From: Dominik Csapak <d.csapak@proxmox.com>
To: pbs-devel@lists.proxmox.com
Subject: [pbs-devel] [RFC PATCH 5/5] proxmox-backup-client: add file-restore commands
Date: Mon, 21 Dec 2020 12:25:07 +0100 [thread overview]
Message-ID: <20201221112507.30450-5-d.csapak@proxmox.com> (raw)
In-Reply-To: <20201221112507.30450-1-d.csapak@proxmox.com>
for now we only have 'list' and 'extract' and it is only supported
for 'pxar.didx' files
this should be the foundation for a general file-restore interface
that is shared with a file-restore from block-level backups
Signed-off-by: Dominik Csapak <d.csapak@proxmox.com>
---
this patch is mostly for @Stefan so that we can coordinate the interface
for file-restoring
i am not completely sure about how i handle the zip/non-zip case here
(it seems a bit too automagic) but having an explicit 'zip' parameter
does not make much sense, since printing a dir to stdout does not
work any other way? (we cannot print partial pxar files)
Cargo.toml | 2 +-
src/bin/proxmox-backup-client.rs | 1 +
src/bin/proxmox_backup_client/file_restore.rs | 329 ++++++++++++++++++
src/bin/proxmox_backup_client/mod.rs | 2 +
4 files changed, 333 insertions(+), 1 deletion(-)
create mode 100644 src/bin/proxmox_backup_client/file_restore.rs
diff --git a/Cargo.toml b/Cargo.toml
index bfe39e75..66f536f6 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -60,7 +60,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
siphasher = "0.3"
syslog = "4.0"
-tokio = { version = "0.2.9", features = [ "blocking", "fs", "dns", "io-util", "macros", "process", "rt-threaded", "signal", "stream", "tcp", "time", "uds" ] }
+tokio = { version = "0.2.9", features = [ "blocking", "fs", "dns", "io-util", "io-std", "macros", "process", "rt-threaded", "signal", "stream", "tcp", "time", "uds" ] }
tokio-openssl = "0.4.0"
tokio-util = { version = "0.3", features = [ "codec" ] }
tower-service = "0.3.0"
diff --git a/src/bin/proxmox-backup-client.rs b/src/bin/proxmox-backup-client.rs
index 6cf81952..8585d24f 100644
--- a/src/bin/proxmox-backup-client.rs
+++ b/src/bin/proxmox-backup-client.rs
@@ -1869,6 +1869,7 @@ fn main() {
.insert("version", version_cmd_def)
.insert("benchmark", benchmark_cmd_def)
.insert("change-owner", change_owner_cmd_def)
+ .insert("file-restore", file_restore_mgmt_cli())
.alias(&["files"], &["snapshot", "files"])
.alias(&["forget"], &["snapshot", "forget"])
diff --git a/src/bin/proxmox_backup_client/file_restore.rs b/src/bin/proxmox_backup_client/file_restore.rs
new file mode 100644
index 00000000..0cb30117
--- /dev/null
+++ b/src/bin/proxmox_backup_client/file_restore.rs
@@ -0,0 +1,329 @@
+use std::sync::Arc;
+use std::path::PathBuf;
+use std::ffi::OsStr;
+use std::os::unix::ffi::OsStrExt;
+
+use anyhow::{bail, format_err, Error};
+use serde_json::{json, Value};
+
+use proxmox::api::{
+ api,
+ cli::{
+ CliCommandMap,
+ CliCommand,
+ },
+};
+use pxar::accessor::aio::Accessor;
+
+use proxmox_backup::pxar::{create_zip, extract_sub_dir};
+use proxmox_backup::tools;
+use proxmox_backup::backup::CryptMode;
+use proxmox_backup::backup::LocalDynamicReadAt;
+use proxmox_backup::client::{
+ BackupReader,
+ RemoteChunkReader,
+};
+use crate::{
+ CryptConfig,
+ keyfile_parameters,
+ BackupDir,
+ CATALOG_NAME,
+ decrypt_key,
+ complete_repository,
+ KEYFD_SCHEMA,
+ BufferedDynamicReader,
+ CatalogReader,
+ connect,
+ extract_repository_from_value,
+ KEYFILE_SCHEMA,
+ REPO_URL_SCHEMA,
+ complete_group_or_snapshot,
+ key,
+ IndexFile,
+};
+
+enum ExtractPath {
+ ListArchives,
+ Pxar(String, Vec<u8>),
+}
+
+fn parse_path(path: String, base64: bool) -> Result<ExtractPath, Error> {
+ let mut bytes = if base64 {
+ base64::decode(path)?
+ } else {
+ path.into_bytes()
+ };
+
+ if bytes == b"/" {
+ return Ok(ExtractPath::ListArchives);
+ }
+
+ while bytes.len() > 0 && bytes[0] == b'/' {
+ bytes.remove(0);
+ }
+
+ let (file, path) = {
+ let slash_pos = bytes.iter().position(|c| *c == b'/').unwrap_or(bytes.len());
+ let path = bytes.split_off(slash_pos);
+ let file = String::from_utf8(bytes)?;
+ (file, path)
+ };
+
+ if file.ends_with(".pxar.didx") {
+ Ok(ExtractPath::Pxar(file, path))
+ } else {
+ bail!("'{}' is not supported for file-restore", file);
+ }
+}
+
+#[api(
+ input: {
+ properties: {
+ repository: {
+ schema: REPO_URL_SCHEMA,
+ optional: true,
+ },
+ snapshot: {
+ type: String,
+ description: "Group/Snapshot path.",
+ },
+ "path": {
+ description: "Path to restore. Directories will be restored as .zip files.",
+ type: String,
+ },
+ "base64": {
+ type: Boolean,
+ description: "If set, 'path' will be interpreted as base64 encoded.",
+ optional: true,
+ default: false,
+ },
+ keyfile: {
+ schema: KEYFILE_SCHEMA,
+ optional: true,
+ },
+ "keyfd": {
+ schema: KEYFD_SCHEMA,
+ optional: true,
+ },
+ "crypt-mode": {
+ type: CryptMode,
+ optional: true,
+ },
+ }
+ }
+)]
+/// List a directory from a backup snapshot.
+async fn list(param: Value) -> Result<Vec<Value>, Error> {
+ let repo = extract_repository_from_value(¶m)?;
+ let base64 = param["base64"].as_bool().unwrap_or(false);
+ let path = parse_path(tools::required_string_param(¶m, "path")?.to_string(), base64)?;
+ let snapshot: BackupDir = tools::required_string_param(¶m, "snapshot")?.parse()?;
+
+ let (keydata, _crypt_mode) = keyfile_parameters(¶m)?;
+ let crypt_config = match keydata {
+ None => None,
+ Some(key) => {
+ let (key, _, fingerprint) = decrypt_key(&key, &key::get_encryption_key_password)?;
+ eprintln!("Encryption key fingerprint: '{}'", fingerprint);
+ Some(Arc::new(CryptConfig::new(key)?))
+ }
+ };
+
+ let client = connect(&repo)?;
+ let client = BackupReader::start(
+ client,
+ crypt_config.clone(),
+ repo.store(),
+ &snapshot.group().backup_type(),
+ &snapshot.group().backup_id(),
+ snapshot.backup_time(),
+ true,
+ ).await?;
+
+ let (manifest, _) = client.download_manifest().await?;
+ manifest.check_fingerprint(crypt_config.as_ref().map(Arc::as_ref))?;
+
+ match path {
+ ExtractPath::ListArchives => {
+ let mut entries = vec![];
+ let mut has_fidx = false;
+ for file in manifest.files() {
+ match file.filename.rsplitn(2, '.').next().unwrap() {
+ "didx" => {},
+ "fidx" => {
+ has_fidx = true;
+ continue;
+ }
+ _ => continue, // ignore all non fidx/didx
+ }
+ let path = format!("/{}", file.filename);
+ entries.push(json!({
+ "path": path.clone(),
+ "base64": base64::encode(path.into_bytes()),
+ "leaf": false,
+ }))
+ }
+ if has_fidx {
+ entries.push(json!({
+ "path": "/block",
+ "base64": base64::encode(b"/block"),
+ "leaf": false,
+ }));
+ }
+
+ Ok(entries.into())
+ },
+ ExtractPath::Pxar(file, mut path) => {
+ let index = client.download_dynamic_index(&manifest, CATALOG_NAME).await?;
+ let most_used = index.find_most_used_chunks(8);
+ let file_info = manifest.lookup_file_info(&CATALOG_NAME)?;
+ let chunk_reader = RemoteChunkReader::new(client.clone(), crypt_config, file_info.chunk_crypt_mode(), most_used);
+ let reader = BufferedDynamicReader::new(index, chunk_reader);
+ let mut catalog_reader = CatalogReader::new(reader);
+
+ let mut fullpath = file.into_bytes();
+ fullpath.append(&mut path);
+
+ catalog_reader.list_dir_content(&fullpath)
+ },
+ }
+}
+
+#[api(
+ input: {
+ properties: {
+ repository: {
+ schema: REPO_URL_SCHEMA,
+ optional: true,
+ },
+ snapshot: {
+ type: String,
+ description: "Group/Snapshot path.",
+ },
+ "path": {
+ description: "Path to restore. Directories will be restored as .zip files if extracted to stdout.",
+ type: String,
+ },
+ "base64": {
+ type: Boolean,
+ description: "If set, 'path' will be interpreted as base64 encoded.",
+ optional: true,
+ default: false,
+ },
+ target: {
+ type: String,
+ optional: true,
+ description: "Target directory path. Use '-' to write to standard output.",
+ },
+ keyfile: {
+ schema: KEYFILE_SCHEMA,
+ optional: true,
+ },
+ "keyfd": {
+ schema: KEYFD_SCHEMA,
+ optional: true,
+ },
+ "crypt-mode": {
+ type: CryptMode,
+ optional: true,
+ },
+ verbose: {
+ type: Boolean,
+ description: "Print verbose information",
+ optional: true,
+ default: false,
+ }
+ }
+ }
+)]
+/// Restore files from a backup snapshot.
+async fn extract(param: Value) -> Result<Value, Error> {
+ let repo = extract_repository_from_value(¶m)?;
+ let verbose = param["verbose"].as_bool().unwrap_or(false);
+ let base64 = param["base64"].as_bool().unwrap_or(false);
+ let orig_path = tools::required_string_param(¶m, "path")?.to_string();
+ let path = parse_path(orig_path.clone(), base64)?;
+
+ let target = match param["target"].as_str() {
+ Some(target) if target == "-" => None,
+ Some(target) => Some(PathBuf::from(target)),
+ None => Some(std::env::current_dir()?),
+ };
+
+ let snapshot: BackupDir = tools::required_string_param(¶m, "snapshot")?.parse()?;
+
+ let (keydata, _crypt_mode) = keyfile_parameters(¶m)?;
+ let crypt_config = match keydata {
+ None => None,
+ Some(key) => {
+ let (key, _, fingerprint) = decrypt_key(&key, &key::get_encryption_key_password)?;
+ eprintln!("Encryption key fingerprint: '{}'", fingerprint);
+ Some(Arc::new(CryptConfig::new(key)?))
+ }
+ };
+
+ match path {
+ ExtractPath::Pxar(archive_name, path) => {
+ let client = connect(&repo)?;
+ let client = BackupReader::start(
+ client,
+ crypt_config.clone(),
+ repo.store(),
+ &snapshot.group().backup_type(),
+ &snapshot.group().backup_id(),
+ snapshot.backup_time(),
+ true,
+ ).await?;
+ let (manifest, _) = client.download_manifest().await?;
+ let file_info = manifest.lookup_file_info(&archive_name)?;
+ let index = client.download_dynamic_index(&manifest, &archive_name).await?;
+ let most_used = index.find_most_used_chunks(8);
+ let chunk_reader = RemoteChunkReader::new(client.clone(), crypt_config, file_info.chunk_crypt_mode(), most_used);
+ let reader = BufferedDynamicReader::new(index, chunk_reader);
+
+ let archive_size = reader.archive_size();
+ let reader = LocalDynamicReadAt::new(reader);
+ let decoder = Accessor::new(reader, archive_size).await?;
+
+ let root = decoder.open_root().await?;
+ let file = root
+ .lookup(OsStr::from_bytes(&path)).await?
+ .ok_or(format_err!("error opening '{:?}'", path))?;
+
+ if let Some(target) = target {
+ extract_sub_dir(target, decoder, OsStr::from_bytes(&path), verbose).await?;
+ } else {
+ match file.kind() {
+ pxar::EntryKind::File { .. } => {
+ tokio::io::copy(&mut file.contents().await?, &mut tokio::io::stdout()).await?;
+ }
+ _ => {
+ create_zip(tokio::io::stdout(), decoder, OsStr::from_bytes(&path), verbose).await?;
+ }
+ }
+ }
+ },
+ _ => {
+ bail!("cannot extract '{}'", orig_path);
+ }
+ }
+
+ Ok(Value::Null)
+}
+
+pub fn file_restore_mgmt_cli() -> CliCommandMap {
+ let list_cmd_def = CliCommand::new(&API_METHOD_LIST)
+ .arg_param(&["snapshot", "path"])
+ .completion_cb("repository", complete_repository)
+ .completion_cb("snapshot", complete_group_or_snapshot);
+
+ let restore_cmd_def = CliCommand::new(&API_METHOD_EXTRACT)
+ .arg_param(&["snapshot", "path", "target"])
+ .completion_cb("repository", complete_repository)
+ .completion_cb("snapshot", complete_group_or_snapshot)
+ .completion_cb("target", tools::complete_file_name);
+
+ CliCommandMap::new()
+ .insert("list", list_cmd_def)
+ .insert("extract", restore_cmd_def)
+}
diff --git a/src/bin/proxmox_backup_client/mod.rs b/src/bin/proxmox_backup_client/mod.rs
index a14b0dc1..7787e91a 100644
--- a/src/bin/proxmox_backup_client/mod.rs
+++ b/src/bin/proxmox_backup_client/mod.rs
@@ -10,6 +10,8 @@ mod catalog;
pub use catalog::*;
mod snapshot;
pub use snapshot::*;
+mod file_restore;
+pub use file_restore::*;
pub mod key;
--
2.20.1
next prev parent reply other threads:[~2020-12-21 11:25 UTC|newest]
Thread overview: 8+ messages / expand[flat|nested] mbox.gz Atom feed top
2020-12-21 11:25 [pbs-devel] [RFC PATCH proxmox-backup 1/5] api2/admin/datastore: refactor list_dir_content in catalog_reader Dominik Csapak
2020-12-21 11:25 ` [pbs-devel] [RFC PATCH proxmox-backup 2/5] api2/admin/datastore: accept "/" as path for root Dominik Csapak
2020-12-21 11:25 ` [pbs-devel] [RFC PATCH proxmox-backup 3/5] api2/admin/datastore: refactor create_zip into pxar/extract Dominik Csapak
2020-12-21 11:25 ` [pbs-devel] [RFC PATCH proxmox-backup 4/5] pxar/extract: add extract_sub_dir Dominik Csapak
2020-12-21 11:25 ` Dominik Csapak [this message]
2020-12-21 11:43 ` [pbs-devel] [RFC PATCH 5/5] proxmox-backup-client: add file-restore commands Dominik Csapak
2020-12-22 5:49 ` [pbs-devel] [RFC PATCH proxmox-backup 1/5] api2/admin/datastore: refactor list_dir_content in catalog_reader Dietmar Maurer
2020-12-22 7:52 ` Dominik Csapak
Reply instructions:
You may reply publicly to this message via plain-text email
using any one of the following methods:
* Save the following mbox file, import it into your mail client,
and reply-to-all from there: mbox
Avoid top-posting and favor interleaved quoting:
https://en.wikipedia.org/wiki/Posting_style#Interleaved_style
* Reply using the --to, --cc, and --in-reply-to
switches of git-send-email(1):
git send-email \
--in-reply-to=20201221112507.30450-5-d.csapak@proxmox.com \
--to=d.csapak@proxmox.com \
--cc=pbs-devel@lists.proxmox.com \
/path/to/YOUR_REPLY
https://kernel.org/pub/software/scm/git/docs/git-send-email.html
* If your mail client supports setting the In-Reply-To header
via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line
before the message body.
This is an external index of several public inboxes,
see mirroring instructions on how to clone and mirror
all data and code used by this external index.