* [pbs-devel] [PATCH v2 proxmox-backup 1/5] datastore: move `ArchiveType` to PBS api types crate
2024-07-10 7:48 [pbs-devel] [PATCH v2 proxmox-backup 0/5] introduce dedcated archive name api type Christian Ebner
@ 2024-07-10 7:48 ` Christian Ebner
2024-07-10 7:48 ` [pbs-devel] [PATCH v2 proxmox-backup 2/5] api types: introduce `BackupArchiveName` type Christian Ebner
` (5 subsequent siblings)
6 siblings, 0 replies; 8+ messages in thread
From: Christian Ebner @ 2024-07-10 7:48 UTC (permalink / raw)
To: pbs-devel
In preparation for introducing a dedicated `BackupArchiveName` api
type, allowing to set the corresponding archive type variant when
parsing the archive name based on it's filename.
Signed-off-by: Christian Ebner <c.ebner@proxmox.com>
---
changes since version 1:
- inlined variable name in `from_path` bail branch
pbs-api-types/src/datastore.rs | 23 ++++++++++++++++++++++-
pbs-client/src/backup_writer.rs | 4 ++--
pbs-datastore/src/datastore.rs | 6 +++---
pbs-datastore/src/manifest.rs | 22 +---------------------
pbs-datastore/src/snapshot_reader.rs | 4 ++--
proxmox-backup-client/src/main.rs | 10 ++++------
src/api2/backup/mod.rs | 3 +--
src/api2/reader/mod.rs | 7 +++----
src/api2/tape/restore.rs | 10 +++++-----
src/backup/verify.rs | 7 ++++---
src/server/pull.rs | 10 ++++------
11 files changed, 51 insertions(+), 55 deletions(-)
diff --git a/pbs-api-types/src/datastore.rs b/pbs-api-types/src/datastore.rs
index 31767417a..dfa6bb259 100644
--- a/pbs-api-types/src/datastore.rs
+++ b/pbs-api-types/src/datastore.rs
@@ -1,5 +1,5 @@
use std::fmt;
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
use anyhow::{bail, format_err, Error};
use const_format::concatcp;
@@ -1569,3 +1569,24 @@ pub fn print_store_and_ns(store: &str, ns: &BackupNamespace) -> String {
format!("datastore '{}', namespace '{}'", store, ns)
}
}
+
+#[derive(PartialEq, Eq)]
+/// Allowed variants of backup archives to be contained in a snapshot's manifest
+pub enum ArchiveType {
+ FixedIndex,
+ DynamicIndex,
+ Blob,
+}
+
+impl ArchiveType {
+ pub fn from_path(archive_name: impl AsRef<Path>) -> Result<Self, Error> {
+ let archive_name = archive_name.as_ref();
+ let archive_type = match archive_name.extension().and_then(|ext| ext.to_str()) {
+ Some("didx") => ArchiveType::DynamicIndex,
+ Some("fidx") => ArchiveType::FixedIndex,
+ Some("blob") => ArchiveType::Blob,
+ _ => bail!("unknown archive type: {archive_name:?}"),
+ };
+ Ok(archive_type)
+ }
+}
diff --git a/pbs-client/src/backup_writer.rs b/pbs-client/src/backup_writer.rs
index 813c8d602..45f975d05 100644
--- a/pbs-client/src/backup_writer.rs
+++ b/pbs-client/src/backup_writer.rs
@@ -12,12 +12,12 @@ use tokio::io::AsyncReadExt;
use tokio::sync::{mpsc, oneshot};
use tokio_stream::wrappers::ReceiverStream;
-use pbs_api_types::{BackupDir, BackupNamespace};
+use pbs_api_types::{ArchiveType, BackupDir, BackupNamespace};
use pbs_datastore::data_blob::{ChunkInfo, DataBlob, DataChunkBuilder};
use pbs_datastore::dynamic_index::DynamicIndexReader;
use pbs_datastore::fixed_index::FixedIndexReader;
use pbs_datastore::index::IndexFile;
-use pbs_datastore::manifest::{ArchiveType, BackupManifest, MANIFEST_BLOB_NAME};
+use pbs_datastore::manifest::{BackupManifest, MANIFEST_BLOB_NAME};
use pbs_datastore::{CATALOG_NAME, PROXMOX_BACKUP_PROTOCOL_ID_V1};
use pbs_tools::crypt_config::CryptConfig;
diff --git a/pbs-datastore/src/datastore.rs b/pbs-datastore/src/datastore.rs
index 5e7d674e8..6c0f6815a 100644
--- a/pbs-datastore/src/datastore.rs
+++ b/pbs-datastore/src/datastore.rs
@@ -19,8 +19,9 @@ use proxmox_sys::WorkerTaskContext;
use proxmox_sys::{task_log, task_warn};
use pbs_api_types::{
- Authid, BackupNamespace, BackupType, ChunkOrder, DataStoreConfig, DatastoreFSyncLevel,
- DatastoreTuning, GarbageCollectionStatus, MaintenanceMode, MaintenanceType, Operation, UPID,
+ ArchiveType, Authid, BackupNamespace, BackupType, ChunkOrder, DataStoreConfig,
+ DatastoreFSyncLevel, DatastoreTuning, GarbageCollectionStatus, MaintenanceMode,
+ MaintenanceType, Operation, UPID,
};
use crate::backup_info::{BackupDir, BackupGroup, BackupGroupDeleteStats};
@@ -29,7 +30,6 @@ use crate::dynamic_index::{DynamicIndexReader, DynamicIndexWriter};
use crate::fixed_index::{FixedIndexReader, FixedIndexWriter};
use crate::hierarchy::{ListGroups, ListGroupsType, ListNamespaces, ListNamespacesRecursive};
use crate::index::IndexFile;
-use crate::manifest::ArchiveType;
use crate::task_tracking::{self, update_active_operations};
use crate::DataBlob;
diff --git a/pbs-datastore/src/manifest.rs b/pbs-datastore/src/manifest.rs
index 347af9337..f65eb9602 100644
--- a/pbs-datastore/src/manifest.rs
+++ b/pbs-datastore/src/manifest.rs
@@ -5,7 +5,7 @@ use anyhow::{bail, format_err, Error};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
-use pbs_api_types::{BackupType, CryptMode, Fingerprint};
+use pbs_api_types::{ArchiveType, BackupType, CryptMode, Fingerprint};
use pbs_tools::crypt_config::CryptConfig;
pub const MANIFEST_BLOB_NAME: &str = "index.json.blob";
@@ -56,26 +56,6 @@ pub struct BackupManifest {
pub signature: Option<String>,
}
-#[derive(PartialEq, Eq)]
-pub enum ArchiveType {
- FixedIndex,
- DynamicIndex,
- Blob,
-}
-
-impl ArchiveType {
- pub fn from_path(archive_name: impl AsRef<Path>) -> Result<Self, Error> {
- let archive_name = archive_name.as_ref();
- let archive_type = match archive_name.extension().and_then(|ext| ext.to_str()) {
- Some("didx") => ArchiveType::DynamicIndex,
- Some("fidx") => ArchiveType::FixedIndex,
- Some("blob") => ArchiveType::Blob,
- _ => bail!("unknown archive type: {:?}", archive_name),
- };
- Ok(archive_type)
- }
-}
-
//#[deprecated(note = "use ArchivType::from_path instead")] later...
pub fn archive_type<P: AsRef<Path>>(archive_name: P) -> Result<ArchiveType, Error> {
ArchiveType::from_path(archive_name)
diff --git a/pbs-datastore/src/snapshot_reader.rs b/pbs-datastore/src/snapshot_reader.rs
index f9c772079..432701ea0 100644
--- a/pbs-datastore/src/snapshot_reader.rs
+++ b/pbs-datastore/src/snapshot_reader.rs
@@ -8,13 +8,13 @@ use nix::dir::Dir;
use proxmox_sys::fs::lock_dir_noblock_shared;
-use pbs_api_types::{print_store_and_ns, BackupNamespace, Operation};
+use pbs_api_types::{print_store_and_ns, ArchiveType, BackupNamespace, Operation};
use crate::backup_info::BackupDir;
use crate::dynamic_index::DynamicIndexReader;
use crate::fixed_index::FixedIndexReader;
use crate::index::IndexFile;
-use crate::manifest::{ArchiveType, CLIENT_LOG_BLOB_NAME, MANIFEST_BLOB_NAME};
+use crate::manifest::{CLIENT_LOG_BLOB_NAME, MANIFEST_BLOB_NAME};
use crate::DataStore;
/// Helper to access the contents of a datastore backup snapshot
diff --git a/proxmox-backup-client/src/main.rs b/proxmox-backup-client/src/main.rs
index 6a7d09047..3c19dc348 100644
--- a/proxmox-backup-client/src/main.rs
+++ b/proxmox-backup-client/src/main.rs
@@ -25,9 +25,9 @@ use pxar::accessor::aio::Accessor;
use pxar::accessor::{MaybeReady, ReadAt, ReadAtOperation};
use pbs_api_types::{
- Authid, BackupDir, BackupGroup, BackupNamespace, BackupPart, BackupType, CryptMode,
- Fingerprint, GroupListItem, PruneJobOptions, PruneListItem, RateLimitConfig, SnapshotListItem,
- StorageStatus, BACKUP_ID_SCHEMA, BACKUP_NAMESPACE_SCHEMA, BACKUP_TIME_SCHEMA,
+ ArchiveType, Authid, BackupDir, BackupGroup, BackupNamespace, BackupPart, BackupType,
+ CryptMode, Fingerprint, GroupListItem, PruneJobOptions, PruneListItem, RateLimitConfig,
+ SnapshotListItem, StorageStatus, BACKUP_ID_SCHEMA, BACKUP_NAMESPACE_SCHEMA, BACKUP_TIME_SCHEMA,
BACKUP_TYPE_SCHEMA, TRAFFIC_CONTROL_BURST_SCHEMA, TRAFFIC_CONTROL_RATE_SCHEMA,
};
use pbs_client::catalog_shell::Shell;
@@ -54,9 +54,7 @@ use pbs_datastore::chunk_store::verify_chunk_size;
use pbs_datastore::dynamic_index::{BufferedDynamicReader, DynamicIndexReader, LocalDynamicReadAt};
use pbs_datastore::fixed_index::FixedIndexReader;
use pbs_datastore::index::IndexFile;
-use pbs_datastore::manifest::{
- ArchiveType, BackupManifest, ENCRYPTED_KEY_BLOB_NAME, MANIFEST_BLOB_NAME,
-};
+use pbs_datastore::manifest::{BackupManifest, ENCRYPTED_KEY_BLOB_NAME, MANIFEST_BLOB_NAME};
use pbs_datastore::read_chunk::AsyncReadChunk;
use pbs_datastore::CATALOG_NAME;
use pbs_key_config::{decrypt_key, rsa_encrypt_key_config, KeyConfig};
diff --git a/src/api2/backup/mod.rs b/src/api2/backup/mod.rs
index 915fb2a84..8597a6398 100644
--- a/src/api2/backup/mod.rs
+++ b/src/api2/backup/mod.rs
@@ -17,13 +17,12 @@ use proxmox_schema::*;
use proxmox_sortable_macro::sortable;
use pbs_api_types::{
- Authid, BackupNamespace, BackupType, Operation, SnapshotVerifyState, VerifyState,
+ ArchiveType, Authid, BackupNamespace, BackupType, Operation, SnapshotVerifyState, VerifyState,
BACKUP_ARCHIVE_NAME_SCHEMA, BACKUP_ID_SCHEMA, BACKUP_NAMESPACE_SCHEMA, BACKUP_TIME_SCHEMA,
BACKUP_TYPE_SCHEMA, CHUNK_DIGEST_SCHEMA, DATASTORE_SCHEMA, PRIV_DATASTORE_BACKUP,
};
use pbs_config::CachedUserInfo;
use pbs_datastore::index::IndexFile;
-use pbs_datastore::manifest::ArchiveType;
use pbs_datastore::{DataStore, PROXMOX_BACKUP_PROTOCOL_ID_V1};
use pbs_tools::json::{required_array_param, required_integer_param, required_string_param};
use proxmox_rest_server::{H2Service, WorkerTask};
diff --git a/src/api2/reader/mod.rs b/src/api2/reader/mod.rs
index 23492f64c..a258bf983 100644
--- a/src/api2/reader/mod.rs
+++ b/src/api2/reader/mod.rs
@@ -17,13 +17,12 @@ use proxmox_schema::{BooleanSchema, ObjectSchema};
use proxmox_sortable_macro::sortable;
use pbs_api_types::{
- Authid, Operation, BACKUP_ARCHIVE_NAME_SCHEMA, BACKUP_ID_SCHEMA, BACKUP_NAMESPACE_SCHEMA,
- BACKUP_TIME_SCHEMA, BACKUP_TYPE_SCHEMA, CHUNK_DIGEST_SCHEMA, DATASTORE_SCHEMA,
- PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_READ,
+ ArchiveType, Authid, Operation, BACKUP_ARCHIVE_NAME_SCHEMA, BACKUP_ID_SCHEMA,
+ BACKUP_NAMESPACE_SCHEMA, BACKUP_TIME_SCHEMA, BACKUP_TYPE_SCHEMA, CHUNK_DIGEST_SCHEMA,
+ DATASTORE_SCHEMA, PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_READ,
};
use pbs_config::CachedUserInfo;
use pbs_datastore::index::IndexFile;
-use pbs_datastore::manifest::ArchiveType;
use pbs_datastore::{DataStore, PROXMOX_BACKUP_READER_PROTOCOL_ID_V1};
use pbs_tools::json::required_string_param;
use proxmox_rest_server::{H2Service, WorkerTask};
diff --git a/src/api2/tape/restore.rs b/src/api2/tape/restore.rs
index 79200d1b9..d297ff4bf 100644
--- a/src/api2/tape/restore.rs
+++ b/src/api2/tape/restore.rs
@@ -17,10 +17,10 @@ use proxmox_sys::{task_log, task_warn, WorkerTaskContext};
use proxmox_uuid::Uuid;
use pbs_api_types::{
- parse_ns_and_snapshot, print_ns_and_snapshot, Authid, BackupDir, BackupNamespace, CryptMode,
- NotificationMode, Operation, TapeRestoreNamespace, Userid, DATASTORE_MAP_ARRAY_SCHEMA,
- DATASTORE_MAP_LIST_SCHEMA, DRIVE_NAME_SCHEMA, MAX_NAMESPACE_DEPTH, PRIV_DATASTORE_BACKUP,
- PRIV_DATASTORE_MODIFY, PRIV_TAPE_READ, TAPE_RESTORE_NAMESPACE_SCHEMA,
+ parse_ns_and_snapshot, print_ns_and_snapshot, ArchiveType, Authid, BackupDir, BackupNamespace,
+ CryptMode, NotificationMode, Operation, TapeRestoreNamespace, Userid,
+ DATASTORE_MAP_ARRAY_SCHEMA, DATASTORE_MAP_LIST_SCHEMA, DRIVE_NAME_SCHEMA, MAX_NAMESPACE_DEPTH,
+ PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_MODIFY, PRIV_TAPE_READ, TAPE_RESTORE_NAMESPACE_SCHEMA,
TAPE_RESTORE_SNAPSHOT_SCHEMA, UPID_SCHEMA,
};
use pbs_client::tools::handle_root_with_optional_format_version_prelude;
@@ -28,7 +28,7 @@ use pbs_config::CachedUserInfo;
use pbs_datastore::dynamic_index::DynamicIndexReader;
use pbs_datastore::fixed_index::FixedIndexReader;
use pbs_datastore::index::IndexFile;
-use pbs_datastore::manifest::{ArchiveType, BackupManifest, MANIFEST_BLOB_NAME};
+use pbs_datastore::manifest::{BackupManifest, MANIFEST_BLOB_NAME};
use pbs_datastore::{DataBlob, DataStore};
use pbs_tape::{
BlockReadError, MediaContentHeader, TapeRead, PROXMOX_BACKUP_CONTENT_HEADER_MAGIC_1_0,
diff --git a/src/backup/verify.rs b/src/backup/verify.rs
index 8dde7dacc..feecd069d 100644
--- a/src/backup/verify.rs
+++ b/src/backup/verify.rs
@@ -9,12 +9,13 @@ use anyhow::{bail, format_err, Error};
use proxmox_sys::{task_log, WorkerTaskContext};
use pbs_api_types::{
- print_ns_and_snapshot, print_store_and_ns, Authid, BackupNamespace, BackupType, CryptMode,
- SnapshotVerifyState, VerifyState, PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_VERIFY, UPID,
+ print_ns_and_snapshot, print_store_and_ns, ArchiveType, Authid, BackupNamespace, BackupType,
+ CryptMode, SnapshotVerifyState, VerifyState, PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_VERIFY,
+ UPID,
};
use pbs_datastore::backup_info::{BackupDir, BackupGroup, BackupInfo};
use pbs_datastore::index::IndexFile;
-use pbs_datastore::manifest::{ArchiveType, BackupManifest, FileInfo};
+use pbs_datastore::manifest::{BackupManifest, FileInfo};
use pbs_datastore::{DataBlob, DataStore, StoreProgress};
use proxmox_sys::fs::lock_dir_noblock_shared;
diff --git a/src/server/pull.rs b/src/server/pull.rs
index d144e2f2d..2ef916c3c 100644
--- a/src/server/pull.rs
+++ b/src/server/pull.rs
@@ -16,9 +16,9 @@ use proxmox_sys::{task_log, task_warn};
use serde_json::json;
use pbs_api_types::{
- print_store_and_ns, Authid, BackupDir, BackupGroup, BackupNamespace, CryptMode, GroupFilter,
- GroupListItem, Operation, RateLimitConfig, Remote, SnapshotListItem, MAX_NAMESPACE_DEPTH,
- PRIV_DATASTORE_AUDIT, PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_READ,
+ print_store_and_ns, ArchiveType, Authid, BackupDir, BackupGroup, BackupNamespace, CryptMode,
+ GroupFilter, GroupListItem, Operation, RateLimitConfig, Remote, SnapshotListItem,
+ MAX_NAMESPACE_DEPTH, PRIV_DATASTORE_AUDIT, PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_READ,
};
use pbs_client::{BackupReader, BackupRepository, HttpClient, RemoteChunkReader};
use pbs_config::CachedUserInfo;
@@ -26,9 +26,7 @@ use pbs_datastore::data_blob::DataBlob;
use pbs_datastore::dynamic_index::DynamicIndexReader;
use pbs_datastore::fixed_index::FixedIndexReader;
use pbs_datastore::index::IndexFile;
-use pbs_datastore::manifest::{
- ArchiveType, BackupManifest, FileInfo, CLIENT_LOG_BLOB_NAME, MANIFEST_BLOB_NAME,
-};
+use pbs_datastore::manifest::{BackupManifest, FileInfo, CLIENT_LOG_BLOB_NAME, MANIFEST_BLOB_NAME};
use pbs_datastore::read_chunk::AsyncReadChunk;
use pbs_datastore::{
check_backup_owner, DataStore, ListNamespacesRecursive, LocalChunkReader, StoreProgress,
--
2.39.2
_______________________________________________
pbs-devel mailing list
pbs-devel@lists.proxmox.com
https://lists.proxmox.com/cgi-bin/mailman/listinfo/pbs-devel
^ permalink raw reply [flat|nested] 8+ messages in thread
* [pbs-devel] [PATCH v2 proxmox-backup 3/5] client/server: use dedicated api type for all archive names
2024-07-10 7:48 [pbs-devel] [PATCH v2 proxmox-backup 0/5] introduce dedcated archive name api type Christian Ebner
2024-07-10 7:48 ` [pbs-devel] [PATCH v2 proxmox-backup 1/5] datastore: move `ArchiveType` to PBS api types crate Christian Ebner
2024-07-10 7:48 ` [pbs-devel] [PATCH v2 proxmox-backup 2/5] api types: introduce `BackupArchiveName` type Christian Ebner
@ 2024-07-10 7:48 ` Christian Ebner
2024-07-10 7:48 ` [pbs-devel] [PATCH v2 proxmox-backup 4/5] client: drop unused parse_archive_type helper Christian Ebner
` (3 subsequent siblings)
6 siblings, 0 replies; 8+ messages in thread
From: Christian Ebner @ 2024-07-10 7:48 UTC (permalink / raw)
To: pbs-devel
Instead of using the plain String or slices of it, use the dedicated
api type and its methods to parse and check for archive type based on
archive filename extension.
This allows to keep the checks and mappings in the api type and
reduces the risk of misusing functions by passing incorrect parameters
because of the additional type checks.
A side effect of this is that commands now also accept the archive
type extension optionally, when passing the archive name.
E.g.
```
proxmox-backup-client restore <snapshot> <name>.pxar.didx <target>
```
is equal to
```
proxmox-backup-client restore <snapshot> <name>.pxar <target>
```
This works because the correct extension for the allowed archive
variants is either checked or mapped by the api type, depending on
the given input.
Signed-off-by: Christian Ebner <c.ebner@proxmox.com>
---
changes since version 1:
- drop additional leftover `has_pxar_filename_extension` import
pbs-client/src/backup_reader.rs | 14 ++--
pbs-client/src/backup_writer.rs | 22 +++---
pbs-client/src/tools/mod.rs | 24 +++---
pbs-datastore/src/manifest.rs | 30 ++++---
proxmox-backup-client/src/catalog.rs | 25 +++---
proxmox-backup-client/src/helper.rs | 7 +-
proxmox-backup-client/src/main.rs | 112 ++++++++++++++++-----------
proxmox-backup-client/src/mount.rs | 33 ++++----
proxmox-file-restore/src/main.rs | 20 +++--
src/api2/admin/datastore.rs | 60 +++++++-------
src/bin/proxmox_backup_debug/diff.rs | 16 ++--
src/server/pull.rs | 16 ++--
12 files changed, 209 insertions(+), 170 deletions(-)
diff --git a/pbs-client/src/backup_reader.rs b/pbs-client/src/backup_reader.rs
index 36d8ebcf3..728322765 100644
--- a/pbs-client/src/backup_reader.rs
+++ b/pbs-client/src/backup_reader.rs
@@ -7,7 +7,7 @@ use std::sync::Arc;
use futures::future::AbortHandle;
use serde_json::{json, Value};
-use pbs_api_types::{BackupDir, BackupNamespace};
+use pbs_api_types::{BackupArchiveName, BackupDir, BackupNamespace};
use pbs_datastore::data_blob::DataBlob;
use pbs_datastore::data_blob_reader::DataBlobReader;
use pbs_datastore::dynamic_index::DynamicIndexReader;
@@ -146,7 +146,7 @@ impl BackupReader {
pub async fn download_blob(
&self,
manifest: &BackupManifest,
- name: &str,
+ name: &BackupArchiveName,
) -> Result<DataBlobReader<'_, File>, Error> {
let mut tmpfile = std::fs::OpenOptions::new()
.write(true)
@@ -154,7 +154,7 @@ impl BackupReader {
.custom_flags(libc::O_TMPFILE)
.open("/tmp")?;
- self.download(name, &mut tmpfile).await?;
+ self.download(name.as_ref(), &mut tmpfile).await?;
tmpfile.seek(SeekFrom::Start(0))?;
let (csum, size) = sha256(&mut tmpfile)?;
@@ -172,7 +172,7 @@ impl BackupReader {
pub async fn download_dynamic_index(
&self,
manifest: &BackupManifest,
- name: &str,
+ name: &BackupArchiveName,
) -> Result<DynamicIndexReader, Error> {
let mut tmpfile = std::fs::OpenOptions::new()
.write(true)
@@ -180,7 +180,7 @@ impl BackupReader {
.custom_flags(libc::O_TMPFILE)
.open("/tmp")?;
- self.download(name, &mut tmpfile).await?;
+ self.download(name.as_ref(), &mut tmpfile).await?;
let index = DynamicIndexReader::new(tmpfile)
.map_err(|err| format_err!("unable to read dynamic index '{}' - {}", name, err))?;
@@ -199,7 +199,7 @@ impl BackupReader {
pub async fn download_fixed_index(
&self,
manifest: &BackupManifest,
- name: &str,
+ name: &BackupArchiveName,
) -> Result<FixedIndexReader, Error> {
let mut tmpfile = std::fs::OpenOptions::new()
.write(true)
@@ -207,7 +207,7 @@ impl BackupReader {
.custom_flags(libc::O_TMPFILE)
.open("/tmp")?;
- self.download(name, &mut tmpfile).await?;
+ self.download(name.as_ref(), &mut tmpfile).await?;
let index = FixedIndexReader::new(tmpfile)
.map_err(|err| format_err!("unable to read fixed index '{}' - {}", name, err))?;
diff --git a/pbs-client/src/backup_writer.rs b/pbs-client/src/backup_writer.rs
index 45f975d05..6fd8ad728 100644
--- a/pbs-client/src/backup_writer.rs
+++ b/pbs-client/src/backup_writer.rs
@@ -12,7 +12,7 @@ use tokio::io::AsyncReadExt;
use tokio::sync::{mpsc, oneshot};
use tokio_stream::wrappers::ReceiverStream;
-use pbs_api_types::{ArchiveType, BackupDir, BackupNamespace};
+use pbs_api_types::{ArchiveType, BackupArchiveName, BackupDir, BackupNamespace};
use pbs_datastore::data_blob::{ChunkInfo, DataBlob, DataChunkBuilder};
use pbs_datastore::dynamic_index::DynamicIndexReader;
use pbs_datastore::fixed_index::FixedIndexReader;
@@ -265,7 +265,7 @@ impl BackupWriter {
pub async fn upload_stream(
&self,
- archive_name: &str,
+ archive_name: &BackupArchiveName,
stream: impl Stream<Item = Result<bytes::BytesMut, Error>>,
options: UploadOptions,
injections: Option<std::sync::mpsc::Receiver<InjectChunks>>,
@@ -291,13 +291,13 @@ impl BackupWriter {
if !manifest
.files()
.iter()
- .any(|file| file.filename == archive_name)
+ .any(|file| file.filename == archive_name.as_ref())
{
log::info!("Previous manifest does not contain an archive called '{archive_name}', skipping download..");
} else {
// try, but ignore errors
- match ArchiveType::from_path(archive_name) {
- Ok(ArchiveType::FixedIndex) => {
+ match archive_name.archive_type() {
+ ArchiveType::FixedIndex => {
if let Err(err) = self
.download_previous_fixed_index(
archive_name,
@@ -309,7 +309,7 @@ impl BackupWriter {
log::warn!("Error downloading .fidx from previous manifest: {}", err);
}
}
- Ok(ArchiveType::DynamicIndex) => {
+ ArchiveType::DynamicIndex => {
if let Err(err) = self
.download_previous_dynamic_index(
archive_name,
@@ -352,9 +352,9 @@ impl BackupWriter {
let size_dirty = upload_stats.size - upload_stats.size_reused;
let size: HumanByte = upload_stats.size.into();
let archive = if log::log_enabled!(log::Level::Debug) {
- archive_name
+ archive_name.to_string()
} else {
- pbs_tools::format::strip_server_file_extension(archive_name)
+ archive_name.without_type_extension()
};
if upload_stats.chunk_injected > 0 {
@@ -365,7 +365,7 @@ impl BackupWriter {
);
}
- if archive_name != CATALOG_NAME {
+ if archive_name.as_ref() != CATALOG_NAME {
let speed: HumanByte =
((size_dirty * 1_000_000) / (upload_stats.duration.as_micros() as usize)).into();
let size_dirty: HumanByte = size_dirty.into();
@@ -527,7 +527,7 @@ impl BackupWriter {
pub async fn download_previous_fixed_index(
&self,
- archive_name: &str,
+ archive_name: &BackupArchiveName,
manifest: &BackupManifest,
known_chunks: Arc<Mutex<HashSet<[u8; 32]>>>,
) -> Result<FixedIndexReader, Error> {
@@ -566,7 +566,7 @@ impl BackupWriter {
pub async fn download_previous_dynamic_index(
&self,
- archive_name: &str,
+ archive_name: &BackupArchiveName,
manifest: &BackupManifest,
known_chunks: Arc<Mutex<HashSet<[u8; 32]>>>,
) -> Result<DynamicIndexReader, Error> {
diff --git a/pbs-client/src/tools/mod.rs b/pbs-client/src/tools/mod.rs
index 90fac696e..4e51ed198 100644
--- a/pbs-client/src/tools/mod.rs
+++ b/pbs-client/src/tools/mod.rs
@@ -18,7 +18,10 @@ use proxmox_router::cli::{complete_file_name, shellword_split};
use proxmox_schema::*;
use proxmox_sys::fs::file_get_json;
-use pbs_api_types::{Authid, BackupNamespace, RateLimitConfig, UserWithTokens, BACKUP_REPO_URL};
+use pbs_api_types::{
+ ArchiveType, Authid, BackupArchiveName, BackupNamespace, RateLimitConfig, UserWithTokens,
+ BACKUP_REPO_URL,
+};
use pbs_datastore::catalog::{ArchiveEntry, DirEntryAttribute};
use pbs_datastore::BackupManifest;
use pxar::accessor::aio::Accessor;
@@ -554,12 +557,12 @@ pub fn place_xdg_file(
}
pub fn get_pxar_archive_names(
- archive_name: &str,
+ archive_name: &BackupArchiveName,
manifest: &BackupManifest,
-) -> Result<(String, Option<String>), Error> {
- let (filename, ext) = match archive_name.strip_suffix(".didx") {
- Some(filename) => (filename, ".didx"),
- None => (archive_name, ""),
+) -> Result<(BackupArchiveName, Option<BackupArchiveName>), Error> {
+ let (filename, ext) = match archive_name.archive_type() {
+ ArchiveType::DynamicIndex => (archive_name.without_type_extension(), ".didx"),
+ _ => (archive_name.to_string(), ""),
};
// Check if archive with given extension is present
@@ -574,8 +577,8 @@ pub fn get_pxar_archive_names(
.or_else(|| filename.strip_suffix(".ppxar"))
{
return Ok((
- format!("{base}.mpxar{ext}"),
- Some(format!("{base}.ppxar{ext}")),
+ format!("{base}.mpxar{ext}").as_str().try_into()?,
+ Some(format!("{base}.ppxar{ext}").as_str().try_into()?),
));
}
return Ok((archive_name.to_owned(), None));
@@ -583,7 +586,10 @@ pub fn get_pxar_archive_names(
// if not, try fallback from regular to split archive
if let Some(base) = filename.strip_suffix(".pxar") {
- return get_pxar_archive_names(&format!("{base}.mpxar{ext}"), manifest);
+ return get_pxar_archive_names(
+ &format!("{base}.mpxar{ext}").as_str().try_into()?,
+ manifest,
+ );
}
bail!("archive not found in manifest");
diff --git a/pbs-datastore/src/manifest.rs b/pbs-datastore/src/manifest.rs
index f65eb9602..1f11ecbf7 100644
--- a/pbs-datastore/src/manifest.rs
+++ b/pbs-datastore/src/manifest.rs
@@ -5,7 +5,7 @@ use anyhow::{bail, format_err, Error};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
-use pbs_api_types::{ArchiveType, BackupType, CryptMode, Fingerprint};
+use pbs_api_types::{ArchiveType, BackupArchiveName, BackupType, CryptMode, Fingerprint};
use pbs_tools::crypt_config::CryptConfig;
pub const MANIFEST_BLOB_NAME: &str = "index.json.blob";
@@ -75,14 +75,13 @@ impl BackupManifest {
pub fn add_file(
&mut self,
- filename: String,
+ filename: &BackupArchiveName,
size: u64,
csum: [u8; 32],
crypt_mode: CryptMode,
) -> Result<(), Error> {
- let _archive_type = ArchiveType::from_path(&filename)?; // check type
self.files.push(FileInfo {
- filename,
+ filename: filename.to_string(),
size,
csum,
crypt_mode,
@@ -94,8 +93,11 @@ impl BackupManifest {
&self.files[..]
}
- pub fn lookup_file_info(&self, name: &str) -> Result<&FileInfo, Error> {
- let info = self.files.iter().find(|item| item.filename == name);
+ pub fn lookup_file_info(&self, name: &BackupArchiveName) -> Result<&FileInfo, Error> {
+ let info = self
+ .files
+ .iter()
+ .find(|item| item.filename == name.as_ref());
match info {
None => bail!("manifest does not contain file '{}'", name),
@@ -103,7 +105,12 @@ impl BackupManifest {
}
}
- pub fn verify_file(&self, name: &str, csum: &[u8; 32], size: u64) -> Result<(), Error> {
+ pub fn verify_file(
+ &self,
+ name: &BackupArchiveName,
+ csum: &[u8; 32],
+ size: u64,
+ ) -> Result<(), Error> {
let info = self.lookup_file_info(name)?;
if size != info.size {
@@ -263,8 +270,13 @@ fn test_manifest_signature() -> Result<(), Error> {
let mut manifest = BackupManifest::new("host/elsa/2020-06-26T13:56:05Z".parse()?);
- manifest.add_file("test1.img.fidx".into(), 200, [1u8; 32], CryptMode::Encrypt)?;
- manifest.add_file("abc.blob".into(), 200, [2u8; 32], CryptMode::None)?;
+ manifest.add_file(
+ &"test1.img.fidx".try_into()?,
+ 200,
+ [1u8; 32],
+ CryptMode::Encrypt,
+ )?;
+ manifest.add_file(&"abc.blob".try_into()?, 200, [2u8; 32], CryptMode::None)?;
manifest.unprotected["note"] = "This is not protected by the signature.".into();
diff --git a/proxmox-backup-client/src/catalog.rs b/proxmox-backup-client/src/catalog.rs
index 276457c1d..86ecc2a0d 100644
--- a/proxmox-backup-client/src/catalog.rs
+++ b/proxmox-backup-client/src/catalog.rs
@@ -8,8 +8,7 @@ use serde_json::Value;
use proxmox_router::cli::*;
use proxmox_schema::api;
-use pbs_api_types::BackupNamespace;
-use pbs_client::tools::has_pxar_filename_extension;
+use pbs_api_types::{BackupArchiveName, BackupNamespace};
use pbs_client::tools::key_source::get_encryption_key_password;
use pbs_client::{BackupReader, RemoteChunkReader};
use pbs_tools::crypt_config::CryptConfig;
@@ -89,13 +88,14 @@ async fn dump_catalog(param: Value) -> Result<Value, Error> {
let (manifest, _) = client.download_manifest().await?;
manifest.check_fingerprint(crypt_config.as_ref().map(Arc::as_ref))?;
+ let catalog_name: BackupArchiveName = CATALOG_NAME.try_into()?;
let index = client
- .download_dynamic_index(&manifest, CATALOG_NAME)
+ .download_dynamic_index(&manifest, &catalog_name)
.await?;
let most_used = index.find_most_used_chunks(8);
- let file_info = manifest.lookup_file_info(CATALOG_NAME)?;
+ let file_info = manifest.lookup_file_info(&catalog_name)?;
let chunk_reader = RemoteChunkReader::new(
client.clone(),
@@ -138,8 +138,7 @@ async fn dump_catalog(param: Value) -> Result<Value, Error> {
description: "Group/Snapshot path.",
},
"archive-name": {
- type: String,
- description: "Backup archive name.",
+ type: BackupArchiveName,
},
"repository": {
optional: true,
@@ -163,7 +162,8 @@ async fn catalog_shell(param: Value) -> Result<(), Error> {
let client = connect(&repo)?;
let backup_ns = optional_ns_param(¶m)?;
let path = required_string_param(¶m, "snapshot")?;
- let archive_name = required_string_param(¶m, "archive-name")?;
+ let server_archive_name: BackupArchiveName =
+ required_string_param(¶m, "archive-name")?.try_into()?;
let backup_dir = dir_or_last_from_group(&client, &repo, &backup_ns, path).await?;
@@ -182,9 +182,7 @@ async fn catalog_shell(param: Value) -> Result<(), Error> {
}
};
- let server_archive_name = if has_pxar_filename_extension(archive_name, false) {
- format!("{}.didx", archive_name)
- } else {
+ if !server_archive_name.has_pxar_filename_extension() {
bail!("Can only mount pxar archives.");
};
@@ -219,13 +217,14 @@ async fn catalog_shell(param: Value) -> Result<(), Error> {
let index = DynamicIndexReader::new(tmpfile)
.map_err(|err| format_err!("unable to read catalog index - {}", err))?;
+ let catalog_name: BackupArchiveName = CATALOG_NAME.try_into().unwrap();
// Note: do not use values stored in index (not trusted) - instead, computed them again
let (csum, size) = index.compute_csum();
- manifest.verify_file(CATALOG_NAME, &csum, size)?;
+ manifest.verify_file(&catalog_name, &csum, size)?;
let most_used = index.find_most_used_chunks(8);
- let file_info = manifest.lookup_file_info(CATALOG_NAME)?;
+ let file_info = manifest.lookup_file_info(&catalog_name)?;
let chunk_reader = RemoteChunkReader::new(
client.clone(),
crypt_config,
@@ -244,7 +243,7 @@ async fn catalog_shell(param: Value) -> Result<(), Error> {
catalogfile.seek(SeekFrom::Start(0))?;
let catalog_reader = CatalogReader::new(catalogfile);
- let state = Shell::new(catalog_reader, &server_archive_name, decoder).await?;
+ let state = Shell::new(catalog_reader, &server_archive_name.as_ref(), decoder).await?;
log::info!("Starting interactive shell");
state.shell().await?;
diff --git a/proxmox-backup-client/src/helper.rs b/proxmox-backup-client/src/helper.rs
index 60355d7d0..642d66a7b 100644
--- a/proxmox-backup-client/src/helper.rs
+++ b/proxmox-backup-client/src/helper.rs
@@ -1,6 +1,7 @@
use std::sync::Arc;
use anyhow::Error;
+use pbs_api_types::BackupArchiveName;
use pbs_client::{BackupReader, RemoteChunkReader};
use pbs_datastore::BackupManifest;
use pbs_tools::crypt_config::CryptConfig;
@@ -8,7 +9,7 @@ use pbs_tools::crypt_config::CryptConfig;
use crate::{BufferedDynamicReadAt, BufferedDynamicReader, IndexFile};
pub(crate) async fn get_pxar_fuse_accessor(
- archive_name: &str,
+ archive_name: &BackupArchiveName,
client: Arc<BackupReader>,
manifest: &BackupManifest,
crypt_config: Option<Arc<CryptConfig>>,
@@ -44,7 +45,7 @@ pub(crate) async fn get_pxar_fuse_accessor(
}
pub(crate) async fn get_pxar_fuse_reader(
- archive_name: &str,
+ archive_name: &BackupArchiveName,
client: Arc<BackupReader>,
manifest: &BackupManifest,
crypt_config: Option<Arc<CryptConfig>>,
@@ -57,7 +58,7 @@ pub(crate) async fn get_pxar_fuse_reader(
}
pub(crate) async fn get_buffered_pxar_reader(
- archive_name: &str,
+ archive_name: &BackupArchiveName,
client: Arc<BackupReader>,
manifest: &BackupManifest,
crypt_config: Option<Arc<CryptConfig>>,
diff --git a/proxmox-backup-client/src/main.rs b/proxmox-backup-client/src/main.rs
index 3c19dc348..d079b3d60 100644
--- a/proxmox-backup-client/src/main.rs
+++ b/proxmox-backup-client/src/main.rs
@@ -25,10 +25,11 @@ use pxar::accessor::aio::Accessor;
use pxar::accessor::{MaybeReady, ReadAt, ReadAtOperation};
use pbs_api_types::{
- ArchiveType, Authid, BackupDir, BackupGroup, BackupNamespace, BackupPart, BackupType,
- CryptMode, Fingerprint, GroupListItem, PruneJobOptions, PruneListItem, RateLimitConfig,
- SnapshotListItem, StorageStatus, BACKUP_ID_SCHEMA, BACKUP_NAMESPACE_SCHEMA, BACKUP_TIME_SCHEMA,
- BACKUP_TYPE_SCHEMA, TRAFFIC_CONTROL_BURST_SCHEMA, TRAFFIC_CONTROL_RATE_SCHEMA,
+ ArchiveType, Authid, BackupArchiveName, BackupDir, BackupGroup, BackupNamespace, BackupPart,
+ BackupType, CryptMode, Fingerprint, GroupListItem, PruneJobOptions, PruneListItem,
+ RateLimitConfig, SnapshotListItem, StorageStatus, BACKUP_ID_SCHEMA, BACKUP_NAMESPACE_SCHEMA,
+ BACKUP_TIME_SCHEMA, BACKUP_TYPE_SCHEMA, TRAFFIC_CONTROL_BURST_SCHEMA,
+ TRAFFIC_CONTROL_RATE_SCHEMA,
};
use pbs_client::catalog_shell::Shell;
use pbs_client::pxar::{ErrorHandler as PxarErrorHandler, MetadataArchiveReader, PxarPrevRef};
@@ -36,7 +37,7 @@ use pbs_client::tools::{
complete_archive_name, complete_auth_id, complete_backup_group, complete_backup_snapshot,
complete_backup_source, complete_chunk_size, complete_group_or_snapshot,
complete_img_archive_name, complete_namespace, complete_pxar_archive_name, complete_repository,
- connect, connect_rate_limited, extract_repository_from_value, has_pxar_filename_extension,
+ connect, connect_rate_limited, extract_repository_from_value,
key_source::{
crypto_parameters, format_key_source, get_encryption_key_password, KEYFD_SCHEMA,
KEYFILE_SCHEMA, MASTER_PUBKEY_FD_SCHEMA, MASTER_PUBKEY_FILE_SCHEMA,
@@ -196,8 +197,8 @@ pub async fn dir_or_last_from_group(
async fn backup_directory<P: AsRef<Path>>(
client: &BackupWriter,
dir_path: P,
- archive_name: &str,
- payload_target: Option<&str>,
+ archive_name: &BackupArchiveName,
+ payload_target: Option<&BackupArchiveName>,
chunk_size: Option<usize>,
catalog: Option<Arc<Mutex<CatalogWriter<TokioWriterAdapter<StdChannelWriter<Error>>>>>>,
pxar_create_options: pbs_client::pxar::PxarCreateOptions,
@@ -276,7 +277,7 @@ async fn backup_directory<P: AsRef<Path>>(
async fn backup_image<P: AsRef<Path>>(
client: &BackupWriter,
image_path: P,
- archive_name: &str,
+ archive_name: &BackupArchiveName,
chunk_size: Option<usize>,
upload_options: UploadOptions,
) -> Result<BackupStats, Error> {
@@ -601,8 +602,9 @@ fn spawn_catalog_upload(
};
tokio::spawn(async move {
+ let catalog_name: BackupArchiveName = CATALOG_NAME.try_into().unwrap();
let catalog_upload_result = client
- .upload_stream(CATALOG_NAME, catalog_chunk_stream, upload_options, None)
+ .upload_stream(&catalog_name, catalog_chunk_stream, upload_options, None)
.await;
if let Err(ref err) = catalog_upload_result {
@@ -1013,13 +1015,21 @@ async fn create_backup(
};
for (backup_type, filename, target_base, extension, size) in upload_list {
- let target = format!("{target_base}.{extension}");
+ let target: BackupArchiveName = format!("{target_base}.{extension}").as_str().try_into()?;
match (backup_type, dry_run) {
// dry-run
- (BackupSpecificationType::CONFIG, true) => log_file("config file", &filename, &target),
- (BackupSpecificationType::LOGFILE, true) => log_file("log file", &filename, &target),
- (BackupSpecificationType::PXAR, true) => log_file("directory", &filename, &target),
- (BackupSpecificationType::IMAGE, true) => log_file("image", &filename, &target),
+ (BackupSpecificationType::CONFIG, true) => {
+ log_file("config file", &filename, target.as_ref())
+ }
+ (BackupSpecificationType::LOGFILE, true) => {
+ log_file("log file", &filename, target.as_ref())
+ }
+ (BackupSpecificationType::PXAR, true) => {
+ log_file("directory", &filename, target.as_ref())
+ }
+ (BackupSpecificationType::IMAGE, true) => {
+ log_file("image", &filename, &target.as_ref())
+ }
// no dry-run
(BackupSpecificationType::CONFIG, false) => {
let upload_options = UploadOptions {
@@ -1028,11 +1038,11 @@ async fn create_backup(
..UploadOptions::default()
};
- log_file("config file", &filename, &target);
+ log_file("config file", &filename, target.as_ref());
let stats = client
- .upload_blob_from_file(&filename, &target, upload_options)
+ .upload_blob_from_file(&filename, target.as_ref(), upload_options)
.await?;
- manifest.add_file(target, stats.size, stats.csum, crypto.mode)?;
+ manifest.add_file(&target, stats.size, stats.csum, crypto.mode)?;
}
(BackupSpecificationType::LOGFILE, false) => {
// fixme: remove - not needed anymore ?
@@ -1042,11 +1052,11 @@ async fn create_backup(
..UploadOptions::default()
};
- log_file("log file", &filename, &target);
+ log_file("log file", &filename, target.as_ref());
let stats = client
- .upload_blob_from_file(&filename, &target, upload_options)
+ .upload_blob_from_file(&filename, target.as_ref(), upload_options)
.await?;
- manifest.add_file(target, stats.size, stats.csum, crypto.mode)?;
+ manifest.add_file(&target, stats.size, stats.csum, crypto.mode)?;
}
(BackupSpecificationType::PXAR, false) => {
let target_base = if let Some(base) = target_base.strip_suffix(".pxar") {
@@ -1058,8 +1068,14 @@ async fn create_backup(
let (target, payload_target) =
if detection_mode.is_metadata() || detection_mode.is_data() {
(
- format!("{target_base}.mpxar.{extension}"),
- Some(format!("{target_base}.ppxar.{extension}")),
+ format!("{target_base}.mpxar.{extension}")
+ .as_str()
+ .try_into()?,
+ Some(
+ format!("{target_base}.ppxar.{extension}")
+ .as_str()
+ .try_into()?,
+ ),
)
} else {
(target, None)
@@ -1073,12 +1089,12 @@ async fn create_backup(
catalog_result_rx = Some(catalog_upload_res.result);
}
- log_file("directory", &filename, &target);
+ log_file("directory", &filename, target.as_ref());
if let Some(catalog) = catalog.as_ref() {
catalog
.lock()
.unwrap()
- .start_directory(std::ffi::CString::new(target.as_str())?.as_c_str())?;
+ .start_directory(std::ffi::CString::new(target.as_ref())?.as_c_str())?;
}
let mut previous_ref = None;
@@ -1145,7 +1161,7 @@ async fn create_backup(
&client,
&filename,
&target,
- payload_target.as_deref(),
+ payload_target.as_ref().as_deref(),
chunk_size_opt,
catalog.as_ref().cloned(),
pxar_options,
@@ -1155,20 +1171,20 @@ async fn create_backup(
if let Some(payload_stats) = payload_stats {
manifest.add_file(
- payload_target
+ &payload_target
.ok_or_else(|| format_err!("missing payload target archive"))?,
payload_stats.size,
payload_stats.csum,
crypto.mode,
)?;
}
- manifest.add_file(target, stats.size, stats.csum, crypto.mode)?;
+ manifest.add_file(&target, stats.size, stats.csum, crypto.mode)?;
if let Some(catalog) = catalog.as_ref() {
catalog.lock().unwrap().end_directory()?;
}
}
(BackupSpecificationType::IMAGE, false) => {
- log_file("image", &filename, &target);
+ log_file("image", &filename, target.as_ref());
let upload_options = UploadOptions {
previous_manifest: previous_manifest.clone(),
@@ -1180,7 +1196,7 @@ async fn create_backup(
let stats =
backup_image(&client, &filename, &target, chunk_size_opt, upload_options)
.await?;
- manifest.add_file(target, stats.size, stats.csum, crypto.mode)?;
+ manifest.add_file(&target, stats.size, stats.csum, crypto.mode)?;
}
}
}
@@ -1202,12 +1218,17 @@ async fn create_backup(
if let Some(catalog_result_rx) = catalog_result_rx {
let stats = catalog_result_rx.await??;
- manifest.add_file(CATALOG_NAME.to_owned(), stats.size, stats.csum, crypto.mode)?;
+ manifest.add_file(
+ &CATALOG_NAME.try_into()?,
+ stats.size,
+ stats.csum,
+ crypto.mode,
+ )?;
}
}
if let Some(rsa_encrypted_key) = rsa_encrypted_key {
- let target = ENCRYPTED_KEY_BLOB_NAME;
+ let target: BackupArchiveName = ENCRYPTED_KEY_BLOB_NAME.try_into()?;
log::info!("Upload RSA encoded key to '{}' as {}", repo, target);
let options = UploadOptions {
compress: false,
@@ -1215,9 +1236,9 @@ async fn create_backup(
..UploadOptions::default()
};
let stats = client
- .upload_blob_from_data(rsa_encrypted_key, target, options)
+ .upload_blob_from_data(rsa_encrypted_key, target.as_ref(), options)
.await?;
- manifest.add_file(target.to_string(), stats.size, stats.csum, crypto.mode)?;
+ manifest.add_file(&target, stats.size, stats.csum, crypto.mode)?;
}
// create manifest (index.json)
// manifests are never encrypted, but include a signature
@@ -1246,7 +1267,7 @@ async fn create_backup(
}
async fn prepare_reference(
- target: &str,
+ target: &BackupArchiveName,
manifest: Arc<BackupManifest>,
backup_writer: &BackupWriter,
backup_reader: Arc<BackupReader>,
@@ -1258,7 +1279,11 @@ async fn prepare_reference(
Ok((target, payload_target)) => (target, payload_target),
Err(_) => return Ok(None),
};
- let payload_target = payload_target.unwrap_or_default();
+ let payload_target = if let Some(payload_target) = payload_target {
+ payload_target
+ } else {
+ return Ok(None);
+ };
let metadata_ref_index = if let Ok(index) = backup_reader
.download_dynamic_index(&manifest, &target)
@@ -1307,7 +1332,7 @@ async fn prepare_reference(
Ok(Some(pbs_client::pxar::PxarPrevRef {
accessor,
payload_index: payload_ref_index,
- archive_name: target,
+ archive_name: target.to_string(),
}))
}
@@ -1497,7 +1522,8 @@ async fn restore(
) -> Result<Value, Error> {
let repo = extract_repository_from_value(¶m)?;
- let archive_name = json::required_string_param(¶m, "archive-name")?;
+ let archive_name: BackupArchiveName =
+ json::required_string_param(¶m, "archive-name")?.try_into()?;
let rate = match param["rate"].as_str() {
Some(s) => Some(s.parse::<HumanByte>()?),
@@ -1545,11 +1571,9 @@ async fn restore(
)
.await?;
- let (archive_name, archive_type) = parse_archive_type(archive_name);
-
let (manifest, backup_index_data) = client.download_manifest().await?;
- if archive_name == ENCRYPTED_KEY_BLOB_NAME && crypt_config.is_none() {
+ if archive_name.as_ref() == ENCRYPTED_KEY_BLOB_NAME && crypt_config.is_none() {
log::info!("Restoring encrypted key blob without original key - skipping manifest fingerprint check!")
} else {
if manifest.signature.is_some() {
@@ -1563,7 +1587,7 @@ async fn restore(
manifest.check_fingerprint(crypt_config.as_ref().map(Arc::as_ref))?;
}
- if archive_name == MANIFEST_BLOB_NAME {
+ if archive_name.as_ref() == MANIFEST_BLOB_NAME {
if let Some(target) = target {
replace_file(target, &backup_index_data, CreateOptions::new(), false)?;
} else {
@@ -1577,7 +1601,7 @@ async fn restore(
return Ok(Value::Null);
}
- if archive_type == ArchiveType::Blob {
+ if archive_name.archive_type() == ArchiveType::Blob {
let mut reader = client.download_blob(&manifest, &archive_name).await?;
if let Some(target) = target {
@@ -1596,7 +1620,7 @@ async fn restore(
std::io::copy(&mut reader, &mut writer)
.map_err(|err| format_err!("unable to pipe data - {}", err))?;
}
- } else if archive_type == ArchiveType::DynamicIndex {
+ } else if archive_name.archive_type() == ArchiveType::DynamicIndex {
let (archive_name, payload_archive_name) =
pbs_client::tools::get_pxar_archive_names(&archive_name, &manifest)?;
@@ -1700,7 +1724,7 @@ async fn restore(
std::io::copy(&mut reader, &mut writer)
.map_err(|err| format_err!("unable to pipe data - {}", err))?;
}
- } else if archive_type == ArchiveType::FixedIndex {
+ } else if archive_name.archive_type() == ArchiveType::FixedIndex {
let file_info = manifest.lookup_file_info(&archive_name)?;
let index = client
.download_fixed_index(&manifest, &archive_name)
diff --git a/proxmox-backup-client/src/mount.rs b/proxmox-backup-client/src/mount.rs
index 6c75021ea..ff743cdc6 100644
--- a/proxmox-backup-client/src/mount.rs
+++ b/proxmox-backup-client/src/mount.rs
@@ -17,8 +17,7 @@ use proxmox_router::{cli::*, ApiHandler, ApiMethod, RpcEnvironment};
use proxmox_schema::*;
use proxmox_sortable_macro::sortable;
-use pbs_api_types::BackupNamespace;
-use pbs_client::tools::has_pxar_filename_extension;
+use pbs_api_types::{ArchiveType, BackupArchiveName, BackupNamespace};
use pbs_client::tools::key_source::get_encryption_key_password;
use pbs_client::{BackupReader, RemoteChunkReader};
use pbs_datastore::cached_chunk_reader::CachedChunkReader;
@@ -46,11 +45,7 @@ const API_METHOD_MOUNT: ApiMethod = ApiMethod::new(
false,
&StringSchema::new("Group/Snapshot path.").schema()
),
- (
- "archive-name",
- false,
- &StringSchema::new("Backup archive name.").schema()
- ),
+ ("archive-name", false, &BackupArchiveName::API_SCHEMA),
(
"target",
false,
@@ -86,11 +81,7 @@ WARNING: Only do this with *trusted* backups!",
false,
&StringSchema::new("Group/Snapshot path.").schema()
),
- (
- "archive-name",
- false,
- &StringSchema::new("Backup archive name.").schema()
- ),
+ ("archive-name", false, &BackupArchiveName::API_SCHEMA),
("repository", true, &REPO_URL_SCHEMA),
(
"keyfile",
@@ -207,7 +198,8 @@ fn mount(
async fn mount_do(param: Value, pipe: Option<OwnedFd>) -> Result<Value, Error> {
let repo = extract_repository_from_value(¶m)?;
- let archive_name = required_string_param(¶m, "archive-name")?;
+ let server_archive_name: BackupArchiveName =
+ required_string_param(¶m, "archive-name")?.try_into()?;
let client = connect(&repo)?;
let target = param["target"].as_str();
@@ -229,16 +221,14 @@ async fn mount_do(param: Value, pipe: Option<OwnedFd>) -> Result<Value, Error> {
}
};
- let server_archive_name = if has_pxar_filename_extension(archive_name, false) {
+ if server_archive_name.has_pxar_filename_extension() {
if target.is_none() {
bail!("use the 'mount' command to mount pxar archives");
}
- format!("{}.didx", archive_name)
- } else if archive_name.ends_with(".img") {
+ } else if server_archive_name.ends_with(".img.fidx") {
if target.is_some() {
bail!("use the 'map' command to map drive images");
}
- format!("{}.fidx", archive_name)
} else {
bail!("Can only mount/map pxar archives and drive images.");
};
@@ -290,7 +280,7 @@ async fn mount_do(param: Value, pipe: Option<OwnedFd>) -> Result<Value, Error> {
let mut interrupt =
futures::future::select(interrupt_int.recv().boxed(), interrupt_term.recv().boxed());
- if server_archive_name.ends_with(".didx") {
+ if server_archive_name.archive_type() == ArchiveType::DynamicIndex {
let decoder = helper::get_pxar_fuse_accessor(
&server_archive_name,
client.clone(),
@@ -311,7 +301,7 @@ async fn mount_do(param: Value, pipe: Option<OwnedFd>) -> Result<Value, Error> {
// exit on interrupted
}
}
- } else if server_archive_name.ends_with(".fidx") {
+ } else if server_archive_name.archive_type() == ArchiveType::FixedIndex {
let file_info = manifest.lookup_file_info(&server_archive_name)?;
let index = client
.download_fixed_index(&manifest, &server_archive_name)
@@ -325,7 +315,10 @@ async fn mount_do(param: Value, pipe: Option<OwnedFd>) -> Result<Value, Error> {
);
let reader = CachedChunkReader::new(chunk_reader, index, 8).seekable();
- let name = &format!("{}:{}/{}", repo, path, archive_name);
+ let name = &format!(
+ "{repo}:{path}/{}",
+ server_archive_name.without_type_extension()
+ );
let name_escaped = proxmox_sys::systemd::escape_unit(name, false);
let mut session =
diff --git a/proxmox-file-restore/src/main.rs b/proxmox-file-restore/src/main.rs
index 69d811fc1..0be88491f 100644
--- a/proxmox-file-restore/src/main.rs
+++ b/proxmox-file-restore/src/main.rs
@@ -5,6 +5,7 @@ use std::sync::Arc;
use anyhow::{bail, format_err, Error};
use futures::StreamExt;
+use pbs_api_types::BackupArchiveName;
use serde_json::{json, Value};
use tokio::io::AsyncWriteExt;
@@ -147,9 +148,10 @@ async fn list_files(
Ok(entries)
}
ExtractPath::Pxar(file, mut path) => {
- if let Ok(file_info) = manifest.lookup_file_info(CATALOG_NAME) {
+ let catalog_name: BackupArchiveName = CATALOG_NAME.try_into()?;
+ if let Ok(file_info) = manifest.lookup_file_info(&catalog_name) {
let index = client
- .download_dynamic_index(&manifest, CATALOG_NAME)
+ .download_dynamic_index(&manifest, &catalog_name)
.await?;
let most_used = index.find_most_used_chunks(8);
let chunk_reader = RemoteChunkReader::new(
@@ -170,6 +172,7 @@ async fn list_files(
path = vec![b'/'];
}
+ let file: BackupArchiveName = file.as_str().try_into()?;
let (archive_name, _payload_archive_name) =
pbs_client::tools::get_pxar_archive_names(&file, &manifest)?;
@@ -186,8 +189,12 @@ async fn list_files(
let accessor = Accessor::new(reader, archive_size).await?;
let path = OsStr::from_bytes(&path);
- pbs_client::tools::pxar_metadata_catalog_lookup(accessor, path, Some(&archive_name))
- .await
+ pbs_client::tools::pxar_metadata_catalog_lookup(
+ accessor,
+ path,
+ Some(archive_name.as_ref()),
+ )
+ .await
}
}
ExtractPath::VM(file, path) => {
@@ -354,7 +361,7 @@ async fn list(
}
async fn get_remote_pxar_reader(
- archive_name: &str,
+ archive_name: &BackupArchiveName,
client: Arc<BackupReader>,
manifest: &BackupManifest,
crypt_config: Option<Arc<CryptConfig>>,
@@ -495,10 +502,11 @@ async fn extract(
match path {
ExtractPath::Pxar(archive_name, path) => {
+ let archive_name: BackupArchiveName = archive_name.as_str().try_into()?;
let (archive_name, payload_archive_name) =
pbs_client::tools::get_pxar_archive_names(&archive_name, &manifest)?;
let (reader, archive_size) = get_remote_pxar_reader(
- &archive_name,
+ &archive_name.try_into()?,
client.clone(),
&manifest,
crypt_config.clone(),
diff --git a/src/api2/admin/datastore.rs b/src/api2/admin/datastore.rs
index 82221c503..8bbb469fa 100644
--- a/src/api2/admin/datastore.rs
+++ b/src/api2/admin/datastore.rs
@@ -33,15 +33,15 @@ use pxar::accessor::aio::Accessor;
use pxar::EntryKind;
use pbs_api_types::{
- print_ns_and_snapshot, print_store_and_ns, Authid, BackupContent, BackupNamespace, BackupType,
- Counts, CryptMode, DataStoreConfig, DataStoreListItem, DataStoreStatus,
- GarbageCollectionJobStatus, GroupListItem, JobScheduleStatus, KeepOptions, Operation,
- PruneJobOptions, RRDMode, RRDTimeFrame, SnapshotListItem, SnapshotVerifyState,
- BACKUP_ARCHIVE_NAME_SCHEMA, BACKUP_ID_SCHEMA, BACKUP_NAMESPACE_SCHEMA, BACKUP_TIME_SCHEMA,
- BACKUP_TYPE_SCHEMA, DATASTORE_SCHEMA, IGNORE_VERIFIED_BACKUPS_SCHEMA, MAX_NAMESPACE_DEPTH,
- NS_MAX_DEPTH_SCHEMA, PRIV_DATASTORE_AUDIT, PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_MODIFY,
- PRIV_DATASTORE_PRUNE, PRIV_DATASTORE_READ, PRIV_DATASTORE_VERIFY, UPID, UPID_SCHEMA,
- VERIFICATION_OUTDATED_AFTER_SCHEMA,
+ print_ns_and_snapshot, print_store_and_ns, ArchiveType, Authid, BackupArchiveName,
+ BackupContent, BackupNamespace, BackupType, Counts, CryptMode, DataStoreConfig,
+ DataStoreListItem, DataStoreStatus, GarbageCollectionJobStatus, GroupListItem,
+ JobScheduleStatus, KeepOptions, Operation, PruneJobOptions, RRDMode, RRDTimeFrame,
+ SnapshotListItem, SnapshotVerifyState, BACKUP_ARCHIVE_NAME_SCHEMA, BACKUP_ID_SCHEMA,
+ BACKUP_NAMESPACE_SCHEMA, BACKUP_TIME_SCHEMA, BACKUP_TYPE_SCHEMA, DATASTORE_SCHEMA,
+ IGNORE_VERIFIED_BACKUPS_SCHEMA, MAX_NAMESPACE_DEPTH, NS_MAX_DEPTH_SCHEMA, PRIV_DATASTORE_AUDIT,
+ PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_MODIFY, PRIV_DATASTORE_PRUNE, PRIV_DATASTORE_READ,
+ PRIV_DATASTORE_VERIFY, UPID, UPID_SCHEMA, VERIFICATION_OUTDATED_AFTER_SCHEMA,
};
use pbs_client::pxar::{create_tar, create_zip};
use pbs_config::CachedUserInfo;
@@ -1471,12 +1471,13 @@ pub fn download_file_decoded(
&backup_dir_api.group,
)?;
- let file_name = required_string_param(¶m, "file-name")?.to_owned();
+ let file_name: BackupArchiveName =
+ required_string_param(¶m, "file-name")?.try_into()?;
let backup_dir = datastore.backup_dir(backup_ns.clone(), backup_dir_api.clone())?;
let (manifest, files) = read_backup_index(&backup_dir)?;
for file in files {
- if file.filename == file_name && file.crypt_mode == Some(CryptMode::Encrypt) {
+ if file.filename == file_name.as_ref() && file.crypt_mode == Some(CryptMode::Encrypt) {
bail!("cannot decode '{}' - is encrypted", file_name);
}
}
@@ -1491,12 +1492,10 @@ pub fn download_file_decoded(
let mut path = datastore.base_path();
path.push(backup_dir.relative_path());
- path.push(&file_name);
+ path.push(file_name.as_ref());
- let (_, extension) = file_name.rsplit_once('.').unwrap();
-
- let body = match extension {
- "didx" => {
+ let body = match file_name.archive_type() {
+ ArchiveType::DynamicIndex => {
let index = DynamicIndexReader::open(&path).map_err(|err| {
format_err!("unable to read dynamic index '{:?}' - {}", &path, err)
})?;
@@ -1510,7 +1509,7 @@ pub fn download_file_decoded(
err
}))
}
- "fidx" => {
+ ArchiveType::FixedIndex => {
let index = FixedIndexReader::open(&path).map_err(|err| {
format_err!("unable to read fixed index '{:?}' - {}", &path, err)
})?;
@@ -1529,7 +1528,7 @@ pub fn download_file_decoded(
),
)
}
- "blob" => {
+ ArchiveType::Blob => {
let file = std::fs::File::open(&path)
.map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
@@ -1544,9 +1543,6 @@ pub fn download_file_decoded(
),
)
}
- extension => {
- bail!("cannot download '{}' files", extension);
- }
};
// fixme: set other headers ?
@@ -1661,7 +1657,7 @@ fn decode_path(path: &str) -> Result<Vec<u8>, Error> {
type: String,
},
"archive-name": {
- schema: BACKUP_ARCHIVE_NAME_SCHEMA,
+ type: BackupArchiveName,
optional: true,
},
},
@@ -1678,12 +1674,12 @@ pub async fn catalog(
ns: Option<BackupNamespace>,
backup_dir: pbs_api_types::BackupDir,
filepath: String,
- archive_name: Option<String>,
+ archive_name: Option<BackupArchiveName>,
rpcenv: &mut dyn RpcEnvironment,
) -> Result<Vec<ArchiveEntry>, Error> {
let file_name = archive_name
.clone()
- .unwrap_or_else(|| CATALOG_NAME.to_string());
+ .unwrap_or_else(|| CATALOG_NAME.try_into().unwrap());
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
@@ -1703,7 +1699,7 @@ pub async fn catalog(
let (manifest, files) = read_backup_index(&backup_dir)?;
for file in files {
- if file.filename == file_name && file.crypt_mode == Some(CryptMode::Encrypt) {
+ if file.filename == file_name.as_ref() && file.crypt_mode == Some(CryptMode::Encrypt) {
bail!("cannot decode '{file_name}' - is encrypted");
}
}
@@ -1712,7 +1708,7 @@ pub async fn catalog(
tokio::task::spawn_blocking(move || {
let mut path = datastore.base_path();
path.push(backup_dir.relative_path());
- path.push(&file_name);
+ path.push(file_name.as_ref());
let index = DynamicIndexReader::open(&path)
.map_err(|err| format_err!("unable to read dynamic index '{path:?}' - {err}"))?;
@@ -1762,7 +1758,7 @@ pub const API_METHOD_PXAR_FILE_DOWNLOAD: ApiMethod = ApiMethod::new(
("backup-time", false, &BACKUP_TIME_SCHEMA),
("filepath", false, &StringSchema::new("Base64 encoded path").schema()),
("tar", true, &BooleanSchema::new("Download as .tar.zst").schema()),
- ("archive-name", true, &BACKUP_ARCHIVE_NAME_SCHEMA),
+ ("archive-name", true, &BackupArchiveName::API_SCHEMA),
]),
)
).access(
@@ -1777,11 +1773,11 @@ fn get_local_pxar_reader(
datastore: Arc<DataStore>,
manifest: &BackupManifest,
backup_dir: &BackupDir,
- pxar_name: &str,
+ pxar_name: &BackupArchiveName,
) -> Result<(LocalDynamicReadAt<LocalChunkReader>, u64), Error> {
let mut path = datastore.base_path();
path.push(backup_dir.relative_path());
- path.push(pxar_name);
+ path.push(pxar_name.as_ref());
let index = DynamicIndexReader::open(&path)
.map_err(|err| format_err!("unable to read dynamic index '{:?}' - {}", &path, err))?;
@@ -1839,16 +1835,16 @@ pub fn pxar_file_download(
let file_path = split.next().unwrap_or(b"/");
(pxar_name.to_owned(), file_path.to_owned())
};
- let pxar_name = std::str::from_utf8(&pxar_name)?;
+ let pxar_name: BackupArchiveName = std::str::from_utf8(&pxar_name)?.try_into()?;
let (manifest, files) = read_backup_index(&backup_dir)?;
for file in files {
- if file.filename == pxar_name && file.crypt_mode == Some(CryptMode::Encrypt) {
+ if file.filename == pxar_name.as_ref() && file.crypt_mode == Some(CryptMode::Encrypt) {
bail!("cannot decode '{}' - is encrypted", pxar_name);
}
}
let (pxar_name, payload_archive_name) =
- pbs_client::tools::get_pxar_archive_names(pxar_name, &manifest)?;
+ pbs_client::tools::get_pxar_archive_names(&pxar_name, &manifest)?;
let (reader, archive_size) =
get_local_pxar_reader(datastore.clone(), &manifest, &backup_dir, &pxar_name)?;
diff --git a/src/bin/proxmox_backup_debug/diff.rs b/src/bin/proxmox_backup_debug/diff.rs
index e6767c17c..4c0485e51 100644
--- a/src/bin/proxmox_backup_debug/diff.rs
+++ b/src/bin/proxmox_backup_debug/diff.rs
@@ -13,7 +13,7 @@ use proxmox_human_byte::HumanByte;
use proxmox_router::cli::{CliCommand, CliCommandMap, CommandLineInterface};
use proxmox_schema::api;
-use pbs_api_types::{BackupNamespace, BackupPart};
+use pbs_api_types::{BackupArchiveName, BackupNamespace, BackupPart};
use pbs_client::tools::key_source::{
crypto_parameters, format_key_source, get_encryption_key_password, KEYFD_SCHEMA,
};
@@ -70,8 +70,7 @@ pub fn diff_commands() -> CommandLineInterface {
type: String,
},
"archive-name": {
- description: "Name of the .pxar archive",
- type: String,
+ type: BackupArchiveName,
},
"repository": {
optional: true,
@@ -106,7 +105,7 @@ pub fn diff_commands() -> CommandLineInterface {
async fn diff_archive_cmd(
prev_snapshot: String,
snapshot: String,
- archive_name: String,
+ archive_name: BackupArchiveName,
compare_content: bool,
color: Option<ColorMode>,
ns: Option<BackupNamespace>,
@@ -140,12 +139,11 @@ async fn diff_archive_cmd(
let output_params = OutputParams { color };
- if archive_name.ends_with(".pxar") {
- let file_name = format!("{}.didx", archive_name);
+ if archive_name.ends_with(".pxar.didx") {
diff_archive(
&prev_snapshot,
&snapshot,
- &file_name,
+ &archive_name,
&repo_params,
compare_content,
&output_params,
@@ -161,7 +159,7 @@ async fn diff_archive_cmd(
async fn diff_archive(
snapshot_a: &str,
snapshot_b: &str,
- file_name: &str,
+ file_name: &BackupArchiveName,
repo_params: &RepoParams,
compare_contents: bool,
output_params: &OutputParams,
@@ -249,7 +247,7 @@ struct OutputParams {
async fn open_dynamic_index(
snapshot: &str,
- archive_name: &str,
+ archive_name: &BackupArchiveName,
params: &RepoParams,
) -> Result<(DynamicIndexReader, Accessor), Error> {
let backup_reader = create_backup_reader(snapshot, params).await?;
diff --git a/src/server/pull.rs b/src/server/pull.rs
index 2ef916c3c..8d026f9cf 100644
--- a/src/server/pull.rs
+++ b/src/server/pull.rs
@@ -16,9 +16,10 @@ use proxmox_sys::{task_log, task_warn};
use serde_json::json;
use pbs_api_types::{
- print_store_and_ns, ArchiveType, Authid, BackupDir, BackupGroup, BackupNamespace, CryptMode,
- GroupFilter, GroupListItem, Operation, RateLimitConfig, Remote, SnapshotListItem,
- MAX_NAMESPACE_DEPTH, PRIV_DATASTORE_AUDIT, PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_READ,
+ print_store_and_ns, ArchiveType, Authid, BackupArchiveName, BackupDir, BackupGroup,
+ BackupNamespace, CryptMode, GroupFilter, GroupListItem, Operation, RateLimitConfig, Remote,
+ SnapshotListItem, MAX_NAMESPACE_DEPTH, PRIV_DATASTORE_AUDIT, PRIV_DATASTORE_BACKUP,
+ PRIV_DATASTORE_READ,
};
use pbs_client::{BackupReader, BackupRepository, HttpClient, RemoteChunkReader};
use pbs_config::CachedUserInfo;
@@ -872,11 +873,12 @@ async fn pull_snapshot<'a>(
path.push(&item.filename);
if path.exists() {
- match ArchiveType::from_path(&item.filename)? {
+ let filename: BackupArchiveName = item.filename.as_str().try_into()?;
+ match filename.archive_type() {
ArchiveType::DynamicIndex => {
let index = DynamicIndexReader::open(&path)?;
let (csum, size) = index.compute_csum();
- match manifest.verify_file(&item.filename, &csum, size) {
+ match manifest.verify_file(&filename, &csum, size) {
Ok(_) => continue,
Err(err) => {
task_log!(worker, "detected changed file {:?} - {}", path, err);
@@ -886,7 +888,7 @@ async fn pull_snapshot<'a>(
ArchiveType::FixedIndex => {
let index = FixedIndexReader::open(&path)?;
let (csum, size) = index.compute_csum();
- match manifest.verify_file(&item.filename, &csum, size) {
+ match manifest.verify_file(&filename, &csum, size) {
Ok(_) => continue,
Err(err) => {
task_log!(worker, "detected changed file {:?} - {}", path, err);
@@ -896,7 +898,7 @@ async fn pull_snapshot<'a>(
ArchiveType::Blob => {
let mut tmpfile = std::fs::File::open(&path)?;
let (csum, size) = sha256(&mut tmpfile)?;
- match manifest.verify_file(&item.filename, &csum, size) {
+ match manifest.verify_file(&filename, &csum, size) {
Ok(_) => continue,
Err(err) => {
task_log!(worker, "detected changed file {:?} - {}", path, err);
--
2.39.2
_______________________________________________
pbs-devel mailing list
pbs-devel@lists.proxmox.com
https://lists.proxmox.com/cgi-bin/mailman/listinfo/pbs-devel
^ permalink raw reply [flat|nested] 8+ messages in thread