* [pbs-devel] [PATCH proxmox v3 1/1] router: add init_cli_logger helper function
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-21 8:40 ` [pbs-devel] applied: " Wolfgang Bumiller
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 01/10] bins: init cli logger Hannes Laimer
` (10 subsequent siblings)
11 siblings, 1 reply; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
proxmox-router/Cargo.toml | 1 +
proxmox-router/src/cli/mod.rs | 11 +++++++++++
2 files changed, 12 insertions(+)
diff --git a/proxmox-router/Cargo.toml b/proxmox-router/Cargo.toml
index 576a29e..1e16e23 100644
--- a/proxmox-router/Cargo.toml
+++ b/proxmox-router/Cargo.toml
@@ -10,6 +10,7 @@ exclude = [ "debian" ]
[dependencies]
anyhow = "1.0"
+env_logger = "0.9"
http = "0.2"
hyper = { version = "0.14", features = [ "full" ] }
nix = "0.24.1"
diff --git a/proxmox-router/src/cli/mod.rs b/proxmox-router/src/cli/mod.rs
index cc72402..41944dc 100644
--- a/proxmox-router/src/cli/mod.rs
+++ b/proxmox-router/src/cli/mod.rs
@@ -49,6 +49,17 @@ pub use readline::*;
/// return a list of all possible values.
pub type CompletionFunction = fn(&str, &HashMap<String, String>) -> Vec<String>;
+/// Initialize default logger for CLI binaries
+pub fn init_cli_logger(env_var_name: &str, default_log_level: &str) {
+ env_logger::Builder::from_env(env_logger::Env::new().filter_or(env_var_name, default_log_level))
+ .write_style(env_logger::WriteStyle::Never)
+ .format_level(false)
+ .format_module_path(false)
+ .format_target(false)
+ .format_timestamp(None)
+ .init();
+}
+
/// Define a simple CLI command.
pub struct CliCommand {
/// The Schema definition.
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] applied: [PATCH proxmox v3 1/1] router: add init_cli_logger helper function
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox v3 1/1] router: add init_cli_logger helper function Hannes Laimer
@ 2022-06-21 8:40 ` Wolfgang Bumiller
0 siblings, 0 replies; 14+ messages in thread
From: Wolfgang Bumiller @ 2022-06-21 8:40 UTC (permalink / raw)
To: Hannes Laimer; +Cc: pbs-devel
applied & bumped proxmox-router
On Wed, Jun 15, 2022 at 08:19:47AM +0000, Hannes Laimer wrote:
> Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
> ---
> proxmox-router/Cargo.toml | 1 +
> proxmox-router/src/cli/mod.rs | 11 +++++++++++
> 2 files changed, 12 insertions(+)
>
> diff --git a/proxmox-router/Cargo.toml b/proxmox-router/Cargo.toml
> index 576a29e..1e16e23 100644
> --- a/proxmox-router/Cargo.toml
> +++ b/proxmox-router/Cargo.toml
> @@ -10,6 +10,7 @@ exclude = [ "debian" ]
>
> [dependencies]
> anyhow = "1.0"
> +env_logger = "0.9"
> http = "0.2"
> hyper = { version = "0.14", features = [ "full" ] }
> nix = "0.24.1"
> diff --git a/proxmox-router/src/cli/mod.rs b/proxmox-router/src/cli/mod.rs
> index cc72402..41944dc 100644
> --- a/proxmox-router/src/cli/mod.rs
> +++ b/proxmox-router/src/cli/mod.rs
> @@ -49,6 +49,17 @@ pub use readline::*;
> /// return a list of all possible values.
> pub type CompletionFunction = fn(&str, &HashMap<String, String>) -> Vec<String>;
>
> +/// Initialize default logger for CLI binaries
> +pub fn init_cli_logger(env_var_name: &str, default_log_level: &str) {
> + env_logger::Builder::from_env(env_logger::Env::new().filter_or(env_var_name, default_log_level))
> + .write_style(env_logger::WriteStyle::Never)
> + .format_level(false)
> + .format_module_path(false)
> + .format_target(false)
> + .format_timestamp(None)
> + .init();
> +}
> +
> /// Define a simple CLI command.
> pub struct CliCommand {
> /// The Schema definition.
> --
> 2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 01/10] bins: init cli logger
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox v3 1/1] router: add init_cli_logger helper function Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 02/10] pbs-client: replace print with log macro Hannes Laimer
` (9 subsequent siblings)
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
pbs-tape/src/bin/pmt.rs | 2 ++
pbs-tape/src/bin/pmtx.rs | 2 ++
proxmox-backup-client/src/main.rs | 1 +
proxmox-file-restore/src/main.rs | 6 ++++--
pxar-bin/src/main.rs | 2 ++
src/bin/proxmox-backup-debug.rs | 4 +++-
src/bin/proxmox-backup-manager.rs | 2 ++
src/bin/proxmox-tape.rs | 2 ++
src/bin/sg-tape-cmd.rs | 2 ++
9 files changed, 20 insertions(+), 3 deletions(-)
diff --git a/pbs-tape/src/bin/pmt.rs b/pbs-tape/src/bin/pmt.rs
index 5f4f6074..75e8e93a 100644
--- a/pbs-tape/src/bin/pmt.rs
+++ b/pbs-tape/src/bin/pmt.rs
@@ -801,6 +801,8 @@ fn options(
}
fn main() -> Result<(), Error> {
+ init_cli_logger("PBS_LOG", "info");
+
let uid = nix::unistd::Uid::current();
let username = match nix::unistd::User::from_uid(uid)? {
diff --git a/pbs-tape/src/bin/pmtx.rs b/pbs-tape/src/bin/pmtx.rs
index cea62c74..1d58d737 100644
--- a/pbs-tape/src/bin/pmtx.rs
+++ b/pbs-tape/src/bin/pmtx.rs
@@ -387,6 +387,8 @@ fn scan(param: Value) -> Result<(), Error> {
}
fn main() -> Result<(), Error> {
+ init_cli_logger("PBS_LOG", "info");
+
let uid = nix::unistd::Uid::current();
let username = match nix::unistd::User::from_uid(uid)? {
diff --git a/proxmox-backup-client/src/main.rs b/proxmox-backup-client/src/main.rs
index 6d0e533a..0d6558e7 100644
--- a/proxmox-backup-client/src/main.rs
+++ b/proxmox-backup-client/src/main.rs
@@ -1616,6 +1616,7 @@ impl ReadAt for BufferedDynamicReadAt {
fn main() {
pbs_tools::setup_libc_malloc_opts();
+ init_cli_logger("PBS_LOG", "info");
let backup_cmd_def = CliCommand::new(&API_METHOD_CREATE_BACKUP)
.arg_param(&["backupspec"])
diff --git a/proxmox-file-restore/src/main.rs b/proxmox-file-restore/src/main.rs
index 3420ea8e..5795b994 100644
--- a/proxmox-file-restore/src/main.rs
+++ b/proxmox-file-restore/src/main.rs
@@ -8,8 +8,8 @@ use serde_json::{json, Value};
use proxmox_router::cli::{
complete_file_name, default_table_format_options, format_and_print_result_full,
- get_output_format, run_cli_command, CliCommand, CliCommandMap, CliEnvironment, ColumnConfig,
- OUTPUT_FORMAT,
+ get_output_format, init_cli_logger, run_cli_command, CliCommand, CliCommandMap, CliEnvironment,
+ ColumnConfig, OUTPUT_FORMAT,
};
use proxmox_router::{http_err, HttpError};
use proxmox_schema::api;
@@ -530,6 +530,8 @@ where
}
fn main() {
+ init_cli_logger("PBS_LOG", "info");
+
let list_cmd_def = CliCommand::new(&API_METHOD_LIST)
.arg_param(&["snapshot", "path"])
.completion_cb("repository", complete_repository)
diff --git a/pxar-bin/src/main.rs b/pxar-bin/src/main.rs
index 78ec31c8..e3eba213 100644
--- a/pxar-bin/src/main.rs
+++ b/pxar-bin/src/main.rs
@@ -442,6 +442,8 @@ fn dump_archive(archive: String, verbose: bool) -> Result<(), Error> {
}
fn main() {
+ init_cli_logger("PXAR_LOG", "info");
+
let cmd_def = CliCommandMap::new()
.insert(
"create",
diff --git a/src/bin/proxmox-backup-debug.rs b/src/bin/proxmox-backup-debug.rs
index e467c956..c8ea0539 100644
--- a/src/bin/proxmox-backup-debug.rs
+++ b/src/bin/proxmox-backup-debug.rs
@@ -1,5 +1,5 @@
use proxmox_router::{
- cli::{run_cli_command, CliCommandMap, CliEnvironment},
+ cli::{init_cli_logger, run_cli_command, CliCommandMap, CliEnvironment},
RpcEnvironment,
};
@@ -7,6 +7,8 @@ mod proxmox_backup_debug;
use proxmox_backup_debug::*;
fn main() {
+ init_cli_logger("PBS_LOG", "info");
+
let cmd_def = CliCommandMap::new()
.insert("inspect", inspect::inspect_commands())
.insert("recover", recover::recover_commands())
diff --git a/src/bin/proxmox-backup-manager.rs b/src/bin/proxmox-backup-manager.rs
index 89598c90..0ca3e990 100644
--- a/src/bin/proxmox-backup-manager.rs
+++ b/src/bin/proxmox-backup-manager.rs
@@ -413,6 +413,8 @@ async fn get_versions(verbose: bool, param: Value) -> Result<Value, Error> {
}
async fn run() -> Result<(), Error> {
+ init_cli_logger("PBS_LOG", "info");
+
let cmd_def = CliCommandMap::new()
.insert("acl", acl_commands())
.insert("datastore", datastore_commands())
diff --git a/src/bin/proxmox-tape.rs b/src/bin/proxmox-tape.rs
index 94114a6e..9cb3978f 100644
--- a/src/bin/proxmox-tape.rs
+++ b/src/bin/proxmox-tape.rs
@@ -989,6 +989,8 @@ async fn catalog_media(mut param: Value) -> Result<(), Error> {
}
fn main() {
+ init_cli_logger("PBS_LOG", "info");
+
let cmd_def = CliCommandMap::new()
.insert(
"backup",
diff --git a/src/bin/sg-tape-cmd.rs b/src/bin/sg-tape-cmd.rs
index fe301ba2..402fe9cb 100644
--- a/src/bin/sg-tape-cmd.rs
+++ b/src/bin/sg-tape-cmd.rs
@@ -123,6 +123,8 @@ fn set_encryption(
}
fn main() -> Result<(), Error> {
+ init_cli_logger("PBS_LOG", "info");
+
// check if we are user root or backup
let backup_uid = pbs_config::backup_user()?.uid;
let backup_gid = pbs_config::backup_group()?.gid;
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 02/10] pbs-client: replace print with log macro
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox v3 1/1] router: add init_cli_logger helper function Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 01/10] bins: init cli logger Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 03/10] pbs-datastore: " Hannes Laimer
` (8 subsequent siblings)
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
pbs-client/Cargo.toml | 1 +
pbs-client/src/backup_writer.rs | 103 +++++++++++----------------
pbs-client/src/catalog_shell.rs | 4 +-
pbs-client/src/http_client.rs | 14 ++--
pbs-client/src/pxar/create.rs | 2 -
| 70 +++++-------------
pbs-client/src/pxar/fuse.rs | 18 ++---
pbs-client/src/pxar/metadata.rs | 4 +-
pbs-client/src/pxar_backup_stream.rs | 6 +-
pbs-client/src/task_log.rs | 8 +--
pbs-client/src/tools/key_source.rs | 2 +-
11 files changed, 87 insertions(+), 145 deletions(-)
diff --git a/pbs-client/Cargo.toml b/pbs-client/Cargo.toml
index 70d04caf..e2226db9 100644
--- a/pbs-client/Cargo.toml
+++ b/pbs-client/Cargo.toml
@@ -17,6 +17,7 @@ hyper = { version = "0.14", features = [ "full" ] }
lazy_static = "1.4"
libc = "0.2"
nix = "0.24"
+log = "0.4"
openssl = "0.10"
percent-encoding = "2.1"
pin-project-lite = "0.2"
diff --git a/pbs-client/src/backup_writer.rs b/pbs-client/src/backup_writer.rs
index a976bd43..6cf3543c 100644
--- a/pbs-client/src/backup_writer.rs
+++ b/pbs-client/src/backup_writer.rs
@@ -28,7 +28,6 @@ use super::{H2Client, HttpClient};
pub struct BackupWriter {
h2: H2Client,
abort: AbortHandle,
- verbose: bool,
crypt_config: Option<Arc<CryptConfig>>,
}
@@ -66,17 +65,11 @@ type UploadQueueSender = mpsc::Sender<(MergedChunkInfo, Option<h2::client::Respo
type UploadResultReceiver = oneshot::Receiver<Result<(), Error>>;
impl BackupWriter {
- fn new(
- h2: H2Client,
- abort: AbortHandle,
- crypt_config: Option<Arc<CryptConfig>>,
- verbose: bool,
- ) -> Arc<Self> {
+ fn new(h2: H2Client, abort: AbortHandle, crypt_config: Option<Arc<CryptConfig>>) -> Arc<Self> {
Arc::new(Self {
h2,
abort,
crypt_config,
- verbose,
})
}
@@ -117,7 +110,7 @@ impl BackupWriter {
.start_h2_connection(req, String::from(PROXMOX_BACKUP_PROTOCOL_ID_V1!()))
.await?;
- Ok(BackupWriter::new(h2, abort, crypt_config, debug))
+ Ok(BackupWriter::new(h2, abort, crypt_config))
}
pub async fn get(&self, path: &str, param: Option<Value>) -> Result<Value, Error> {
@@ -338,23 +331,23 @@ impl BackupWriter {
None
},
options.compress,
- self.verbose,
)
.await?;
let size_dirty = upload_stats.size - upload_stats.size_reused;
let size: HumanByte = upload_stats.size.into();
- let archive = if self.verbose {
+ let archive = if log::log_enabled!(log::Level::Debug) {
archive_name
} else {
pbs_tools::format::strip_server_file_extension(archive_name)
};
+
if archive_name != CATALOG_NAME {
let speed: HumanByte =
((size_dirty * 1_000_000) / (upload_stats.duration.as_micros() as usize)).into();
let size_dirty: HumanByte = size_dirty.into();
let size_compressed: HumanByte = upload_stats.size_compressed.into();
- println!(
+ log::info!(
"{}: had to backup {} of {} (compressed {}) in {:.2}s",
archive,
size_dirty,
@@ -362,30 +355,34 @@ impl BackupWriter {
size_compressed,
upload_stats.duration.as_secs_f64()
);
- println!("{}: average backup speed: {}/s", archive, speed);
+ log::info!("{}: average backup speed: {}/s", archive, speed);
} else {
- println!("Uploaded backup catalog ({})", size);
+ log::info!("Uploaded backup catalog ({})", size);
}
if upload_stats.size_reused > 0 && upload_stats.size > 1024 * 1024 {
let reused_percent = upload_stats.size_reused as f64 * 100. / upload_stats.size as f64;
let reused: HumanByte = upload_stats.size_reused.into();
- println!(
+ log::info!(
"{}: backup was done incrementally, reused {} ({:.1}%)",
- archive, reused, reused_percent
+ archive,
+ reused,
+ reused_percent
);
}
- if self.verbose && upload_stats.chunk_count > 0 {
- println!(
+ if log::log_enabled!(log::Level::Debug) && upload_stats.chunk_count > 0 {
+ log::debug!(
"{}: Reused {} from {} chunks.",
- archive, upload_stats.chunk_reused, upload_stats.chunk_count
+ archive,
+ upload_stats.chunk_reused,
+ upload_stats.chunk_count
);
- println!(
+ log::debug!(
"{}: Average chunk size was {}.",
archive,
HumanByte::from(upload_stats.size / upload_stats.chunk_count)
);
- println!(
+ log::debug!(
"{}: Average time per request: {} microseconds.",
archive,
(upload_stats.duration.as_micros()) / (upload_stats.chunk_count as u128)
@@ -405,9 +402,7 @@ impl BackupWriter {
})
}
- fn response_queue(
- verbose: bool,
- ) -> (
+ fn response_queue() -> (
mpsc::Sender<h2::client::ResponseFuture>,
oneshot::Receiver<Result<(), Error>>,
) {
@@ -435,11 +430,7 @@ impl BackupWriter {
response
.map_err(Error::from)
.and_then(H2Client::h2api_response)
- .map_ok(move |result| {
- if verbose {
- println!("RESPONSE: {:?}", result)
- }
- })
+ .map_ok(move |result| log::debug!("RESPONSE: {:?}", result))
.map_err(|err| format_err!("pipelined request failed: {}", err))
})
.map(|result| {
@@ -454,7 +445,6 @@ impl BackupWriter {
h2: H2Client,
wid: u64,
path: String,
- verbose: bool,
) -> (UploadQueueSender, UploadResultReceiver) {
let (verify_queue_tx, verify_queue_rx) = mpsc::channel(64);
let (verify_result_tx, verify_result_rx) = oneshot::channel();
@@ -491,7 +481,7 @@ impl BackupWriter {
digest_list.push(hex::encode(&digest));
offset_list.push(offset);
}
- if verbose { println!("append chunks list len ({})", digest_list.len()); }
+ log::debug!("append chunks list len ({})", digest_list.len());
let param = json!({ "wid": wid, "digest-list": digest_list, "offset-list": offset_list });
let request = H2Client::request_builder("localhost", "PUT", &path, None, Some("application/json")).unwrap();
let param_data = bytes::Bytes::from(param.to_string().into_bytes());
@@ -547,13 +537,11 @@ impl BackupWriter {
known_chunks.insert(*index.index_digest(i).unwrap());
}
- if self.verbose {
- println!(
- "{}: known chunks list length is {}",
- archive_name,
- index.index_count()
- );
- }
+ log::debug!(
+ "{}: known chunks list length is {}",
+ archive_name,
+ index.index_count()
+ );
Ok(index)
}
@@ -588,13 +576,11 @@ impl BackupWriter {
known_chunks.insert(*index.index_digest(i).unwrap());
}
- if self.verbose {
- println!(
- "{}: known chunks list length is {}",
- archive_name,
- index.index_count()
- );
- }
+ log::debug!(
+ "{}: known chunks list length is {}",
+ archive_name,
+ index.index_count()
+ );
Ok(index)
}
@@ -641,7 +627,6 @@ impl BackupWriter {
known_chunks: Arc<Mutex<HashSet<[u8; 32]>>>,
crypt_config: Option<Arc<CryptConfig>>,
compress: bool,
- verbose: bool,
) -> impl Future<Output = Result<UploadStats, Error>> {
let total_chunks = Arc::new(AtomicUsize::new(0));
let total_chunks2 = total_chunks.clone();
@@ -660,7 +645,7 @@ impl BackupWriter {
let is_fixed_chunk_size = prefix == "fixed";
let (upload_queue, upload_result) =
- Self::append_chunk_queue(h2.clone(), wid, append_chunk_path, verbose);
+ Self::append_chunk_queue(h2.clone(), wid, append_chunk_path);
let start_time = std::time::Instant::now();
@@ -721,12 +706,12 @@ impl BackupWriter {
let digest = chunk_info.digest;
let digest_str = hex::encode(&digest);
- /* too verbose, needs finer verbosity setting granularity
- if verbose {
- println!("upload new chunk {} ({} bytes, offset {})", digest_str,
- chunk_info.chunk_len, offset);
- }
- */
+ log::trace!(
+ "upload new chunk {} ({} bytes, offset {})",
+ digest_str,
+ chunk_info.chunk_len,
+ offset
+ );
let chunk_data = chunk_info.chunk.into_inner();
let param = json!({
@@ -793,7 +778,7 @@ impl BackupWriter {
}
/// Upload speed test - prints result to stderr
- pub async fn upload_speedtest(&self, verbose: bool) -> Result<f64, Error> {
+ pub async fn upload_speedtest(&self) -> Result<f64, Error> {
let mut data = vec![];
// generate pseudo random byte sequence
for i in 0..1024 * 1024 {
@@ -807,7 +792,7 @@ impl BackupWriter {
let mut repeat = 0;
- let (upload_queue, upload_result) = Self::response_queue(verbose);
+ let (upload_queue, upload_result) = Self::response_queue();
let start_time = std::time::Instant::now();
@@ -817,9 +802,7 @@ impl BackupWriter {
break;
}
- if verbose {
- eprintln!("send test data ({} bytes)", data.len());
- }
+ log::debug!("send test data ({} bytes)", data.len());
let request =
H2Client::request_builder("localhost", "POST", "speedtest", None, None).unwrap();
let request_future = self
@@ -834,13 +817,13 @@ impl BackupWriter {
let _ = upload_result.await?;
- eprintln!(
+ log::info!(
"Uploaded {} chunks in {} seconds.",
repeat,
start_time.elapsed().as_secs()
);
let speed = ((item_len * (repeat as usize)) as f64) / start_time.elapsed().as_secs_f64();
- eprintln!(
+ log::info!(
"Time per request: {} microseconds.",
(start_time.elapsed().as_micros()) / (repeat as u128)
);
diff --git a/pbs-client/src/catalog_shell.rs b/pbs-client/src/catalog_shell.rs
index 85508891..c401e434 100644
--- a/pbs-client/src/catalog_shell.rs
+++ b/pbs-client/src/catalog_shell.rs
@@ -101,7 +101,7 @@ fn complete_path(complete_me: &str, _map: &HashMap<String, String>) -> Vec<Strin
match shell.complete_path(complete_me) {
Ok(list) => list,
Err(err) => {
- eprintln!("error during completion: {}", err);
+ log::error!("error during completion: {}", err);
Vec::new()
}
}
@@ -456,7 +456,7 @@ impl Shell {
let args = match cli::shellword_split(&line) {
Ok(args) => args,
Err(err) => {
- println!("Error: {}", err);
+ log::error!("Error: {}", err);
continue;
}
};
diff --git a/pbs-client/src/http_client.rs b/pbs-client/src/http_client.rs
index c9104a03..6d5d9ad5 100644
--- a/pbs-client/src/http_client.rs
+++ b/pbs-client/src/http_client.rs
@@ -347,14 +347,14 @@ impl HttpClient {
if let Err(err) =
store_fingerprint(prefix.as_ref().unwrap(), &server, &fingerprint)
{
- eprintln!("{}", err);
+ log::error!("{}", err);
}
}
*verified_fingerprint.lock().unwrap() = Some(fingerprint);
true
}
Err(err) => {
- eprintln!("certificate validation failed - {}", err);
+ log::error!("certificate validation failed - {}", err);
false
}
},
@@ -457,7 +457,7 @@ impl HttpClient {
*auth2.write().unwrap() = auth;
}
Err(err) => {
- eprintln!("re-authentication failed: {}", err);
+ log::error!("re-authentication failed: {}", err);
return;
}
}
@@ -580,14 +580,14 @@ impl HttpClient {
if expected_fingerprint == fp_string {
return Ok(Some(fp_string));
} else {
- eprintln!("WARNING: certificate fingerprint does not match expected fingerprint!");
- eprintln!("expected: {}", expected_fingerprint);
+ log::warn!("WARNING: certificate fingerprint does not match expected fingerprint!");
+ log::warn!("expected: {}", expected_fingerprint);
}
}
// If we're on a TTY, query the user
if interactive && tty::stdin_isatty() {
- eprintln!("fingerprint: {}", fp_string);
+ log::info!("fingerprint: {}", fp_string);
loop {
eprint!("Are you sure you want to continue connecting? (y/n): ");
let _ = std::io::stdout().flush();
@@ -775,7 +775,7 @@ impl HttpClient {
.handshake(upgraded)
.await?;
- let connection = connection.map_err(|_| eprintln!("HTTP/2.0 connection failed"));
+ let connection = connection.map_err(|_| log::error!("HTTP/2.0 connection failed"));
let (connection, abort) = futures::future::abortable(connection);
// A cancellable future returns an Option which is None when cancelled and
diff --git a/pbs-client/src/pxar/create.rs b/pbs-client/src/pxar/create.rs
index d58bab20..5df324cc 100644
--- a/pbs-client/src/pxar/create.rs
+++ b/pbs-client/src/pxar/create.rs
@@ -43,8 +43,6 @@ pub struct PxarCreateOptions {
pub entries_max: usize,
/// Skip lost+found directory
pub skip_lost_and_found: bool,
- /// Verbose output
- pub verbose: bool,
}
fn detect_fs_type(fd: RawFd) -> Result<i64, Error> {
--git a/pbs-client/src/pxar/extract.rs b/pbs-client/src/pxar/extract.rs
index 9a8854c0..95530e48 100644
--- a/pbs-client/src/pxar/extract.rs
+++ b/pbs-client/src/pxar/extract.rs
@@ -532,12 +532,7 @@ where
}
/// Creates a tar file from `path` and writes it into `output`
-pub async fn create_tar<T, W, P>(
- output: W,
- accessor: Accessor<T>,
- path: P,
- verbose: bool,
-) -> Result<(), Error>
+pub async fn create_tar<T, W, P>(output: W, accessor: Accessor<T>, path: P) -> Result<(), Error>
where
T: Clone + pxar::accessor::ReadAt + Unpin + Send + Sync + 'static,
W: tokio::io::AsyncWrite + Unpin + Send + 'static,
@@ -596,9 +591,7 @@ where
let metadata = realfile.entry().metadata();
let realpath = Path::new(link);
- if verbose {
- eprintln!("adding '{}' to tar", path.display());
- }
+ log::debug!("adding '{}' to tar", path.display());
let stripped_path = match realpath.strip_prefix(prefix) {
Ok(path) => path,
@@ -632,9 +625,7 @@ where
}
}
EntryKind::Symlink(link) if !link.data.is_empty() => {
- if verbose {
- eprintln!("adding '{}' to tar", path.display());
- }
+ log::debug!("adding '{}' to tar", path.display());
let realpath = Path::new(link);
let mut header = tar::Header::new_gnu();
header.set_entry_type(tar::EntryType::Symlink);
@@ -646,9 +637,7 @@ where
.map_err(|err| format_err!("could not send symlink entry: {}", err))?;
}
EntryKind::Fifo => {
- if verbose {
- eprintln!("adding '{}' to tar", path.display());
- }
+ log::debug!("adding '{}' to tar", path.display());
let mut header = tar::Header::new_gnu();
header.set_entry_type(tar::EntryType::Fifo);
add_metadata_to_header(&mut header, metadata);
@@ -662,9 +651,7 @@ where
.map_err(|err| format_err!("could not send fifo entry: {}", err))?;
}
EntryKind::Directory => {
- if verbose {
- eprintln!("adding '{}' to tar", path.display());
- }
+ log::debug!("adding '{}' to tar", path.display());
// we cannot add the root path itself
if path != Path::new("/") {
let mut header = tar::Header::new_gnu();
@@ -679,9 +666,7 @@ where
}
}
EntryKind::Device(device) => {
- if verbose {
- eprintln!("adding '{}' to tar", path.display());
- }
+ log::debug!("adding '{}' to tar", path.display());
let entry_type = if metadata.stat.is_chardev() {
tar::EntryType::Char
} else {
@@ -704,18 +689,13 @@ where
}
tarencoder.finish().await.map_err(|err| {
- eprintln!("error during finishing of zip: {}", err);
+ log::error!("error during finishing of zip: {}", err);
err
})?;
Ok(())
}
-pub async fn create_zip<T, W, P>(
- output: W,
- accessor: Accessor<T>,
- path: P,
- verbose: bool,
-) -> Result<(), Error>
+pub async fn create_zip<T, W, P>(output: W, accessor: Accessor<T>, path: P) -> Result<(), Error>
where
T: Clone + pxar::accessor::ReadAt + Unpin + Send + Sync + 'static,
W: tokio::io::AsyncWrite + Unpin + Send + 'static,
@@ -758,9 +738,7 @@ where
match entry.kind() {
EntryKind::File { .. } => {
- if verbose {
- eprintln!("adding '{}' to zip", path.display());
- }
+ log::debug!("adding '{}' to zip", path.display());
let entry = ZipEntry::new(
path,
metadata.stat.mtime.secs,
@@ -778,9 +756,7 @@ where
.ok_or(format_err!("error looking up '{:?}'", path))?;
let realfile = accessor.follow_hardlink(&entry).await?;
let metadata = realfile.entry().metadata();
- if verbose {
- eprintln!("adding '{}' to zip", path.display());
- }
+ log::debug!("adding '{}' to zip", path.display());
let entry = ZipEntry::new(
path,
metadata.stat.mtime.secs,
@@ -792,9 +768,7 @@ where
.map_err(|err| format_err!("could not send file entry: {}", err))?;
}
EntryKind::Directory => {
- if verbose {
- eprintln!("adding '{}' to zip", path.display());
- }
+ log::debug!("adding '{}' to zip", path.display());
let entry = ZipEntry::new(
path,
metadata.stat.mtime.secs,
@@ -851,7 +825,6 @@ pub async fn extract_sub_dir<T, DEST, PATH>(
destination: DEST,
decoder: Accessor<T>,
path: PATH,
- verbose: bool,
) -> Result<(), Error>
where
T: Clone + pxar::accessor::ReadAt + Unpin + Send + Sync + 'static,
@@ -870,13 +843,12 @@ where
.await?
.ok_or(format_err!("error opening '{:?}'", path.as_ref()))?;
- recurse_files_extractor(&mut extractor, file, verbose).await
+ recurse_files_extractor(&mut extractor, file).await
}
pub async fn extract_sub_dir_seq<S, DEST>(
destination: DEST,
mut decoder: Decoder<S>,
- verbose: bool,
) -> Result<(), Error>
where
S: pxar::decoder::SeqRead + Unpin + Send + 'static,
@@ -891,8 +863,8 @@ where
let mut extractor = get_extractor(destination, root.metadata().clone())?;
- if let Err(err) = seq_files_extractor(&mut extractor, decoder, verbose).await {
- eprintln!("error extracting pxar archive: {}", err);
+ if let Err(err) = seq_files_extractor(&mut extractor, decoder).await {
+ log::error!("error extracting pxar archive: {}", err);
}
Ok(())
@@ -948,7 +920,6 @@ fn get_filename(entry: &Entry) -> Result<(OsString, CString), Error> {
async fn recurse_files_extractor<'a, T>(
extractor: &'a mut Extractor,
file: FileEntry<T>,
- verbose: bool,
) -> Result<(), Error>
where
T: Clone + pxar::accessor::ReadAt + Unpin + Send + Sync + 'static,
@@ -957,9 +928,7 @@ where
let metadata = entry.metadata();
let (file_name_os, file_name) = get_filename(entry)?;
- if verbose {
- eprintln!("extracting: {}", file.path().display());
- }
+ log::debug!("extracting: {}", file.path().display());
match file.kind() {
EntryKind::Directory => {
@@ -970,7 +939,7 @@ where
let dir = file.enter_directory().await?;
let mut seq_decoder = dir.decode_full().await?;
seq_decoder.enable_goodbye_entries(true);
- seq_files_extractor(extractor, seq_decoder, verbose).await?;
+ seq_files_extractor(extractor, seq_decoder).await?;
extractor.leave_directory()?;
}
EntryKind::File { size, .. } => {
@@ -994,7 +963,6 @@ where
async fn seq_files_extractor<'a, T>(
extractor: &'a mut Extractor,
mut decoder: pxar::decoder::aio::Decoder<T>,
- verbose: bool,
) -> Result<(), Error>
where
T: pxar::decoder::SeqRead,
@@ -1009,8 +977,8 @@ where
let metadata = entry.metadata();
let (file_name_os, file_name) = get_filename(&entry)?;
- if verbose && !matches!(entry.kind(), EntryKind::GoodbyeTable) {
- eprintln!("extracting: {}", entry.path().display());
+ if !matches!(entry.kind(), EntryKind::GoodbyeTable) {
+ log::debug!("extracting: {}", entry.path().display());
}
if let Err(err) = async {
@@ -1044,7 +1012,7 @@ where
.await
{
let display = entry.path().display().to_string();
- eprintln!(
+ log::error!(
"error extracting {}: {}",
if matches!(entry.kind(), EntryKind::GoodbyeTable) {
"<directory>"
diff --git a/pbs-client/src/pxar/fuse.rs b/pbs-client/src/pxar/fuse.rs
index 594930ef..e9e1b6f3 100644
--- a/pbs-client/src/pxar/fuse.rs
+++ b/pbs-client/src/pxar/fuse.rs
@@ -240,8 +240,8 @@ impl SessionImpl {
/// Here's how we deal with errors:
///
- /// Any error will be printed if the verbose flag was set, otherwise the message will be
- /// silently dropped.
+ /// Any error will be logged if a log level of at least 'debug' was set, otherwise the
+ /// message will be silently dropped.
///
/// Opaque errors will cause the fuse main loop to bail out with that error.
///
@@ -255,8 +255,8 @@ impl SessionImpl {
) {
let final_result = match err.downcast::<io::Error>() {
Ok(err) => {
- if err.kind() == io::ErrorKind::Other && self.verbose {
- eprintln!("an IO error occurred: {}", err);
+ if err.kind() == io::ErrorKind::Other {
+ log::error!("an IO error occurred: {}", err);
}
// fail the request
@@ -264,9 +264,7 @@ impl SessionImpl {
}
Err(err) => {
// `bail` (non-`io::Error`) is used for fatal errors which should actually cancel:
- if self.verbose {
- eprintln!("internal error: {}, bailing out", err);
- }
+ log::error!("internal error: {}, bailing out", err);
Err(err)
}
};
@@ -297,7 +295,7 @@ impl SessionImpl {
},
err = err_recv.next() => match err {
Some(err) => if self.verbose {
- eprintln!("cancelling fuse main loop due to error: {}", err);
+ log::error!("cancelling fuse main loop due to error: {}", err);
return Err(err);
},
None => panic!("error channel was closed unexpectedly"),
@@ -385,9 +383,7 @@ impl SessionImpl {
}
}
other => {
- if self.verbose {
- eprintln!("Received unexpected fuse request");
- }
+ log::error!("Received unexpected fuse request");
other.fail(libc::ENOSYS).map_err(Error::from)
}
};
diff --git a/pbs-client/src/pxar/metadata.rs b/pbs-client/src/pxar/metadata.rs
index 415922ef..22bc5f9d 100644
--- a/pbs-client/src/pxar/metadata.rs
+++ b/pbs-client/src/pxar/metadata.rs
@@ -208,7 +208,7 @@ fn apply_xattrs(
}
if !xattr::is_valid_xattr_name(xattr.name()) {
- eprintln!("skipping invalid xattr named {:?}", xattr.name());
+ log::info!("skipping invalid xattr named {:?}", xattr.name());
continue;
}
@@ -269,7 +269,7 @@ fn apply_acls(
acl.add_entry_full(acl::ACL_GROUP_OBJ, None, mode)?;
if !metadata.acl.users.is_empty() || !metadata.acl.groups.is_empty() {
- eprintln!(
+ log::warn!(
"Warning: {:?}: Missing GROUP_OBJ entry in ACL, resetting to value of MASK",
path_info,
);
diff --git a/pbs-client/src/pxar_backup_stream.rs b/pbs-client/src/pxar_backup_stream.rs
index 8ffcf36e..22a6ffdc 100644
--- a/pbs-client/src/pxar_backup_stream.rs
+++ b/pbs-client/src/pxar_backup_stream.rs
@@ -53,17 +53,13 @@ impl PxarBackupStream {
StdChannelWriter::new(tx),
));
- let verbose = options.verbose;
-
let writer = pxar::encoder::sync::StandardWriter::new(writer);
if let Err(err) = crate::pxar::create_archive(
dir,
writer,
crate::pxar::Flags::DEFAULT,
move |path| {
- if verbose {
- println!("{:?}", path);
- }
+ log::debug!("{:?}", path);
Ok(())
},
Some(catalog),
diff --git a/pbs-client/src/task_log.rs b/pbs-client/src/task_log.rs
index b38ae10c..a396a451 100644
--- a/pbs-client/src/task_log.rs
+++ b/pbs-client/src/task_log.rs
@@ -31,10 +31,10 @@ pub async fn display_task_log(
let abort_future = async move {
while signal_stream.recv().await.is_some() {
- println!("got shutdown request (SIGINT)");
+ log::info!("got shutdown request (SIGINT)");
let prev_count = abort_count2.fetch_add(1, Ordering::SeqCst);
if prev_count >= 1 {
- println!("forced exit (task still running)");
+ log::info!("forced exit (task still running)");
break;
}
}
@@ -77,9 +77,9 @@ pub async fn display_task_log(
}
if strip_date && t.len() > 27 && &t[25..27] == ": " {
let line = &t[27..];
- println!("{}", line);
+ log::info!("{}", line);
} else {
- println!("{}", t);
+ log::info!("{}", t);
}
start += 1;
}
diff --git a/pbs-client/src/tools/key_source.rs b/pbs-client/src/tools/key_source.rs
index 3ff592e1..f7a8c7fb 100644
--- a/pbs-client/src/tools/key_source.rs
+++ b/pbs-client/src/tools/key_source.rs
@@ -231,7 +231,7 @@ fn do_crypto_parameters(param: &Value, keep_keyfd_open: bool) -> Result<CryptoPa
(None, master_pubkey) => match read_optional_default_encryption_key()? {
None => bail!("--crypt-mode without --keyfile and no default key file available"),
enc_key => {
- eprintln!("Encrypting with default encryption key!");
+ log::info!("Encrypting with default encryption key!");
let master_pubkey = match master_pubkey {
None => read_optional_default_master_pubkey()?,
master_pubkey => master_pubkey,
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 03/10] pbs-datastore: replace print with log macro
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
` (2 preceding siblings ...)
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 02/10] pbs-client: replace print with log macro Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 04/10] pbs-fuse+pbs-tape: " Hannes Laimer
` (7 subsequent siblings)
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
pbs-datastore/src/catalog.rs | 6 +++---
pbs-datastore/src/datastore.rs | 2 +-
pbs-datastore/src/dynamic_index.rs | 2 +-
pbs-datastore/src/fixed_index.rs | 14 +++++++-------
pbs-datastore/src/paperkey.rs | 2 +-
5 files changed, 13 insertions(+), 13 deletions(-)
diff --git a/pbs-datastore/src/catalog.rs b/pbs-datastore/src/catalog.rs
index c07b71a6..f1224041 100644
--- a/pbs-datastore/src/catalog.rs
+++ b/pbs-datastore/src/catalog.rs
@@ -636,7 +636,7 @@ impl<R: Read + Seek> CatalogReader<R> {
match etype {
CatalogEntryType::Directory => {
- println!("{} {:?}", etype, path);
+ log::info!("{} {:?}", etype, path);
if offset > start {
bail!("got wrong directory offset ({} > {})", offset, start);
}
@@ -649,10 +649,10 @@ impl<R: Read + Seek> CatalogReader<R> {
mtime_string = s;
}
- println!("{} {:?} {} {}", etype, path, size, mtime_string,);
+ log::info!("{} {:?} {} {}", etype, path, size, mtime_string,);
}
_ => {
- println!("{} {:?}", etype, path);
+ log::info!("{} {:?}", etype, path);
}
}
diff --git a/pbs-datastore/src/datastore.rs b/pbs-datastore/src/datastore.rs
index 3cd1d98f..1ae5bcb6 100644
--- a/pbs-datastore/src/datastore.rs
+++ b/pbs-datastore/src/datastore.rs
@@ -233,7 +233,7 @@ impl DataStore {
match serde_json::from_str(&state) {
Ok(state) => state,
Err(err) => {
- eprintln!("error reading gc-status: {}", err);
+ log::error!("error reading gc-status: {}", err);
GarbageCollectionStatus::default()
}
}
diff --git a/pbs-datastore/src/dynamic_index.rs b/pbs-datastore/src/dynamic_index.rs
index fdfcecb3..6c2ee685 100644
--- a/pbs-datastore/src/dynamic_index.rs
+++ b/pbs-datastore/src/dynamic_index.rs
@@ -465,7 +465,7 @@ impl DynamicChunkWriter {
self.stat.disk_size += compressed_size;
}
- println!(
+ log::info!(
"ADD CHUNK {:016x} {} {}% {} {}",
self.chunk_offset,
chunk_size,
diff --git a/pbs-datastore/src/fixed_index.rs b/pbs-datastore/src/fixed_index.rs
index 5fc3d2ab..936bd2e7 100644
--- a/pbs-datastore/src/fixed_index.rs
+++ b/pbs-datastore/src/fixed_index.rs
@@ -51,7 +51,7 @@ unsafe impl Sync for FixedIndexReader {}
impl Drop for FixedIndexReader {
fn drop(&mut self) {
if let Err(err) = self.unmap() {
- eprintln!("Unable to unmap file - {}", err);
+ log::error!("Unable to unmap file - {}", err);
}
}
}
@@ -144,16 +144,16 @@ impl FixedIndexReader {
}
pub fn print_info(&self) {
- println!("Size: {}", self.size);
- println!("ChunkSize: {}", self.chunk_size);
+ log::info!("Size: {}", self.size);
+ log::info!("ChunkSize: {}", self.chunk_size);
let mut ctime_str = self.ctime.to_string();
if let Ok(s) = proxmox_time::strftime_local("%c", self.ctime) {
ctime_str = s;
}
- println!("CTime: {}", ctime_str);
- println!("UUID: {:?}", self.uuid);
+ log::info!("CTime: {}", ctime_str);
+ log::info!("UUID: {:?}", self.uuid);
}
}
@@ -247,7 +247,7 @@ impl Drop for FixedIndexWriter {
fn drop(&mut self) {
let _ = std::fs::remove_file(&self.tmp_filename); // ignore errors
if let Err(err) = self.unmap() {
- eprintln!("Unable to unmap file {:?} - {}", self.tmp_filename, err);
+ log::error!("Unable to unmap file {:?} - {}", self.tmp_filename, err);
}
}
}
@@ -418,7 +418,7 @@ impl FixedIndexWriter {
let digest = &chunk_info.digest;
- println!(
+ log::info!(
"ADD CHUNK {} {} {}% {} {}",
idx,
chunk_len,
diff --git a/pbs-datastore/src/paperkey.rs b/pbs-datastore/src/paperkey.rs
index 8caca0b9..27ef1028 100644
--- a/pbs-datastore/src/paperkey.rs
+++ b/pbs-datastore/src/paperkey.rs
@@ -63,7 +63,7 @@ pub fn generate_paper_key<W: Write>(
(lines, false)
}
Err(err) => {
- eprintln!("Couldn't parse data as KeyConfig - {}", err);
+ log::error!("Couldn't parse data as KeyConfig - {}", err);
bail!("Neither a PEM-formatted private key, nor a PBS key file.");
}
}
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 04/10] pbs-fuse+pbs-tape: replace print with log macro
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
` (3 preceding siblings ...)
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 03/10] pbs-datastore: " Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 05/10] proxmox-backup-client: " Hannes Laimer
` (6 subsequent siblings)
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
pbs-fuse-loop/Cargo.toml | 3 ++-
pbs-fuse-loop/src/fuse_loop.rs | 12 ++++++------
pbs-tape/Cargo.toml | 1 +
pbs-tape/src/bin/pmt.rs | 10 +++++-----
pbs-tape/src/bin/pmtx.rs | 8 ++++----
pbs-tape/src/sg_pt_changer.rs | 4 ++--
6 files changed, 20 insertions(+), 18 deletions(-)
diff --git a/pbs-fuse-loop/Cargo.toml b/pbs-fuse-loop/Cargo.toml
index cc608848..c9b260e5 100644
--- a/pbs-fuse-loop/Cargo.toml
+++ b/pbs-fuse-loop/Cargo.toml
@@ -11,9 +11,10 @@ futures = "0.3"
lazy_static = "1.4"
libc = "0.2"
nix = "0.24"
+log = "0.4"
regex = "1.5"
tokio = { version = "1.6", features = [] }
proxmox-time = "1"
proxmox-fuse = "0.1.3"
-proxmox-sys = "0.3"
\ No newline at end of file
+proxmox-sys = "0.3"
diff --git a/pbs-fuse-loop/src/fuse_loop.rs b/pbs-fuse-loop/src/fuse_loop.rs
index c92a5390..8660f264 100644
--- a/pbs-fuse-loop/src/fuse_loop.rs
+++ b/pbs-fuse-loop/src/fuse_loop.rs
@@ -141,7 +141,7 @@ impl<R: AsyncRead + AsyncSeek + Unpin> FuseLoopSession<R> {
let cleanup = |session: futures::stream::Fuse<Fuse>| {
// only warn for errors on cleanup, if these fail nothing is lost
if let Err(err) = loopdev::unassign(&loopdev_path) {
- eprintln!(
+ log::warn!(
"cleanup: warning: could not unassign file {} from loop device {} - {}",
&fuse_path, &loopdev_path, err,
);
@@ -151,13 +151,13 @@ impl<R: AsyncRead + AsyncSeek + Unpin> FuseLoopSession<R> {
std::mem::drop(session);
if let Err(err) = remove_file(&fuse_path) {
- eprintln!(
+ log::warn!(
"cleanup: warning: could not remove temporary file {} - {}",
&fuse_path, err,
);
}
if let Err(err) = remove_file(&pid_path) {
- eprintln!(
+ log::warn!(
"cleanup: warning: could not remove PID file {} - {}",
&pid_path, err,
);
@@ -200,7 +200,7 @@ impl<R: AsyncRead + AsyncSeek + Unpin> FuseLoopSession<R> {
},
Some(_) => {
// only FUSE requests necessary for loop-mapping are implemented
- eprintln!("Unimplemented FUSE request type encountered");
+ log::error!("Unimplemented FUSE request type encountered");
Ok(())
},
None => {
@@ -239,7 +239,7 @@ pub fn cleanup_unused_run_files(filter_name: Option<String>) {
// does nothing if files are already stagnant (e.g. instance crashed etc...)
if unmap_from_backing(&path, None).is_ok() {
// we have reaped some leftover instance, tell the user
- eprintln!(
+ log::info!(
"Cleaned up dangling mapping '{}': no loop device assigned",
&name
);
@@ -291,7 +291,7 @@ fn get_backing_file(loopdev: &str) -> Result<String, Error> {
// call in broken state: we found the mapping, but the client is already dead,
// only thing to do is clean up what we can
fn emerg_cleanup(loopdev: Option<&str>, mut backing_file: PathBuf) {
- eprintln!(
+ log::warn!(
"warning: found mapping with dead process ({:?}), attempting cleanup",
&backing_file
);
diff --git a/pbs-tape/Cargo.toml b/pbs-tape/Cargo.toml
index dfe65778..1ad80c18 100644
--- a/pbs-tape/Cargo.toml
+++ b/pbs-tape/Cargo.toml
@@ -8,6 +8,7 @@ description = "LTO tape support"
[dependencies]
lazy_static = "1.4"
libc = "0.2"
+log = "0.4"
anyhow = "1.0"
thiserror = "1.0"
endian_trait = { version = "0.6", features = ["arrays"] }
diff --git a/pbs-tape/src/bin/pmt.rs b/pbs-tape/src/bin/pmt.rs
index 75e8e93a..adf95db2 100644
--- a/pbs-tape/src/bin/pmt.rs
+++ b/pbs-tape/src/bin/pmt.rs
@@ -55,24 +55,24 @@ fn get_tape_handle(param: &Value) -> Result<SgTape, Error> {
if let Some(name) = param["drive"].as_str() {
let (config, _digest) = pbs_config::drive::config()?;
let drive: LtoTapeDrive = config.lookup("lto", name)?;
- eprintln!("using device {}", drive.path);
+ log::info!("using device {}", drive.path);
return SgTape::new(open_lto_tape_device(&drive.path)?);
}
if let Some(device) = param["device"].as_str() {
- eprintln!("using device {}", device);
+ log::info!("using device {}", device);
return SgTape::new(open_lto_tape_device(device)?);
}
if let Ok(name) = std::env::var("PROXMOX_TAPE_DRIVE") {
let (config, _digest) = pbs_config::drive::config()?;
let drive: LtoTapeDrive = config.lookup("lto", &name)?;
- eprintln!("using device {}", drive.path);
+ log::info!("using device {}", drive.path);
return SgTape::new(open_lto_tape_device(&drive.path)?);
}
if let Ok(device) = std::env::var("TAPE") {
- eprintln!("using device {}", device);
+ log::info!("using device {}", device);
return SgTape::new(open_lto_tape_device(&device)?);
}
@@ -89,7 +89,7 @@ fn get_tape_handle(param: &Value) -> Result<SgTape, Error> {
if drive_names.len() == 1 {
let name = drive_names[0];
let drive: LtoTapeDrive = config.lookup("lto", name)?;
- eprintln!("using device {}", drive.path);
+ log::info!("using device {}", drive.path);
return SgTape::new(open_lto_tape_device(&drive.path)?);
}
diff --git a/pbs-tape/src/bin/pmtx.rs b/pbs-tape/src/bin/pmtx.rs
index 1d58d737..6f34bc44 100644
--- a/pbs-tape/src/bin/pmtx.rs
+++ b/pbs-tape/src/bin/pmtx.rs
@@ -33,12 +33,12 @@ fn get_changer_handle(param: &Value) -> Result<File, Error> {
if let Some(name) = param["changer"].as_str() {
let (config, _digest) = pbs_config::drive::config()?;
let changer_config: ScsiTapeChanger = config.lookup("changer", name)?;
- eprintln!("using device {}", changer_config.path);
+ log::info!("using device {}", changer_config.path);
return sg_pt_changer::open(&changer_config.path);
}
if let Some(device) = param["device"].as_str() {
- eprintln!("using device {}", device);
+ log::info!("using device {}", device);
return sg_pt_changer::open(device);
}
@@ -47,13 +47,13 @@ fn get_changer_handle(param: &Value) -> Result<File, Error> {
let drive: LtoTapeDrive = config.lookup("lto", &name)?;
if let Some(changer) = drive.changer {
let changer_config: ScsiTapeChanger = config.lookup("changer", &changer)?;
- eprintln!("using device {}", changer_config.path);
+ log::info!("using device {}", changer_config.path);
return sg_pt_changer::open(&changer_config.path);
}
}
if let Ok(device) = std::env::var("CHANGER") {
- eprintln!("using device {}", device);
+ log::info!("using device {}", device);
return sg_pt_changer::open(device);
}
diff --git a/pbs-tape/src/sg_pt_changer.rs b/pbs-tape/src/sg_pt_changer.rs
index eb581d6c..d413c1e0 100644
--- a/pbs-tape/src/sg_pt_changer.rs
+++ b/pbs-tape/src/sg_pt_changer.rs
@@ -86,11 +86,11 @@ fn execute_scsi_command<F: AsRawFd>(
let msg = err.to_string();
if let Some(ref last) = last_msg {
if &msg != last {
- eprintln!("{}", err);
+ log::error!("{}", err);
last_msg = Some(msg);
}
} else {
- eprintln!("{}", err);
+ log::error!("{}", err);
last_msg = Some(msg);
}
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 05/10] proxmox-backup-client: replace print with log macro
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
` (4 preceding siblings ...)
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 04/10] pbs-fuse+pbs-tape: " Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 06/10] proxmox-file-restore: " Hannes Laimer
` (5 subsequent siblings)
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
proxmox-backup-client/Cargo.toml | 1 +
proxmox-backup-client/src/benchmark.rs | 36 ++++------
proxmox-backup-client/src/catalog.rs | 6 +-
proxmox-backup-client/src/key.rs | 27 ++++----
proxmox-backup-client/src/main.rs | 91 ++++++++++----------------
proxmox-backup-client/src/mount.rs | 12 ++--
6 files changed, 71 insertions(+), 102 deletions(-)
diff --git a/proxmox-backup-client/Cargo.toml b/proxmox-backup-client/Cargo.toml
index 5142719d..bd110961 100644
--- a/proxmox-backup-client/Cargo.toml
+++ b/proxmox-backup-client/Cargo.toml
@@ -10,6 +10,7 @@ futures = "0.3"
hyper = { version = "0.14", features = [ "full" ] }
libc = "0.2"
nix = "0.24"
+log = "0.4"
openssl = "0.10"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
diff --git a/proxmox-backup-client/src/benchmark.rs b/proxmox-backup-client/src/benchmark.rs
index 9cab2221..a306cbaa 100644
--- a/proxmox-backup-client/src/benchmark.rs
+++ b/proxmox-backup-client/src/benchmark.rs
@@ -109,11 +109,6 @@ static BENCHMARK_RESULT_2020_TOP: BenchmarkResult = BenchmarkResult {
schema: REPO_URL_SCHEMA,
optional: true,
},
- verbose: {
- description: "Verbose output.",
- type: bool,
- optional: true,
- },
keyfile: {
schema: KEYFILE_SCHEMA,
optional: true,
@@ -135,8 +130,6 @@ pub async fn benchmark(
let keyfile = param["keyfile"].as_str().map(PathBuf::from);
- let verbose = param["verbose"].as_bool().unwrap_or(false);
-
let output_format = get_output_format(¶m);
let crypt_config = match keyfile {
@@ -152,10 +145,10 @@ pub async fn benchmark(
// do repo tests first, because this may prompt for a password
if let Some(repo) = repo {
- test_upload_speed(&mut benchmark_result, repo, crypt_config.clone(), verbose).await?;
+ test_upload_speed(&mut benchmark_result, repo, crypt_config.clone()).await?;
}
- test_crypt_speed(&mut benchmark_result, verbose)?;
+ test_crypt_speed(&mut benchmark_result)?;
render_result(&output_format, &benchmark_result)?;
@@ -228,16 +221,13 @@ async fn test_upload_speed(
benchmark_result: &mut BenchmarkResult,
repo: BackupRepository,
crypt_config: Option<Arc<CryptConfig>>,
- verbose: bool,
) -> Result<(), Error> {
let backup_time = proxmox_time::epoch_i64();
let client = connect(&repo)?;
record_repository(&repo);
- if verbose {
- eprintln!("Connecting to backup server");
- }
+ log::debug!("Connecting to backup server");
let client = BackupWriter::start(
client,
crypt_config.clone(),
@@ -249,12 +239,10 @@ async fn test_upload_speed(
)
.await?;
- if verbose {
- eprintln!("Start TLS speed test");
- }
- let speed = client.upload_speedtest(verbose).await?;
+ log::debug!("Start TLS speed test");
+ let speed = client.upload_speedtest().await?;
- eprintln!("TLS speed: {:.2} MB/s", speed / 1_000_000.0);
+ log::info!("TLS speed: {:.2} MB/s", speed / 1_000_000.0);
benchmark_result.tls.speed = Some(speed);
@@ -262,7 +250,7 @@ async fn test_upload_speed(
}
// test hash/crypt/compress speed
-fn test_crypt_speed(benchmark_result: &mut BenchmarkResult, _verbose: bool) -> Result<(), Error> {
+fn test_crypt_speed(benchmark_result: &mut BenchmarkResult) -> Result<(), Error> {
let pw = b"test";
let kdf = KeyDerivationConfig::Scrypt {
@@ -301,7 +289,7 @@ fn test_crypt_speed(benchmark_result: &mut BenchmarkResult, _verbose: bool) -> R
let speed = (bytes as f64) / start_time.elapsed().as_secs_f64();
benchmark_result.sha256.speed = Some(speed);
- eprintln!("SHA256 speed: {:.2} MB/s", speed / 1_000_000.0);
+ log::info!("SHA256 speed: {:.2} MB/s", speed / 1_000_000.0);
let start_time = std::time::Instant::now();
@@ -317,7 +305,7 @@ fn test_crypt_speed(benchmark_result: &mut BenchmarkResult, _verbose: bool) -> R
let speed = (bytes as f64) / start_time.elapsed().as_secs_f64();
benchmark_result.compress.speed = Some(speed);
- eprintln!("Compression speed: {:.2} MB/s", speed / 1_000_000.0);
+ log::info!("Compression speed: {:.2} MB/s", speed / 1_000_000.0);
let start_time = std::time::Instant::now();
@@ -338,7 +326,7 @@ fn test_crypt_speed(benchmark_result: &mut BenchmarkResult, _verbose: bool) -> R
let speed = (bytes as f64) / start_time.elapsed().as_secs_f64();
benchmark_result.decompress.speed = Some(speed);
- eprintln!("Decompress speed: {:.2} MB/s", speed / 1_000_000.0);
+ log::info!("Decompress speed: {:.2} MB/s", speed / 1_000_000.0);
let start_time = std::time::Instant::now();
@@ -354,7 +342,7 @@ fn test_crypt_speed(benchmark_result: &mut BenchmarkResult, _verbose: bool) -> R
let speed = (bytes as f64) / start_time.elapsed().as_secs_f64();
benchmark_result.aes256_gcm.speed = Some(speed);
- eprintln!("AES256/GCM speed: {:.2} MB/s", speed / 1_000_000.0);
+ log::info!("AES256/GCM speed: {:.2} MB/s", speed / 1_000_000.0);
let start_time = std::time::Instant::now();
@@ -371,7 +359,7 @@ fn test_crypt_speed(benchmark_result: &mut BenchmarkResult, _verbose: bool) -> R
let speed = (bytes as f64) / start_time.elapsed().as_secs_f64();
benchmark_result.verify.speed = Some(speed);
- eprintln!("Verify speed: {:.2} MB/s", speed / 1_000_000.0);
+ log::info!("Verify speed: {:.2} MB/s", speed / 1_000_000.0);
Ok(())
}
diff --git a/proxmox-backup-client/src/catalog.rs b/proxmox-backup-client/src/catalog.rs
index d0c7b52f..6566147e 100644
--- a/proxmox-backup-client/src/catalog.rs
+++ b/proxmox-backup-client/src/catalog.rs
@@ -64,7 +64,7 @@ async fn dump_catalog(param: Value) -> Result<Value, Error> {
Some(key) => {
let (key, _created, _fingerprint) = decrypt_key(&key.key, &get_encryption_key_password)
.map_err(|err| {
- eprintln!("{}", format_key_source(&key.source, "encryption"));
+ log::error!("{}", format_key_source(&key.source, "encryption"));
err
})?;
let crypt_config = CryptConfig::new(key)?;
@@ -172,7 +172,7 @@ async fn catalog_shell(param: Value) -> Result<(), Error> {
Some(key) => {
let (key, _created, _fingerprint) = decrypt_key(&key.key, &get_encryption_key_password)
.map_err(|err| {
- eprintln!("{}", format_key_source(&key.source, "encryption"));
+ log::error!("{}", format_key_source(&key.source, "encryption"));
err
})?;
let crypt_config = CryptConfig::new(key)?;
@@ -253,7 +253,7 @@ async fn catalog_shell(param: Value) -> Result<(), Error> {
let catalog_reader = CatalogReader::new(catalogfile);
let state = Shell::new(catalog_reader, &server_archive_name, decoder).await?;
- println!("Starting interactive shell");
+ log::info!("Starting interactive shell");
state.shell().await?;
record_repository(&repo);
diff --git a/proxmox-backup-client/src/key.rs b/proxmox-backup-client/src/key.rs
index c498afd3..c7997ea7 100644
--- a/proxmox-backup-client/src/key.rs
+++ b/proxmox-backup-client/src/key.rs
@@ -79,7 +79,7 @@ fn create(kdf: Option<Kdf>, path: Option<String>, hint: Option<String>) -> Resul
Some(path) => PathBuf::from(path),
None => {
let path = place_default_encryption_key()?;
- println!("creating default key at: {:?}", path);
+ log::info!("creating default key at: {:?}", path);
path
}
};
@@ -157,7 +157,7 @@ async fn import_with_master_key(
if path.exists() {
bail!("Please remove default encryption key at {:?} before importing to default location (or choose a non-default one).", path);
}
- println!("Importing key to default location at: {:?}", path);
+ log::info!("Importing key to default location at: {:?}", path);
path
}
};
@@ -230,7 +230,7 @@ fn change_passphrase(
let path = find_default_encryption_key()?.ok_or_else(|| {
format_err!("no encryption file provided and no default file found")
})?;
- println!("updating default key at: {:?}", path);
+ log::info!("updating default key at: {:?}", path);
path
}
};
@@ -338,10 +338,10 @@ fn import_master_pubkey(path: String) -> Result<(), Error> {
match openssl::pkey::PKey::public_key_from_pem(&pem_data) {
Ok(key) => {
let info = RsaPubKeyInfo::try_from(key.rsa()?)?;
- println!("Found following key at {:?}", path);
- println!("Modulus: {}", info.modulus);
- println!("Exponent: {}", info.exponent);
- println!("Length: {}", info.length);
+ log::info!("Found following key at {:?}", path);
+ log::info!("Modulus: {}", info.modulus);
+ log::info!("Exponent: {}", info.exponent);
+ log::info!("Length: {}", info.length);
}
Err(err) => bail!("Unable to decode PEM data - {}", err),
};
@@ -350,7 +350,7 @@ fn import_master_pubkey(path: String) -> Result<(), Error> {
replace_file(&target_path, &pem_data, CreateOptions::new(), true)?;
- println!("Imported public master key to {:?}", target_path);
+ log::info!("Imported public master key to {:?}", target_path);
Ok(())
}
@@ -365,14 +365,13 @@ fn create_master_key() -> Result<(), Error> {
}
let bits = 4096;
- println!("Generating {}-bit RSA key..", bits);
+ log::info!("Generating {}-bit RSA key..", bits);
let rsa = openssl::rsa::Rsa::generate(bits)?;
let public =
openssl::rsa::Rsa::from_public_components(rsa.n().to_owned()?, rsa.e().to_owned()?)?;
let info = RsaPubKeyInfo::try_from(public)?;
- println!("Modulus: {}", info.modulus);
- println!("Exponent: {}", info.exponent);
- println!();
+ log::info!("Modulus: {}", info.modulus);
+ log::info!("Exponent: {}\n", info.exponent);
let pkey = openssl::pkey::PKey::from_rsa(rsa)?;
@@ -380,7 +379,7 @@ fn create_master_key() -> Result<(), Error> {
let pub_key: Vec<u8> = pkey.public_key_to_pem()?;
let filename_pub = "master-public.pem";
- println!("Writing public master key to {}", filename_pub);
+ log::info!("Writing public master key to {}", filename_pub);
replace_file(filename_pub, pub_key.as_slice(), CreateOptions::new(), true)?;
let cipher = openssl::symm::Cipher::aes_256_cbc();
@@ -388,7 +387,7 @@ fn create_master_key() -> Result<(), Error> {
pkey.private_key_to_pem_pkcs8_passphrase(cipher, password.as_bytes())?;
let filename_priv = "master-private.pem";
- println!("Writing private master key to {}", filename_priv);
+ log::info!("Writing private master key to {}", filename_priv);
replace_file(
filename_priv,
priv_key.as_slice(),
diff --git a/proxmox-backup-client/src/main.rs b/proxmox-backup-client/src/main.rs
index 0d6558e7..fc7dc1fa 100644
--- a/proxmox-backup-client/src/main.rs
+++ b/proxmox-backup-client/src/main.rs
@@ -461,7 +461,7 @@ async fn api_version(param: Value) -> Result<(), Error> {
match client.get("api2/json/version", None).await {
Ok(mut result) => version_info["server"] = result["data"].take(),
- Err(e) => eprintln!("could not connect to server - {}", e),
+ Err(e) => log::error!("could not connect to server - {}", e),
}
}
if output_format == "text" {
@@ -547,7 +547,7 @@ fn spawn_catalog_upload(
.await;
if let Err(ref err) = catalog_upload_result {
- eprintln!("catalog upload error - {}", err);
+ log::error!("catalog upload error - {}", err);
client.cancel();
}
@@ -657,12 +657,6 @@ fn spawn_catalog_upload(
optional: true,
default: pbs_client::pxar::ENCODER_MAX_ENTRIES as isize,
},
- "verbose": {
- type: Boolean,
- description: "Verbose output.",
- optional: true,
- default: false,
- },
"dry-run": {
type: Boolean,
description: "Just show what backup would do, but do not upload anything.",
@@ -678,7 +672,6 @@ async fn create_backup(
all_file_systems: bool,
skip_lost_and_found: bool,
dry_run: bool,
- verbose: bool,
_info: &ApiMethod,
_rpcenv: &mut dyn RpcEnvironment,
) -> Result<Value, Error> {
@@ -837,16 +830,16 @@ async fn create_backup(
let snapshot = BackupDir::from((backup_type, backup_id.to_owned(), backup_time));
if backup_ns.is_root() {
- println!("Starting backup: {snapshot}");
+ log::info!("Starting backup: {snapshot}");
} else {
- println!("Starting backup: [{backup_ns}]:{snapshot}");
+ log::info!("Starting backup: [{backup_ns}]:{snapshot}");
}
- println!("Client name: {}", proxmox_sys::nodename());
+ log::info!("Client name: {}", proxmox_sys::nodename());
let start_time = std::time::Instant::now();
- println!(
+ log::info!(
"Starting backup protocol: {}",
strftime_local("%c", epoch_i64())?
);
@@ -854,20 +847,20 @@ async fn create_backup(
let (crypt_config, rsa_encrypted_key) = match crypto.enc_key {
None => (None, None),
Some(key_with_source) => {
- println!(
+ log::info!(
"{}",
format_key_source(&key_with_source.source, "encryption")
);
let (key, created, fingerprint) =
decrypt_key(&key_with_source.key, &get_encryption_key_password)?;
- println!("Encryption key fingerprint: {}", fingerprint);
+ log::info!("Encryption key fingerprint: {}", fingerprint);
let crypt_config = CryptConfig::new(key)?;
match crypto.master_pubkey {
Some(pem_with_source) => {
- println!("{}", format_key_source(&pem_with_source.source, "master"));
+ log::info!("{}", format_key_source(&pem_with_source.source, "master"));
let rsa = openssl::rsa::Rsa::public_key_from_pem(&pem_with_source.key)?;
@@ -889,21 +882,21 @@ async fn create_backup(
repo.store(),
&backup_ns,
&snapshot,
- verbose,
+ true,
false,
)
.await?;
let download_previous_manifest = match client.previous_backup_time().await {
Ok(Some(backup_time)) => {
- println!(
+ log::info!(
"Downloading previous manifest ({})",
strftime_local("%c", backup_time)?
);
true
}
Ok(None) => {
- println!("No previous manifest available.");
+ log::info!("No previous manifest available.");
false
}
Err(_) => {
@@ -918,13 +911,13 @@ async fn create_backup(
match previous_manifest.check_fingerprint(crypt_config.as_ref().map(Arc::as_ref)) {
Ok(()) => Some(Arc::new(previous_manifest)),
Err(err) => {
- println!("Couldn't re-use previous manifest - {}", err);
+ log::error!("Couldn't re-use previous manifest - {}", err);
None
}
}
}
Err(err) => {
- println!("Couldn't download previous manifest - {}", err);
+ log::error!("Couldn't download previous manifest - {}", err);
None
}
}
@@ -939,7 +932,7 @@ async fn create_backup(
let log_file = |desc: &str, file: &str, target: &str| {
let what = if dry_run { "Would upload" } else { "Upload" };
- println!("{} {} '{}' to '{}' as {}", what, desc, file, repo, target);
+ log::info!("{} {} '{}' to '{}' as {}", what, desc, file, repo, target);
};
for (backup_type, filename, target, size) in upload_list {
@@ -998,7 +991,6 @@ async fn create_backup(
patterns: pattern_list.clone(),
entries_max: entries_max as usize,
skip_lost_and_found,
- verbose,
};
let upload_options = UploadOptions {
@@ -1040,7 +1032,7 @@ async fn create_backup(
}
if dry_run {
- println!("dry-run: no upload happened");
+ log::info!("dry-run: no upload happend");
return Ok(Value::Null);
}
@@ -1062,7 +1054,7 @@ async fn create_backup(
if let Some(rsa_encrypted_key) = rsa_encrypted_key {
let target = ENCRYPTED_KEY_BLOB_NAME;
- println!("Upload RSA encoded key to '{:?}' as {}", repo, target);
+ log::info!("Upload RSA encoded key to '{:?}' as {}", repo, target);
let options = UploadOptions {
compress: false,
encrypt: false,
@@ -1079,9 +1071,8 @@ async fn create_backup(
.to_string(crypt_config.as_ref().map(Arc::as_ref))
.map_err(|err| format_err!("unable to format manifest - {}", err))?;
- if verbose {
- println!("Upload index.json to '{}'", repo)
- };
+ log::debug!("Upload index.json to '{}'", repo);
+
let options = UploadOptions {
compress: true,
encrypt: false,
@@ -1095,10 +1086,8 @@ async fn create_backup(
let end_time = std::time::Instant::now();
let elapsed = end_time.duration_since(start_time);
- println!("Duration: {:.2}s", elapsed.as_secs_f64());
-
- println!("End Time: {}", strftime_local("%c", epoch_i64())?);
-
+ log::info!("Duration: {:.2}s", elapsed.as_secs_f64());
+ log::info!("End Time: {}", strftime_local("%c", epoch_i64())?);
Ok(Value::Null)
}
@@ -1108,7 +1097,6 @@ async fn dump_image<W: Write>(
crypt_mode: CryptMode,
index: FixedIndexReader,
mut writer: W,
- verbose: bool,
) -> Result<(), Error> {
let most_used = index.find_most_used_chunks(8);
@@ -1125,23 +1113,21 @@ async fn dump_image<W: Write>(
let raw_data = chunk_reader.read_chunk(digest).await?;
writer.write_all(&raw_data)?;
bytes += raw_data.len();
- if verbose {
- let next_per = ((pos + 1) * 100) / index.index_count();
- if per != next_per {
- eprintln!(
- "progress {}% (read {} bytes, duration {} sec)",
- next_per,
- bytes,
- start_time.elapsed().as_secs()
- );
- per = next_per;
- }
+ let next_per = ((pos + 1) * 100) / index.index_count();
+ if per != next_per {
+ log::debug!(
+ "progress {}% (read {} bytes, duration {} sec)",
+ next_per,
+ bytes,
+ start_time.elapsed().as_secs()
+ );
+ per = next_per;
}
}
let end_time = std::time::Instant::now();
let elapsed = end_time.duration_since(start_time);
- eprintln!(
+ log::info!(
"restore image complete (bytes={}, duration={:.2}s, speed={:.2}MB/s)",
bytes,
elapsed.as_secs_f64(),
@@ -1222,8 +1208,6 @@ We do not extract '.pxar' archives when writing to standard output.
async fn restore(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(¶m)?;
- let verbose = param["verbose"].as_bool().unwrap_or(false);
-
let allow_existing_dirs = param["allow-existing-dirs"].as_bool().unwrap_or(false);
let archive_name = json::required_string_param(¶m, "archive-name")?;
@@ -1257,7 +1241,7 @@ async fn restore(param: Value) -> Result<Value, Error> {
Some(ref key) => {
let (key, _, _) =
decrypt_key(&key.key, &get_encryption_key_password).map_err(|err| {
- eprintln!("{}", format_key_source(&key.source, "encryption"));
+ log::error!("{}", format_key_source(&key.source, "encryption"));
err
})?;
Some(Arc::new(CryptConfig::new(key)?))
@@ -1279,14 +1263,14 @@ async fn restore(param: Value) -> Result<Value, Error> {
let (manifest, backup_index_data) = client.download_manifest().await?;
if archive_name == ENCRYPTED_KEY_BLOB_NAME && crypt_config.is_none() {
- eprintln!("Restoring encrypted key blob without original key - skipping manifest fingerprint check!")
+ log::info!("Restoring encrypted key blob without original key - skipping manifest fingerprint check!")
} else {
if manifest.signature.is_some() {
if let Some(key) = &crypto.enc_key {
- eprintln!("{}", format_key_source(&key.source, "encryption"));
+ log::info!("{}", format_key_source(&key.source, "encryption"));
}
if let Some(config) = &crypt_config {
- eprintln!("Fingerprint: {}", Fingerprint::new(config.fingerprint()));
+ log::info!("Fingerprint: {}", Fingerprint::new(config.fingerprint()));
}
}
manifest.check_fingerprint(crypt_config.as_ref().map(Arc::as_ref))?;
@@ -1356,9 +1340,7 @@ async fn restore(param: Value) -> Result<Value, Error> {
Path::new(target),
pbs_client::pxar::Flags::DEFAULT,
|path| {
- if verbose {
- println!("{:?}", path);
- }
+ log::debug!("{:?}", path);
},
options,
)
@@ -1397,7 +1379,6 @@ async fn restore(param: Value) -> Result<Value, Error> {
file_info.chunk_crypt_mode(),
index,
&mut writer,
- verbose,
)
.await?;
}
diff --git a/proxmox-backup-client/src/mount.rs b/proxmox-backup-client/src/mount.rs
index 268be580..be933efd 100644
--- a/proxmox-backup-client/src/mount.rs
+++ b/proxmox-backup-client/src/mount.rs
@@ -211,9 +211,9 @@ async fn mount_do(param: Value, pipe: Option<Fd>) -> Result<Value, Error> {
let crypt_config = match keyfile {
None => None,
Some(path) => {
- println!("Encryption key file: '{:?}'", path);
+ log::info!("Encryption key file: '{:?}'", path);
let (key, _, fingerprint) = load_and_decrypt_key(&path, &get_encryption_key_password)?;
- println!("Encryption key fingerprint: '{}'", fingerprint);
+ log::info!("Encryption key fingerprint: '{}'", fingerprint);
Some(Arc::new(CryptConfig::new(key)?))
}
};
@@ -356,7 +356,7 @@ async fn mount_do(param: Value, pipe: Option<Fd>) -> Result<Value, Error> {
}
// daemonize only now to be able to print mapped loopdev or startup errors
- println!("Image '{}' mapped on {}", name, loopdev);
+ log::info!("Image '{}' mapped on {}", name, loopdev);
daemonize()?;
// continue polling until complete or interrupted (which also happens on unmap)
@@ -370,7 +370,7 @@ async fn mount_do(param: Value, pipe: Option<Fd>) -> Result<Value, Error> {
}
}
- println!("Image unmapped");
+ log::info!("Image unmapped");
} else {
bail!("unknown archive file extension (expected .pxar or .img)");
}
@@ -390,7 +390,7 @@ fn unmap(
let mut any = false;
for (backing, loopdev) in pbs_fuse_loop::find_all_mappings()? {
let name = proxmox_sys::systemd::unescape_unit(&backing)?;
- println!(
+ log::info!(
"{}:\t{}",
loopdev.unwrap_or_else(|| "(unmapped)".to_string()),
name
@@ -398,7 +398,7 @@ fn unmap(
any = true;
}
if !any {
- println!("Nothing mapped.");
+ log::info!("Nothing mapped.");
}
return Ok(Value::Null);
}
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 06/10] proxmox-file-restore: replace print with log macro
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
` (5 preceding siblings ...)
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 05/10] proxmox-backup-client: " Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 07/10] proxmox-rest-server: " Hannes Laimer
` (4 subsequent siblings)
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
proxmox-file-restore/Cargo.toml | 1 +
proxmox-file-restore/src/block_driver_qemu.rs | 6 +++---
proxmox-file-restore/src/main.rs | 20 ++++++-------------
proxmox-file-restore/src/qemu_helper.rs | 18 ++++++++---------
4 files changed, 18 insertions(+), 27 deletions(-)
diff --git a/proxmox-file-restore/Cargo.toml b/proxmox-file-restore/Cargo.toml
index de719b46..37428c8d 100644
--- a/proxmox-file-restore/Cargo.toml
+++ b/proxmox-file-restore/Cargo.toml
@@ -10,6 +10,7 @@ base64 = "0.13"
futures = "0.3"
libc = "0.2"
nix = "0.24"
+log = "0.4"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tokio = { version = "1.6", features = [ "io-std", "rt", "rt-multi-thread", "time" ] }
diff --git a/proxmox-file-restore/src/block_driver_qemu.rs b/proxmox-file-restore/src/block_driver_qemu.rs
index d203d72a..6e00c88d 100644
--- a/proxmox-file-restore/src/block_driver_qemu.rs
+++ b/proxmox-file-restore/src/block_driver_qemu.rs
@@ -98,7 +98,7 @@ async fn cleanup_map(map: &mut HashMap<String, VMState>) -> bool {
if res.is_err() {
// VM is not reachable, remove from map and inform user
to_remove.push(name.clone());
- eprintln!(
+ log::warn!(
"VM '{}' (pid: {}, cid: {}) was not reachable, removing from map",
name, state.pid, state.cid
);
@@ -134,7 +134,7 @@ async fn ensure_running(details: &SnapRestoreDetails) -> Result<VsockClient, Err
return Ok(client);
}
Err(err) => {
- eprintln!("stale VM detected, restarting ({})", err);
+ log::warn!("stale VM detected, restarting ({})", err);
// VM is dead, restart
let _ = super::qemu_helper::try_kill_vm(vm.pid);
let vms = start_vm(vm.cid, details).await?;
@@ -233,7 +233,7 @@ impl BlockRestoreDriver for QemuBlockDriver {
)
.await
{
- eprintln!("reading file extraction stream failed - {}", err);
+ log::error!("reading file extraction stream failed - {}", err);
std::process::exit(1);
}
});
diff --git a/proxmox-file-restore/src/main.rs b/proxmox-file-restore/src/main.rs
index 5795b994..562c8ca7 100644
--- a/proxmox-file-restore/src/main.rs
+++ b/proxmox-file-restore/src/main.rs
@@ -261,7 +261,7 @@ async fn list(
Some(ref key) => {
let (key, _, _) =
decrypt_key(&key.key, &get_encryption_key_password).map_err(|err| {
- eprintln!("{}", format_key_source(&key.source, "encryption"));
+ log::error!("{}", format_key_source(&key.source, "encryption"));
err
})?;
Some(Arc::new(CryptConfig::new(key)?))
@@ -392,7 +392,6 @@ async fn extract(
path: String,
base64: bool,
target: Option<String>,
- verbose: bool,
param: Value,
) -> Result<(), Error> {
let repo = extract_repository_from_value(¶m)?;
@@ -414,7 +413,7 @@ async fn extract(
Some(ref key) => {
let (key, _, _) =
decrypt_key(&key.key, &get_encryption_key_password).map_err(|err| {
- eprintln!("{}", format_key_source(&key.source, "encryption"));
+ log::error!("{}", format_key_source(&key.source, "encryption"));
err
})?;
Some(Arc::new(CryptConfig::new(key)?))
@@ -451,7 +450,7 @@ async fn extract(
let archive_size = reader.archive_size();
let reader = LocalDynamicReadAt::new(reader);
let decoder = Accessor::new(reader, archive_size).await?;
- extract_to_target(decoder, &path, target, verbose).await?;
+ extract_to_target(decoder, &path, target).await?;
}
ExtractPath::VM(file, path) => {
let details = SnapRestoreDetails {
@@ -469,7 +468,7 @@ async fn extract(
if let Some(mut target) = target {
let reader = data_extract(driver, details, file, path.clone(), true).await?;
let decoder = Decoder::from_tokio(reader).await?;
- extract_sub_dir_seq(&target, decoder, verbose).await?;
+ extract_sub_dir_seq(&target, decoder).await?;
// we extracted a .pxarexclude-cli file auto-generated by the VM when encoding the
// archive, this file is of no use for the user, so try to remove it
@@ -494,7 +493,6 @@ async fn extract_to_target<T>(
decoder: Accessor<T>,
path: &[u8],
target: Option<PathBuf>,
- verbose: bool,
) -> Result<(), Error>
where
T: pxar::accessor::ReadAt + Clone + Send + Sync + Unpin + 'static,
@@ -508,20 +506,14 @@ where
.ok_or_else(|| format_err!("error opening '{:?}'", path))?;
if let Some(target) = target {
- extract_sub_dir(target, decoder, OsStr::from_bytes(path), verbose).await?;
+ extract_sub_dir(target, decoder, OsStr::from_bytes(path)).await?;
} else {
match file.kind() {
pxar::EntryKind::File { .. } => {
tokio::io::copy(&mut file.contents().await?, &mut tokio::io::stdout()).await?;
}
_ => {
- create_zip(
- tokio::io::stdout(),
- decoder,
- OsStr::from_bytes(path),
- verbose,
- )
- .await?;
+ create_zip(tokio::io::stdout(), decoder, OsStr::from_bytes(path)).await?;
}
}
}
diff --git a/proxmox-file-restore/src/qemu_helper.rs b/proxmox-file-restore/src/qemu_helper.rs
index 4ed7e8b6..20558817 100644
--- a/proxmox-file-restore/src/qemu_helper.rs
+++ b/proxmox-file-restore/src/qemu_helper.rs
@@ -154,7 +154,7 @@ pub async fn start_vm(
let mut logrotate = LogRotate::new(logfile, false, Some(16), None)?;
if let Err(err) = logrotate.do_rotate() {
- eprintln!("warning: logrotate for QEMU log file failed - {}", err);
+ log::warn!("warning: logrotate for QEMU log file failed - {}", err);
}
let mut logfd = OpenOptions::new()
@@ -297,11 +297,11 @@ pub async fn start_vm(
bail!("CID '{}' in use, but max attempts reached, aborting", cid);
}
// CID in use, try next higher one
- eprintln!("CID '{}' in use by other VM, attempting next one", cid);
+ log::info!("CID '{}' in use by other VM, attempting next one", cid);
// skip special-meaning low values
cid = cid.wrapping_add(1).max(10);
} else {
- eprint!("{}", out);
+ log::error!("{}", out);
bail!("Starting VM failed. See output above for more information.");
}
}
@@ -317,12 +317,10 @@ pub async fn start_vm(
if let Ok(Ok(_)) =
time::timeout(Duration::from_secs(2), client.get("api2/json/status", None)).await
{
- if debug {
- eprintln!(
- "Connect to '/run/proxmox-backup/file-restore-serial-{}.sock' for shell access",
- cid
- )
- }
+ log::debug!(
+ "Connect to '/run/proxmox-backup/file-restore-serial-{}.sock' for shell access",
+ cid
+ );
return Ok((pid, cid as i32));
}
if kill(pid_t, None).is_err() {
@@ -338,7 +336,7 @@ pub async fn start_vm(
// start failed
if let Err(err) = try_kill_vm(pid) {
- eprintln!("killing failed VM failed: {}", err);
+ log::error!("killing failed VM failed: {}", err);
}
bail!("starting VM timed out");
}
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 07/10] proxmox-rest-server: replace print with log macro
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
` (6 preceding siblings ...)
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 06/10] proxmox-file-restore: " Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 08/10] " Hannes Laimer
` (3 subsequent siblings)
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
proxmox-rest-server/src/api_config.rs | 4 ++--
proxmox-rest-server/src/command_socket.rs | 13 +++++++------
proxmox-rest-server/src/file_logger.rs | 2 +-
3 files changed, 10 insertions(+), 9 deletions(-)
diff --git a/proxmox-rest-server/src/api_config.rs b/proxmox-rest-server/src/api_config.rs
index c63106d3..b05e06d0 100644
--- a/proxmox-rest-server/src/api_config.rs
+++ b/proxmox-rest-server/src/api_config.rs
@@ -229,7 +229,7 @@ impl ApiConfig {
self.request_log = Some(Arc::clone(&request_log));
commando_sock.register_command("api-access-log-reopen".into(), move |_args| {
- println!("re-opening access-log file");
+ log::info!("re-opening access-log file");
request_log.lock().unwrap().reopen()?;
Ok(serde_json::Value::Null)
})?;
@@ -269,7 +269,7 @@ impl ApiConfig {
self.auth_log = Some(Arc::clone(&auth_log));
commando_sock.register_command("api-auth-log-reopen".into(), move |_args| {
- println!("re-opening auth-log file");
+ log::info!("re-opening auth-log file");
auth_log.lock().unwrap().reopen()?;
Ok(serde_json::Value::Null)
})?;
diff --git a/proxmox-rest-server/src/command_socket.rs b/proxmox-rest-server/src/command_socket.rs
index a7fc7576..bfa42b01 100644
--- a/proxmox-rest-server/src/command_socket.rs
+++ b/proxmox-rest-server/src/command_socket.rs
@@ -35,7 +35,7 @@ where
let (conn, _addr) = match socket.accept().await {
Ok(data) => data,
Err(err) => {
- eprintln!("failed to accept on control socket {:?}: {}", path, err);
+ log::error!("failed to accept on control socket {:?}: {}", path, err);
continue;
}
};
@@ -44,7 +44,7 @@ where
let cred = match socket::getsockopt(conn.as_raw_fd(), opt) {
Ok(cred) => cred,
Err(err) => {
- eprintln!("no permissions - unable to read peer credential - {}", err);
+ log::error!("no permissions - unable to read peer credential - {}", err);
continue;
}
};
@@ -52,7 +52,7 @@ where
// check permissions (same gid, root user, or backup group)
let mygid = unsafe { libc::getgid() };
if !(cred.uid() == 0 || cred.gid() == mygid || cred.gid() == gid) {
- eprintln!("no permissions for {:?}", cred);
+ log::error!("no permissions for {:?}", cred);
continue;
}
@@ -80,7 +80,7 @@ where
Ok(0) => break,
Ok(_) => (),
Err(err) => {
- eprintln!("control socket {:?} read error: {}", path, err);
+ log::error!("control socket {:?} read error: {}", path, err);
return;
}
}
@@ -94,9 +94,10 @@ where
};
if let Err(err) = tx.write_all(response.as_bytes()).await {
- eprintln!(
+ log::error!(
"control socket {:?} write response error: {}",
- path, err
+ path,
+ err
);
return;
}
diff --git a/proxmox-rest-server/src/file_logger.rs b/proxmox-rest-server/src/file_logger.rs
index eeabadab..2bb1fac6 100644
--- a/proxmox-rest-server/src/file_logger.rs
+++ b/proxmox-rest-server/src/file_logger.rs
@@ -125,7 +125,7 @@ impl FileLogger {
if let Err(err) = self.file.write_all(line.as_bytes()) {
// avoid panicking, log methods should not do that
// FIXME: or, return result???
- eprintln!("error writing to log file - {}", err);
+ log::error!("error writing to log file - {}", err);
}
}
}
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 08/10] replace print with log macro
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
` (7 preceding siblings ...)
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 07/10] proxmox-rest-server: " Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 09/10] docs: add note for setting verbosity level Hannes Laimer
` (2 subsequent siblings)
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
examples/upload-speed.rs | 2 +-
.../src/proxmox_restore_daemon/api.rs | 1 -
pxar-bin/Cargo.toml | 1 +
pxar-bin/src/main.rs | 49 +++++--------------
src/api2/admin/datastore.rs | 6 +--
src/bin/sg-tape-cmd.rs | 10 ++--
6 files changed, 20 insertions(+), 49 deletions(-)
diff --git a/examples/upload-speed.rs b/examples/upload-speed.rs
index 3579db96..f9fc52a8 100644
--- a/examples/upload-speed.rs
+++ b/examples/upload-speed.rs
@@ -29,7 +29,7 @@ async fn upload_speed() -> Result<f64, Error> {
.await?;
println!("start upload speed test");
- let res = client.upload_speedtest(true).await?;
+ let res = client.upload_speedtest().await?;
Ok(res)
}
diff --git a/proxmox-restore-daemon/src/proxmox_restore_daemon/api.rs b/proxmox-restore-daemon/src/proxmox_restore_daemon/api.rs
index aeb5a71d..676deb59 100644
--- a/proxmox-restore-daemon/src/proxmox_restore_daemon/api.rs
+++ b/proxmox-restore-daemon/src/proxmox_restore_daemon/api.rs
@@ -336,7 +336,6 @@ fn extract(
entries_max: ENCODER_MAX_ENTRIES,
device_set: None,
patterns,
- verbose: false,
skip_lost_and_found: false,
};
diff --git a/pxar-bin/Cargo.toml b/pxar-bin/Cargo.toml
index a5ecb039..960b0539 100644
--- a/pxar-bin/Cargo.toml
+++ b/pxar-bin/Cargo.toml
@@ -12,6 +12,7 @@ path = "src/main.rs"
anyhow = "1.0"
futures = "0.3"
nix = "0.24"
+log = "0.4"
serde_json = "1.0"
tokio = { version = "1.6", features = [ "rt", "rt-multi-thread" ] }
diff --git a/pxar-bin/src/main.rs b/pxar-bin/src/main.rs
index e3eba213..3714eb03 100644
--- a/pxar-bin/src/main.rs
+++ b/pxar-bin/src/main.rs
@@ -23,7 +23,6 @@ fn extract_archive_from_reader<R: std::io::Read>(
reader: &mut R,
target: &str,
feature_flags: Flags,
- verbose: bool,
options: PxarExtractOptions,
) -> Result<(), Error> {
pbs_client::pxar::extract_archive(
@@ -31,9 +30,7 @@ fn extract_archive_from_reader<R: std::io::Read>(
Path::new(target),
feature_flags,
|path| {
- if verbose {
- println!("{:?}", path);
- }
+ log::debug!("{:?}", path);
},
options,
)
@@ -58,11 +55,6 @@ fn extract_archive_from_reader<R: std::io::Read>(
description: "Target directory",
optional: true,
},
- verbose: {
- description: "Verbose output.",
- optional: true,
- default: false,
- },
"no-xattrs": {
description: "Ignore extended file attributes.",
optional: true,
@@ -116,7 +108,6 @@ fn extract_archive(
archive: String,
pattern: Option<Vec<String>>,
target: Option<String>,
- verbose: bool,
no_xattrs: bool,
no_fcaps: bool,
no_acls: bool,
@@ -179,7 +170,7 @@ fn extract_archive(
// otherwise we want to log them but not act on them
Some(Box::new(move |err| {
was_ok.store(false, Ordering::Release);
- eprintln!("error: {}", err);
+ log::error!("error: {}", err);
Ok(())
})
as Box<dyn FnMut(Error) -> Result<(), Error> + Send>)
@@ -195,14 +186,12 @@ fn extract_archive(
if archive == "-" {
let stdin = std::io::stdin();
let mut reader = stdin.lock();
- extract_archive_from_reader(&mut reader, target, feature_flags, verbose, options)?;
+ extract_archive_from_reader(&mut reader, target, feature_flags, options)?;
} else {
- if verbose {
- println!("PXAR extract: {}", archive);
- }
+ log::debug!("PXAR extract: {}", archive);
let file = std::fs::File::open(archive)?;
let mut reader = std::io::BufReader::new(file);
- extract_archive_from_reader(&mut reader, target, feature_flags, verbose, options)?;
+ extract_archive_from_reader(&mut reader, target, feature_flags, options)?;
}
if !was_ok.load(Ordering::Acquire) {
@@ -221,11 +210,6 @@ fn extract_archive(
source: {
description: "Source directory.",
},
- verbose: {
- description: "Verbose output.",
- optional: true,
- default: false,
- },
"no-xattrs": {
description: "Ignore extended file attributes.",
optional: true,
@@ -285,7 +269,6 @@ fn extract_archive(
async fn create_archive(
archive: String,
source: String,
- verbose: bool,
no_xattrs: bool,
no_fcaps: bool,
no_acls: bool,
@@ -318,7 +301,6 @@ async fn create_archive(
entries_max: entries_max as usize,
device_set,
patterns,
- verbose,
skip_lost_and_found: false,
};
@@ -363,9 +345,7 @@ async fn create_archive(
writer,
feature_flags,
move |path| {
- if verbose {
- println!("{:?}", path);
- }
+ log::debug!("{:?}", path);
Ok(())
},
None,
@@ -404,9 +384,7 @@ async fn mount_archive(archive: String, mountpoint: String, verbose: bool) -> Re
select! {
res = session.fuse() => res?,
_ = interrupt.recv().fuse() => {
- if verbose {
- eprintln!("interrupted");
- }
+ log::debug!("interrupted");
}
}
@@ -419,23 +397,18 @@ async fn mount_archive(archive: String, mountpoint: String, verbose: bool) -> Re
archive: {
description: "Archive name.",
},
- verbose: {
- description: "Verbose output.",
- optional: true,
- default: false,
- },
},
},
)]
/// List the contents of an archive.
-fn dump_archive(archive: String, verbose: bool) -> Result<(), Error> {
+fn dump_archive(archive: String) -> Result<(), Error> {
for entry in pxar::decoder::Decoder::open(archive)? {
let entry = entry?;
- if verbose {
- println!("{}", format_single_line_entry(&entry));
+ if log::log_enabled!(log::Level::Debug) {
+ log::debug!("{}", format_single_line_entry(&entry));
} else {
- println!("{:?}", entry.path());
+ log::info!("{:?}", entry.path());
}
}
Ok(())
diff --git a/src/api2/admin/datastore.rs b/src/api2/admin/datastore.rs
index ad2744b7..5448fa10 100644
--- a/src/api2/admin/datastore.rs
+++ b/src/api2/admin/datastore.rs
@@ -1702,11 +1702,10 @@ pub fn pxar_file_download(
channelwriter,
decoder,
path.clone(),
- false,
));
let zstdstream = ZstdEncoder::new(ReceiverStream::new(receiver))?;
Body::wrap_stream(zstdstream.map_err(move |err| {
- eprintln!("error during streaming of tar.zst '{:?}' - {}", path, err);
+ log::error!("error during streaming of tar.zst '{:?}' - {}", path, err);
err
}))
} else {
@@ -1714,10 +1713,9 @@ pub fn pxar_file_download(
channelwriter,
decoder,
path.clone(),
- false,
));
Body::wrap_stream(ReceiverStream::new(receiver).map_err(move |err| {
- eprintln!("error during streaming of zip '{:?}' - {}", path, err);
+ log::error!("error during streaming of zip '{:?}' - {}", path, err);
err
}))
}
diff --git a/src/bin/sg-tape-cmd.rs b/src/bin/sg-tape-cmd.rs
index 402fe9cb..c4567896 100644
--- a/src/bin/sg-tape-cmd.rs
+++ b/src/bin/sg-tape-cmd.rs
@@ -25,13 +25,13 @@ fn get_tape_handle(param: &Value) -> Result<LtoTapeHandle, Error> {
let handle = if let Some(name) = param["drive"].as_str() {
let (config, _digest) = pbs_config::drive::config()?;
let drive: LtoTapeDrive = config.lookup("lto", name)?;
- eprintln!("using device {}", drive.path);
+ log::info!("using device {}", drive.path);
open_lto_tape_drive(&drive)?
} else if let Some(device) = param["device"].as_str() {
- eprintln!("using device {}", device);
+ log::info!("using device {}", device);
LtoTapeHandle::new(open_lto_tape_device(device)?)?
} else if let Some(true) = param["stdin"].as_bool() {
- eprintln!("using stdin");
+ log::info!("using stdin");
let fd = std::io::stdin().as_raw_fd();
let file = unsafe { File::from_raw_fd(fd) };
check_tape_is_lto_tape_device(&file)?;
@@ -39,7 +39,7 @@ fn get_tape_handle(param: &Value) -> Result<LtoTapeHandle, Error> {
} else if let Ok(name) = std::env::var("PROXMOX_TAPE_DRIVE") {
let (config, _digest) = pbs_config::drive::config()?;
let drive: LtoTapeDrive = config.lookup("lto", &name)?;
- eprintln!("using device {}", drive.path);
+ log::info!("using device {}", drive.path);
open_lto_tape_drive(&drive)?
} else {
let (config, _digest) = pbs_config::drive::config()?;
@@ -55,7 +55,7 @@ fn get_tape_handle(param: &Value) -> Result<LtoTapeHandle, Error> {
if drive_names.len() == 1 {
let name = drive_names[0];
let drive: LtoTapeDrive = config.lookup("lto", name)?;
- eprintln!("using device {}", drive.path);
+ log::info!("using device {}", drive.path);
open_lto_tape_drive(&drive)?
} else {
bail!("no drive/device specified");
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 09/10] docs: add note for setting verbosity level
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
` (8 preceding siblings ...)
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 08/10] " Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 10/10] fixed_index: remove unused 'print_info' function Hannes Laimer
2022-06-21 8:46 ` [pbs-devel] applied-series: [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Wolfgang Bumiller
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
docs/command-syntax.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/docs/command-syntax.rst b/docs/command-syntax.rst
index a24b9cdc..b2ea330a 100644
--- a/docs/command-syntax.rst
+++ b/docs/command-syntax.rst
@@ -1,6 +1,10 @@
Command Syntax
==============
+.. NOTE:: Logging verbosity for the command line tools can be controlled with the
+ ``PBS_LOG`` (for ``pxar``: ``PXAR_LOG``) environment variable. Possible values are `off`,
+ `error`, `warn`, `info`, `debug` and `trace` with `info` being the default.
+
``proxmox-backup-client``
-------------------------
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] [PATCH proxmox-backup v3 10/10] fixed_index: remove unused 'print_info' function
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
` (9 preceding siblings ...)
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 09/10] docs: add note for setting verbosity level Hannes Laimer
@ 2022-06-15 8:19 ` Hannes Laimer
2022-06-21 8:46 ` [pbs-devel] applied-series: [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Wolfgang Bumiller
11 siblings, 0 replies; 14+ messages in thread
From: Hannes Laimer @ 2022-06-15 8:19 UTC (permalink / raw)
To: pbs-devel
Signed-off-by: Hannes Laimer <h.laimer@proxmox.com>
---
pbs-datastore/src/fixed_index.rs | 13 -------------
1 file changed, 13 deletions(-)
diff --git a/pbs-datastore/src/fixed_index.rs b/pbs-datastore/src/fixed_index.rs
index 936bd2e7..9f703cce 100644
--- a/pbs-datastore/src/fixed_index.rs
+++ b/pbs-datastore/src/fixed_index.rs
@@ -142,19 +142,6 @@ impl FixedIndexReader {
Ok(())
}
-
- pub fn print_info(&self) {
- log::info!("Size: {}", self.size);
- log::info!("ChunkSize: {}", self.chunk_size);
-
- let mut ctime_str = self.ctime.to_string();
- if let Ok(s) = proxmox_time::strftime_local("%c", self.ctime) {
- ctime_str = s;
- }
-
- log::info!("CTime: {}", ctime_str);
- log::info!("UUID: {:?}", self.uuid);
- }
}
impl IndexFile for FixedIndexReader {
--
2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread
* [pbs-devel] applied-series: [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries
2022-06-15 8:19 [pbs-devel] [PATCH-SERIES proxmox-backup/proxmox v3 00/10] replace print by log macro in libraries Hannes Laimer
` (10 preceding siblings ...)
2022-06-15 8:19 ` [pbs-devel] [PATCH proxmox-backup v3 10/10] fixed_index: remove unused 'print_info' function Hannes Laimer
@ 2022-06-21 8:46 ` Wolfgang Bumiller
11 siblings, 0 replies; 14+ messages in thread
From: Wolfgang Bumiller @ 2022-06-21 8:46 UTC (permalink / raw)
To: Hannes Laimer; +Cc: pbs-devel
applied series, thanks
On Wed, Jun 15, 2022 at 08:19:46AM +0000, Hannes Laimer wrote:
> This series mostly replaces print with the log macro in libs, it also replaces print
> in binaries where it is used to log stuff and not output the result of a command.
> In the process of replacing prints by log macros a few parameters controlling verbosity
> became obsolete and were removed(outputs that were behind an if(verbose).. are now log::debug),
> other 'verbose' parameters influenced the control flow and where therefore kept.
>
> The whole changes were split up into 7 seperate patches[2-8], this was done
> to aviod one huge patch file and improve readability. Those (maybe also 1)
> should be squashed when applied since they are not necesarilly buildable.
> The reason for that is that in a few places 'verbose' parameters were remove.
>
> A verion bump is also needed since patches 1 (and indirectly 2-8) depend on
> the function added to proxmox-router.
>
> v3, thanks @Wolfgang Bumiller <w.bumiller@proxmox.com>:
> * updated verbosity in some places
> * used log_enabled!(...)
> * removed unused 'print_info' function
> * rebased onto master
>
> * proxmox
> Hannes Laimer (1):
> router: add init_cli_logger helper function
>
> proxmox-router/Cargo.toml | 1 +
> proxmox-router/src/cli/mod.rs | 11 +++++++++++
> 2 files changed, 12 insertions(+)
>
> * proxmox-backup
> Hannes Laimer (10):
> bins: init cli logger
> pbs-client: replace print with log macro
> pbs-datastore: replace print with log macro
> pbs-fuse+pbs-tape: replace print with log macro
> proxmox-backup-client: replace print with log macro
> proxmox-file-restore: replace print with log macro
> proxmox-rest-server: replace print with log macro
> replace print with log macro
> docs: add note for setting verbosity level
> fixed_index: remove unused 'print_info' function
>
> docs/command-syntax.rst | 4 +
> examples/upload-speed.rs | 2 +-
> pbs-client/Cargo.toml | 1 +
> pbs-client/src/backup_writer.rs | 103 ++++++++----------
> pbs-client/src/catalog_shell.rs | 4 +-
> pbs-client/src/http_client.rs | 14 +--
> pbs-client/src/pxar/create.rs | 2 -
> pbs-client/src/pxar/extract.rs | 70 ++++--------
> pbs-client/src/pxar/fuse.rs | 18 ++-
> pbs-client/src/pxar/metadata.rs | 4 +-
> pbs-client/src/pxar_backup_stream.rs | 6 +-
> pbs-client/src/task_log.rs | 8 +-
> pbs-client/src/tools/key_source.rs | 2 +-
> pbs-datastore/src/catalog.rs | 6 +-
> pbs-datastore/src/datastore.rs | 2 +-
> pbs-datastore/src/dynamic_index.rs | 2 +-
> pbs-datastore/src/fixed_index.rs | 19 +---
> pbs-datastore/src/paperkey.rs | 2 +-
> pbs-fuse-loop/Cargo.toml | 3 +-
> pbs-fuse-loop/src/fuse_loop.rs | 12 +-
> pbs-tape/Cargo.toml | 1 +
> pbs-tape/src/bin/pmt.rs | 12 +-
> pbs-tape/src/bin/pmtx.rs | 10 +-
> pbs-tape/src/sg_pt_changer.rs | 4 +-
> proxmox-backup-client/Cargo.toml | 1 +
> proxmox-backup-client/src/benchmark.rs | 36 ++----
> proxmox-backup-client/src/catalog.rs | 6 +-
> proxmox-backup-client/src/key.rs | 27 +++--
> proxmox-backup-client/src/main.rs | 92 +++++++---------
> proxmox-backup-client/src/mount.rs | 12 +-
> proxmox-file-restore/Cargo.toml | 1 +
> proxmox-file-restore/src/block_driver_qemu.rs | 6 +-
> proxmox-file-restore/src/main.rs | 26 ++---
> proxmox-file-restore/src/qemu_helper.rs | 18 ++-
> proxmox-rest-server/src/api_config.rs | 4 +-
> proxmox-rest-server/src/command_socket.rs | 13 ++-
> proxmox-rest-server/src/file_logger.rs | 2 +-
> .../src/proxmox_restore_daemon/api.rs | 1 -
> pxar-bin/Cargo.toml | 1 +
> pxar-bin/src/main.rs | 51 +++------
> src/api2/admin/datastore.rs | 6 +-
> src/bin/proxmox-backup-debug.rs | 4 +-
> src/bin/proxmox-backup-manager.rs | 2 +
> src/bin/proxmox-tape.rs | 2 +
> src/bin/sg-tape-cmd.rs | 12 +-
> 45 files changed, 259 insertions(+), 375 deletions(-)
>
> --
> 2.30.2
^ permalink raw reply [flat|nested] 14+ messages in thread