From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: <pbs-devel-bounces@lists.proxmox.com> Received: from firstgate.proxmox.com (firstgate.proxmox.com [212.224.123.68]) by lore.proxmox.com (Postfix) with ESMTPS id 59BEA1FF170 for <inbox@lore.proxmox.com>; Tue, 3 Dec 2024 12:07:06 +0100 (CET) Received: from firstgate.proxmox.com (localhost [127.0.0.1]) by firstgate.proxmox.com (Proxmox) with ESMTP id EE0EC1724; Tue, 3 Dec 2024 12:07:11 +0100 (CET) From: Gabriel Goller <g.goller@proxmox.com> To: pbs-devel@lists.proxmox.com Date: Tue, 3 Dec 2024 12:06:32 +0100 Message-Id: <20241203110632.190866-1-g.goller@proxmox.com> X-Mailer: git-send-email 2.39.5 MIME-Version: 1.0 X-SPAM-LEVEL: Spam detection results: 0 AWL -0.038 Adjusted score from AWL reputation of From: address BAYES_00 -1.9 Bayes spam probability is 0 to 1% DMARC_MISSING 0.1 Missing DMARC policy KAM_DMARC_STATUS 0.01 Test Rule for DKIM or SPF Failure with Strict Alignment RCVD_IN_VALIDITY_CERTIFIED_BLOCKED 0.001 ADMINISTRATOR NOTICE: The query to Validity was blocked. See https://knowledge.validity.com/hc/en-us/articles/20961730681243 for more information. RCVD_IN_VALIDITY_RPBL_BLOCKED 0.001 ADMINISTRATOR NOTICE: The query to Validity was blocked. See https://knowledge.validity.com/hc/en-us/articles/20961730681243 for more information. RCVD_IN_VALIDITY_SAFE_BLOCKED 0.001 ADMINISTRATOR NOTICE: The query to Validity was blocked. See https://knowledge.validity.com/hc/en-us/articles/20961730681243 for more information. SPF_HELO_NONE 0.001 SPF: HELO does not publish an SPF Record SPF_PASS -0.001 SPF: sender matches SPF record URIBL_BLOCKED 0.001 ADMINISTRATOR NOTICE: The query to URIBL was blocked. See http://wiki.apache.org/spamassassin/DnsBlocklists#dnsbl-block for more information. [extract.rs, tools.rs, metadata.rs, create.rs] Subject: [pbs-devel] [PATCH proxmox-backup] pbs-client: remove `log` dependency and migrate to `tracing` X-BeenThere: pbs-devel@lists.proxmox.com X-Mailman-Version: 2.1.29 Precedence: list List-Id: Proxmox Backup Server development discussion <pbs-devel.lists.proxmox.com> List-Unsubscribe: <https://lists.proxmox.com/cgi-bin/mailman/options/pbs-devel>, <mailto:pbs-devel-request@lists.proxmox.com?subject=unsubscribe> List-Archive: <http://lists.proxmox.com/pipermail/pbs-devel/> List-Post: <mailto:pbs-devel@lists.proxmox.com> List-Help: <mailto:pbs-devel-request@lists.proxmox.com?subject=help> List-Subscribe: <https://lists.proxmox.com/cgi-bin/mailman/listinfo/pbs-devel>, <mailto:pbs-devel-request@lists.proxmox.com?subject=subscribe> Reply-To: Proxmox Backup Server development discussion <pbs-devel@lists.proxmox.com> Content-Type: text/plain; charset="us-ascii" Content-Transfer-Encoding: 7bit Errors-To: pbs-devel-bounces@lists.proxmox.com Sender: "pbs-devel" <pbs-devel-bounces@lists.proxmox.com> Remove the `log` dependency in pbs-client and change all the invocations to tracing logs. No functional change intended. Signed-off-by: Gabriel Goller <g.goller@proxmox.com> --- pbs-client/Cargo.toml | 2 +- pbs-client/src/backup_writer.rs | 51 ++++++++++++------------- pbs-client/src/catalog_shell.rs | 5 ++- pbs-client/src/http_client.rs | 21 ++++++----- pbs-client/src/pxar/create.rs | 56 ++++++++++++++-------------- pbs-client/src/pxar/extract.rs | 35 ++++++++--------- pbs-client/src/pxar/metadata.rs | 9 +++-- pbs-client/src/pxar/tools.rs | 11 +++--- pbs-client/src/pxar_backup_stream.rs | 3 +- pbs-client/src/task_log.rs | 5 ++- pbs-client/src/tools/key_source.rs | 3 +- 11 files changed, 103 insertions(+), 98 deletions(-) diff --git a/pbs-client/Cargo.toml b/pbs-client/Cargo.toml index 00c18b83a6ac..212f62f2a95e 100644 --- a/pbs-client/Cargo.toml +++ b/pbs-client/Cargo.toml @@ -15,7 +15,6 @@ hex.workspace = true http.workspace = true hyper.workspace = true libc.workspace = true -log.workspace = true nix.workspace = true openssl.workspace = true percent-encoding.workspace = true @@ -38,6 +37,7 @@ proxmox-compression.workspace = true proxmox-http = { workspace = true, features = [ "rate-limiter" ] } proxmox-human-byte.workspace = true proxmox-io = { workspace = true, features = [ "tokio" ] } +proxmox-log = { workspace = true } proxmox-router = { workspace = true, features = [ "cli", "server" ] } proxmox-schema.workspace = true proxmox-sys.workspace = true diff --git a/pbs-client/src/backup_writer.rs b/pbs-client/src/backup_writer.rs index f321ea403c84..1253ef561530 100644 --- a/pbs-client/src/backup_writer.rs +++ b/pbs-client/src/backup_writer.rs @@ -25,6 +25,7 @@ use pbs_datastore::PROXMOX_BACKUP_PROTOCOL_ID_V1; use pbs_tools::crypt_config::CryptConfig; use proxmox_human_byte::HumanByte; +use proxmox_log::{debug, enabled, info, trace, warn, Level}; use proxmox_time::TimeSpan; use super::backup_stats::{BackupStats, UploadCounters, UploadStats}; @@ -391,7 +392,7 @@ impl BackupWriter { .iter() .any(|file| file.filename == archive_name.as_ref()) { - log::info!("Previous manifest does not contain an archive called '{archive_name}', skipping download.."); + info!("Previous manifest does not contain an archive called '{archive_name}', skipping download.."); } else { // try, but ignore errors match archive_name.archive_type() { @@ -404,7 +405,7 @@ impl BackupWriter { ) .await { - log::warn!("Error downloading .fidx from previous manifest: {}", err); + warn!("Error downloading .fidx from previous manifest: {}", err); } } ArchiveType::DynamicIndex => { @@ -416,7 +417,7 @@ impl BackupWriter { ) .await { - log::warn!("Error downloading .didx from previous manifest: {}", err); + warn!("Error downloading .didx from previous manifest: {}", err); } } _ => { /* do nothing */ } @@ -450,14 +451,14 @@ impl BackupWriter { let size_dirty = upload_stats.size - upload_stats.size_reused; let size: HumanByte = upload_stats.size.into(); - let archive = if log::log_enabled!(log::Level::Debug) { + let archive = if enabled!(Level::DEBUG) { archive_name.to_string() } else { archive_name.without_type_extension() }; if upload_stats.chunk_injected > 0 { - log::info!( + info!( "{archive}: reused {} from previous snapshot for unchanged files ({} chunks)", HumanByte::from(upload_stats.size_injected), upload_stats.chunk_injected, @@ -469,37 +470,33 @@ impl BackupWriter { ((size_dirty * 1_000_000) / (upload_stats.duration.as_micros() as usize)).into(); let size_dirty: HumanByte = size_dirty.into(); let size_compressed: HumanByte = upload_stats.size_compressed.into(); - log::info!( + info!( "{archive}: had to backup {size_dirty} of {size} (compressed {size_compressed}) in {:.2} s (average {speed}/s)", upload_stats.duration.as_secs_f64() ); } else { - log::info!("Uploaded backup catalog ({})", size); + info!("Uploaded backup catalog ({})", size); } if upload_stats.size_reused > 0 && upload_stats.size > 1024 * 1024 { let reused_percent = upload_stats.size_reused as f64 * 100. / upload_stats.size as f64; let reused: HumanByte = upload_stats.size_reused.into(); - log::info!( + info!( "{}: backup was done incrementally, reused {} ({:.1}%)", - archive, - reused, - reused_percent + archive, reused, reused_percent ); } - if log::log_enabled!(log::Level::Debug) && upload_stats.chunk_count > 0 { - log::debug!( + if enabled!(Level::DEBUG) && upload_stats.chunk_count > 0 { + debug!( "{}: Reused {} from {} chunks.", - archive, - upload_stats.chunk_reused, - upload_stats.chunk_count + archive, upload_stats.chunk_reused, upload_stats.chunk_count ); - log::debug!( + debug!( "{}: Average chunk size was {}.", archive, HumanByte::from(upload_stats.size / upload_stats.chunk_count) ); - log::debug!( + debug!( "{}: Average time per request: {} microseconds.", archive, (upload_stats.duration.as_micros()) / (upload_stats.chunk_count as u128) @@ -544,7 +541,7 @@ impl BackupWriter { response .map_err(Error::from) .and_then(H2Client::h2api_response) - .map_ok(move |result| log::debug!("RESPONSE: {:?}", result)) + .map_ok(move |result| debug!("RESPONSE: {:?}", result)) .map_err(|err| format_err!("pipelined request failed: {}", err)) }) .map(|result| { @@ -602,7 +599,7 @@ impl BackupWriter { digest_list.push(hex::encode(digest)); offset_list.push(offset); } - log::debug!("append chunks list len ({})", digest_list.len()); + debug!("append chunks list len ({})", digest_list.len()); let param = json!({ "wid": wid, "digest-list": digest_list, "offset-list": offset_list }); let request = H2Client::request_builder("localhost", "PUT", &path, None, Some("application/json")).unwrap(); let param_data = bytes::Bytes::from(param.to_string().into_bytes()); @@ -654,7 +651,7 @@ impl BackupWriter { known_chunks.insert(*index.index_digest(i).unwrap()); } - log::debug!( + debug!( "{}: known chunks list length is {}", archive_name, index.index_count() @@ -688,7 +685,7 @@ impl BackupWriter { known_chunks.insert(*index.index_digest(i).unwrap()); } - log::debug!( + debug!( "{}: known chunks list length is {}", archive_name, index.index_count() @@ -860,7 +857,7 @@ impl BackupWriter { let size_uploaded = HumanByte::from(uploaded_len.load(Ordering::SeqCst)); let elapsed = TimeSpan::from(start_time.elapsed()); - log::info!("processed {size} in {elapsed}, uploaded {size_uploaded}"); + info!("processed {size} in {elapsed}, uploaded {size_uploaded}"); } })) } else { @@ -876,7 +873,7 @@ impl BackupWriter { let digest = chunk_info.digest; let digest_str = hex::encode(digest); - log::trace!( + trace!( "upload new chunk {} ({} bytes, offset {})", digest_str, chunk_info.chunk_len, @@ -967,7 +964,7 @@ impl BackupWriter { break; } - log::debug!("send test data ({} bytes)", data.len()); + debug!("send test data ({} bytes)", data.len()); let request = H2Client::request_builder("localhost", "POST", "speedtest", None, None).unwrap(); let request_future = self @@ -982,13 +979,13 @@ impl BackupWriter { let _ = upload_result.await?; - log::info!( + info!( "Uploaded {} chunks in {} seconds.", repeat, start_time.elapsed().as_secs() ); let speed = ((item_len * (repeat as usize)) as f64) / start_time.elapsed().as_secs_f64(); - log::info!( + info!( "Time per request: {} microseconds.", (start_time.elapsed().as_micros()) / (repeat as u128) ); diff --git a/pbs-client/src/catalog_shell.rs b/pbs-client/src/catalog_shell.rs index 7dace86bbe2f..08e64f90aa93 100644 --- a/pbs-client/src/catalog_shell.rs +++ b/pbs-client/src/catalog_shell.rs @@ -23,6 +23,7 @@ use pxar::{EntryKind, Metadata}; use pbs_datastore::catalog::{self, DirEntryAttribute}; use proxmox_async::runtime::{block_in_place, block_on}; +use proxmox_log::error; use crate::pxar::Flags; @@ -106,7 +107,7 @@ fn complete_path(complete_me: &str, _map: &HashMap<String, String>) -> Vec<Strin match shell.complete_path(complete_me) { Ok(list) => list, Err(err) => { - log::error!("error during completion: {}", err); + error!("error during completion: {}", err); Vec::new() } } @@ -418,7 +419,7 @@ impl Shell { let args = match cli::shellword_split(&line) { Ok(args) => args, Err(err) => { - log::error!("Error: {}", err); + error!("Error: {}", err); continue; } }; diff --git a/pbs-client/src/http_client.rs b/pbs-client/src/http_client.rs index 8ae5edaa031d..e97b4e549e8f 100644 --- a/pbs-client/src/http_client.rs +++ b/pbs-client/src/http_client.rs @@ -25,6 +25,7 @@ use proxmox_async::broadcast_future::BroadcastFuture; use proxmox_http::client::HttpsConnector; use proxmox_http::uri::{build_authority, json_object_to_query}; use proxmox_http::{ProxyConfig, RateLimiter}; +use proxmox_log::{error, info, warn}; use pbs_api_types::percent_encoding::DEFAULT_ENCODE_SET; use pbs_api_types::{Authid, RateLimitConfig, Userid}; @@ -348,14 +349,14 @@ impl HttpClient { if let Err(err) = store_fingerprint(prefix.as_ref().unwrap(), &server, &fingerprint) { - log::error!("{}", err); + error!("{}", err); } } *verified_fingerprint.lock().unwrap() = Some(fingerprint); true } Err(err) => { - log::error!("certificate validation failed - {}", err); + error!("certificate validation failed - {}", err); false } }, @@ -393,7 +394,7 @@ impl HttpClient { let proxy_config = ProxyConfig::from_proxy_env()?; if let Some(config) = proxy_config { - log::info!("Using proxy connection: {}:{}", config.host, config.port); + info!("Using proxy connection: {}:{}", config.host, config.port); https.set_proxy(config); } @@ -461,14 +462,14 @@ impl HttpClient { &auth.token, ) { if std::io::stdout().is_terminal() { - log::error!("storing login ticket failed: {}", err); + error!("storing login ticket failed: {}", err); } } } *auth2.write().unwrap() = auth; } Err(err) => { - log::error!("re-authentication failed: {}", err); + error!("re-authentication failed: {}", err); } } } @@ -498,7 +499,7 @@ impl HttpClient { &auth.token, ) { if std::io::stdout().is_terminal() { - log::error!("storing login ticket failed: {}", err); + error!("storing login ticket failed: {}", err); } } } @@ -600,14 +601,14 @@ impl HttpClient { if expected_fingerprint == fp_string { return Ok(Some(fp_string)); } else { - log::warn!("WARNING: certificate fingerprint does not match expected fingerprint!"); - log::warn!("expected: {}", expected_fingerprint); + warn!("WARNING: certificate fingerprint does not match expected fingerprint!"); + warn!("expected: {}", expected_fingerprint); } } // If we're on a TTY, query the user if interactive && std::io::stdin().is_terminal() { - log::info!("fingerprint: {}", fp_string); + info!("fingerprint: {}", fp_string); loop { eprint!("Are you sure you want to continue connecting? (y/n): "); let _ = std::io::stdout().flush(); @@ -797,7 +798,7 @@ impl HttpClient { .handshake(upgraded) .await?; - let connection = connection.map_err(|_| log::error!("HTTP/2.0 connection failed")); + let connection = connection.map_err(|_| error!("HTTP/2.0 connection failed")); let (connection, abort) = futures::future::abortable(connection); // A cancellable future returns an Option which is None when cancelled and diff --git a/pbs-client/src/pxar/create.rs b/pbs-client/src/pxar/create.rs index d5c2b451cec1..c7d274b8c3a2 100644 --- a/pbs-client/src/pxar/create.rs +++ b/pbs-client/src/pxar/create.rs @@ -27,6 +27,7 @@ use pxar::{EntryKind, Metadata, PxarVariant}; use proxmox_human_byte::HumanByte; use proxmox_io::vec; +use proxmox_log::{debug, error, info, warn}; use proxmox_sys::fs::{self, acl, xattr}; use pbs_datastore::catalog::BackupCatalogWriter; @@ -315,25 +316,25 @@ where encoder.close().await?; if metadata_mode { - log::info!("Change detection summary:"); - log::info!( + info!("Change detection summary:"); + info!( " - {} total files ({} hardlinks)", archiver.reuse_stats.files_reused_count + archiver.reuse_stats.files_reencoded_count + archiver.reuse_stats.files_hardlink_count, archiver.reuse_stats.files_hardlink_count, ); - log::info!( + info!( " - {} unchanged, reusable files with {} data", archiver.reuse_stats.files_reused_count, HumanByte::from(archiver.reuse_stats.total_reused_payload_size), ); - log::info!( + info!( " - {} changed or non-reusable files with {} data", archiver.reuse_stats.files_reencoded_count, HumanByte::from(archiver.reuse_stats.total_reencoded_size), ); - log::info!( + info!( " - {} padding in {} partially reused chunks", HumanByte::from( archiver.reuse_stats.total_injected_size @@ -434,18 +435,18 @@ impl Archiver { { let range = *offset..*offset + size + size_of::<pxar::format::Header>() as u64; - log::debug!( + debug!( "reusable: {file_name:?} at range {range:?} has unchanged metadata." ); return Ok(Some(range)); } - log::debug!("re-encode: {file_name:?} not a regular file."); + debug!("re-encode: {file_name:?} not a regular file."); return Ok(None); } - log::debug!("re-encode: {file_name:?} metadata did not match."); + debug!("re-encode: {file_name:?} metadata did not match."); return Ok(None); } - log::debug!("re-encode: {file_name:?} not found in previous archive."); + debug!("re-encode: {file_name:?} not found in previous archive."); } Ok(None) @@ -481,7 +482,7 @@ impl Archiver { Ok(None) } Err(Errno::EACCES) => { - log::warn!("failed to open file: {:?}: access denied", file_name); + warn!("failed to open file: {:?}: access denied", file_name); Ok(None) } Err(Errno::ESTALE) => { @@ -515,10 +516,9 @@ impl Archiver { let line = match line { Ok(line) => line, Err(err) => { - log::warn!( + warn!( "ignoring .pxarexclude after read error in {:?}: {}", - self.path, - err, + self.path, err, ); self.patterns.truncate(old_pattern_count); return Ok(()); @@ -558,7 +558,7 @@ impl Archiver { } } Err(err) => { - log::error!("bad pattern in {:?}: {}", self.path, err); + error!("bad pattern in {:?}: {}", self.path, err); } } } @@ -640,7 +640,7 @@ impl Archiver { match match_result { Ok(Some(MatchType::Exclude)) => { - log::debug!("matched by exclude pattern '{full_path:?}'"); + debug!("matched by exclude pattern '{full_path:?}'"); continue; } Ok(_) => (), @@ -692,22 +692,22 @@ impl Archiver { fn report_stale_file_handle(&self, path: Option<&PathBuf>) { let path = path.unwrap_or(&self.path); - log::warn!("warning: stale file handle encountered while reading: {path:?}"); + warn!("warning: stale file handle encountered while reading: {path:?}"); } fn report_vanished_file(&self) { - log::warn!("warning: file vanished while reading: {:?}", self.path); + warn!("warning: file vanished while reading: {:?}", self.path); } fn report_file_shrunk_while_reading(&self) { - log::warn!( + warn!( "warning: file size shrunk while reading: {:?}, file will be padded with zeros!", self.path, ); } fn report_file_grew_while_reading(&self) { - log::warn!( + warn!( "warning: file size increased while reading: {:?}, file will be truncated!", self.path, ); @@ -766,7 +766,7 @@ impl Archiver { // Avoid having to many open file handles in cached entries if self.cache.is_full() { - log::debug!("Max cache size reached, reuse cached entries"); + debug!("Max cache size reached, reuse cached entries"); self.flush_cached_reusing_if_below_threshold(encoder, true) .await?; } @@ -803,7 +803,7 @@ impl Archiver { .await? { if !self.cache.try_extend_range(payload_range.clone()) { - log::debug!("Cache range has hole, new range: {payload_range:?}"); + debug!("Cache range has hole, new range: {payload_range:?}"); self.flush_cached_reusing_if_below_threshold(encoder, true) .await?; // range has to be set after flushing of cached entries, which resets the range @@ -814,7 +814,7 @@ impl Archiver { // actual chunks, which needs to be added before encoding the payload reference let offset = PayloadOffset::default().add(payload_range.start - self.cache.range().start); - log::debug!("Offset relative to range start: {offset:?}"); + debug!("Offset relative to range start: {offset:?}"); self.cache.insert( fd, @@ -1018,7 +1018,7 @@ impl Archiver { // do not reuse chunks if introduced padding higher than threshold // opt for re-encoding in that case if ratio > CHUNK_PADDING_THRESHOLD { - log::debug!( + debug!( "Padding ratio: {ratio} > {CHUNK_PADDING_THRESHOLD}, padding: {}, total {}, chunks: {}", HumanByte::from(padding), HumanByte::from(total_size), @@ -1027,7 +1027,7 @@ impl Archiver { self.cache.update_last_chunk(prev_last_chunk); self.encode_entries_to_archive(encoder, None).await?; } else { - log::debug!( + debug!( "Padding ratio: {ratio} < {CHUNK_PADDING_THRESHOLD}, padding: {}, total {}, chunks: {}", HumanByte::from(padding), HumanByte::from(total_size), @@ -1078,7 +1078,7 @@ impl Archiver { let (entries, start_path) = self.cache.take_and_reset(); let old_path = self.path.clone(); self.path = start_path; - log::debug!( + debug!( "Got {} cache entries to encode: reuse is {}", entries.len(), base_offset.is_some() @@ -1147,7 +1147,7 @@ impl Archiver { let mut size = PayloadOffset::default(); for chunk in chunks.iter() { - log::debug!( + debug!( "Injecting chunk with {} padding (chunk size {})", HumanByte::from(chunk.padding), HumanByte::from(chunk.size()), @@ -1175,7 +1175,7 @@ impl Archiver { }; injection_boundary = injection_boundary.add(size.raw()); - log::debug!("Advance payload position by: {size:?}"); + debug!("Advance payload position by: {size:?}"); encoder.advance(size)?; } @@ -1225,7 +1225,7 @@ impl Archiver { } let result = if skip_contents { - log::info!("skipping mount point: {:?}", self.path); + info!("skipping mount point: {:?}", self.path); Ok(()) } else { let mut dir_accessor = None; diff --git a/pbs-client/src/pxar/extract.rs b/pbs-client/src/pxar/extract.rs index 60d8ce142e36..d21bf7ea41f3 100644 --- a/pbs-client/src/pxar/extract.rs +++ b/pbs-client/src/pxar/extract.rs @@ -22,6 +22,7 @@ use pxar::format::Device; use pxar::{Entry, EntryKind, Metadata}; use proxmox_io::{sparse_copy, sparse_copy_async}; +use proxmox_log::{debug, error, info}; use proxmox_sys::c_result; use proxmox_sys::fs::{create_path, CreateOptions}; @@ -140,10 +141,10 @@ where if let pxar::EntryKind::Prelude(ref prelude) = entry.kind() { prelude_file.write_all(prelude.as_ref())?; } else { - log::info!("unexpected entry kind for prelude"); + info!("unexpected entry kind for prelude"); } } else { - log::info!("No prelude entry found, skip prelude restore."); + info!("No prelude entry found, skip prelude restore."); } } @@ -887,7 +888,7 @@ where let metadata = realfile.entry().metadata(); let realpath = Path::new(link); - log::debug!("adding '{}' to tar", path.display()); + debug!("adding '{}' to tar", path.display()); let stripped_path = match realpath.strip_prefix(prefix) { Ok(path) => path, @@ -916,7 +917,7 @@ where } } EntryKind::Symlink(link) if !link.data.is_empty() => { - log::debug!("adding '{}' to tar", path.display()); + debug!("adding '{}' to tar", path.display()); let realpath = Path::new(link); let mut header = tar::Header::new_gnu(); header.set_entry_type(tar::EntryType::Symlink); @@ -928,7 +929,7 @@ where .context("could not send symlink entry")?; } EntryKind::Fifo => { - log::debug!("adding '{}' to tar", path.display()); + debug!("adding '{}' to tar", path.display()); let mut header = tar::Header::new_gnu(); header.set_entry_type(tar::EntryType::Fifo); add_metadata_to_header(&mut header, metadata); @@ -942,7 +943,7 @@ where .context("could not send fifo entry")?; } EntryKind::Directory => { - log::debug!("adding '{}' to tar", path.display()); + debug!("adding '{}' to tar", path.display()); // we cannot add the root path itself if path != Path::new("/") { let mut header = tar::Header::new_gnu(); @@ -957,7 +958,7 @@ where } } EntryKind::Device(device) => { - log::debug!("adding '{}' to tar", path.display()); + debug!("adding '{}' to tar", path.display()); let entry_type = if metadata.stat.is_chardev() { tar::EntryType::Char } else { @@ -980,7 +981,7 @@ where } tarencoder.finish().await.map_err(|err| { - log::error!("error during finishing of zip: {}", err); + error!("error during finishing of zip: {}", err); err })?; Ok(()) @@ -1029,7 +1030,7 @@ where match entry.kind() { EntryKind::File { .. } => { - log::debug!("adding '{}' to zip", path.display()); + debug!("adding '{}' to zip", path.display()); let entry = ZipEntry::new( path, metadata.stat.mtime.secs, @@ -1048,7 +1049,7 @@ where .with_context(|| format!("error looking up {:?}", path))?; let realfile = accessor.follow_hardlink(&entry).await?; let metadata = realfile.entry().metadata(); - log::debug!("adding '{}' to zip", path.display()); + debug!("adding '{}' to zip", path.display()); let entry = ZipEntry::new( path, metadata.stat.mtime.secs, @@ -1061,7 +1062,7 @@ where .context("could not send file entry")?; } EntryKind::Directory => { - log::debug!("adding '{}' to zip", path.display()); + debug!("adding '{}' to zip", path.display()); let entry = ZipEntry::new( path, metadata.stat.mtime.secs, @@ -1151,7 +1152,7 @@ where let mut extractor = get_extractor(destination, root.metadata().clone())?; if let Err(err) = seq_files_extractor(&mut extractor, decoder).await { - log::error!("error extracting pxar archive: {}", err); + error!("error extracting pxar archive: {}", err); } Ok(()) @@ -1215,7 +1216,7 @@ where let metadata = entry.metadata(); let (file_name_os, file_name) = get_filename(entry)?; - log::debug!("extracting: {}", file.path().display()); + debug!("extracting: {}", file.path().display()); match file.kind() { EntryKind::Directory => { @@ -1267,7 +1268,7 @@ where let (file_name_os, file_name) = get_filename(&entry)?; if !matches!(entry.kind(), EntryKind::GoodbyeTable) { - log::debug!("extracting: {}", entry.path().display()); + debug!("extracting: {}", entry.path().display()); } if let Err(err) = async { @@ -1303,13 +1304,13 @@ where } .await { - let display = entry.path().display().to_string(); - log::error!( + let display_string = entry.path().display().to_string(); + error!( "error extracting {}: {}", if matches!(entry.kind(), EntryKind::GoodbyeTable) { "<directory>" } else { - &display + &display_string }, err ); diff --git a/pbs-client/src/pxar/metadata.rs b/pbs-client/src/pxar/metadata.rs index ad6332157cc6..d03cf7e24706 100644 --- a/pbs-client/src/pxar/metadata.rs +++ b/pbs-client/src/pxar/metadata.rs @@ -9,6 +9,7 @@ use nix::sys::stat::Mode; use pxar::Metadata; +use proxmox_log::{info, warn}; use proxmox_sys::c_result; use proxmox_sys::error::SysError; use proxmox_sys::fs::{self, acl, xattr}; @@ -221,7 +222,7 @@ fn apply_xattrs( } if !xattr::is_valid_xattr_name(xattr.name()) { - log::info!("skipping invalid xattr named {:?}", xattr.name()); + info!("skipping invalid xattr named {:?}", xattr.name()); continue; } @@ -282,7 +283,7 @@ fn apply_acls( acl.add_entry_full(acl::ACL_GROUP_OBJ, None, mode)?; if !metadata.acl.users.is_empty() || !metadata.acl.groups.is_empty() { - log::warn!( + warn!( "Warning: {:?}: Missing GROUP_OBJ entry in ACL, resetting to value of MASK", path_info, ); @@ -300,7 +301,7 @@ fn apply_acls( } if !acl.is_valid() { - log::warn!("Warning: {path_info:?} - ACL invalid, attempting restore anyway.."); + warn!("Warning: {path_info:?} - ACL invalid, attempting restore anyway.."); } acl.set_file(c_proc_path, acl::ACL_TYPE_ACCESS)?; @@ -329,7 +330,7 @@ fn apply_acls( } if !acl.is_valid() { - log::warn!("Warning: {path_info:?} - ACL invalid, attempting restore anyway.."); + warn!("Warning: {path_info:?} - ACL invalid, attempting restore anyway.."); } acl.set_file(c_proc_path, acl::ACL_TYPE_DEFAULT)?; diff --git a/pbs-client/src/pxar/tools.rs b/pbs-client/src/pxar/tools.rs index 483ef19b8e36..d71d6e6ddf6a 100644 --- a/pbs-client/src/pxar/tools.rs +++ b/pbs-client/src/pxar/tools.rs @@ -21,6 +21,7 @@ use pbs_datastore::dynamic_index::{BufferedDynamicReader, LocalDynamicReadAt}; use pbs_datastore::index::IndexFile; use pbs_datastore::BackupManifest; use pbs_tools::crypt_config::CryptConfig; +use proxmox_log::{debug, info}; use crate::{BackupReader, RemoteChunkReader}; @@ -307,11 +308,11 @@ pub fn handle_root_with_optional_format_version_prelude<R: pxar::decoder::SeqRea match first.kind() { pxar::EntryKind::Directory => { let version = pxar::format::FormatVersion::Version1; - log::debug!("pxar format version '{version:?}'"); + debug!("pxar format version '{version:?}'"); Ok((first, None)) } pxar::EntryKind::Version(version) => { - log::debug!("pxar format version '{version:?}'"); + debug!("pxar format version '{version:?}'"); let second = decoder .next() .ok_or_else(|| format_err!("missing root entry"))??; @@ -405,14 +406,14 @@ pub async fn pxar_metadata_catalog_dump_dir<T: Clone + Send + Sync + ReadAt>( if let Ok(s) = proxmox_time::strftime_local("%FT%TZ", mtime) { mtime_string = s; } - log::info!("{etype} {entry_path:?} {size} {mtime_string}"); + info!("{etype} {entry_path:?} {size} {mtime_string}"); } DirEntryAttribute::Directory { .. } => { - log::info!("{etype} {entry_path:?}"); + info!("{etype} {entry_path:?}"); let dir = entry.enter_directory().await?; pxar_metadata_catalog_dump_dir(dir, path_prefix).await?; } - _ => log::info!("{etype} {entry_path:?}"), + _ => info!("{etype} {entry_path:?}"), } } diff --git a/pbs-client/src/pxar_backup_stream.rs b/pbs-client/src/pxar_backup_stream.rs index 4370da6cc2d0..2bfb5cf29e70 100644 --- a/pbs-client/src/pxar_backup_stream.rs +++ b/pbs-client/src/pxar_backup_stream.rs @@ -14,6 +14,7 @@ use nix::sys::stat::Mode; use proxmox_async::blocking::TokioWriterAdapter; use proxmox_io::StdChannelWriter; +use proxmox_log::debug; use pbs_datastore::catalog::{BackupCatalogWriter, CatalogWriter}; @@ -88,7 +89,7 @@ impl PxarBackupStream { ), crate::pxar::Flags::DEFAULT, move |path| { - log::debug!("{:?}", path); + debug!("{:?}", path); Ok(()) }, options, diff --git a/pbs-client/src/task_log.rs b/pbs-client/src/task_log.rs index d299e20e9721..9bf274d71998 100644 --- a/pbs-client/src/task_log.rs +++ b/pbs-client/src/task_log.rs @@ -8,6 +8,7 @@ use futures::*; use serde_json::{json, Value}; use tokio::signal::unix::{signal, SignalKind}; +use proxmox_log::info; use proxmox_router::cli::format_and_print_result; use pbs_api_types::percent_encoding::percent_encode_component; @@ -32,10 +33,10 @@ pub async fn display_task_log( let abort_future = async move { while signal_stream.recv().await.is_some() { - log::info!("got shutdown request (SIGINT)"); + info!("got shutdown request (SIGINT)"); let prev_count = abort_count2.fetch_add(1, Ordering::SeqCst); if prev_count >= 1 { - log::info!("forced exit (task still running)"); + info!("forced exit (task still running)"); break; } } diff --git a/pbs-client/src/tools/key_source.rs b/pbs-client/src/tools/key_source.rs index c039de26f73c..7968e0c2af91 100644 --- a/pbs-client/src/tools/key_source.rs +++ b/pbs-client/src/tools/key_source.rs @@ -5,6 +5,7 @@ use std::path::PathBuf; use anyhow::{bail, format_err, Error}; use serde_json::Value; +use proxmox_log::info; use proxmox_schema::*; use proxmox_sys::fs::file_get_contents; use proxmox_sys::linux::tty; @@ -230,7 +231,7 @@ fn do_crypto_parameters(param: &Value, keep_keyfd_open: bool) -> Result<CryptoPa (None, master_pubkey) => match read_optional_default_encryption_key()? { None => bail!("--crypt-mode without --keyfile and no default key file available"), enc_key => { - log::info!("Encrypting with default encryption key!"); + info!("Encrypting with default encryption key!"); let master_pubkey = match master_pubkey { None => read_optional_default_master_pubkey()?, master_pubkey => master_pubkey, -- 2.39.5 _______________________________________________ pbs-devel mailing list pbs-devel@lists.proxmox.com https://lists.proxmox.com/cgi-bin/mailman/listinfo/pbs-devel