From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: from firstgate.proxmox.com (firstgate.proxmox.com [212.224.123.68]) (using TLSv1.3 with cipher TLS_AES_256_GCM_SHA384 (256/256 bits) key-exchange X25519 server-signature RSA-PSS (2048 bits) server-digest SHA256) (No client certificate requested) by lists.proxmox.com (Postfix) with ESMTPS id A7CE39A157 for ; Fri, 5 May 2023 16:39:14 +0200 (CEST) Received: from firstgate.proxmox.com (localhost [127.0.0.1]) by firstgate.proxmox.com (Proxmox) with ESMTP id 81A6F290E4 for ; Fri, 5 May 2023 16:38:44 +0200 (CEST) Received: from proxmox-new.maurer-it.com (proxmox-new.maurer-it.com [94.136.29.106]) (using TLSv1.3 with cipher TLS_AES_256_GCM_SHA384 (256/256 bits) key-exchange X25519 server-signature RSA-PSS (2048 bits) server-digest SHA256) (No client certificate requested) by firstgate.proxmox.com (Proxmox) with ESMTPS for ; Fri, 5 May 2023 16:38:43 +0200 (CEST) Received: from proxmox-new.maurer-it.com (localhost.localdomain [127.0.0.1]) by proxmox-new.maurer-it.com (Proxmox) with ESMTP id 21BFC47633 for ; Fri, 5 May 2023 16:38:43 +0200 (CEST) From: Dominik Csapak To: pbs-devel@lists.proxmox.com Date: Fri, 5 May 2023 16:38:42 +0200 Message-Id: <20230505143842.1108765-1-d.csapak@proxmox.com> X-Mailer: git-send-email 2.30.2 MIME-Version: 1.0 Content-Transfer-Encoding: 8bit X-SPAM-LEVEL: Spam detection results: 0 AWL 0.015 Adjusted score from AWL reputation of From: address BAYES_00 -1.9 Bayes spam probability is 0 to 1% DMARC_MISSING 0.1 Missing DMARC policy KAM_DMARC_STATUS 0.01 Test Rule for DKIM or SPF Failure with Strict Alignment SPF_HELO_NONE 0.001 SPF: HELO does not publish an SPF Record SPF_PASS -0.001 SPF: sender matches SPF record T_SCC_BODY_TEXT_LINE -0.01 - Subject: [pbs-devel] [PATCH proxmox-backup] tape restore: show required tape list on single snapshot restore X-BeenThere: pbs-devel@lists.proxmox.com X-Mailman-Version: 2.1.29 Precedence: list List-Id: Proxmox Backup Server development discussion List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , X-List-Received-Date: Fri, 05 May 2023 14:39:14 -0000 like we do on a full restore, for that refactor the functionality into 'log_required_tapes' to be reused across the workers Signed-off-by: Dominik Csapak --- src/api2/tape/restore.rs | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/src/api2/tape/restore.rs b/src/api2/tape/restore.rs index 43d34473..7b4fee9c 100644 --- a/src/api2/tape/restore.rs +++ b/src/api2/tape/restore.rs @@ -495,12 +495,11 @@ fn restore_full_worker( .join(", "); task_log!(worker, "Datastore(s): {datastore_list}",); task_log!(worker, "Drive: {drive_name}"); - let required_media = media_id_list - .iter() - .map(|media_id| media_id.label.label_text.as_str()) - .collect::>() - .join(";"); - task_log!(worker, "Required media list: {required_media}",); + log_required_tapes( + &worker, + &inventory, + media_id_list.iter().map(|id| &id.label.uuid), + ); let mut datastore_locks = Vec::new(); for (target, _) in used_datastores.values() { @@ -607,6 +606,25 @@ fn check_snapshot_restorable( Ok(can_restore_some) } +fn log_required_tapes<'a>( + worker: &WorkerTask, + inventory: &Inventory, + list: impl Iterator, +) { + let mut tape_list = list + .map(|uuid| { + inventory + .lookup_media(uuid) + .unwrap() + .label + .label_text + .as_str() + }) + .collect::>(); + tape_list.sort_unstable(); + task_log!(worker, "Required media list: {}", tape_list.join(";")); +} + #[allow(clippy::too_many_arguments)] fn restore_list_worker( worker: Arc, @@ -751,6 +769,7 @@ fn restore_list_worker( } task_log!(worker, "Phase 1: temporarily restore snapshots to temp dir"); + log_required_tapes(&worker, &inventory, snapshot_file_hash.keys()); let mut datastore_chunk_map: HashMap> = HashMap::new(); let mut tmp_paths = Vec::new(); for (media_uuid, file_list) in snapshot_file_hash.iter_mut() { @@ -804,6 +823,7 @@ fn restore_list_worker( if !media_file_chunk_map.is_empty() { task_log!(worker, "Phase 2: restore chunks to datastores"); + log_required_tapes(&worker, &inventory, media_file_chunk_map.keys()); } else { task_log!(worker, "All chunks are already present, skip phase 2..."); } -- 2.30.2