From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: from firstgate.proxmox.com (firstgate.proxmox.com [212.224.123.68]) (using TLSv1.3 with cipher TLS_AES_256_GCM_SHA384 (256/256 bits) key-exchange X25519 server-signature RSA-PSS (2048 bits)) (No client certificate requested) by lists.proxmox.com (Postfix) with ESMTPS id 1197D70539 for ; Fri, 2 Apr 2021 14:32:44 +0200 (CEST) Received: from firstgate.proxmox.com (localhost [127.0.0.1]) by firstgate.proxmox.com (Proxmox) with ESMTP id 03AB3EC52 for ; Fri, 2 Apr 2021 14:32:44 +0200 (CEST) Received: from proxmox-new.maurer-it.com (proxmox-new.maurer-it.com [212.186.127.180]) (using TLSv1.3 with cipher TLS_AES_256_GCM_SHA384 (256/256 bits) key-exchange X25519 server-signature RSA-PSS (2048 bits)) (No client certificate requested) by firstgate.proxmox.com (Proxmox) with ESMTPS id 32A87EC45 for ; Fri, 2 Apr 2021 14:32:43 +0200 (CEST) Received: from proxmox-new.maurer-it.com (localhost.localdomain [127.0.0.1]) by proxmox-new.maurer-it.com (Proxmox) with ESMTP id E8C0A43624 for ; Fri, 2 Apr 2021 14:32:42 +0200 (CEST) Message-ID: <59ee51d7-4d1d-18be-ec41-ae299253b8e8@proxmox.com> Date: Fri, 2 Apr 2021 14:32:41 +0200 MIME-Version: 1.0 User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:88.0) Gecko/20100101 Thunderbird/88.0 Content-Language: en-US To: Proxmox Backup Server development discussion , Dominik Csapak References: <20210401141123.12964-1-d.csapak@proxmox.com> <20210401141123.12964-6-d.csapak@proxmox.com> From: Thomas Lamprecht In-Reply-To: <20210401141123.12964-6-d.csapak@proxmox.com> Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 7bit X-SPAM-LEVEL: Spam detection results: 0 AWL -0.043 Adjusted score from AWL reputation of From: address KAM_DMARC_STATUS 0.01 Test Rule for DKIM or SPF Failure with Strict Alignment NICE_REPLY_A -0.001 Looks like a legit reply (A) RCVD_IN_DNSWL_MED -2.3 Sender listed at https://www.dnswl.org/, medium trust SPF_HELO_NONE 0.001 SPF: HELO does not publish an SPF Record SPF_PASS -0.001 SPF: sender matches SPF record URIBL_BLOCKED 0.001 ADMINISTRATOR NOTICE: The query to URIBL was blocked. See http://wiki.apache.org/spamassassin/DnsBlocklists#dnsbl-block for more information. [rest.rs] Subject: Re: [pbs-devel] [PATCH proxmox-backup v2 5/5] server/rest: compress static files X-BeenThere: pbs-devel@lists.proxmox.com X-Mailman-Version: 2.1.29 Precedence: list List-Id: Proxmox Backup Server development discussion List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , X-List-Received-Date: Fri, 02 Apr 2021 12:32:44 -0000 On 01.04.21 16:11, Dominik Csapak wrote: > compress them on the fly > > Signed-off-by: Dominik Csapak > --- > src/server/rest.rs | 93 ++++++++++++++++++++++++++++++++-------------- > 1 file changed, 66 insertions(+), 27 deletions(-) > > diff --git a/src/server/rest.rs b/src/server/rest.rs > index 357d1b81..6b1bb0cb 100644 > --- a/src/server/rest.rs > +++ b/src/server/rest.rs > @@ -40,6 +40,7 @@ use crate::auth_helpers::*; > use crate::config::cached_user_info::CachedUserInfo; > use crate::tools; > use crate::tools::compression::{CompressionMethod, DeflateEncoder, Level}; > +use crate::tools::AsyncReaderStream; > use crate::tools::FileLogger; > > extern "C" { > @@ -432,16 +433,18 @@ pub async fn handle_api_request > let resp = match compression { > Some(CompressionMethod::Deflate) => { > - resp.headers_mut() > - .insert(header::CONTENT_ENCODING, CompressionMethod::Deflate.content_encoding()); > - resp.map(|body| > + resp.headers_mut().insert( > + header::CONTENT_ENCODING, > + CompressionMethod::Deflate.content_encoding(), > + ); > + resp.map(|body| { > Body::wrap_stream(DeflateEncoder::with_quality( > body.map_err(|err| { > proxmox::io_format_err!("error during compression: {}", err) > }), > Level::Fastest, > - )), > - ) > + )) > + }) > } > Some(_other) => { > // fixme: implement other compression algorithms > @@ -546,9 +549,11 @@ fn extension_to_content_type(filename: &Path) -> (&'static str, bool) { > ("application/octet-stream", false) > } > > -async fn simple_static_file_download(filename: PathBuf) -> Result, Error> { > - let (content_type, _nocomp) = extension_to_content_type(&filename); > - > +async fn simple_static_file_download( > + filename: PathBuf, > + content_type: &'static str, > + compression: Option, > +) -> Result, Error> { > use tokio::io::AsyncReadExt; > > let mut file = File::open(filename) > @@ -556,46 +561,79 @@ async fn simple_static_file_download(filename: PathBuf) -> Result > .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?; > > let mut data: Vec = Vec::new(); > - file.read_to_end(&mut data) > - .await > - .map_err(|err| http_err!(BAD_REQUEST, "File read failed: {}", err))?; > > - let mut response = Response::new(data.into()); > + let mut response = match compression { > + Some(CompressionMethod::Deflate) => { > + let mut enc = DeflateEncoder::with_quality(data, Level::Fastest); > + enc.compress_vec(&mut file, 32 * 1024).await?; as talked off-list, that value should really be in a `const CHUNK_RESPONSE_LIMIT` or the like. > + let mut response = Response::new(enc.into_inner().into()); > + response.headers_mut().insert( > + header::CONTENT_ENCODING, > + CompressionMethod::Deflate.content_encoding(), > + ); > + response > + } > + Some(_) | None => { > + file.read_to_end(&mut data) > + .await > + .map_err(|err| http_err!(BAD_REQUEST, "File read failed: {}", err))?; > + Response::new(data.into()) > + } > + }; > + > response.headers_mut().insert( > header::CONTENT_TYPE, > header::HeaderValue::from_static(content_type), > ); > + > Ok(response) > } > > -async fn chuncked_static_file_download(filename: PathBuf) -> Result, Error> { > - let (content_type, _nocomp) = extension_to_content_type(&filename); > +async fn chuncked_static_file_download( > + filename: PathBuf, > + content_type: &'static str, > + compression: Option, > +) -> Result, Error> { > + let mut resp = Response::builder() > + .status(StatusCode::OK) > + .header(header::CONTENT_TYPE, content_type); > > let file = File::open(filename) > .await > .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?; > > - let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new()) > - .map_ok(|bytes| bytes.freeze()); > - let body = Body::wrap_stream(payload); > + let body = match compression { > + Some(CompressionMethod::Deflate) => { > + resp = resp.header( > + header::CONTENT_ENCODING, > + CompressionMethod::Deflate.content_encoding(), > + ); > + Body::wrap_stream(DeflateEncoder::with_quality( > + AsyncReaderStream::new(file), > + Level::Fastest, > + )) > + } > + Some(_) | None => Body::wrap_stream(AsyncReaderStream::new(file)), > + }; > > - // FIXME: set other headers ? > - Ok(Response::builder() > - .status(StatusCode::OK) > - .header(header::CONTENT_TYPE, content_type) > - .body(body) > - .unwrap()) > + Ok(resp.body(body).unwrap()) > } > > -async fn handle_static_file_download(filename: PathBuf) -> Result, Error> { > +async fn handle_static_file_download( > + filename: PathBuf, > + compression: Option, > +) -> Result, Error> { > let metadata = tokio::fs::metadata(filename.clone()) > .map_err(|err| http_err!(BAD_REQUEST, "File access problems: {}", err)) > .await?; > > + let (content_type, nocomp) = extension_to_content_type(&filename); > + let compression = if nocomp { None } else { compression }; > + > if metadata.len() < 1024 * 32 { the const from above could then be used here too > - simple_static_file_download(filename).await > + simple_static_file_download(filename, content_type, compression).await > } else { > - chuncked_static_file_download(filename).await > + chuncked_static_file_download(filename, content_type, compression).await > } > } > > @@ -773,7 +811,8 @@ async fn handle_request( > } > } else { > let filename = api.find_alias(&components); > - return handle_static_file_download(filename).await; > + let compression = extract_compression_method(&parts.headers); > + return handle_static_file_download(filename, compression).await; > } > } > >