From 151b92a0cfa454907fa5684e583ac971dd10a419 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Sun, 28 Jan 2024 19:25:30 -0500 Subject: [PATCH] Decode sdists async --- Cargo.toml | 2 + crates/puffin-build/src/lib.rs | 10 +- .../src/distribution_database.rs | 5 +- crates/puffin-distribution/src/source/mod.rs | 81 ++---- crates/puffin-distribution/src/unzip.rs | 6 +- crates/puffin-extract/Cargo.toml | 4 +- crates/puffin-extract/src/error.rs | 19 ++ crates/puffin-extract/src/lib.rs | 247 +----------------- crates/puffin-extract/src/stream.rs | 133 ++++++++++ crates/puffin-extract/src/sync.rs | 122 +++++++++ .../src/vendor/cloneable_seekable_reader.rs | 2 +- crates/puffin-extract/src/vendor/mod.rs | 2 +- crates/puffin/tests/pip_compile.rs | 10 +- requirements.in | 4 +- 14 files changed, 322 insertions(+), 325 deletions(-) create mode 100644 crates/puffin-extract/src/error.rs create mode 100644 crates/puffin-extract/src/stream.rs create mode 100644 crates/puffin-extract/src/sync.rs diff --git a/Cargo.toml b/Cargo.toml index ec6573679471..98fb25a72ccd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,8 @@ license = "MIT OR Apache-2.0" [workspace.dependencies] anstream = { version = "0.6.5" } anyhow = { version = "1.0.79" } +async-compression = { version = "0.4.6" } +async-tar = { version = "0.4.2" } async_http_range_reader = { git = "https://github.com/baszalmstra/async_http_range_reader", rev = "8dab2c08ac864fec1df014465264f9a7c8eae905" } async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "d76801da0943de985254fc6255c0e476b57c5836", features = ["deflate"] } base64 = { version = "0.21.7" } diff --git a/crates/puffin-build/src/lib.rs b/crates/puffin-build/src/lib.rs index 5aea85f9d479..9270ddbdd501 100644 --- a/crates/puffin-build/src/lib.rs +++ b/crates/puffin-build/src/lib.rs @@ -27,7 +27,6 @@ use tracing::{debug, info_span, instrument, Instrument}; use distribution_types::Resolution; use pep508_rs::Requirement; -use puffin_extract::extract_source; use puffin_interpreter::{Interpreter, Virtualenv}; use puffin_traits::{BuildContext, BuildKind, SetupPyStrategy, SourceBuildTrait}; @@ -297,8 +296,15 @@ impl SourceBuild { source.to_path_buf() } else { debug!("Unpacking for build: {}", source.display()); + let extracted = temp_dir.path().join("extracted"); - extract_source(source, &extracted) + + // Unzip the archive into the temporary directory. + puffin_extract::archive(source, &extracted) + .map_err(|err| Error::Extraction(extracted.clone(), err))?; + + // Extract the top-level directory from the archive. + puffin_extract::strip_component(&extracted) .map_err(|err| Error::Extraction(extracted.clone(), err))? }; let source_tree = if let Some(subdir) = subdirectory { diff --git a/crates/puffin-distribution/src/distribution_database.rs b/crates/puffin-distribution/src/distribution_database.rs index 3e1e0169e5d4..b9f0e75d91bf 100644 --- a/crates/puffin-distribution/src/distribution_database.rs +++ b/crates/puffin-distribution/src/distribution_database.rs @@ -14,7 +14,6 @@ use distribution_types::{ use platform_tags::Tags; use puffin_cache::{Cache, CacheBucket, Timestamp, WheelCache}; use puffin_client::{CacheControl, CachedClientError, RegistryClient}; -use puffin_extract::unzip_no_seek; use puffin_fs::metadata_if_exists; use puffin_git::GitSource; use puffin_traits::{BuildContext, NoBinary}; @@ -157,7 +156,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context> // Download and unzip the wheel to a temporary directory. let temp_dir = tempfile::tempdir_in(self.cache.root()).map_err(Error::CacheWrite)?; - unzip_no_seek(reader.compat(), temp_dir.path()).await?; + puffin_extract::stream::unzip(reader.compat(), temp_dir.path()).await?; // Persist the temporary directory to the directory store. let archive = self @@ -215,7 +214,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context> // Download and unzip the wheel to a temporary directory. let temp_dir = tempfile::tempdir_in(self.cache.root()).map_err(Error::CacheWrite)?; - unzip_no_seek(reader.compat(), temp_dir.path()).await?; + puffin_extract::stream::unzip(reader.compat(), temp_dir.path()).await?; // Persist the temporary directory to the directory store. let archive = self diff --git a/crates/puffin-distribution/src/source/mod.rs b/crates/puffin-distribution/src/source/mod.rs index 8baf2b68d729..f7a661bfaafb 100644 --- a/crates/puffin-distribution/src/source/mod.rs +++ b/crates/puffin-distribution/src/source/mod.rs @@ -8,7 +8,6 @@ use anyhow::Result; use fs_err::tokio as fs; use futures::{FutureExt, TryStreamExt}; use reqwest::Response; -use tempfile::TempDir; use tokio_util::compat::FuturesAsyncReadCompatExt; use tracing::{debug, info_span, instrument, Instrument}; use url::Url; @@ -750,78 +749,30 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> { return Ok(cache_path); } - // Download the source distribution to a temporary file. If it's a zip archive, we can unzip - // it directly into the cache. - if filename.ends_with(".zip") { - // Unzip the source distribution to a temporary directory. - let span = info_span!("download_unzip_source_dist", filename = filename, source_dist = %source_dist); - let temp_dir = tempfile::tempdir_in(self.build_context.cache().root()) - .map_err(Error::CacheWrite)?; - let reader = response - .bytes_stream() - .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)) - .into_async_read(); - puffin_extract::unzip_no_seek(reader.compat(), temp_dir.path()).await?; - drop(span); - - // Persist the unzipped distribution to the cache. - fs_err::tokio::create_dir_all(cache_path.parent().expect("Cache entry to have parent")) - .await - .map_err(Error::CacheWrite)?; - fs_err::tokio::rename(temp_dir.path(), &cache_path) - .await - .map_err(Error::CacheWrite)?; - } else { - // Unzip the source distribution to a temporary directory. - let span = info_span!("download_unzip_source_dist", filename = filename, source_dist = %source_dist); - let temp_dir = tempfile::tempdir_in(self.build_context.cache().root()) - .map_err(Error::CacheWrite)?; - let reader = response - .bytes_stream() - .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)) - .into_async_read(); - let extracted = puffin_extract::untar_no_seek(reader, temp_dir.path()).await?; - drop(span); - - // Persist the unzipped distribution to the cache. - fs_err::tokio::create_dir_all(cache_path.parent().expect("Cache entry to have parent")) - .await - .map_err(Error::CacheWrite)?; - fs_err::tokio::rename(extracted, &cache_path) - .await - .map_err(Error::CacheWrite)?; - } - - Ok(cache_path) - } - - /// Download a source distribution from a URL to a temporary file. - async fn download_source_dist_url( - &self, - response: Response, - source_dist_filename: &str, - ) -> Result { + // Download and unzip the source distribution into a temporary directory. + let span = + info_span!("download_source_dist", filename = filename, source_dist = %source_dist); + let temp_dir = + tempfile::tempdir_in(self.build_context.cache().root()).map_err(Error::CacheWrite)?; let reader = response .bytes_stream() .map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err)) .into_async_read(); - let mut reader = tokio::io::BufReader::new(reader.compat()); + puffin_extract::stream::archive(reader.compat(), filename, temp_dir.path()).await?; + drop(span); - // Create a temporary directory. - let temp_dir = tempfile::tempdir_in(self.build_context.cache().root()) - .map_err(puffin_client::ErrorKind::CacheWrite)?; + // Extract the top-level directory. + let extracted = puffin_extract::strip_component(temp_dir.path())?; - // Download the source distribution to a temporary file. - let mut writer = tokio::io::BufWriter::new( - fs_err::tokio::File::create(temp_dir.path().join(source_dist_filename)) - .await - .map_err(puffin_client::ErrorKind::CacheWrite)?, - ); - tokio::io::copy(&mut reader, &mut writer) + // Persist it to the cache. + fs_err::tokio::create_dir_all(cache_path.parent().expect("Cache entry to have parent")) + .await + .map_err(Error::CacheWrite)?; + fs_err::tokio::rename(extracted, &cache_path) .await - .map_err(puffin_client::ErrorKind::CacheWrite)?; + .map_err(Error::CacheWrite)?; - Ok(temp_dir) + Ok(cache_path) } /// Download a source distribution from a Git repository. diff --git a/crates/puffin-distribution/src/unzip.rs b/crates/puffin-distribution/src/unzip.rs index 37a8f6843fbd..2cf933988ada 100644 --- a/crates/puffin-distribution/src/unzip.rs +++ b/crates/puffin-distribution/src/unzip.rs @@ -1,6 +1,6 @@ use std::path::Path; -use puffin_extract::{unzip_archive, Error}; +use puffin_extract::Error; use crate::download::BuiltWheel; use crate::{DiskWheel, LocalWheel}; @@ -12,13 +12,13 @@ pub trait Unzip { impl Unzip for DiskWheel { fn unzip(&self, target: &Path) -> Result<(), Error> { - unzip_archive(fs_err::File::open(&self.path)?, target) + puffin_extract::unzip(fs_err::File::open(&self.path)?, target) } } impl Unzip for BuiltWheel { fn unzip(&self, target: &Path) -> Result<(), Error> { - unzip_archive(fs_err::File::open(&self.path)?, target) + puffin_extract::unzip(fs_err::File::open(&self.path)?, target) } } diff --git a/crates/puffin-extract/Cargo.toml b/crates/puffin-extract/Cargo.toml index 4ad46713d814..3ad7c39e15da 100644 --- a/crates/puffin-extract/Cargo.toml +++ b/crates/puffin-extract/Cargo.toml @@ -13,8 +13,8 @@ license = { workspace = true } workspace = true [dependencies] -async-compression = { version = "0.4.6", features = ["gzip"] } -async-tar = "0.4.2" +async-compression = { workspace = true, features = ["gzip"] } +async-tar = { workspace = true } async_zip = { workspace = true, features = ["tokio"] } flate2 = { workspace = true } fs-err = { workspace = true, features = ["tokio"] } diff --git a/crates/puffin-extract/src/error.rs b/crates/puffin-extract/src/error.rs new file mode 100644 index 000000000000..c1d2ce641822 --- /dev/null +++ b/crates/puffin-extract/src/error.rs @@ -0,0 +1,19 @@ +use std::path::PathBuf; + +use zip::result::ZipError; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error(transparent)] + Zip(#[from] ZipError), + #[error(transparent)] + AsyncZip(#[from] async_zip::error::ZipError), + #[error(transparent)] + Io(#[from] std::io::Error), + #[error("Unsupported archive type: {0}")] + UnsupportedArchive(PathBuf), + #[error( + "The top level of the archive must only contain a list directory, but it contains: {0:?}" + )] + InvalidArchive(Vec), +} diff --git a/crates/puffin-extract/src/lib.rs b/crates/puffin-extract/src/lib.rs index 9e6e54dc67b6..d9b25aeb8338 100644 --- a/crates/puffin-extract/src/lib.rs +++ b/crates/puffin-extract/src/lib.rs @@ -1,244 +1,7 @@ -use std::fs::OpenOptions; -use std::path::{Path, PathBuf}; -use std::sync::Mutex; - -use rayon::prelude::*; -use rustc_hash::FxHashSet; -use tokio_util::compat::{FuturesAsyncReadCompatExt, TokioAsyncReadCompatExt}; -use zip::result::ZipError; -use zip::ZipArchive; - -pub use crate::vendor::{CloneableSeekableReader, HasLength}; +pub use error::Error; +pub use sync::*; +mod error; +pub mod stream; +mod sync; mod vendor; - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error(transparent)] - Zip(#[from] ZipError), - #[error(transparent)] - AsyncZip(#[from] async_zip::error::ZipError), - #[error(transparent)] - Io(#[from] std::io::Error), - #[error("Unsupported archive type: {0}")] - UnsupportedArchive(PathBuf), - #[error( - "The top level of the archive must only contain a list directory, but it contains: {0:?}" - )] - InvalidArchive(Vec), -} - -/// Unzip a `.zip` archive into the target directory without requiring Seek. -/// -/// This is useful for unzipping files as they're being downloaded. If the archive -/// is already fully on disk, consider using `unzip_archive`, which can use multiple -/// threads to work faster in that case. -pub async fn unzip_no_seek( - reader: R, - target: &Path, -) -> Result<(), Error> { - let mut reader = reader.compat(); - let mut zip = async_zip::base::read::stream::ZipFileReader::new(&mut reader); - - let mut directories = FxHashSet::default(); - - while let Some(mut entry) = zip.next_with_entry().await? { - // Construct the (expected) path to the file on-disk. - let path = entry.reader().entry().filename().as_str()?; - let path = target.join(path); - let is_dir = entry.reader().entry().dir()?; - - // Either create the directory or write the file to disk. - if is_dir { - if directories.insert(path.clone()) { - fs_err::tokio::create_dir_all(path).await?; - } - } else { - if let Some(parent) = path.parent() { - if directories.insert(parent.to_path_buf()) { - fs_err::tokio::create_dir_all(parent).await?; - } - } - - let file = fs_err::tokio::File::create(path).await?; - let mut writer = - if let Ok(size) = usize::try_from(entry.reader().entry().uncompressed_size()) { - tokio::io::BufWriter::with_capacity(size, file) - } else { - tokio::io::BufWriter::new(file) - }; - let mut reader = entry.reader_mut().compat(); - tokio::io::copy(&mut reader, &mut writer).await?; - } - - // Close current file to get access to the next one. See docs: - // https://docs.rs/async_zip/0.0.16/async_zip/base/read/stream/ - zip = entry.skip().await?; - } - - // On Unix, we need to set file permissions, which are stored in the central directory, at the - // end of the archive. The `ZipFileReader` reads until it sees a central directory signature, - // which indicates the first entry in the central directory. So we continue reading from there. - #[cfg(unix)] - { - use std::fs::Permissions; - use std::os::unix::fs::PermissionsExt; - - // To avoid lots of small reads to `reader` when parsing the central directory, wrap it in - // a buffer. - let mut buf = futures::io::BufReader::new(reader); - let mut directory = async_zip::base::read::cd::CentralDirectoryReader::new(&mut buf); - while let Some(entry) = directory.next().await? { - if entry.dir()? { - continue; - } - - // Construct the (expected) path to the file on-disk. - let path = entry.filename().as_str()?; - let path = target.join(path); - - if let Some(mode) = entry.unix_permissions() { - fs_err::set_permissions(&path, Permissions::from_mode(mode))?; - } - } - } - - Ok(()) -} - -/// Unzip a `.zip` archive into the target directory. -pub fn unzip_archive( - reader: R, - target: &Path, -) -> Result<(), Error> { - // Unzip in parallel. - let archive = ZipArchive::new(CloneableSeekableReader::new(reader))?; - let directories = Mutex::new(FxHashSet::default()); - (0..archive.len()) - .par_bridge() - .map(|file_number| { - let mut archive = archive.clone(); - let mut file = archive.by_index(file_number)?; - - // Determine the path of the file within the wheel. - let Some(enclosed_name) = file.enclosed_name() else { - return Ok(()); - }; - - // Create necessary parent directories. - let path = target.join(enclosed_name); - if file.is_dir() { - let mut directories = directories.lock().unwrap(); - if directories.insert(path.clone()) { - fs_err::create_dir_all(path)?; - } - return Ok(()); - } - - if let Some(parent) = path.parent() { - let mut directories = directories.lock().unwrap(); - if directories.insert(parent.to_path_buf()) { - fs_err::create_dir_all(parent)?; - } - } - - // Create the file, with the correct permissions (on Unix). - let mut options = OpenOptions::new(); - options.write(true); - options.create_new(true); - - #[cfg(unix)] - { - use std::os::unix::fs::OpenOptionsExt; - - if let Some(mode) = file.unix_mode() { - options.mode(mode); - } - } - - // Copy the file contents. - let mut outfile = options.open(&path)?; - std::io::copy(&mut file, &mut outfile)?; - - Ok(()) - }) - .collect::>() -} - -/// Extract a `.zip` or `.tar.gz` archive into the target directory. -pub fn extract_archive(source: impl AsRef, target: impl AsRef) -> Result<(), Error> { - // .zip - if source - .as_ref() - .extension() - .is_some_and(|ext| ext.eq_ignore_ascii_case("zip")) - { - unzip_archive(fs_err::File::open(source.as_ref())?, target.as_ref())?; - return Ok(()); - } - - // .tar.gz - if source - .as_ref() - .extension() - .is_some_and(|ext| ext.eq_ignore_ascii_case("gz")) - { - if source.as_ref().file_stem().is_some_and(|stem| { - Path::new(stem) - .extension() - .is_some_and(|ext| ext.eq_ignore_ascii_case("tar")) - }) { - let mut archive = tar::Archive::new(flate2::read::GzDecoder::new(fs_err::File::open( - source.as_ref(), - )?)); - // https://github.com/alexcrichton/tar-rs/issues/349 - archive.set_preserve_mtime(false); - archive.unpack(target)?; - return Ok(()); - } - } - - Err(Error::UnsupportedArchive(source.as_ref().to_path_buf())) -} - -/// Extract a source distribution into the target directory. -/// -/// Returns the path to the top-level directory of the source distribution. -pub fn extract_source( - source: impl AsRef, - target: impl AsRef, -) -> Result { - extract_archive(&source, &target)?; - - // > A .tar.gz source distribution (sdist) contains a single top-level directory called - // > `{name}-{version}` (e.g. foo-1.0), containing the source files of the package. - // TODO(konstin): Verify the name of the directory. - let top_level = - fs_err::read_dir(target.as_ref())?.collect::>>()?; - let [root] = top_level.as_slice() else { - return Err(Error::InvalidArchive(top_level)); - }; - - Ok(root.path()) -} - - -pub async fn untar_no_seek( - reader: R, - target: &Path, -) -> Result { - let decompressed_bytes = async_compression::futures::bufread::GzipDecoder::new(reader); - let archive = async_tar::Archive::new(decompressed_bytes); - archive.unpack(target).await?; - - // > A .tar.gz source distribution (sdist) contains a single top-level directory called - // > `{name}-{version}` (e.g. foo-1.0), containing the source files of the package. - // TODO(konstin): Verify the name of the directory. - let top_level = - fs_err::read_dir(target)?.collect::>>()?; - let [root] = top_level.as_slice() else { - return Err(Error::InvalidArchive(top_level)); - }; - - Ok(root.path()) -} \ No newline at end of file diff --git a/crates/puffin-extract/src/stream.rs b/crates/puffin-extract/src/stream.rs new file mode 100644 index 000000000000..2a2c813ff9b2 --- /dev/null +++ b/crates/puffin-extract/src/stream.rs @@ -0,0 +1,133 @@ +use std::path::Path; + +use rustc_hash::FxHashSet; +use tokio_util::compat::{FuturesAsyncReadCompatExt, TokioAsyncReadCompatExt}; + +use crate::Error; + +/// Unzip a `.zip` archive into the target directory, without requiring `Seek`. +/// +/// This is useful for unzipping files as they're being downloaded. If the archive +/// is already fully on disk, consider using `unzip_archive`, which can use multiple +/// threads to work faster in that case. +pub async fn unzip( + reader: R, + target: impl AsRef, +) -> Result<(), Error> { + let mut reader = reader.compat(); + let mut zip = async_zip::base::read::stream::ZipFileReader::new(&mut reader); + + let mut directories = FxHashSet::default(); + + while let Some(mut entry) = zip.next_with_entry().await? { + // Construct the (expected) path to the file on-disk. + let path = entry.reader().entry().filename().as_str()?; + let path = target.as_ref().join(path); + let is_dir = entry.reader().entry().dir()?; + + // Either create the directory or write the file to disk. + if is_dir { + if directories.insert(path.clone()) { + fs_err::tokio::create_dir_all(path).await?; + } + } else { + if let Some(parent) = path.parent() { + if directories.insert(parent.to_path_buf()) { + fs_err::tokio::create_dir_all(parent).await?; + } + } + + let file = fs_err::tokio::File::create(path).await?; + let mut writer = + if let Ok(size) = usize::try_from(entry.reader().entry().uncompressed_size()) { + tokio::io::BufWriter::with_capacity(size, file) + } else { + tokio::io::BufWriter::new(file) + }; + let mut reader = entry.reader_mut().compat(); + tokio::io::copy(&mut reader, &mut writer).await?; + } + + // Close current file to get access to the next one. See docs: + // https://docs.rs/async_zip/0.0.16/async_zip/base/read/stream/ + zip = entry.skip().await?; + } + + // On Unix, we need to set file permissions, which are stored in the central directory, at the + // end of the archive. The `ZipFileReader` reads until it sees a central directory signature, + // which indicates the first entry in the central directory. So we continue reading from there. + #[cfg(unix)] + { + use std::fs::Permissions; + use std::os::unix::fs::PermissionsExt; + + // To avoid lots of small reads to `reader` when parsing the central directory, wrap it in + // a buffer. + let mut buf = futures::io::BufReader::new(reader); + let mut directory = async_zip::base::read::cd::CentralDirectoryReader::new(&mut buf); + while let Some(entry) = directory.next().await? { + if entry.dir()? { + continue; + } + + // Construct the (expected) path to the file on-disk. + let path = entry.filename().as_str()?; + let path = target.as_ref().join(path); + + if let Some(mode) = entry.unix_permissions() { + fs_err::set_permissions(&path, Permissions::from_mode(mode))?; + } + } + } + + Ok(()) +} + +/// Unzip a `.tar.gz` archive into the target directory, without requiring `Seek`. +/// +/// This is useful for unpacking files as they're being downloaded. +pub async fn untar( + reader: R, + target: impl AsRef, +) -> Result<(), Error> { + let decompressed_bytes = async_compression::futures::bufread::GzipDecoder::new(reader.compat()); + let archive = async_tar::ArchiveBuilder::new(decompressed_bytes) + .set_preserve_permissions(false) + .build(); + Ok(archive.unpack(target.as_ref()).await?) +} + +/// Unzip a `.zip` or `.tar.gz` archive into the target directory, without requiring `Seek`. +pub async fn archive( + reader: R, + source: impl AsRef, + target: impl AsRef, +) -> Result<(), Error> { + // `.zip` + if source + .as_ref() + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("zip")) + { + unzip(reader, target).await?; + return Ok(()); + } + + // `.tar.gz` + if source + .as_ref() + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("gz")) + { + if source.as_ref().file_stem().is_some_and(|stem| { + Path::new(stem) + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("tar")) + }) { + untar(reader, target).await?; + return Ok(()); + } + } + + Err(Error::UnsupportedArchive(source.as_ref().to_path_buf())) +} diff --git a/crates/puffin-extract/src/sync.rs b/crates/puffin-extract/src/sync.rs new file mode 100644 index 000000000000..6c8a15a995e0 --- /dev/null +++ b/crates/puffin-extract/src/sync.rs @@ -0,0 +1,122 @@ +use std::fs::OpenOptions; +use std::path::{Path, PathBuf}; +use std::sync::Mutex; + +use rayon::prelude::*; +use rustc_hash::FxHashSet; +use zip::ZipArchive; + +use crate::vendor::{CloneableSeekableReader, HasLength}; +use crate::Error; + +/// Unzip a `.zip` archive into the target directory. +pub fn unzip( + reader: R, + target: &Path, +) -> Result<(), Error> { + // Unzip in parallel. + let archive = ZipArchive::new(CloneableSeekableReader::new(reader))?; + let directories = Mutex::new(FxHashSet::default()); + (0..archive.len()) + .par_bridge() + .map(|file_number| { + let mut archive = archive.clone(); + let mut file = archive.by_index(file_number)?; + + // Determine the path of the file within the wheel. + let Some(enclosed_name) = file.enclosed_name() else { + return Ok(()); + }; + + // Create necessary parent directories. + let path = target.join(enclosed_name); + if file.is_dir() { + let mut directories = directories.lock().unwrap(); + if directories.insert(path.clone()) { + fs_err::create_dir_all(path)?; + } + return Ok(()); + } + + if let Some(parent) = path.parent() { + let mut directories = directories.lock().unwrap(); + if directories.insert(parent.to_path_buf()) { + fs_err::create_dir_all(parent)?; + } + } + + // Create the file, with the correct permissions (on Unix). + let mut options = OpenOptions::new(); + options.write(true); + options.create_new(true); + + #[cfg(unix)] + { + use std::os::unix::fs::OpenOptionsExt; + + if let Some(mode) = file.unix_mode() { + options.mode(mode); + } + } + + // Copy the file contents. + let mut outfile = options.open(&path)?; + std::io::copy(&mut file, &mut outfile)?; + + Ok(()) + }) + .collect::>() +} + +/// Extract a `.zip` or `.tar.gz` archive into the target directory. +pub fn archive(source: impl AsRef, target: impl AsRef) -> Result<(), Error> { + // `.zip` + if source + .as_ref() + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("zip")) + { + unzip(fs_err::File::open(source.as_ref())?, target.as_ref())?; + return Ok(()); + } + + // `.tar.gz` + if source + .as_ref() + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("gz")) + { + if source.as_ref().file_stem().is_some_and(|stem| { + Path::new(stem) + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("tar")) + }) { + let mut archive = tar::Archive::new(flate2::read::GzDecoder::new(fs_err::File::open( + source.as_ref(), + )?)); + // https://github.com/alexcrichton/tar-rs/issues/349 + archive.set_preserve_mtime(false); + archive.unpack(target)?; + return Ok(()); + } + } + + Err(Error::UnsupportedArchive(source.as_ref().to_path_buf())) +} + +/// Extract the top-level directory from an unpacked archive. +/// +/// The specification says: +/// > A .tar.gz source distribution (sdist) contains a single top-level directory called +/// > `{name}-{version}` (e.g. foo-1.0), containing the source files of the package. +/// +/// This function returns the path to that top-level directory. +pub fn strip_component(source: impl AsRef) -> Result { + // TODO(konstin): Verify the name of the directory. + let top_level = + fs_err::read_dir(source.as_ref())?.collect::>>()?; + let [root] = top_level.as_slice() else { + return Err(Error::InvalidArchive(top_level)); + }; + Ok(root.path()) +} diff --git a/crates/puffin-extract/src/vendor/cloneable_seekable_reader.rs b/crates/puffin-extract/src/vendor/cloneable_seekable_reader.rs index 9048ac21eb83..b2c58e07f17d 100644 --- a/crates/puffin-extract/src/vendor/cloneable_seekable_reader.rs +++ b/crates/puffin-extract/src/vendor/cloneable_seekable_reader.rs @@ -26,7 +26,7 @@ pub trait HasLength { /// and thus can be cloned cheaply. It supports seeking; each cloned instance /// maintains its own pointer into the file, and the underlying instance /// is seeked prior to each read. -pub struct CloneableSeekableReader { +pub(crate) struct CloneableSeekableReader { file: Arc>, pos: u64, // TODO determine and store this once instead of per cloneable file diff --git a/crates/puffin-extract/src/vendor/mod.rs b/crates/puffin-extract/src/vendor/mod.rs index 22f8fc671bdb..3148e2edd99d 100644 --- a/crates/puffin-extract/src/vendor/mod.rs +++ b/crates/puffin-extract/src/vendor/mod.rs @@ -1,3 +1,3 @@ -pub use cloneable_seekable_reader::{CloneableSeekableReader, HasLength}; +pub(crate) use cloneable_seekable_reader::{CloneableSeekableReader, HasLength}; mod cloneable_seekable_reader; diff --git a/crates/puffin/tests/pip_compile.rs b/crates/puffin/tests/pip_compile.rs index cb032c0d66ec..75bbd2f74e6c 100644 --- a/crates/puffin/tests/pip_compile.rs +++ b/crates/puffin/tests/pip_compile.rs @@ -1617,7 +1617,7 @@ fn disallowed_transitive_url_dependency() -> Result<()> { let venv = create_venv_py312(&temp_dir, &cache_dir); let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("transitive_url_dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip")?; + requirements_in.write_str("transitive_url_dependency @ https://github.com/astral-sh/ruff/files/14078476/transitive_url_dependency.zip")?; insta::with_settings!({ filters => INSTA_FILTERS.to_vec() @@ -1654,7 +1654,7 @@ fn allowed_transitive_url_dependency() -> Result<()> { let venv = create_venv_py312(&temp_dir, &cache_dir); let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("transitive_url_dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip")?; + requirements_in.write_str("transitive_url_dependency @ https://github.com/astral-sh/ruff/files/14078476/transitive_url_dependency.zip")?; let constraints_txt = temp_dir.child("constraints.txt"); constraints_txt.write_str("werkzeug @ git+https://github.com/pallets/werkzeug@2.0.0")?; @@ -1679,7 +1679,7 @@ fn allowed_transitive_url_dependency() -> Result<()> { ----- stdout ----- # This file was autogenerated by Puffin v[VERSION] via the following command: # puffin pip compile requirements.in --constraint constraints.txt --cache-dir [CACHE_DIR] - transitive-url-dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip + transitive-url-dependency @ https://github.com/astral-sh/ruff/files/14078476/transitive_url_dependency.zip werkzeug @ git+https://github.com/pallets/werkzeug@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74 # via transitive-url-dependency @@ -1702,7 +1702,7 @@ fn allowed_transitive_canonical_url_dependency() -> Result<()> { let venv = create_venv_py312(&temp_dir, &cache_dir); let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("transitive_url_dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip")?; + requirements_in.write_str("transitive_url_dependency @ https://github.com/astral-sh/ruff/files/14078476/transitive_url_dependency.zip")?; let constraints_txt = temp_dir.child("constraints.txt"); constraints_txt.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0")?; @@ -1727,7 +1727,7 @@ fn allowed_transitive_canonical_url_dependency() -> Result<()> { ----- stdout ----- # This file was autogenerated by Puffin v[VERSION] via the following command: # puffin pip compile requirements.in --constraint constraints.txt --cache-dir [CACHE_DIR] - transitive-url-dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip + transitive-url-dependency @ https://github.com/astral-sh/ruff/files/14078476/transitive_url_dependency.zip werkzeug @ git+https://github.com/pallets/werkzeug@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74 # via transitive-url-dependency diff --git a/requirements.in b/requirements.in index e6289cf6276d..89ab7baa398c 100644 --- a/requirements.in +++ b/requirements.in @@ -1,3 +1,5 @@ -django @ https://files.pythonhosted.org/packages/53/82/c8e8ed137da1c72fa110e3be9ab0f26bcfcf6f3d2994601d164dfac86269/Django-5.0.1.tar.gz +#django @ https://files.pythonhosted.org/packages/53/82/c8e8ed137da1c72fa110e3be9ab0f26bcfcf6f3d2994601d164dfac86269/Django-5.0.1.tar.gz flask @ https://files.pythonhosted.org/packages/b2/14/97b9137a02f57d2287f3a9731b3a339fda716d2d3a157d7d1d89c2bebf7b/flask-3.0.1.tar.gz markupsafe @ https://files.pythonhosted.org/packages/fb/5a/fb1326fe32913e663c8e2d6bdf7cde6f472e51f9c21f0768d9b9080fe7c5/MarkupSafe-2.1.4.tar.gz +requests @ https://files.pythonhosted.org/packages/9d/be/10918a2eac4ae9f02f6cfe6414b7a155ccd8f7f9d4380d62fd5b955065c3/requests-2.31.0.tar.gz +werkzeug @ https://files.pythonhosted.org/packages/0d/cc/ff1904eb5eb4b455e442834dabf9427331ac0fa02853bf83db817a7dd53d/werkzeug-3.0.1.tar.gz