Skip to content

Commit fec0ee5

Browse files
committed
Remove bz/lzma/xz support from ZIP handling
Signed-off-by: William Woodruff <william@astral.sh> Remove CompressionMethod APIs entirely Signed-off-by: William Woodruff <william@astral.sh> Tear out the rest of the deprecation pathways Signed-off-by: William Woodruff <william@astral.sh> Fix `cargo shear` Signed-off-by: William Woodruff <william@astral.sh> Bump snapshots Signed-off-by: William Woodruff <william@astral.sh> `cargo fmt` Signed-off-by: William Woodruff <william@astral.sh> Specialize error messages for unsupported compression methods in zip files Signed-off-by: William Woodruff <william@astral.sh> Simplify matches Signed-off-by: William Woodruff <william@astral.sh>
1 parent 45bf2f9 commit fec0ee5

16 files changed

Lines changed: 89 additions & 187 deletions

File tree

Cargo.lock

Lines changed: 1 addition & 28 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -106,11 +106,8 @@ async-compression = { version = "0.4.12", features = [
106106
async-trait = { version = "0.1.82" }
107107
async_http_range_reader = { version = "0.11.0", package = "astral_async_http_range_reader" }
108108
async_zip = { version = "0.0.17", package = "astral_async_zip", features = [
109-
"bzip2",
110109
"deflate",
111-
"lzma",
112110
"tokio",
113-
"xz",
114111
"zstd",
115112
] }
116113
axoupdater = { version = "0.10.0", default-features = false }
@@ -309,9 +306,6 @@ zeroize = { version = "1.8.1" }
309306
zip = { version = "8.1.0", default-features = false, features = [
310307
"deflate",
311308
"zstd",
312-
"bzip2",
313-
"lzma",
314-
"xz",
315309
] }
316310
zstd = { version = "0.13.3" }
317311

crates/uv-bin-install/src/lib.rs

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -772,14 +772,9 @@ async fn download_and_unpack(
772772

773773
let id = reporter.on_download_start(binary.name(), version, size);
774774
let mut progress_reader = ProgressReader::new(reader, id, reporter);
775-
stream::archive(
776-
&download_url,
777-
&mut progress_reader,
778-
format.into(),
779-
temp_dir.path(),
780-
)
781-
.await
782-
.map_err(|e| Error::Extract { source: e })?;
775+
stream::archive(&mut progress_reader, format.into(), temp_dir.path())
776+
.await
777+
.map_err(|e| Error::Extract { source: e })?;
783778
reporter.on_download_complete(id);
784779

785780
// Find the binary in the extracted files

crates/uv-dev/src/validate_zip.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ pub(crate) async fn validate_zip(
4747

4848
let target = tempfile::TempDir::new()?;
4949

50-
uv_extract::stream::unzip(args.url.to_url(), reader.compat(), target.path()).await?;
50+
uv_extract::stream::unzip(reader.compat(), target.path()).await?;
5151

5252
Ok(())
5353
}

crates/uv-distribution/src/distribution_database.rs

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -662,8 +662,6 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
662662
// Create an entry for the HTTP cache.
663663
let http_entry = wheel_entry.with_file(format!("{}.http", filename.cache_key()));
664664

665-
let query_url = &url.clone();
666-
667665
let download = |response: reqwest::Response| {
668666
async {
669667
let size = size.or_else(|| content_length(&response));
@@ -692,7 +690,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
692690
let mut reader = ProgressReader::new(&mut hasher, progress, &**reporter);
693691
match extension {
694692
WheelExtension::Whl => {
695-
uv_extract::stream::unzip(query_url, &mut reader, temp_dir.path())
693+
uv_extract::stream::unzip(&mut reader, temp_dir.path())
696694
.await
697695
.map_err(|err| Error::Extract(filename.to_string(), err))?
698696
}
@@ -705,7 +703,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
705703
}
706704
None => match extension {
707705
WheelExtension::Whl => {
708-
uv_extract::stream::unzip(query_url, &mut hasher, temp_dir.path())
706+
uv_extract::stream::unzip(&mut hasher, temp_dir.path())
709707
.await
710708
.map_err(|err| Error::Extract(filename.to_string(), err))?
711709
}
@@ -839,8 +837,6 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
839837
// Create an entry for the HTTP cache.
840838
let http_entry = wheel_entry.with_file(format!("{}.http", filename.cache_key()));
841839

842-
let query_url = &url.clone();
843-
844840
let download = |response: reqwest::Response| {
845841
async {
846842
let size = size.or_else(|| content_length(&response));
@@ -911,7 +907,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
911907

912908
let files = match extension {
913909
WheelExtension::Whl => {
914-
uv_extract::stream::unzip(query_url, &mut hasher, temp_dir.path())
910+
uv_extract::stream::unzip(&mut hasher, temp_dir.path())
915911
.await
916912
.map_err(|err| Error::Extract(filename.to_string(), err))?
917913
}
@@ -1107,11 +1103,9 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
11071103

11081104
// Unzip the wheel to a temporary directory.
11091105
let files = match extension {
1110-
WheelExtension::Whl => {
1111-
uv_extract::stream::unzip(path.display(), &mut hasher, temp_dir.path())
1112-
.await
1113-
.map_err(|err| Error::Extract(filename.to_string(), err))?
1114-
}
1106+
WheelExtension::Whl => uv_extract::stream::unzip(&mut hasher, temp_dir.path())
1107+
.await
1108+
.map_err(|err| Error::Extract(filename.to_string(), err))?,
11151109
WheelExtension::WhlZst => {
11161110
uv_extract::stream::untar_zst(&mut hasher, temp_dir.path())
11171111
.await

crates/uv-distribution/src/source/mod.rs

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -792,8 +792,6 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
792792
};
793793

794794
let download = |response| {
795-
let query_url = url.clone();
796-
797795
async {
798796
// At this point, we're seeing a new or updated source distribution. Initialize a
799797
// new revision, to collect the source and built artifacts.
@@ -804,7 +802,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
804802
let entry = cache_shard.shard(revision.id()).entry(SOURCE);
805803
let algorithms = hashes.algorithms();
806804
let hashes = self
807-
.download_archive(query_url, response, source, ext, entry.path(), &algorithms)
805+
.download_archive(response, source, ext, entry.path(), &algorithms)
808806
.await?;
809807

810808
Ok(revision.with_hashes(HashDigests::from(hashes)))
@@ -2264,8 +2262,6 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
22642262
};
22652263

22662264
let download = |response| {
2267-
let query_url = url.clone();
2268-
22692265
async {
22702266
// Take the union of the requested and existing hash algorithms.
22712267
let algorithms = {
@@ -2279,7 +2275,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
22792275
};
22802276

22812277
let hashes = self
2282-
.download_archive(query_url, response, source, ext, entry.path(), &algorithms)
2278+
.download_archive(response, source, ext, entry.path(), &algorithms)
22832279
.await?;
22842280
for existing in revision.hashes() {
22852281
if !hashes.contains(existing) {
@@ -2313,7 +2309,6 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
23132309
/// Download and unzip a source distribution into the cache from an HTTP response.
23142310
async fn download_archive(
23152311
&self,
2316-
query_url: DisplaySafeUrl,
23172312
response: Response,
23182313
source: &BuildableSource<'_>,
23192314
ext: SourceDistExtension,
@@ -2342,7 +2337,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
23422337

23432338
// Download and unzip the source distribution into a temporary directory.
23442339
let span = info_span!("download_source_dist", source_dist = %source);
2345-
uv_extract::stream::archive(query_url, &mut hasher, ext, temp_dir.path())
2340+
uv_extract::stream::archive(&mut hasher, ext, temp_dir.path())
23462341
.await
23472342
.map_err(|err| Error::Extract(source.to_string(), err))?;
23482343
drop(span);
@@ -2411,7 +2406,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
24112406
let mut hasher = uv_extract::hash::HashReader::new(reader, &mut hashers);
24122407

24132408
// Unzip the archive into a temporary directory.
2414-
uv_extract::stream::archive(path.display(), &mut hasher, ext, &temp_dir.path())
2409+
uv_extract::stream::archive(&mut hasher, ext, &temp_dir.path())
24152410
.await
24162411
.map_err(|err| Error::Extract(temp_dir.path().to_string_lossy().into_owned(), err))?;
24172412

crates/uv-extract/Cargo.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ uv-configuration = { workspace = true }
2020
uv-distribution-filename = { workspace = true }
2121
uv-pypi-types = { workspace = true }
2222
uv-static = { workspace = true }
23-
uv-warnings = { workspace = true }
2423

2524
astral-tokio-tar = { workspace = true }
2625
async-compression = { workspace = true, features = ["bzip2", "gzip", "zstd", "xz"] }

crates/uv-extract/src/error.rs

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ pub enum Error {
55
#[error("I/O operation failed during extraction")]
66
Io(#[source] std::io::Error),
77
#[error("Invalid zip file")]
8-
Zip(#[from] zip::result::ZipError),
8+
Zip(#[source] zip::result::ZipError),
99
#[error("Invalid zip file structure")]
10-
AsyncZip(#[from] async_zip::error::ZipError),
10+
AsyncZip(#[source] async_zip::error::ZipError),
1111
#[error("Invalid tar file")]
1212
Tar(#[from] tokio_tar::TarError),
1313
#[error(
@@ -93,6 +93,31 @@ pub enum Error {
9393
EmptyFilename,
9494
#[error("Archive contains unacceptable filename: {filename}")]
9595
UnacceptableFilename { filename: String },
96+
#[error(
97+
"Archive contains a file with an unsupported compression method; files must be compressed with 'stored', 'DEFLATE', or 'zstd'"
98+
)]
99+
UnsupportedCompression,
100+
}
101+
102+
impl From<async_zip::error::ZipError> for Error {
103+
fn from(err: async_zip::error::ZipError) -> Self {
104+
match err {
105+
async_zip::error::ZipError::CompressionNotSupported(_) => Self::UnsupportedCompression,
106+
o => Self::AsyncZip(o),
107+
}
108+
}
109+
}
110+
111+
impl From<zip::result::ZipError> for Error {
112+
fn from(err: zip::result::ZipError) -> Self {
113+
match err {
114+
// NOTE: No structured error from the zip crate, so we need to sniff the message.
115+
zip::result::ZipError::UnsupportedArchive("Compression method not supported") => {
116+
Self::UnsupportedCompression
117+
}
118+
o => Self::Zip(o),
119+
}
120+
}
96121
}
97122

98123
impl Error {

crates/uv-extract/src/lib.rs

Lines changed: 1 addition & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use std::{fmt::Display, sync::LazyLock};
1+
use std::sync::LazyLock;
22

33
pub use error::Error;
44
use regex::Regex;
@@ -14,64 +14,6 @@ mod vendor;
1414
static CONTROL_CHARACTERS_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\p{C}").unwrap());
1515
static REPLACEMENT_CHARACTER: &str = "\u{FFFD}";
1616

17-
/// Compression methods that we consider supported.
18-
///
19-
/// Our underlying ZIP dependencies may support more.
20-
pub(crate) enum CompressionMethod {
21-
Stored,
22-
Deflated,
23-
Zstd,
24-
// NOTE: This will become `Unsupported(...)` in the future.
25-
Deprecated(&'static str),
26-
}
27-
28-
impl CompressionMethod {
29-
/// Returns `true` if this is a well-known compression method that we
30-
/// expect other ZIP implementations to support.
31-
pub(crate) fn is_well_known(&self) -> bool {
32-
matches!(self, Self::Stored | Self::Deflated | Self::Zstd)
33-
}
34-
}
35-
36-
impl Display for CompressionMethod {
37-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
38-
match self {
39-
Self::Stored => write!(f, "stored"),
40-
Self::Deflated => write!(f, "DEFLATE"),
41-
Self::Zstd => write!(f, "zstd"),
42-
Self::Deprecated(name) => write!(f, "{name}"),
43-
}
44-
}
45-
}
46-
47-
impl From<async_zip::Compression> for CompressionMethod {
48-
fn from(value: async_zip::Compression) -> Self {
49-
match value {
50-
async_zip::Compression::Stored => Self::Stored,
51-
async_zip::Compression::Deflate => Self::Deflated,
52-
async_zip::Compression::Zstd => Self::Zstd,
53-
async_zip::Compression::Bz => Self::Deprecated("bzip2"),
54-
async_zip::Compression::Lzma => Self::Deprecated("lzma"),
55-
async_zip::Compression::Xz => Self::Deprecated("xz"),
56-
_ => Self::Deprecated("unknown"),
57-
}
58-
}
59-
}
60-
61-
impl From<zip::CompressionMethod> for CompressionMethod {
62-
fn from(value: zip::CompressionMethod) -> Self {
63-
match value {
64-
zip::CompressionMethod::Stored => Self::Stored,
65-
zip::CompressionMethod::Deflated => Self::Deflated,
66-
zip::CompressionMethod::Zstd => Self::Zstd,
67-
zip::CompressionMethod::Bzip2 => Self::Deprecated("bzip2"),
68-
zip::CompressionMethod::Lzma => Self::Deprecated("lzma"),
69-
zip::CompressionMethod::Xz => Self::Deprecated("xz"),
70-
_ => Self::Deprecated("unknown"),
71-
}
72-
}
73-
}
74-
7517
/// Validate that a given filename (e.g. reported by a ZIP archive's
7618
/// local file entries or central directory entries) is "safe" to use.
7719
///

0 commit comments

Comments
 (0)