From 6d80c54e6a6de7a85785f3860d0a698a6c9c9d57 Mon Sep 17 00:00:00 2001 From: Christian Haynes <06chaynes@gmail.com> Date: Mon, 16 Feb 2026 19:42:16 -0500 Subject: [PATCH 1/3] migration tests validating bincode->postcard upgrade path --- Cargo.toml | 4 + compat-tests/README.md | 72 ++++++ compat-tests/legacy-writer/Cargo.toml | 11 + compat-tests/legacy-writer/src/main.rs | 59 +++++ compat-tests/read-legacy-cache/Cargo.toml | 9 + compat-tests/read-legacy-cache/src/main.rs | 67 +++++ http-cache-reqwest/Cargo.toml | 4 + http-cache-reqwest/src/test.rs | 150 +++++++++++ http-cache/Cargo.toml | 1 + http-cache/src/lib.rs | 4 +- http-cache/src/managers/cacache.rs | 78 ++++-- http-cache/src/managers/foyer.rs | 13 +- http-cache/src/managers/mod.rs | 2 +- http-cache/src/managers/moka.rs | 153 ++++++++++- http-cache/src/test.rs | 283 +++++++++++++++++++++ 15 files changed, 873 insertions(+), 37 deletions(-) create mode 100644 compat-tests/README.md create mode 100644 compat-tests/legacy-writer/Cargo.toml create mode 100644 compat-tests/legacy-writer/src/main.rs create mode 100644 compat-tests/read-legacy-cache/Cargo.toml create mode 100644 compat-tests/read-legacy-cache/src/main.rs diff --git a/Cargo.toml b/Cargo.toml index 29f90ad..a82ec4d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,4 +8,8 @@ members = [ "http-cache-tower", "http-cache-tower-server", "http-cache-ureq" +] +exclude = [ + "compat-tests/legacy-writer", + "compat-tests/read-legacy-cache", ] \ No newline at end of file diff --git a/compat-tests/README.md b/compat-tests/README.md new file mode 100644 index 0000000..d2206ce --- /dev/null +++ b/compat-tests/README.md @@ -0,0 +1,72 @@ +# Compatibility Tests: 0.16 -> 1.0-alpha Migration + +These tools validate the real-world upgrade path from http-cache-reqwest 0.16.0 +(bincode serialization) to 1.0-alpha.4 (postcard serialization). + +## What's being tested + +| Scenario | Expected Result | +|---|---| +| Old bincode entries read with new code (postcard only) | Graceful cache miss (`Ok(None)`), no crash | +| Old bincode entries read with compat features enabled | Successful deserialization via bincode fallback | +| New postcard entries written and read | Normal cache hit | + +## Prerequisites + +- Rust toolchain +- Internet access (for the legacy writer to make HTTP requests) + +## Step-by-step manual validation + +### 1. Build and run the legacy writer + +This uses http-cache-reqwest 0.16.0 from crates.io to populate a cache directory +with bincode-serialized entries. + +```bash +cd compat-tests/legacy-writer +cargo run -- /tmp/legacy-cache +``` + +### 2. Read the cache with the new code (bincode fallback) + +This uses the current workspace code with `manager-cacache-bincode` and +`http-headers-compat` features to read back the old entries. + +```bash +cd compat-tests/read-legacy-cache +cargo run -- /tmp/legacy-cache +``` + +Expected output: all entries should be HIT (bincode fallback working). + +### 3. Verify graceful degradation without compat features + +You can also test by modifying `read-legacy-cache/Cargo.toml` to remove +`manager-cacache-bincode` and `http-headers-compat`, then rebuild and run. +Old entries should return as cache misses without errors. + +## Automated CI tests + +The equivalent scenarios are also tested in the workspace test suite: + +```bash +# Test bincode->postcard migration (with compat features) +cargo test -p http-cache --features manager-cacache,manager-cacache-bincode,http-headers-compat cacache_bincode_to_postcard + +# Test graceful degradation (postcard only, no bincode feature) +cargo test -p http-cache --features manager-cacache cacache_bincode_entry_read_without_compat + +# Full middleware-level e2e test +cargo test -p http-cache-reqwest --features manager-cacache,manager-cacache-bincode,http-headers-compat migration_from_bincode +``` + +## Key compatibility facts + +| | http-cache-reqwest 0.16.0 | http-cache-reqwest 1.0.0-alpha.4 | +|---|---|---| +| http-cache | 0.21.0 | 1.0.0-alpha.4 | +| Serialization | bincode 1.3.3 | postcard 1.1 (default) | +| cacache | 13.1.0 | 13.1.0 (same!) | +| http-cache-semantics | 2.1.0 | 2.1.0 (same!) | +| Cache key format | `METHOD:URI` | `METHOD:URI` (same!) | diff --git a/compat-tests/legacy-writer/Cargo.toml b/compat-tests/legacy-writer/Cargo.toml new file mode 100644 index 0000000..f1160d3 --- /dev/null +++ b/compat-tests/legacy-writer/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "legacy-writer" +version = "0.1.0" +edition = "2021" + +[dependencies] +cacache = { version = "13.1.0", default-features = false } +http-cache-reqwest = { version = "0.16.0", features = ["manager-cacache"] } +reqwest = { version = "0.12", features = ["rustls-tls"] } +reqwest-middleware = "0.4" +tokio = { version = "1", features = ["macros", "rt-multi-thread"] } diff --git a/compat-tests/legacy-writer/src/main.rs b/compat-tests/legacy-writer/src/main.rs new file mode 100644 index 0000000..c6b0e20 --- /dev/null +++ b/compat-tests/legacy-writer/src/main.rs @@ -0,0 +1,59 @@ +use http_cache_reqwest::{CACacheManager, Cache, CacheMode, HttpCache, HttpCacheOptions}; +use reqwest::Client; +use reqwest_middleware::ClientBuilder; +use std::path::PathBuf; + +/// Writes cache entries using http-cache-reqwest 0.16.0 (bincode serialization). +/// Used to validate the upgrade path from 0.16 -> 1.0-alpha. +/// +/// Usage: cargo run -- +#[tokio::main] +async fn main() -> Result<(), Box> { + let cache_dir = std::env::args() + .nth(1) + .map(PathBuf::from) + .unwrap_or_else(|| PathBuf::from("/tmp/legacy-cache")); + + println!("Writing legacy cache entries to: {}", cache_dir.display()); + + let manager = CACacheManager { + path: cache_dir.clone(), + remove_opts: cacache::RemoveOpts::new().remove_fully(true), + }; + + let client = ClientBuilder::new(Client::builder().build()?) + .with(Cache(HttpCache { + mode: CacheMode::Default, + manager, + options: HttpCacheOptions::default(), + })) + .build(); + + let urls = [ + "https://httpbin.org/cache/300", + "https://httpbin.org/response-headers?Cache-Control=public%2Cmax-age%3D3600", + "https://jsonplaceholder.typicode.com/posts/1", + ]; + + for url in &urls { + println!("\nRequesting: {}", url); + match client.get(*url).send().await { + Ok(response) => { + println!(" Status: {}", response.status()); + println!( + " Cache-Control: {:?}", + response.headers().get("cache-control") + ); + let body = response.bytes().await?; + println!(" Body length: {} bytes", body.len()); + } + Err(e) => { + eprintln!(" Error: {}", e); + } + } + } + + println!("\nCache entries written to: {}", cache_dir.display()); + println!("Now run the read-legacy-cache tool to verify migration."); + Ok(()) +} diff --git a/compat-tests/read-legacy-cache/Cargo.toml b/compat-tests/read-legacy-cache/Cargo.toml new file mode 100644 index 0000000..96de089 --- /dev/null +++ b/compat-tests/read-legacy-cache/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "read-legacy-cache" +version = "0.1.0" +edition = "2021" + +[dependencies] +http-cache = { path = "../../http-cache", features = ["manager-cacache", "manager-cacache-bincode", "http-headers-compat"] } +http-cache-semantics = "2.1.0" +tokio = { version = "1", features = ["macros", "rt-multi-thread"] } diff --git a/compat-tests/read-legacy-cache/src/main.rs b/compat-tests/read-legacy-cache/src/main.rs new file mode 100644 index 0000000..b5c761f --- /dev/null +++ b/compat-tests/read-legacy-cache/src/main.rs @@ -0,0 +1,67 @@ +use http_cache::{CACacheManager, CacheManager}; +use std::path::PathBuf; + +/// Reads cache entries written by the legacy-writer (http-cache-reqwest 0.16.0) +/// using the current code (1.0-alpha with postcard + bincode fallback). +/// +/// Usage: cargo run -- +#[tokio::main] +async fn main() -> Result<(), Box> { + let cache_dir = std::env::args() + .nth(1) + .map(PathBuf::from) + .unwrap_or_else(|| PathBuf::from("/tmp/legacy-cache")); + + println!("Reading legacy cache entries from: {}", cache_dir.display()); + + let manager = CACacheManager::new(cache_dir.clone(), true); + + let urls = [ + "https://httpbin.org/cache/300", + "https://httpbin.org/response-headers?Cache-Control=public%2Cmax-age%3D3600", + "https://jsonplaceholder.typicode.com/posts/1", + ]; + + let mut success_count = 0; + let mut miss_count = 0; + let mut total = 0; + + for url in &urls { + total += 1; + let cache_key = format!("GET:{}", url); + println!("\nLooking up: {}", cache_key); + + match manager.get(&cache_key).await { + Ok(Some((response, policy))) => { + success_count += 1; + println!(" HIT - bincode fallback worked!"); + println!(" Status: {}", response.status); + println!(" Body length: {} bytes", response.body.len()); + println!(" Headers: {:?}", response.headers); + println!(" Version: {:?}", response.version); + println!(" Policy is_stale: {}", policy.is_stale(std::time::SystemTime::now())); + } + Ok(None) => { + miss_count += 1; + println!(" MISS - entry not found or not deserializable"); + } + Err(e) => { + eprintln!(" ERROR: {} (this should NOT happen!)", e); + } + } + } + + println!("\n--- Summary ---"); + println!("Total: {}, Hits: {}, Misses: {}", total, success_count, miss_count); + + if success_count == total { + println!("All legacy entries were successfully read via bincode fallback!"); + } else if miss_count > 0 && success_count == 0 { + println!("No entries were readable. This could mean:"); + println!(" - The legacy-writer hasn't been run yet"); + println!(" - The cache directory is wrong"); + println!(" - The bincode fallback isn't working"); + } + + Ok(()) +} diff --git a/http-cache-reqwest/Cargo.toml b/http-cache-reqwest/Cargo.toml index 73fcb0a..36454a5 100644 --- a/http-cache-reqwest/Cargo.toml +++ b/http-cache-reqwest/Cargo.toml @@ -38,6 +38,10 @@ version = "1.0.0-alpha.4" default-features = false [dev-dependencies] +bincode = "1.3.3" +cacache = { version = "13.1.0", default-features = false, features = ["tokio-runtime"] } +http-cache-semantics = "2.1.0" +serde = { version = "1.0", features = ["derive"] } tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread"] } wiremock = "0.6.0" tempfile = "3.13.0" diff --git a/http-cache-reqwest/src/test.rs b/http-cache-reqwest/src/test.rs index afb64cd..7d90de5 100644 --- a/http-cache-reqwest/src/test.rs +++ b/http-cache-reqwest/src/test.rs @@ -2374,3 +2374,153 @@ async fn test_metadata_retrieval_through_extensions() -> Result<()> { Ok(()) } + +// Full middleware-level e2e test for the bincode->postcard migration path. +// Simulates a user who had http-cache-reqwest 0.16.0 and upgrades to 1.0-alpha.4. +#[cfg(all( + feature = "manager-cacache", + feature = "manager-cacache-bincode", + feature = "http-headers-compat" +))] +mod bincode_migration { + use super::*; + + use http_cache::{HttpVersion, Url}; + use serde::Serialize; + use std::collections::HashMap; + use std::str::FromStr; + + // Local copies matching the exact serde layout from http-cache 0.21.0. + // This is intentional — it's a true black-box test mirroring what old + // code actually wrote via bincode. + #[derive(Serialize)] + struct LegacyStore { + response: LegacyTestResponse, + policy: http_cache_semantics::CachePolicy, + } + + #[derive(Serialize)] + struct LegacyTestResponse { + body: Vec, + headers: HashMap, + status: u16, + url: Url, + version: HttpVersion, + } + + #[tokio::test] + async fn migration_from_bincode_cache() -> Result<()> { + let mock_server = MockServer::start().await; + + // Set up a mock that we expect NOT to be called if the cache hit works. + // Use ForceCache mode so the stale policy doesn't trigger revalidation. + let m = build_mock(CACHEABLE_PUBLIC, b"fresh from server", 200, 0); + let _mock_guard = mock_server.register_as_scoped(m).await; + + let url = format!("{}/legacy-endpoint", &mock_server.uri()); + let parsed_url = Url::from_str(&url)?; + + let cache_dir = tempfile::tempdir().unwrap(); + let cache_path = cache_dir.path().to_path_buf(); + let cache_key = format!("GET:{}", &parsed_url); + + // Construct a legacy bincode payload + let mut headers = HashMap::new(); + headers.insert( + "content-type".to_string(), + "application/json".to_string(), + ); + headers.insert( + "cache-control".to_string(), + CACHEABLE_PUBLIC.to_string(), + ); + + let legacy_body = b"legacy bincode response"; + + let req = http::Request::get(url.as_str()).body(())?; + let res = http::Response::builder() + .status(200) + .header("cache-control", CACHEABLE_PUBLIC) + .body(legacy_body.to_vec())?; + let policy = http_cache_semantics::CachePolicy::new(&req, &res); + + let legacy_store = LegacyStore { + response: LegacyTestResponse { + body: legacy_body.to_vec(), + headers, + status: 200, + url: parsed_url.clone(), + version: HttpVersion::Http11, + }, + policy, + }; + + // Serialize with bincode and write to cacache + let bytes = bincode::serialize(&legacy_store).unwrap(); + cacache::write(&cache_path, &cache_key, bytes).await.unwrap(); + + // Create a reqwest client pointing at the same cache directory + let manager = CACacheManager::new(cache_path.clone(), true); + let client = ClientBuilder::new(Client::new()) + .with(Cache(HttpCache { + mode: CacheMode::ForceCache, + manager: manager.clone(), + options: Default::default(), + })) + .build(); + + // Request the same URL — should be served from the legacy cache + let res = client.get(&url).send().await?; + assert_eq!(res.status(), 200); + let body = res.bytes().await?; + assert_eq!( + body.as_ref(), + legacy_body, + "Response should come from the legacy bincode cache entry" + ); + + // The mock expects 0 calls, so WireMock will assert if it was hit + + Ok(()) + } + + #[tokio::test] + async fn migration_new_entries_work_after_upgrade() -> Result<()> { + let mock_server = MockServer::start().await; + let m = build_mock(CACHEABLE_PUBLIC, b"new postcard data", 200, 1); + let _mock_guard = mock_server.register_as_scoped(m).await; + let url = format!("{}/new-endpoint", &mock_server.uri()); + + let cache_dir = tempfile::tempdir().unwrap(); + let manager = + CACacheManager::new(cache_dir.path().to_path_buf(), true); + + let client = ClientBuilder::new(Client::new()) + .with(Cache(HttpCache { + mode: CacheMode::Default, + manager: manager.clone(), + options: Default::default(), + })) + .build(); + + // First request: cache miss, goes to mock server + let res = client.get(&url).send().await?; + assert_eq!(res.status(), 200); + assert_eq!(res.bytes().await?, &b"new postcard data"[..]); + + // Verify it was cached (written with postcard) + let data = CacheManager::get( + &manager, + &format!("GET:{}", &url_parse(&url)?), + ) + .await?; + assert!(data.is_some(), "New entry should be cached with postcard"); + + // Second request: should come from cache (mock expects only 1 call) + let res = client.get(&url).send().await?; + assert_eq!(res.status(), 200); + assert_eq!(res.bytes().await?, &b"new postcard data"[..]); + + Ok(()) + } +} diff --git a/http-cache/Cargo.toml b/http-cache/Cargo.toml index 80bdbda..f6dd491 100644 --- a/http-cache/Cargo.toml +++ b/http-cache/Cargo.toml @@ -51,6 +51,7 @@ async-compat = { version = "0.2", optional = true } rand = { version = "0.9.2", optional = true } [dev-dependencies] +bincode = "1.3.3" smol = "2.0.2" http-cache-semantics = "2.1.0" tokio = { version = "1.43.0", features = [ "macros", "rt", "rt-multi-thread" ] } diff --git a/http-cache/src/lib.rs b/http-cache/src/lib.rs index 6f40776..65295e1 100644 --- a/http-cache/src/lib.rs +++ b/http-cache/src/lib.rs @@ -464,7 +464,7 @@ pub use managers::cacache::CACacheManager; #[cfg(feature = "streaming")] pub use managers::streaming_cache::StreamingManager; -#[cfg(feature = "manager-moka")] +#[cfg(any(feature = "manager-moka", feature = "manager-moka-bincode"))] pub use managers::moka::MokaManager; #[cfg(feature = "manager-foyer")] @@ -479,7 +479,7 @@ pub use rate_limiting::{ pub use rate_limiting::Quota; // Exposing the moka cache for convenience, renaming to avoid naming conflicts -#[cfg(feature = "manager-moka")] +#[cfg(any(feature = "manager-moka", feature = "manager-moka-bincode"))] #[cfg_attr(docsrs, doc(cfg(feature = "manager-moka")))] pub use moka::future::{Cache as MokaCache, CacheBuilder as MokaCacheBuilder}; diff --git a/http-cache/src/managers/cacache.rs b/http-cache/src/managers/cacache.rs index 7c73c69..d3188ff 100644 --- a/http-cache/src/managers/cacache.rs +++ b/http-cache/src/managers/cacache.rs @@ -31,17 +31,17 @@ struct Store { // Legacy store format (bincode) - HttpResponse without metadata field // The metadata field was added alongside postcard, so bincode cache // data will never contain it. -#[cfg(all(feature = "bincode", not(feature = "postcard")))] +#[cfg(feature = "bincode")] #[derive(Debug, Deserialize, Serialize)] -struct Store { +struct BincodeStore { response: LegacyHttpResponse, policy: CachePolicy, } -#[cfg(all(feature = "bincode", not(feature = "postcard")))] +#[cfg(feature = "bincode")] use crate::{HttpHeaders, HttpVersion, Url}; -#[cfg(all(feature = "bincode", not(feature = "postcard")))] +#[cfg(feature = "bincode")] #[derive(Debug, Clone, Deserialize, Serialize)] struct LegacyHttpResponse { body: Vec, @@ -54,7 +54,7 @@ struct LegacyHttpResponse { version: HttpVersion, } -#[cfg(all(feature = "bincode", not(feature = "postcard")))] +#[cfg(feature = "bincode")] impl From for HttpResponse { fn from(legacy: LegacyHttpResponse) -> Self { #[cfg(feature = "http-headers-compat")] @@ -73,7 +73,7 @@ impl From for HttpResponse { } } -#[cfg(all(feature = "bincode", not(feature = "postcard")))] +#[cfg(feature = "bincode")] impl From for LegacyHttpResponse { fn from(response: HttpResponse) -> Self { #[cfg(feature = "http-headers-compat")] @@ -121,29 +121,65 @@ impl CacheManager for CACacheManager { &self, cache_key: &str, ) -> Result> { - let store: Store = match cacache::read(&self.path, cache_key).await { - Ok(d) => { - #[cfg(feature = "postcard")] - { - postcard::from_bytes(&d)? - } - #[cfg(all(feature = "bincode", not(feature = "postcard")))] - { - bincode::deserialize(&d)? - } - } + let d = match cacache::read(&self.path, cache_key).await { + Ok(d) => d, Err(_e) => { return Ok(None); } }; + // When both postcard and bincode are enabled, try postcard first + // then fall back to bincode (for reading legacy cache entries). + // When only one format is enabled, use that format directly. #[cfg(feature = "postcard")] { - Ok(Some((store.response, store.policy))) + match postcard::from_bytes::(&d) { + Ok(store) => return Ok(Some((store.response, store.policy))), + Err(_e) => { + #[cfg(feature = "bincode")] + { + match bincode::deserialize::(&d) { + Ok(store) => { + return Ok(Some(( + store.response.into(), + store.policy, + ))); + } + Err(e) => { + log::warn!( + "Failed to deserialize cache entry for key '{}': {}", + cache_key, + e + ); + return Ok(None); + } + } + } + #[cfg(not(feature = "bincode"))] + { + log::warn!( + "Failed to deserialize cache entry for key '{}': {}", + cache_key, + _e + ); + return Ok(None); + } + } + } } #[cfg(all(feature = "bincode", not(feature = "postcard")))] { - Ok(Some((store.response.into(), store.policy))) + match bincode::deserialize::(&d) { + Ok(store) => Ok(Some((store.response.into(), store.policy))), + Err(e) => { + log::warn!( + "Failed to deserialize cache entry for key '{}': {}", + cache_key, + e + ); + Ok(None) + } + } } } @@ -153,10 +189,12 @@ impl CacheManager for CACacheManager { response: HttpResponse, policy: CachePolicy, ) -> Result { + // Always write with postcard when available (modern format). + // Only use bincode when postcard is not enabled. #[cfg(feature = "postcard")] let data = Store { response, policy }; #[cfg(all(feature = "bincode", not(feature = "postcard")))] - let data = Store { response: response.into(), policy }; + let data = BincodeStore { response: response.into(), policy }; #[cfg(feature = "postcard")] let bytes = postcard::to_allocvec(&data)?; diff --git a/http-cache/src/managers/foyer.rs b/http-cache/src/managers/foyer.rs index 7429a78..94efe56 100644 --- a/http-cache/src/managers/foyer.rs +++ b/http-cache/src/managers/foyer.rs @@ -104,8 +104,17 @@ impl CacheManager for FoyerManager { ) -> Result> { match self.cache.get(&cache_key.to_string()).await { Ok(Some(entry)) => { - let store: Store = postcard::from_bytes(entry.value())?; - Ok(Some((store.response, store.policy))) + match postcard::from_bytes::(entry.value()) { + Ok(store) => Ok(Some((store.response, store.policy))), + Err(e) => { + log::warn!( + "Failed to deserialize cache entry for key '{}': {}", + cache_key, + e + ); + Ok(None) + } + } } Ok(None) => Ok(None), Err(_) => Ok(None), diff --git a/http-cache/src/managers/mod.rs b/http-cache/src/managers/mod.rs index 4b3e317..7b5d46b 100644 --- a/http-cache/src/managers/mod.rs +++ b/http-cache/src/managers/mod.rs @@ -4,7 +4,7 @@ pub mod cacache; #[cfg(feature = "manager-foyer")] pub mod foyer; -#[cfg(feature = "manager-moka")] +#[cfg(any(feature = "manager-moka", feature = "manager-moka-bincode"))] pub mod moka; // Streaming cache managers diff --git a/http-cache/src/managers/moka.rs b/http-cache/src/managers/moka.rs index 334aa44..484ccf4 100644 --- a/http-cache/src/managers/moka.rs +++ b/http-cache/src/managers/moka.rs @@ -27,12 +27,82 @@ impl Default for MokaManager { } } +// Modern store format (postcard) - includes metadata field +#[cfg(feature = "postcard")] #[derive(Debug, Deserialize, Serialize)] struct Store { response: HttpResponse, policy: CachePolicy, } +// Legacy store format (bincode) - HttpResponse without metadata field +#[cfg(feature = "bincode")] +#[derive(Debug, Deserialize, Serialize)] +struct BincodeStore { + response: LegacyHttpResponse, + policy: CachePolicy, +} + +#[cfg(feature = "bincode")] +use crate::{HttpHeaders, HttpVersion, Url}; + +#[cfg(feature = "bincode")] +#[derive(Debug, Clone, Deserialize, Serialize)] +struct LegacyHttpResponse { + body: Vec, + #[cfg(feature = "http-headers-compat")] + headers: std::collections::HashMap, + #[cfg(not(feature = "http-headers-compat"))] + headers: std::collections::HashMap>, + status: u16, + url: Url, + version: HttpVersion, +} + +#[cfg(feature = "bincode")] +impl From for HttpResponse { + fn from(legacy: LegacyHttpResponse) -> Self { + #[cfg(feature = "http-headers-compat")] + let headers = HttpHeaders::Legacy(legacy.headers); + #[cfg(not(feature = "http-headers-compat"))] + let headers = HttpHeaders::Modern(legacy.headers); + + HttpResponse { + body: legacy.body, + headers, + status: legacy.status, + url: legacy.url, + version: legacy.version, + metadata: None, + } + } +} + +#[cfg(feature = "bincode")] +impl From for LegacyHttpResponse { + fn from(response: HttpResponse) -> Self { + #[cfg(feature = "http-headers-compat")] + let headers = match response.headers { + HttpHeaders::Legacy(h) => h, + HttpHeaders::Modern(h) => { + h.into_iter().map(|(k, v)| (k, v.join(", "))).collect() + } + }; + #[cfg(not(feature = "http-headers-compat"))] + let headers = match response.headers { + HttpHeaders::Modern(h) => h, + }; + + LegacyHttpResponse { + body: response.body, + headers, + status: response.status, + url: response.url, + version: response.version, + } + } +} + impl MokaManager { /// Create a new manager from a pre-configured Cache pub fn new(cache: Cache>>) -> Self { @@ -52,20 +122,64 @@ impl CacheManager for MokaManager { &self, cache_key: &str, ) -> Result> { - let store: Store = match self.cache.get(cache_key).await { - Some(d) => { - #[cfg(feature = "postcard")] - { - postcard::from_bytes(&d)? + let d = match self.cache.get(cache_key).await { + Some(d) => d, + None => return Ok(None), + }; + + // When both postcard and bincode are enabled, try postcard first + // then fall back to bincode (for reading legacy cache entries). + // When only one format is enabled, use that format directly. + #[cfg(feature = "postcard")] + { + match postcard::from_bytes::(&d) { + Ok(store) => return Ok(Some((store.response, store.policy))), + Err(_e) => { + #[cfg(feature = "bincode")] + { + match bincode::deserialize::(&d) { + Ok(store) => { + return Ok(Some(( + store.response.into(), + store.policy, + ))); + } + Err(e) => { + log::warn!( + "Failed to deserialize cache entry for key '{}': {}", + cache_key, + e + ); + return Ok(None); + } + } + } + #[cfg(not(feature = "bincode"))] + { + log::warn!( + "Failed to deserialize cache entry for key '{}': {}", + cache_key, + _e + ); + return Ok(None); + } } - #[cfg(all(feature = "bincode", not(feature = "postcard")))] - { - bincode::deserialize(&d)? + } + } + #[cfg(all(feature = "bincode", not(feature = "postcard")))] + { + match bincode::deserialize::(&d) { + Ok(store) => Ok(Some((store.response.into(), store.policy))), + Err(e) => { + log::warn!( + "Failed to deserialize cache entry for key '{}': {}", + cache_key, + e + ); + Ok(None) } } - None => return Ok(None), - }; - Ok(Some((store.response, store.policy))) + } } async fn put( @@ -74,14 +188,29 @@ impl CacheManager for MokaManager { response: HttpResponse, policy: CachePolicy, ) -> Result { + // Always write with postcard when available (modern format). + // Only use bincode when postcard is not enabled. + #[cfg(feature = "postcard")] let data = Store { response, policy }; + #[cfg(all(feature = "bincode", not(feature = "postcard")))] + let data = BincodeStore { response: response.into(), policy }; + #[cfg(feature = "postcard")] let bytes = postcard::to_allocvec(&data)?; #[cfg(all(feature = "bincode", not(feature = "postcard")))] let bytes = bincode::serialize(&data)?; + self.cache.insert(cache_key, Arc::new(bytes)).await; self.cache.run_pending_tasks().await; - Ok(data.response) + + #[cfg(feature = "postcard")] + { + Ok(data.response) + } + #[cfg(all(feature = "bincode", not(feature = "postcard")))] + { + Ok(data.response.into()) + } } async fn delete(&self, cache_key: &str) -> Result<()> { diff --git a/http-cache/src/test.rs b/http-cache/src/test.rs index 4aafdde..9b37b0a 100644 --- a/http-cache/src/test.rs +++ b/http-cache/src/test.rs @@ -208,6 +208,250 @@ mod with_cacache { assert!(data.is_none()); Ok(()) } + + #[tokio::test] + async fn cacache_corrupt_data_returns_none() -> Result<()> { + let cache_dir = tempfile::tempdir().unwrap(); + let manager = CACacheManager::new(cache_dir.path().to_path_buf(), true); + let key = "GET:http://example.com/corrupt"; + + // Write corrupt data directly to the cache + cacache::write(&cache_dir.path().to_path_buf(), key, b"not valid serialized data").await?; + + // get() should return Ok(None) instead of an error + let result = manager.get(key).await?; + assert!(result.is_none()); + Ok(()) + } +} + +// Migration tests: bincode (0.16/0.21) -> postcard (1.0-alpha) with compat features +#[cfg(all( + feature = "manager-cacache", + feature = "manager-cacache-bincode", + feature = "http-headers-compat" +))] +mod cacache_bincode_migration { + use super::*; + use crate::{CACacheManager, CacheManager, HttpVersion}; + + use http_cache_semantics::CachePolicy; + use serde::Serialize; + use std::collections::HashMap; + use std::str::FromStr; + + // Matches the Store struct from http-cache 0.21.0 (bincode era). + // This is intentionally a local copy of the internal struct to test + // the real-world migration path as a black-box test. + #[derive(Serialize)] + struct LegacyStore { + response: LegacyTestResponse, + policy: CachePolicy, + } + + // Matches HttpResponse from http-cache 0.21.0 (no metadata field, + // headers as HashMap). + #[derive(Serialize)] + struct LegacyTestResponse { + body: Vec, + headers: HashMap, + status: u16, + url: Url, + version: HttpVersion, + } + + #[tokio::test] + async fn cacache_bincode_to_postcard_migration() -> Result<()> { + let url = Url::from_str("http://example.com/legacy")?; + let cache_dir = tempfile::tempdir().unwrap(); + let manager = + CACacheManager::new(cache_dir.path().to_path_buf(), true); + let cache_key = format!("GET:{}", &url); + + // Construct a legacy bincode payload matching what 0.16/0.21 wrote + let mut headers = HashMap::new(); + headers.insert( + "content-type".to_string(), + "application/json".to_string(), + ); + headers.insert( + "cache-control".to_string(), + "max-age=3600".to_string(), + ); + + let legacy_response = LegacyTestResponse { + body: b"legacy cached body".to_vec(), + headers, + status: 200, + url: url.clone(), + version: HttpVersion::Http11, + }; + + let req = + http::Request::get("http://example.com/legacy").body(())?; + let res = http::Response::builder() + .status(200) + .header("cache-control", "max-age=3600") + .body(b"legacy cached body".to_vec())?; + let policy = CachePolicy::new(&req, &res); + + let legacy_store = + LegacyStore { response: legacy_response, policy }; + + // Serialize with bincode (exactly as old code would have) + let bytes = bincode::serialize(&legacy_store).unwrap(); + + // Write directly to cacache (bypassing the manager's put()) + cacache::write(&cache_dir.path().to_path_buf(), &cache_key, bytes) + .await?; + + // Read it back via the manager (postcard first, then bincode fallback) + let data = manager.get(&cache_key).await?; + assert!( + data.is_some(), + "Bincode fallback should successfully deserialize legacy entry" + ); + + let (response, _policy) = data.unwrap(); + assert_eq!(response.body, b"legacy cached body"); + assert_eq!(response.status, 200); + assert_eq!(response.url, url); + assert_eq!(response.version, HttpVersion::Http11); + // Legacy entries have no metadata field + assert!(response.metadata.is_none()); + + Ok(()) + } + + #[tokio::test] + async fn cacache_bincode_migration_preserves_headers() -> Result<()> { + let url = Url::from_str("http://example.com/headers")?; + let cache_dir = tempfile::tempdir().unwrap(); + let manager = + CACacheManager::new(cache_dir.path().to_path_buf(), true); + let cache_key = format!("GET:{}", &url); + + let mut headers = HashMap::new(); + headers + .insert("content-type".to_string(), "text/html".to_string()); + headers.insert("x-custom".to_string(), "value123".to_string()); + + let legacy_response = LegacyTestResponse { + body: b"test".to_vec(), + headers, + status: 200, + url: url.clone(), + version: HttpVersion::Http11, + }; + + let req = + http::Request::get("http://example.com/headers").body(())?; + let res = http::Response::builder() + .status(200) + .header("cache-control", "max-age=3600") + .body(b"test".to_vec())?; + let policy = CachePolicy::new(&req, &res); + + let legacy_store = + LegacyStore { response: legacy_response, policy }; + let bytes = bincode::serialize(&legacy_store).unwrap(); + + cacache::write(&cache_dir.path().to_path_buf(), &cache_key, bytes) + .await?; + + let data = manager.get(&cache_key).await?; + assert!(data.is_some()); + + let (response, _) = data.unwrap(); + assert_eq!( + response.headers.get("content-type"), + Some(&"text/html".to_string()) + ); + assert_eq!( + response.headers.get("x-custom"), + Some(&"value123".to_string()) + ); + + Ok(()) + } +} + +// Test graceful degradation: bincode entries are treated as cache misses +// when only postcard is enabled (no bincode compat features). +#[cfg(all( + feature = "manager-cacache", + not(feature = "manager-cacache-bincode") +))] +mod cacache_bincode_graceful_miss { + use super::*; + use crate::{CACacheManager, CacheManager}; + + #[tokio::test] + async fn cacache_bincode_entry_read_without_compat_feature() -> Result<()> + { + let cache_dir = tempfile::tempdir().unwrap(); + let manager = + CACacheManager::new(cache_dir.path().to_path_buf(), true); + let key = "GET:http://example.com/legacy-miss"; + + // Write data that looks like a bincode-serialized Store. + // Without the bincode feature, postcard deserialization will fail + // and the manager should return Ok(None). + // + // We use actual bincode serialization via the dev-dependency to + // produce realistic bytes (not just garbage data — the corrupt_data + // test already covers that). + use serde::Serialize; + use std::collections::HashMap; + use std::str::FromStr; + + #[derive(Serialize)] + struct FakeStore { + response: FakeResponse, + policy: http_cache_semantics::CachePolicy, + } + + #[derive(Serialize)] + struct FakeResponse { + body: Vec, + headers: HashMap, + status: u16, + url: Url, + version: HttpVersion, + } + + let url = Url::from_str("http://example.com/legacy-miss")?; + let req = http::Request::get("http://example.com/legacy-miss") + .body(())?; + let res = http::Response::builder() + .status(200) + .header("cache-control", "max-age=3600") + .body(b"old data".to_vec())?; + let policy = http_cache_semantics::CachePolicy::new(&req, &res); + + let fake_store = FakeStore { + response: FakeResponse { + body: b"old data".to_vec(), + headers: HashMap::new(), + status: 200, + url, + version: HttpVersion::Http11, + }, + policy, + }; + + let bytes = bincode::serialize(&fake_store).unwrap(); + cacache::write(&cache_dir.path().to_path_buf(), key, bytes).await?; + + // Should return Ok(None) — graceful miss, no crash + let result = manager.get(key).await?; + assert!( + result.is_none(), + "Bincode entry should be a cache miss without compat features" + ); + + Ok(()) + } } #[cfg(feature = "manager-moka")] @@ -261,6 +505,21 @@ mod with_moka { assert!(data.is_none()); Ok(()) } + + #[tokio::test] + async fn moka_corrupt_data_returns_none() -> Result<()> { + let manager = MokaManager::default(); + let key = "GET:http://example.com/corrupt"; + + // Write corrupt data directly to the cache + manager.cache.insert(key.to_string(), Arc::new(b"not valid serialized data".to_vec())).await; + manager.cache.run_pending_tasks().await; + + // get() should return Ok(None) instead of an error + let result = manager.get(key).await?; + assert!(result.is_none()); + Ok(()) + } } #[cfg(feature = "manager-foyer")] @@ -312,6 +571,30 @@ mod with_foyer { // since foyer handles eviction internally Ok(()) } + + #[tokio::test] + async fn foyer_corrupt_data_returns_none() -> Result<()> { + // Build the cache directly so we can insert corrupt data + let cache: foyer::HybridCache> = + foyer::HybridCacheBuilder::new() + .memory(100) + .storage() + .build() + .await + .unwrap(); + let key = "GET:http://example.com/corrupt"; + + // Write corrupt data directly to the cache + cache.insert(key.to_string(), b"not valid serialized data".to_vec()); + + // Wrap in FoyerManager + let manager = FoyerManager::new(cache); + + // get() should return Ok(None) instead of an error + let result = manager.get(key).await?; + assert!(result.is_none()); + Ok(()) + } } #[cfg(feature = "manager-cacache")] From b0d5549e1157d73e73cbe6170c1b30df238e26f7 Mon Sep 17 00:00:00 2001 From: Christian Haynes <06chaynes@gmail.com> Date: Tue, 17 Feb 2026 17:18:42 -0500 Subject: [PATCH 2/3] fmt --- http-cache-reqwest/src/test.rs | 23 +++++--------- http-cache/src/test.rs | 57 ++++++++++++++++------------------ 2 files changed, 35 insertions(+), 45 deletions(-) diff --git a/http-cache-reqwest/src/test.rs b/http-cache-reqwest/src/test.rs index 7d90de5..abc23db 100644 --- a/http-cache-reqwest/src/test.rs +++ b/http-cache-reqwest/src/test.rs @@ -2426,14 +2426,10 @@ mod bincode_migration { // Construct a legacy bincode payload let mut headers = HashMap::new(); - headers.insert( - "content-type".to_string(), - "application/json".to_string(), - ); - headers.insert( - "cache-control".to_string(), - CACHEABLE_PUBLIC.to_string(), - ); + headers + .insert("content-type".to_string(), "application/json".to_string()); + headers + .insert("cache-control".to_string(), CACHEABLE_PUBLIC.to_string()); let legacy_body = b"legacy bincode response"; @@ -2492,8 +2488,7 @@ mod bincode_migration { let url = format!("{}/new-endpoint", &mock_server.uri()); let cache_dir = tempfile::tempdir().unwrap(); - let manager = - CACacheManager::new(cache_dir.path().to_path_buf(), true); + let manager = CACacheManager::new(cache_dir.path().to_path_buf(), true); let client = ClientBuilder::new(Client::new()) .with(Cache(HttpCache { @@ -2509,11 +2504,9 @@ mod bincode_migration { assert_eq!(res.bytes().await?, &b"new postcard data"[..]); // Verify it was cached (written with postcard) - let data = CacheManager::get( - &manager, - &format!("GET:{}", &url_parse(&url)?), - ) - .await?; + let data = + CacheManager::get(&manager, &format!("GET:{}", &url_parse(&url)?)) + .await?; assert!(data.is_some(), "New entry should be cached with postcard"); // Second request: should come from cache (mock expects only 1 call) diff --git a/http-cache/src/test.rs b/http-cache/src/test.rs index 9b37b0a..85af2c0 100644 --- a/http-cache/src/test.rs +++ b/http-cache/src/test.rs @@ -216,7 +216,12 @@ mod with_cacache { let key = "GET:http://example.com/corrupt"; // Write corrupt data directly to the cache - cacache::write(&cache_dir.path().to_path_buf(), key, b"not valid serialized data").await?; + cacache::write( + &cache_dir.path().to_path_buf(), + key, + b"not valid serialized data", + ) + .await?; // get() should return Ok(None) instead of an error let result = manager.get(key).await?; @@ -264,20 +269,14 @@ mod cacache_bincode_migration { async fn cacache_bincode_to_postcard_migration() -> Result<()> { let url = Url::from_str("http://example.com/legacy")?; let cache_dir = tempfile::tempdir().unwrap(); - let manager = - CACacheManager::new(cache_dir.path().to_path_buf(), true); + let manager = CACacheManager::new(cache_dir.path().to_path_buf(), true); let cache_key = format!("GET:{}", &url); // Construct a legacy bincode payload matching what 0.16/0.21 wrote let mut headers = HashMap::new(); - headers.insert( - "content-type".to_string(), - "application/json".to_string(), - ); - headers.insert( - "cache-control".to_string(), - "max-age=3600".to_string(), - ); + headers + .insert("content-type".to_string(), "application/json".to_string()); + headers.insert("cache-control".to_string(), "max-age=3600".to_string()); let legacy_response = LegacyTestResponse { body: b"legacy cached body".to_vec(), @@ -287,16 +286,14 @@ mod cacache_bincode_migration { version: HttpVersion::Http11, }; - let req = - http::Request::get("http://example.com/legacy").body(())?; + let req = http::Request::get("http://example.com/legacy").body(())?; let res = http::Response::builder() .status(200) .header("cache-control", "max-age=3600") .body(b"legacy cached body".to_vec())?; let policy = CachePolicy::new(&req, &res); - let legacy_store = - LegacyStore { response: legacy_response, policy }; + let legacy_store = LegacyStore { response: legacy_response, policy }; // Serialize with bincode (exactly as old code would have) let bytes = bincode::serialize(&legacy_store).unwrap(); @@ -327,13 +324,11 @@ mod cacache_bincode_migration { async fn cacache_bincode_migration_preserves_headers() -> Result<()> { let url = Url::from_str("http://example.com/headers")?; let cache_dir = tempfile::tempdir().unwrap(); - let manager = - CACacheManager::new(cache_dir.path().to_path_buf(), true); + let manager = CACacheManager::new(cache_dir.path().to_path_buf(), true); let cache_key = format!("GET:{}", &url); let mut headers = HashMap::new(); - headers - .insert("content-type".to_string(), "text/html".to_string()); + headers.insert("content-type".to_string(), "text/html".to_string()); headers.insert("x-custom".to_string(), "value123".to_string()); let legacy_response = LegacyTestResponse { @@ -344,16 +339,14 @@ mod cacache_bincode_migration { version: HttpVersion::Http11, }; - let req = - http::Request::get("http://example.com/headers").body(())?; + let req = http::Request::get("http://example.com/headers").body(())?; let res = http::Response::builder() .status(200) .header("cache-control", "max-age=3600") .body(b"test".to_vec())?; let policy = CachePolicy::new(&req, &res); - let legacy_store = - LegacyStore { response: legacy_response, policy }; + let legacy_store = LegacyStore { response: legacy_response, policy }; let bytes = bincode::serialize(&legacy_store).unwrap(); cacache::write(&cache_dir.path().to_path_buf(), &cache_key, bytes) @@ -387,11 +380,9 @@ mod cacache_bincode_graceful_miss { use crate::{CACacheManager, CacheManager}; #[tokio::test] - async fn cacache_bincode_entry_read_without_compat_feature() -> Result<()> - { + async fn cacache_bincode_entry_read_without_compat_feature() -> Result<()> { let cache_dir = tempfile::tempdir().unwrap(); - let manager = - CACacheManager::new(cache_dir.path().to_path_buf(), true); + let manager = CACacheManager::new(cache_dir.path().to_path_buf(), true); let key = "GET:http://example.com/legacy-miss"; // Write data that looks like a bincode-serialized Store. @@ -421,8 +412,8 @@ mod cacache_bincode_graceful_miss { } let url = Url::from_str("http://example.com/legacy-miss")?; - let req = http::Request::get("http://example.com/legacy-miss") - .body(())?; + let req = + http::Request::get("http://example.com/legacy-miss").body(())?; let res = http::Response::builder() .status(200) .header("cache-control", "max-age=3600") @@ -512,7 +503,13 @@ mod with_moka { let key = "GET:http://example.com/corrupt"; // Write corrupt data directly to the cache - manager.cache.insert(key.to_string(), Arc::new(b"not valid serialized data".to_vec())).await; + manager + .cache + .insert( + key.to_string(), + Arc::new(b"not valid serialized data".to_vec()), + ) + .await; manager.cache.run_pending_tasks().await; // get() should return Ok(None) instead of an error From ec20bbce133dbd3d50eff6309d18c511f33d38ac Mon Sep 17 00:00:00 2001 From: Christian Haynes <06chaynes@gmail.com> Date: Tue, 17 Feb 2026 17:29:14 -0500 Subject: [PATCH 3/3] updated http-cache-semantics and rand --- compat-tests/read-legacy-cache/Cargo.toml | 2 +- http-cache-quickcache/Cargo.toml | 2 +- http-cache-reqwest/Cargo.toml | 4 ++-- http-cache-surf/Cargo.toml | 2 +- http-cache-tower-server/Cargo.toml | 2 +- http-cache-tower/Cargo.toml | 2 +- http-cache-ureq/Cargo.toml | 2 +- http-cache/Cargo.toml | 6 +++--- http-cache/src/managers/streaming_cache.rs | 2 +- 9 files changed, 12 insertions(+), 12 deletions(-) diff --git a/compat-tests/read-legacy-cache/Cargo.toml b/compat-tests/read-legacy-cache/Cargo.toml index 96de089..27fc102 100644 --- a/compat-tests/read-legacy-cache/Cargo.toml +++ b/compat-tests/read-legacy-cache/Cargo.toml @@ -5,5 +5,5 @@ edition = "2021" [dependencies] http-cache = { path = "../../http-cache", features = ["manager-cacache", "manager-cacache-bincode", "http-headers-compat"] } -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" tokio = { version = "1", features = ["macros", "rt-multi-thread"] } diff --git a/http-cache-quickcache/Cargo.toml b/http-cache-quickcache/Cargo.toml index f85f7b4..a7bc92e 100644 --- a/http-cache-quickcache/Cargo.toml +++ b/http-cache-quickcache/Cargo.toml @@ -18,7 +18,7 @@ rust-version = "1.88.0" [dependencies] async-trait = "0.1.85" postcard = { version = "1.1", default-features = false, features = ["alloc"] } -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" serde = { version = "1.0.217", features = ["derive"] } quick_cache = "0.6.9" http = "1.2.0" diff --git a/http-cache-reqwest/Cargo.toml b/http-cache-reqwest/Cargo.toml index 36454a5..ca3502d 100644 --- a/http-cache-reqwest/Cargo.toml +++ b/http-cache-reqwest/Cargo.toml @@ -22,7 +22,7 @@ bytes = "1.8.0" http = "1.2.0" http-body = "1.0.1" http-body-util = "0.1.2" -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" reqwest = { version = "0.13.1", default-features = false, features = [ "stream", ] } @@ -40,7 +40,7 @@ default-features = false [dev-dependencies] bincode = "1.3.3" cacache = { version = "13.1.0", default-features = false, features = ["tokio-runtime"] } -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" serde = { version = "1.0", features = ["derive"] } tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread"] } wiremock = "0.6.0" diff --git a/http-cache-surf/Cargo.toml b/http-cache-surf/Cargo.toml index ac6492f..2b850c1 100644 --- a/http-cache-surf/Cargo.toml +++ b/http-cache-surf/Cargo.toml @@ -18,7 +18,7 @@ rust-version = "1.88.0" [dependencies] async-trait = "0.1.85" http = "1.2.0" -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" http-types = "2.12.0" surf = { version = "2.3.2", default-features = false } diff --git a/http-cache-tower-server/Cargo.toml b/http-cache-tower-server/Cargo.toml index 3f61adc..eb93984 100644 --- a/http-cache-tower-server/Cargo.toml +++ b/http-cache-tower-server/Cargo.toml @@ -17,7 +17,7 @@ rust-version = "1.88.0" [dependencies] http-cache = { version = "1.0.0-alpha.4", path = "../http-cache", default-features = false } -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" tower = { version = "0.5.2", features = ["util"] } http = "1.2.0" http-body = "1.0.1" diff --git a/http-cache-tower/Cargo.toml b/http-cache-tower/Cargo.toml index 6a51395..4c05469 100644 --- a/http-cache-tower/Cargo.toml +++ b/http-cache-tower/Cargo.toml @@ -17,7 +17,7 @@ rust-version = "1.88.0" [dependencies] http-cache = { version = "1.0.0-alpha.4", path = "../http-cache", default-features = false } -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" tower = { version = "0.5.2", features = ["util"] } http = "1.2.0" http-body = "1.0.1" diff --git a/http-cache-ureq/Cargo.toml b/http-cache-ureq/Cargo.toml index 0483eea..41bf476 100644 --- a/http-cache-ureq/Cargo.toml +++ b/http-cache-ureq/Cargo.toml @@ -18,7 +18,7 @@ rust-version = "1.88.0" [dependencies] async-trait = "0.1.85" http = "1.2.0" -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" serde = { version = "1.0.217", features = ["derive"] } serde_json = { version = "1.0", optional = true } smol = "2.0.2" diff --git a/http-cache/Cargo.toml b/http-cache/Cargo.toml index f6dd491..b512514 100644 --- a/http-cache/Cargo.toml +++ b/http-cache/Cargo.toml @@ -28,7 +28,7 @@ log = "0.4.22" http = "1.2.0" http-body = "1.0.1" http-body-util = "0.1.2" -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" http-types = { version = "2.12.0", default-features = false, optional = true } httpdate = "1.0.3" moka = { version = "0.12.10", features = ["future"], optional = true } @@ -48,12 +48,12 @@ governor = { version = "0.10.1", optional = true } tempfile = { version = "3.13.0", optional = true } async-lock = { version = "3.4.0", optional = true } async-compat = { version = "0.2", optional = true } -rand = { version = "0.9.2", optional = true } +rand = { version = "0.10.0", optional = true } [dev-dependencies] bincode = "1.3.3" smol = "2.0.2" -http-cache-semantics = "2.1.0" +http-cache-semantics = "3.0.0" tokio = { version = "1.43.0", features = [ "macros", "rt", "rt-multi-thread" ] } tempfile = "3.13.0" macro_rules_attribute = "0.2.0" diff --git a/http-cache/src/managers/streaming_cache.rs b/http-cache/src/managers/streaming_cache.rs index 0da0755..105e3ac 100644 --- a/http-cache/src/managers/streaming_cache.rs +++ b/http-cache/src/managers/streaming_cache.rs @@ -72,7 +72,7 @@ use http_body::Body; use http_body_util::{BodyExt, Empty}; use http_cache_semantics::CachePolicy; use moka::future::Cache; -use rand::Rng; +use rand::RngExt; use serde::{Deserialize, Serialize}; use tokio::io::AsyncWriteExt; use tokio::sync::mpsc;