diff --git a/Cargo.toml b/Cargo.toml index 06b5f55..81f2267 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.0.22" +version = "0.0.23" authors = ["Jun Kurihara"] homepage = "https://github.com/junkurihara/httpsig-rs" repository = "https://github.com/junkurihara/httpsig-rs" diff --git a/httpsig-hyper/Cargo.toml b/httpsig-hyper/Cargo.toml index de941b9..0b4a8fd 100644 --- a/httpsig-hyper/Cargo.toml +++ b/httpsig-hyper/Cargo.toml @@ -19,7 +19,7 @@ rsa-signature = ["httpsig/rsa-signature"] [dependencies] -httpsig = { path = "../httpsig", version = "0.0.22" } +httpsig = { path = "../httpsig", version = "0.0.23" } thiserror = { version = "2.0.18" } tracing = { version = "0.1.44" } @@ -28,6 +28,7 @@ futures = { version = "0.3.31", default-features = false, features = [ "async-await", ] } indexmap = { version = "2.11.4" } +subtle = { version = "2.6.1", default-features = false } # content digest with rfc8941 structured field values sha2 = { version = "0.10.9", default-features = false } diff --git a/httpsig-hyper/src/hyper_content_digest.rs b/httpsig-hyper/src/hyper_content_digest.rs index 5564289..51eb75c 100644 --- a/httpsig-hyper/src/hyper_content_digest.rs +++ b/httpsig-hyper/src/hyper_content_digest.rs @@ -8,6 +8,7 @@ use http_body_util::{combinators::BoxBody, BodyExt, Full}; use sha2::Digest; use std::future::Future; use std::str::FromStr; +use subtle::ConstantTimeEq; // hyper's http specific extension to generate and verify http signature @@ -133,7 +134,7 @@ where Self: Sized, { let header_map = self.headers(); - let (cd_type, _expected_digest) = extract_content_digest(header_map).await?; + let (cd_type, expected_digest) = extract_content_digest(header_map).await?; let (header, body) = self.into_parts(); let body_bytes = body .into_bytes() @@ -141,7 +142,8 @@ where .map_err(|_e| HyperDigestError::HttpBodyError("Failed to get body bytes".to_string()))?; let digest = derive_digest(&body_bytes, &cd_type); - if digest == _expected_digest { + // Use constant time equality check to prevent timing attacks + if is_equal_digest(&digest, &expected_digest) { let new_body = Full::new(body_bytes).map_err(|never| match never {}).boxed(); let res = Request::from_parts(header, new_body); Ok(res) @@ -184,7 +186,7 @@ where Self: Sized, { let header_map = self.headers(); - let (cd_type, _expected_digest) = extract_content_digest(header_map).await?; + let (cd_type, expected_digest) = extract_content_digest(header_map).await?; let (header, body) = self.into_parts(); let body_bytes = body .into_bytes() @@ -192,7 +194,8 @@ where .map_err(|_e| HyperDigestError::HttpBodyError("Failed to get body bytes".to_string()))?; let digest = derive_digest(&body_bytes, &cd_type); - if digest == _expected_digest { + // Use constant time equality check to prevent timing attacks + if is_equal_digest(&digest, &expected_digest) { let new_body = Full::new(body_bytes).map_err(|never| match never {}).boxed(); let res = Response::from_parts(header, new_body); Ok(res) @@ -204,6 +207,16 @@ where } } +// Constant time equality check for digest verification to prevent timing attacks +fn is_equal_digest(digest1: &[u8], digest2: &[u8]) -> bool { + // Early return if the lengths are different to prevent unnecessary computation, + // which is not a security risk in this context since the digest lengths are fixed for each algorithm. + if digest1.len() != digest2.len() { + return false; + } + digest1.ct_eq(digest2).into() +} + async fn extract_content_digest(header_map: &http::HeaderMap) -> HyperDigestResult<(ContentDigestType, Vec)> { let content_digest_header = header_map .get(CONTENT_DIGEST_HEADER) @@ -301,4 +314,116 @@ mod tests { let verified = res.verify_content_digest().await; assert!(verified.is_ok()); } + + #[tokio::test] + async fn hyper_request_digest_mismatch_by_body_tamper_should_fail() { + // 1) Create a request and set a correct Content-Digest for the original body + let body = Full::new(&b"{\"hello\": \"world\"}"[..]); + let req = Request::builder() + .method("GET") + .uri("https://example.com/") + .header("date", "Sun, 09 May 2021 18:30:00 GMT") + .header("content-type", "application/json") + .body(body) + .unwrap(); + + let req = req.set_content_digest(&ContentDigestType::Sha256).await.unwrap(); + assert!(req.headers().contains_key(CONTENT_DIGEST_HEADER)); + + // 2) Tamper the body while keeping the digest header unchanged + let (parts, _old_body) = req.into_parts(); + let tampered_body = Full::new(&b"{\"hello\": \"pwned\"}"[..]).boxed(); + let tampered_req = Request::from_parts(parts, tampered_body); + + // 3) Verification must fail + let verified = tampered_req.verify_content_digest().await; + assert!(verified.is_err()); + match verified.err().unwrap() { + HyperDigestError::InvalidContentDigest(_) => {} + e => panic!("unexpected error: {e:?}"), + } + } + + #[tokio::test] + async fn hyper_response_digest_mismatch_by_header_tamper_should_fail() { + // 1) Create a response and set a correct Content-Digest + let body = Full::new(&b"{\"hello\": \"world\"}"[..]); + let res = Response::builder() + .status(200) + .header("date", "Sun, 09 May 2021 18:30:00 GMT") + .header("content-type", "application/json") + .body(body) + .unwrap(); + + let res = res.set_content_digest(&ContentDigestType::Sha256).await.unwrap(); + let (mut parts, body) = res.into_parts(); + + // 2) Tamper the Content-Digest header (keep it syntactically valid) + // Expected digest is: X48E9qOokqqrvdts8nOJRJN3OWDUoyWxBf7kbu9DBPE= + // Change the first character to another valid base64 character. + parts.headers.insert( + CONTENT_DIGEST_HEADER, + "sha-256=:Y48E9qOokqqrvdts8nOJRJN3OWDUoyWxBf7kbu9DBPE=:".parse().unwrap(), + ); + + let tampered_res = Response::from_parts(parts, body); + + // 3) Verification must fail + let verified = tampered_res.verify_content_digest().await; + assert!(verified.is_err()); + match verified.err().unwrap() { + HyperDigestError::InvalidContentDigest(_) => {} + e => panic!("unexpected error: {e:?}"), + } + } + + #[tokio::test] + async fn hyper_request_missing_content_digest_header_should_fail() { + let body = Full::new(&b"{\"hello\": \"world\"}"[..]); + let req = Request::builder() + .method("GET") + .uri("https://example.com/") + .header("date", "Sun, 09 May 2021 18:30:00 GMT") + .header("content-type", "application/json") + .body(body) + .unwrap(); + + // No set_content_digest() call => header missing + let verified = req.verify_content_digest().await; + assert!(verified.is_err()); + match verified.err().unwrap() { + HyperDigestError::NoDigestHeader(_) => {} + e => panic!("unexpected error: {e:?}"), + } + } + + #[tokio::test] + async fn hyper_request_digest_length_mismatch_should_fail() { + // 1) Create a request and attach a valid Content-Digest header + let body = Full::new(&b"{\"hello\": \"world\"}"[..]); + let req = Request::builder() + .method("GET") + .uri("https://example.com/") + .header("date", "Sun, 09 May 2021 18:30:00 GMT") + .header("content-type", "application/json") + .body(body) + .unwrap(); + + let req = req.set_content_digest(&ContentDigestType::Sha256).await.unwrap(); + + // 2) Extract parts and replace the Content-Digest header + // with a syntactically valid but length-mismatched base64 value. + // This ensures that length mismatches are properly rejected. + let (mut parts, body) = req.into_parts(); + + parts + .headers + .insert(CONTENT_DIGEST_HEADER, "sha-256=:AAAA=:".parse().unwrap()); + + let tampered_req = Request::from_parts(parts, body); + + // 3) Verification must fail due to digest length mismatch + let verified = tampered_req.verify_content_digest().await; + assert!(verified.is_err()); + } }