quality: MSRV, tarpaulin config, proptest for parsers (#84)

* fix: proxy dedup, multi-registry GC, TOCTOU and credential hygiene

- Deduplicate proxy_fetch/proxy_fetch_text into generic proxy_fetch_core
  with response extractor closure (removes ~50 lines of copy-paste)
- GC now scans all registry prefixes, not just docker/
- Add tracing::warn to fire-and-forget cache writes in docker proxy
- Mark S3 credentials as skip_serializing to prevent accidental leaks
- Remove TOCTOU race in LocalStorage get/delete (redundant exists check)

* chore: clean up root directory structure

- Move Dockerfile.astra and Dockerfile.redos to deploy/ (niche builds
  should not clutter the project root)
- Harden .gitignore to exclude session files, working notes, and
  internal review scripts

* refactor(metrics): replace 13 atomic fields with CounterMap

Per-registry download/upload counters were 13 individual AtomicU64
fields, each duplicated across new(), with_persistence(), save(),
record_download(), record_upload(), and get_registry_* (6 touch points
per counter). Adding a new registry required changes in 6+ places.

Now uses CounterMap (HashMap<String, AtomicU64>) for per-registry
counters. Adding a new registry = one entry in REGISTRIES const.
Added Go registry to REGISTRIES, gaining go metrics for free.

* quality: add MSRV, tarpaulin config, proptest for parsers

- Set rust-version = 1.75 in workspace Cargo.toml (MSRV policy)
- Add tarpaulin.toml: llvm engine, fail-under=25, json+html output
- Add coverage/ to .gitignore
- Update CI to use tarpaulin.toml instead of inline flags
- Add proptest dev-dependency and property tests:
  - validation.rs: 16 tests (never-panics + invariants for all 4 validators)
  - pypi.rs: 5 tests (extract_filename never-panics + format assertions)

* test: add unit tests for 14 modules, coverage 21% → 30%

Add 149 new tests across auth, backup, gc, metrics, mirror parsers,
docker (manifest detection, session cleanup, metadata serde),
docker_auth (token cache), maven, npm, pypi (normalize, rewrite, extract),
raw (content-type guessing), request_id, and s3 (URI encoding).

Update tarpaulin.toml: raise fail-under to 30, exclude UI/main from
coverage reporting as they require integration tests.

* bench: add criterion benchmarks for validation and manifest parsing

Add parsing benchmark suite with 14 benchmarks covering:
- Storage key, Docker name, digest, and reference validation
- Docker manifest media type detection (v2, OCI index, minimal, invalid)

Run with: cargo bench --package nora-registry --bench parsing

* test: add 48 integration tests via tower oneshot

Add integration tests for all HTTP handlers:
- health (3), raw (7), cargo (4), maven (4), request_id (2)
- pypi (5), npm (5), docker (12), auth (6)

Create test_helpers.rs with TestContext pattern.
Add tower and http-body-util dev-dependencies.
Update tarpaulin fail-under 30 to 40.

Coverage: 29.5% to 43.3% (2089/4825 lines)

* fix: clean clippy warnings in tests, fix flaky audit test

Add #[allow(clippy::unwrap_used)] to 18 test modules.
Fix 3 additional clippy lints: writeln_empty_string, needless_update,
unnecessary_get_then_check.
Fix flaky audit test: replace single sleep(50ms) with retry loop (max 1s).
Prefix unused token variable with underscore.

cargo clippy --all-targets = 0 warnings (was 245 errors)
This commit is contained in:
2026-04-05 10:01:50 +03:00
committed by GitHub
parent 35a9e34a3e
commit ac3a8a7c43
37 changed files with 3452 additions and 130 deletions

View File

@@ -51,7 +51,7 @@ jobs:
- name: Run coverage
run: |
cargo tarpaulin --package nora-registry --out json --output-dir coverage/ 2>&1 | tee /tmp/tarpaulin.log
cargo tarpaulin --config tarpaulin.toml 2>&1 | tee /tmp/tarpaulin.log
COVERAGE=$(python3 -c "import json; d=json.load(open('coverage/tarpaulin-report.json')); print(f\"{d['coverage']:.1f}\")")
echo "COVERAGE=$COVERAGE" >> $GITHUB_ENV
echo "Coverage: $COVERAGE%"

10
.gitignore vendored
View File

@@ -15,3 +15,13 @@ data/
node_modules/
package-lock.json
/tmp/
# Working files (never commit)
SESSION_*.md
TODO.md
FEEDBACK.txt
*.session.txt
*-this-session-*.txt
nora-review.sh
coverage/
target/criterion/

281
Cargo.lock generated
View File

@@ -32,6 +32,12 @@ dependencies = [
"libc",
]
[[package]]
name = "anes"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]]
name = "anstream"
version = "1.0.0"
@@ -219,6 +225,21 @@ dependencies = [
"zeroize",
]
[[package]]
name = "bit-set"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
dependencies = [
"bit-vec",
]
[[package]]
name = "bit-vec"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
[[package]]
name = "bitflags"
version = "2.10.0"
@@ -280,6 +301,12 @@ version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33"
[[package]]
name = "cast"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
version = "1.2.55"
@@ -318,6 +345,33 @@ dependencies = [
"windows-link",
]
[[package]]
name = "ciborium"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e"
dependencies = [
"ciborium-io",
"ciborium-ll",
"serde",
]
[[package]]
name = "ciborium-io"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757"
[[package]]
name = "ciborium-ll"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9"
dependencies = [
"ciborium-io",
"half",
]
[[package]]
name = "cipher"
version = "0.4.4"
@@ -432,12 +486,73 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "criterion"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
dependencies = [
"anes",
"cast",
"ciborium",
"clap",
"criterion-plot",
"is-terminal",
"itertools",
"num-traits",
"once_cell",
"oorandom",
"plotters",
"rayon",
"regex",
"serde",
"serde_derive",
"serde_json",
"tinytemplate",
"walkdir",
]
[[package]]
name = "criterion-plot"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
dependencies = [
"cast",
"itertools",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "crunchy"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-common"
version = "0.1.7"
@@ -543,6 +658,12 @@ dependencies = [
"syn",
]
[[package]]
name = "either"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "encode_unicode"
version = "1.0.0"
@@ -823,6 +944,17 @@ dependencies = [
"tracing",
]
[[package]]
name = "half"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b"
dependencies = [
"cfg-if",
"crunchy",
"zerocopy",
]
[[package]]
name = "hashbrown"
version = "0.14.5"
@@ -1189,12 +1321,32 @@ dependencies = [
"serde",
]
[[package]]
name = "is-terminal"
version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46"
dependencies = [
"hermit-abi",
"libc",
"windows-sys 0.61.2",
]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
[[package]]
name = "itertools"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.17"
@@ -1382,16 +1534,19 @@ dependencies = [
"bcrypt",
"chrono",
"clap",
"criterion",
"flate2",
"governor",
"hex",
"hmac",
"http-body-util",
"httpdate",
"indicatif",
"lazy_static",
"parking_lot",
"percent-encoding",
"prometheus",
"proptest",
"reqwest",
"serde",
"serde_json",
@@ -1401,6 +1556,7 @@ dependencies = [
"thiserror 2.0.18",
"tokio",
"toml",
"tower",
"tower-http",
"tower_governor",
"tracing",
@@ -1452,6 +1608,12 @@ version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
[[package]]
name = "oorandom"
version = "11.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e"
[[package]]
name = "parking_lot"
version = "0.12.5"
@@ -1524,6 +1686,34 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "plotters"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747"
dependencies = [
"num-traits",
"plotters-backend",
"plotters-svg",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "plotters-backend"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a"
[[package]]
name = "plotters-svg"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670"
dependencies = [
"plotters-backend",
]
[[package]]
name = "portable-atomic"
version = "1.13.0"
@@ -1582,6 +1772,25 @@ dependencies = [
"thiserror 2.0.18",
]
[[package]]
name = "proptest"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b45fcc2344c680f5025fe57779faef368840d0bd1f42f216291f0dc4ace4744"
dependencies = [
"bit-set",
"bit-vec",
"bitflags",
"num-traits",
"rand",
"rand_chacha",
"rand_xorshift",
"regex-syntax",
"rusty-fork",
"tempfile",
"unarray",
]
[[package]]
name = "protobuf"
version = "3.7.2"
@@ -1617,6 +1826,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "quick-error"
version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quinn"
version = "0.11.9"
@@ -1725,6 +1940,15 @@ dependencies = [
"getrandom 0.3.4",
]
[[package]]
name = "rand_xorshift"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a"
dependencies = [
"rand_core 0.9.5",
]
[[package]]
name = "raw-cpuid"
version = "11.6.0"
@@ -1734,6 +1958,26 @@ dependencies = [
"bitflags",
]
[[package]]
name = "rayon"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "redox_syscall"
version = "0.5.18"
@@ -1929,6 +2173,18 @@ version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
[[package]]
name = "rusty-fork"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2"
dependencies = [
"fnv",
"quick-error",
"tempfile",
"wait-timeout",
]
[[package]]
name = "ryu"
version = "1.0.22"
@@ -2247,6 +2503,16 @@ dependencies = [
"zerovec",
]
[[package]]
name = "tinytemplate"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
dependencies = [
"serde",
"serde_json",
]
[[package]]
name = "tinyvec"
version = "1.10.0"
@@ -2545,6 +2811,12 @@ version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
[[package]]
name = "unarray"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
[[package]]
name = "unicase"
version = "2.9.0"
@@ -2671,6 +2943,15 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "wait-timeout"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11"
dependencies = [
"libc",
]
[[package]]
name = "walkdir"
version = "2.5.0"

View File

@@ -8,6 +8,7 @@ members = [
[workspace.package]
version = "0.3.0"
edition = "2021"
rust-version = "1.75"
license = "MIT"
authors = ["DevITWay <devitway@gmail.com>"]
repository = "https://github.com/getnora-io/nora"

View File

@@ -54,5 +54,13 @@ tower-http = { version = "0.6", features = ["set-header"] }
percent-encoding = "2"
[dev-dependencies]
proptest = "1"
tempfile = "3"
wiremock = "0.6"
criterion = { version = "0.5", features = ["html_reports"] }
tower = { version = "0.5", features = ["util"] }
http-body-util = "0.1"
[[bench]]
name = "parsing"
harness = false

View File

@@ -0,0 +1,109 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use nora_registry::validation::{
validate_digest, validate_docker_name, validate_docker_reference, validate_storage_key,
};
fn bench_validation(c: &mut Criterion) {
let mut group = c.benchmark_group("validation");
group.bench_function("storage_key_short", |b| {
b.iter(|| validate_storage_key(black_box("docker/alpine/blobs/sha256:abc123")))
});
group.bench_function("storage_key_long", |b| {
let key = "maven/com/example/deep/nested/path/artifact-1.0.0-SNAPSHOT.jar";
b.iter(|| validate_storage_key(black_box(key)))
});
group.bench_function("storage_key_reject", |b| {
b.iter(|| validate_storage_key(black_box("../etc/passwd")))
});
group.bench_function("docker_name_simple", |b| {
b.iter(|| validate_docker_name(black_box("library/alpine")))
});
group.bench_function("docker_name_nested", |b| {
b.iter(|| validate_docker_name(black_box("my-org/sub/repo-name")))
});
group.bench_function("docker_name_reject", |b| {
b.iter(|| validate_docker_name(black_box("INVALID/NAME")))
});
group.bench_function("digest_sha256", |b| {
b.iter(|| {
validate_digest(black_box(
"sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
))
})
});
group.bench_function("digest_reject", |b| {
b.iter(|| validate_digest(black_box("md5:abc")))
});
group.bench_function("reference_tag", |b| {
b.iter(|| validate_docker_reference(black_box("v1.2.3-alpine")))
});
group.bench_function("reference_digest", |b| {
b.iter(|| {
validate_docker_reference(black_box(
"sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
))
})
});
group.finish();
}
fn bench_manifest_detection(c: &mut Criterion) {
let mut group = c.benchmark_group("manifest_detection");
let docker_v2 = serde_json::json!({
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"schemaVersion": 2,
"config": {"mediaType": "application/vnd.docker.container.image.v1+json", "digest": "sha256:abc"},
"layers": [{"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip", "digest": "sha256:def", "size": 1000}]
})
.to_string();
let oci_index = serde_json::json!({
"schemaVersion": 2,
"manifests": [
{"digest": "sha256:aaa", "platform": {"os": "linux", "architecture": "amd64"}},
{"digest": "sha256:bbb", "platform": {"os": "linux", "architecture": "arm64"}}
]
})
.to_string();
let minimal = serde_json::json!({"schemaVersion": 2}).to_string();
group.bench_function("docker_v2_explicit", |b| {
b.iter(|| {
nora_registry::docker_fuzz::detect_manifest_media_type(black_box(docker_v2.as_bytes()))
})
});
group.bench_function("oci_index", |b| {
b.iter(|| {
nora_registry::docker_fuzz::detect_manifest_media_type(black_box(oci_index.as_bytes()))
})
});
group.bench_function("minimal_json", |b| {
b.iter(|| {
nora_registry::docker_fuzz::detect_manifest_media_type(black_box(minimal.as_bytes()))
})
});
group.bench_function("invalid_json", |b| {
b.iter(|| nora_registry::docker_fuzz::detect_manifest_media_type(black_box(b"not json")))
});
group.finish();
}
criterion_group!(benches, bench_validation, bench_manifest_detection);
criterion_main!(benches);

View File

@@ -77,6 +77,7 @@ impl AuditLog {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use tempfile::TempDir;
@@ -112,10 +113,17 @@ mod tests {
let entry = AuditEntry::new("pull", "user1", "lodash", "npm", "downloaded");
log.log(entry);
// spawn_blocking is fire-and-forget; give it time to flush
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
// spawn_blocking is fire-and-forget; retry until flushed (max 1s)
let path = log.path().clone();
let mut content = String::new();
for _ in 0..20 {
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
content = std::fs::read_to_string(&path).unwrap_or_default();
if content.contains(r#""action":"pull""#) {
break;
}
}
let content = std::fs::read_to_string(log.path()).unwrap();
assert!(content.contains(r#""action":"pull""#));
assert!(content.contains(r#""actor":"user1""#));
assert!(content.contains(r#""artifact":"lodash""#));
@@ -130,11 +138,20 @@ mod tests {
log.log(AuditEntry::new("pull", "user", "b", "npm", ""));
log.log(AuditEntry::new("delete", "admin", "c", "maven", ""));
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
// Retry until all 3 entries flushed (max 1s)
let path = log.path().clone();
let mut line_count = 0;
for _ in 0..20 {
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
if let Ok(content) = std::fs::read_to_string(&path) {
line_count = content.lines().count();
if line_count >= 3 {
break;
}
}
}
let content = std::fs::read_to_string(log.path()).unwrap();
let lines: Vec<&str> = content.lines().collect();
assert_eq!(lines.len(), 3);
assert_eq!(line_count, 3);
}
#[test]

View File

@@ -366,6 +366,7 @@ pub fn token_routes() -> Router<Arc<AppState>> {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use std::io::Write;
@@ -403,7 +404,7 @@ mod tests {
fn test_htpasswd_loading_with_comments() {
let mut file = NamedTempFile::new().unwrap();
writeln!(file, "# This is a comment").unwrap();
writeln!(file, "").unwrap();
writeln!(file).unwrap();
let hash = bcrypt::hash("secret", 4).unwrap();
writeln!(file, "admin:{}", hash).unwrap();
file.flush().unwrap();
@@ -472,4 +473,185 @@ mod tests {
assert!(hash.starts_with("$2"));
assert!(bcrypt::verify("test123", &hash).unwrap());
}
#[test]
fn test_is_public_path_health() {
assert!(is_public_path("/health"));
assert!(is_public_path("/ready"));
assert!(is_public_path("/metrics"));
}
#[test]
fn test_is_public_path_v2() {
assert!(is_public_path("/v2/"));
assert!(is_public_path("/v2"));
}
#[test]
fn test_is_public_path_ui() {
assert!(is_public_path("/ui"));
assert!(is_public_path("/ui/dashboard"));
assert!(is_public_path("/ui/repos"));
}
#[test]
fn test_is_public_path_api_docs() {
assert!(is_public_path("/api-docs"));
assert!(is_public_path("/api-docs/openapi.json"));
assert!(is_public_path("/api/ui"));
}
#[test]
fn test_is_public_path_tokens() {
assert!(is_public_path("/api/tokens"));
assert!(is_public_path("/api/tokens/list"));
assert!(is_public_path("/api/tokens/revoke"));
}
#[test]
fn test_is_public_path_root() {
assert!(is_public_path("/"));
}
#[test]
fn test_is_not_public_path_registry() {
assert!(!is_public_path("/v2/library/alpine/manifests/latest"));
assert!(!is_public_path("/npm/lodash"));
assert!(!is_public_path("/maven/com/example"));
assert!(!is_public_path("/pypi/simple/flask"));
}
#[test]
fn test_is_not_public_path_random() {
assert!(!is_public_path("/admin"));
assert!(!is_public_path("/secret"));
assert!(!is_public_path("/api/data"));
}
#[test]
fn test_default_role_str() {
assert_eq!(default_role_str(), "read");
}
#[test]
fn test_default_ttl() {
assert_eq!(default_ttl(), 30);
}
#[test]
fn test_create_token_request_defaults() {
let json = r#"{"username":"admin","password":"pass"}"#;
let req: CreateTokenRequest = serde_json::from_str(json).unwrap();
assert_eq!(req.username, "admin");
assert_eq!(req.password, "pass");
assert_eq!(req.ttl_days, 30);
assert_eq!(req.role, "read");
assert!(req.description.is_none());
}
#[test]
fn test_create_token_request_custom() {
let json = r#"{"username":"admin","password":"pass","ttl_days":90,"role":"write","description":"CI token"}"#;
let req: CreateTokenRequest = serde_json::from_str(json).unwrap();
assert_eq!(req.ttl_days, 90);
assert_eq!(req.role, "write");
assert_eq!(req.description, Some("CI token".to_string()));
}
#[test]
fn test_create_token_response_serialization() {
let resp = CreateTokenResponse {
token: "nora_abc123".to_string(),
expires_in_days: 30,
};
let json = serde_json::to_string(&resp).unwrap();
assert!(json.contains("nora_abc123"));
assert!(json.contains("30"));
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod integration_tests {
use crate::test_helpers::*;
use axum::http::{Method, StatusCode};
use base64::{engine::general_purpose::STANDARD, Engine};
#[tokio::test]
async fn test_auth_disabled_passes_all() {
let ctx = create_test_context();
let response = send(&ctx.app, Method::PUT, "/raw/test.txt", b"data".to_vec()).await;
assert_eq!(response.status(), StatusCode::CREATED);
}
#[tokio::test]
async fn test_auth_public_paths_always_pass() {
let ctx = create_test_context_with_auth(&[("admin", "secret")]);
let response = send(&ctx.app, Method::GET, "/health", "").await;
assert_eq!(response.status(), StatusCode::OK);
let response = send(&ctx.app, Method::GET, "/ready", "").await;
assert_eq!(response.status(), StatusCode::OK);
let response = send(&ctx.app, Method::GET, "/v2/", "").await;
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_auth_blocks_without_credentials() {
let ctx = create_test_context_with_auth(&[("admin", "secret")]);
let response = send(&ctx.app, Method::PUT, "/raw/test.txt", b"data".to_vec()).await;
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
assert!(response.headers().contains_key("www-authenticate"));
}
#[tokio::test]
async fn test_auth_basic_works() {
let ctx = create_test_context_with_auth(&[("admin", "secret")]);
let header_val = format!("Basic {}", STANDARD.encode("admin:secret"));
let response = send_with_headers(
&ctx.app,
Method::PUT,
"/raw/test.txt",
vec![("authorization", &header_val)],
b"data".to_vec(),
)
.await;
assert_eq!(response.status(), StatusCode::CREATED);
}
#[tokio::test]
async fn test_auth_basic_wrong_password() {
let ctx = create_test_context_with_auth(&[("admin", "secret")]);
let header_val = format!("Basic {}", STANDARD.encode("admin:wrong"));
let response = send_with_headers(
&ctx.app,
Method::PUT,
"/raw/test.txt",
vec![("authorization", &header_val)],
b"data".to_vec(),
)
.await;
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_auth_anonymous_read() {
let ctx = create_test_context_with_anonymous_read(&[("admin", "secret")]);
// Upload with auth
let header_val = format!("Basic {}", STANDARD.encode("admin:secret"));
let response = send_with_headers(
&ctx.app,
Method::PUT,
"/raw/test.txt",
vec![("authorization", &header_val)],
b"data".to_vec(),
)
.await;
assert_eq!(response.status(), StatusCode::CREATED);
// Read without auth should work
let response = send(&ctx.app, Method::GET, "/raw/test.txt", "").await;
assert_eq!(response.status(), StatusCode::OK);
// Write without auth should fail
let response = send(&ctx.app, Method::PUT, "/raw/test2.txt", b"data".to_vec()).await;
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
}

View File

@@ -300,3 +300,134 @@ fn format_bytes(bytes: u64) -> String {
format!("{} B", bytes)
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
#[test]
fn test_format_bytes_zero() {
assert_eq!(format_bytes(0), "0 B");
}
#[test]
fn test_format_bytes_bytes() {
assert_eq!(format_bytes(512), "512 B");
assert_eq!(format_bytes(1023), "1023 B");
}
#[test]
fn test_format_bytes_kilobytes() {
assert_eq!(format_bytes(1024), "1.00 KB");
assert_eq!(format_bytes(1536), "1.50 KB");
assert_eq!(format_bytes(10240), "10.00 KB");
}
#[test]
fn test_format_bytes_megabytes() {
assert_eq!(format_bytes(1048576), "1.00 MB");
assert_eq!(format_bytes(5 * 1024 * 1024), "5.00 MB");
}
#[test]
fn test_format_bytes_gigabytes() {
assert_eq!(format_bytes(1073741824), "1.00 GB");
assert_eq!(format_bytes(3 * 1024 * 1024 * 1024), "3.00 GB");
}
#[test]
fn test_backup_metadata_serialization() {
let meta = BackupMetadata {
version: "0.3.0".to_string(),
created_at: chrono::Utc::now(),
artifact_count: 42,
total_bytes: 1024000,
storage_backend: "local".to_string(),
};
let json = serde_json::to_string(&meta).unwrap();
assert!(json.contains("\"version\":\"0.3.0\""));
assert!(json.contains("\"artifact_count\":42"));
assert!(json.contains("\"storage_backend\":\"local\""));
}
#[test]
fn test_backup_metadata_deserialization() {
let json = r#"{
"version": "0.3.0",
"created_at": "2026-01-01T00:00:00Z",
"artifact_count": 10,
"total_bytes": 5000,
"storage_backend": "s3"
}"#;
let meta: BackupMetadata = serde_json::from_str(json).unwrap();
assert_eq!(meta.version, "0.3.0");
assert_eq!(meta.artifact_count, 10);
assert_eq!(meta.total_bytes, 5000);
assert_eq!(meta.storage_backend, "s3");
}
#[test]
fn test_backup_metadata_roundtrip() {
let meta = BackupMetadata {
version: "1.0.0".to_string(),
created_at: chrono::Utc::now(),
artifact_count: 100,
total_bytes: 999999,
storage_backend: "local".to_string(),
};
let json = serde_json::to_value(&meta).unwrap();
let restored: BackupMetadata = serde_json::from_value(json).unwrap();
assert_eq!(meta.version, restored.version);
assert_eq!(meta.artifact_count, restored.artifact_count);
assert_eq!(meta.total_bytes, restored.total_bytes);
}
#[tokio::test]
async fn test_create_backup_empty_storage() {
let dir = tempfile::tempdir().unwrap();
let storage = Storage::new_local(dir.path().join("data").to_str().unwrap());
let output = dir.path().join("backup.tar.gz");
let stats = create_backup(&storage, &output).await.unwrap();
assert_eq!(stats.artifact_count, 0);
assert_eq!(stats.total_bytes, 0);
assert!(output.exists());
assert!(stats.output_size > 0); // at least metadata.json
}
#[tokio::test]
async fn test_backup_restore_roundtrip() {
let dir = tempfile::tempdir().unwrap();
let storage = Storage::new_local(dir.path().join("data").to_str().unwrap());
// Put some test data
storage
.put("maven/com/example/1.0/test.jar", b"test-content")
.await
.unwrap();
storage
.put("docker/test/blobs/sha256:abc123", b"blob-data")
.await
.unwrap();
// Create backup
let backup_file = dir.path().join("backup.tar.gz");
let backup_stats = create_backup(&storage, &backup_file).await.unwrap();
assert_eq!(backup_stats.artifact_count, 2);
// Restore to different storage
let restore_storage = Storage::new_local(dir.path().join("restored").to_str().unwrap());
let restore_stats = restore_backup(&restore_storage, &backup_file)
.await
.unwrap();
assert_eq!(restore_stats.artifact_count, 2);
// Verify data
let data = restore_storage
.get("maven/com/example/1.0/test.jar")
.await
.unwrap();
assert_eq!(&data[..], b"test-content");
}
}

View File

@@ -763,6 +763,7 @@ impl Default for Config {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;

View File

@@ -2,52 +2,83 @@
// SPDX-License-Identifier: MIT
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::Instant;
use tracing::{info, warn};
/// Serializable snapshot of metrics for persistence
/// Known registry names for per-registry metrics
const REGISTRIES: &[&str] = &["docker", "maven", "npm", "cargo", "pypi", "raw", "go"];
/// Serializable snapshot of metrics for persistence.
/// Uses HashMap for per-registry counters — adding a new registry only
/// requires adding its name to REGISTRIES (one line).
#[derive(Serialize, Deserialize, Default)]
struct MetricsSnapshot {
downloads: u64,
uploads: u64,
cache_hits: u64,
cache_misses: u64,
docker_downloads: u64,
docker_uploads: u64,
npm_downloads: u64,
maven_downloads: u64,
maven_uploads: u64,
cargo_downloads: u64,
pypi_downloads: u64,
raw_downloads: u64,
raw_uploads: u64,
#[serde(default)]
registry_downloads: HashMap<String, u64>,
#[serde(default)]
registry_uploads: HashMap<String, u64>,
}
/// Dashboard metrics for tracking registry activity
/// Uses atomic counters for thread-safe access without locks
/// Thread-safe atomic counter map for per-registry metrics.
struct CounterMap(HashMap<String, AtomicU64>);
impl CounterMap {
fn new(keys: &[&str]) -> Self {
let mut map = HashMap::with_capacity(keys.len());
for &k in keys {
map.insert(k.to_string(), AtomicU64::new(0));
}
Self(map)
}
fn inc(&self, key: &str) {
if let Some(counter) = self.0.get(key) {
counter.fetch_add(1, Ordering::Relaxed);
}
}
fn get(&self, key: &str) -> u64 {
self.0
.get(key)
.map(|c| c.load(Ordering::Relaxed))
.unwrap_or(0)
}
fn snapshot(&self) -> HashMap<String, u64> {
self.0
.iter()
.map(|(k, v)| (k.clone(), v.load(Ordering::Relaxed)))
.collect()
}
fn load_from(&self, data: &HashMap<String, u64>) {
for (k, v) in data {
if let Some(counter) = self.0.get(k.as_str()) {
counter.store(*v, Ordering::Relaxed);
}
}
}
}
/// Dashboard metrics for tracking registry activity.
/// Global counters are separate fields; per-registry counters use CounterMap.
pub struct DashboardMetrics {
// Global counters
pub downloads: AtomicU64,
pub uploads: AtomicU64,
pub cache_hits: AtomicU64,
pub cache_misses: AtomicU64,
// Per-registry download counters
pub docker_downloads: AtomicU64,
pub docker_uploads: AtomicU64,
pub npm_downloads: AtomicU64,
pub maven_downloads: AtomicU64,
pub maven_uploads: AtomicU64,
pub cargo_downloads: AtomicU64,
pub pypi_downloads: AtomicU64,
pub raw_downloads: AtomicU64,
pub raw_uploads: AtomicU64,
registry_downloads: CounterMap,
registry_uploads: CounterMap,
pub start_time: Instant,
/// Path to metrics.json for persistence
persist_path: Option<PathBuf>,
}
@@ -58,15 +89,8 @@ impl DashboardMetrics {
uploads: AtomicU64::new(0),
cache_hits: AtomicU64::new(0),
cache_misses: AtomicU64::new(0),
docker_downloads: AtomicU64::new(0),
docker_uploads: AtomicU64::new(0),
npm_downloads: AtomicU64::new(0),
maven_downloads: AtomicU64::new(0),
maven_uploads: AtomicU64::new(0),
cargo_downloads: AtomicU64::new(0),
pypi_downloads: AtomicU64::new(0),
raw_downloads: AtomicU64::new(0),
raw_uploads: AtomicU64::new(0),
registry_downloads: CounterMap::new(REGISTRIES),
registry_uploads: CounterMap::new(REGISTRIES),
start_time: Instant::now(),
persist_path: None,
}
@@ -77,7 +101,6 @@ impl DashboardMetrics {
let path = Path::new(storage_path).join("metrics.json");
let mut metrics = Self::new();
// Load existing metrics if file exists
if path.exists() {
match std::fs::read_to_string(&path) {
Ok(data) => match serde_json::from_str::<MetricsSnapshot>(&data) {
@@ -86,15 +109,10 @@ impl DashboardMetrics {
metrics.uploads = AtomicU64::new(snap.uploads);
metrics.cache_hits = AtomicU64::new(snap.cache_hits);
metrics.cache_misses = AtomicU64::new(snap.cache_misses);
metrics.docker_downloads = AtomicU64::new(snap.docker_downloads);
metrics.docker_uploads = AtomicU64::new(snap.docker_uploads);
metrics.npm_downloads = AtomicU64::new(snap.npm_downloads);
metrics.maven_downloads = AtomicU64::new(snap.maven_downloads);
metrics.maven_uploads = AtomicU64::new(snap.maven_uploads);
metrics.cargo_downloads = AtomicU64::new(snap.cargo_downloads);
metrics.pypi_downloads = AtomicU64::new(snap.pypi_downloads);
metrics.raw_downloads = AtomicU64::new(snap.raw_downloads);
metrics.raw_uploads = AtomicU64::new(snap.raw_uploads);
metrics
.registry_downloads
.load_from(&snap.registry_downloads);
metrics.registry_uploads.load_from(&snap.registry_uploads);
info!(
downloads = snap.downloads,
uploads = snap.uploads,
@@ -121,17 +139,9 @@ impl DashboardMetrics {
uploads: self.uploads.load(Ordering::Relaxed),
cache_hits: self.cache_hits.load(Ordering::Relaxed),
cache_misses: self.cache_misses.load(Ordering::Relaxed),
docker_downloads: self.docker_downloads.load(Ordering::Relaxed),
docker_uploads: self.docker_uploads.load(Ordering::Relaxed),
npm_downloads: self.npm_downloads.load(Ordering::Relaxed),
maven_downloads: self.maven_downloads.load(Ordering::Relaxed),
maven_uploads: self.maven_uploads.load(Ordering::Relaxed),
cargo_downloads: self.cargo_downloads.load(Ordering::Relaxed),
pypi_downloads: self.pypi_downloads.load(Ordering::Relaxed),
raw_downloads: self.raw_downloads.load(Ordering::Relaxed),
raw_uploads: self.raw_uploads.load(Ordering::Relaxed),
registry_downloads: self.registry_downloads.snapshot(),
registry_uploads: self.registry_uploads.snapshot(),
};
// Atomic write: write to tmp then rename
let tmp = path.with_extension("json.tmp");
if let Ok(data) = serde_json::to_string_pretty(&snap) {
if tokio::fs::write(&tmp, &data).await.is_ok() {
@@ -140,42 +150,24 @@ impl DashboardMetrics {
}
}
/// Record a download event for the specified registry
pub fn record_download(&self, registry: &str) {
self.downloads.fetch_add(1, Ordering::Relaxed);
match registry {
"docker" => self.docker_downloads.fetch_add(1, Ordering::Relaxed),
"npm" => self.npm_downloads.fetch_add(1, Ordering::Relaxed),
"maven" => self.maven_downloads.fetch_add(1, Ordering::Relaxed),
"cargo" => self.cargo_downloads.fetch_add(1, Ordering::Relaxed),
"pypi" => self.pypi_downloads.fetch_add(1, Ordering::Relaxed),
"raw" => self.raw_downloads.fetch_add(1, Ordering::Relaxed),
_ => 0,
};
self.registry_downloads.inc(registry);
}
/// Record an upload event for the specified registry
pub fn record_upload(&self, registry: &str) {
self.uploads.fetch_add(1, Ordering::Relaxed);
match registry {
"docker" => self.docker_uploads.fetch_add(1, Ordering::Relaxed),
"maven" => self.maven_uploads.fetch_add(1, Ordering::Relaxed),
"raw" => self.raw_uploads.fetch_add(1, Ordering::Relaxed),
_ => 0,
};
self.registry_uploads.inc(registry);
}
/// Record a cache hit
pub fn record_cache_hit(&self) {
self.cache_hits.fetch_add(1, Ordering::Relaxed);
}
/// Record a cache miss
pub fn record_cache_miss(&self) {
self.cache_misses.fetch_add(1, Ordering::Relaxed);
}
/// Calculate the cache hit rate as a percentage
pub fn cache_hit_rate(&self) -> f64 {
let hits = self.cache_hits.load(Ordering::Relaxed);
let misses = self.cache_misses.load(Ordering::Relaxed);
@@ -187,27 +179,12 @@ impl DashboardMetrics {
}
}
/// Get download count for a specific registry
pub fn get_registry_downloads(&self, registry: &str) -> u64 {
match registry {
"docker" => self.docker_downloads.load(Ordering::Relaxed),
"npm" => self.npm_downloads.load(Ordering::Relaxed),
"maven" => self.maven_downloads.load(Ordering::Relaxed),
"cargo" => self.cargo_downloads.load(Ordering::Relaxed),
"pypi" => self.pypi_downloads.load(Ordering::Relaxed),
"raw" => self.raw_downloads.load(Ordering::Relaxed),
_ => 0,
}
self.registry_downloads.get(registry)
}
/// Get upload count for a specific registry
pub fn get_registry_uploads(&self, registry: &str) -> u64 {
match registry {
"docker" => self.docker_uploads.load(Ordering::Relaxed),
"maven" => self.maven_uploads.load(Ordering::Relaxed),
"raw" => self.raw_uploads.load(Ordering::Relaxed),
_ => 0,
}
self.registry_uploads.get(registry)
}
}
@@ -218,6 +195,7 @@ impl Default for DashboardMetrics {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use tempfile::TempDir;
@@ -238,12 +216,12 @@ mod tests {
m.record_download(reg);
}
assert_eq!(m.downloads.load(Ordering::Relaxed), 6);
assert_eq!(m.docker_downloads.load(Ordering::Relaxed), 1);
assert_eq!(m.npm_downloads.load(Ordering::Relaxed), 1);
assert_eq!(m.maven_downloads.load(Ordering::Relaxed), 1);
assert_eq!(m.cargo_downloads.load(Ordering::Relaxed), 1);
assert_eq!(m.pypi_downloads.load(Ordering::Relaxed), 1);
assert_eq!(m.raw_downloads.load(Ordering::Relaxed), 1);
assert_eq!(m.get_registry_downloads("docker"), 1);
assert_eq!(m.get_registry_downloads("npm"), 1);
assert_eq!(m.get_registry_downloads("maven"), 1);
assert_eq!(m.get_registry_downloads("cargo"), 1);
assert_eq!(m.get_registry_downloads("pypi"), 1);
assert_eq!(m.get_registry_downloads("raw"), 1);
}
#[test]
@@ -251,8 +229,7 @@ mod tests {
let m = DashboardMetrics::new();
m.record_download("unknown");
assert_eq!(m.downloads.load(Ordering::Relaxed), 1);
// no per-registry counter should increment
assert_eq!(m.docker_downloads.load(Ordering::Relaxed), 0);
assert_eq!(m.get_registry_downloads("docker"), 0);
}
#[test]
@@ -262,15 +239,15 @@ mod tests {
m.record_upload("maven");
m.record_upload("raw");
assert_eq!(m.uploads.load(Ordering::Relaxed), 3);
assert_eq!(m.docker_uploads.load(Ordering::Relaxed), 1);
assert_eq!(m.maven_uploads.load(Ordering::Relaxed), 1);
assert_eq!(m.raw_uploads.load(Ordering::Relaxed), 1);
assert_eq!(m.get_registry_uploads("docker"), 1);
assert_eq!(m.get_registry_uploads("maven"), 1);
assert_eq!(m.get_registry_uploads("raw"), 1);
}
#[test]
fn test_record_upload_unknown_registry() {
let m = DashboardMetrics::new();
m.record_upload("npm"); // npm has no upload counter
m.record_upload("npm");
assert_eq!(m.uploads.load(Ordering::Relaxed), 1);
}
@@ -322,7 +299,6 @@ mod tests {
let tmp = TempDir::new().unwrap();
let path = tmp.path().to_str().unwrap();
// Create metrics, record some data, save
{
let m = DashboardMetrics::with_persistence(path);
m.record_download("docker");
@@ -332,13 +308,12 @@ mod tests {
m.save().await;
}
// Load in new instance
{
let m = DashboardMetrics::with_persistence(path);
assert_eq!(m.downloads.load(Ordering::Relaxed), 2);
assert_eq!(m.uploads.load(Ordering::Relaxed), 1);
assert_eq!(m.docker_downloads.load(Ordering::Relaxed), 2);
assert_eq!(m.maven_uploads.load(Ordering::Relaxed), 1);
assert_eq!(m.get_registry_downloads("docker"), 2);
assert_eq!(m.get_registry_uploads("maven"), 1);
assert_eq!(m.cache_hits.load(Ordering::Relaxed), 1);
}
}
@@ -347,8 +322,6 @@ mod tests {
fn test_persistence_missing_file() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().to_str().unwrap();
// Should work even without existing metrics.json
let m = DashboardMetrics::with_persistence(path);
assert_eq!(m.downloads.load(Ordering::Relaxed), 0);
}
@@ -358,4 +331,11 @@ mod tests {
let m = DashboardMetrics::default();
assert_eq!(m.downloads.load(Ordering::Relaxed), 0);
}
#[test]
fn test_go_registry_supported() {
let m = DashboardMetrics::new();
m.record_download("go");
assert_eq!(m.get_registry_downloads("go"), 1);
}
}

View File

@@ -124,3 +124,198 @@ async fn collect_referenced_digests(storage: &Storage) -> HashSet<String> {
referenced
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
#[test]
fn test_gc_result_defaults() {
let result = GcResult {
total_blobs: 0,
referenced_blobs: 0,
orphaned_blobs: 0,
deleted_blobs: 0,
orphan_keys: vec![],
};
assert_eq!(result.total_blobs, 0);
assert!(result.orphan_keys.is_empty());
}
#[tokio::test]
async fn test_gc_empty_storage() {
let dir = tempfile::tempdir().unwrap();
let storage = Storage::new_local(dir.path().join("data").to_str().unwrap());
let result = run_gc(&storage, true).await;
assert_eq!(result.total_blobs, 0);
assert_eq!(result.referenced_blobs, 0);
assert_eq!(result.orphaned_blobs, 0);
assert_eq!(result.deleted_blobs, 0);
}
#[tokio::test]
async fn test_gc_no_orphans() {
let dir = tempfile::tempdir().unwrap();
let storage = Storage::new_local(dir.path().join("data").to_str().unwrap());
// Create a manifest that references a blob
let manifest = serde_json::json!({
"config": {"digest": "sha256:configabc"},
"layers": [{"digest": "sha256:layer111", "size": 100}]
});
storage
.put(
"docker/test/manifests/latest.json",
manifest.to_string().as_bytes(),
)
.await
.unwrap();
storage
.put("docker/test/blobs/sha256:configabc", b"config-data")
.await
.unwrap();
storage
.put("docker/test/blobs/sha256:layer111", b"layer-data")
.await
.unwrap();
let result = run_gc(&storage, true).await;
assert_eq!(result.total_blobs, 2);
assert_eq!(result.orphaned_blobs, 0);
}
#[tokio::test]
async fn test_gc_finds_orphans_dry_run() {
let dir = tempfile::tempdir().unwrap();
let storage = Storage::new_local(dir.path().join("data").to_str().unwrap());
// Create a manifest referencing only one blob
let manifest = serde_json::json!({
"config": {"digest": "sha256:configabc"},
"layers": [{"digest": "sha256:layer111", "size": 100}]
});
storage
.put(
"docker/test/manifests/latest.json",
manifest.to_string().as_bytes(),
)
.await
.unwrap();
storage
.put("docker/test/blobs/sha256:configabc", b"config-data")
.await
.unwrap();
storage
.put("docker/test/blobs/sha256:layer111", b"layer-data")
.await
.unwrap();
// Orphan blob (not referenced)
storage
.put("docker/test/blobs/sha256:orphan999", b"orphan-data")
.await
.unwrap();
let result = run_gc(&storage, true).await;
assert_eq!(result.total_blobs, 3);
assert_eq!(result.orphaned_blobs, 1);
assert_eq!(result.deleted_blobs, 0); // dry run
assert!(result.orphan_keys[0].contains("orphan999"));
// Verify orphan still exists (dry run)
assert!(storage
.get("docker/test/blobs/sha256:orphan999")
.await
.is_ok());
}
#[tokio::test]
async fn test_gc_deletes_orphans() {
let dir = tempfile::tempdir().unwrap();
let storage = Storage::new_local(dir.path().join("data").to_str().unwrap());
let manifest = serde_json::json!({
"config": {"digest": "sha256:configabc"},
"layers": []
});
storage
.put(
"docker/test/manifests/latest.json",
manifest.to_string().as_bytes(),
)
.await
.unwrap();
storage
.put("docker/test/blobs/sha256:configabc", b"config")
.await
.unwrap();
storage
.put("docker/test/blobs/sha256:orphan1", b"orphan")
.await
.unwrap();
let result = run_gc(&storage, false).await;
assert_eq!(result.orphaned_blobs, 1);
assert_eq!(result.deleted_blobs, 1);
// Verify orphan is gone
assert!(storage
.get("docker/test/blobs/sha256:orphan1")
.await
.is_err());
// Referenced blob still exists
assert!(storage
.get("docker/test/blobs/sha256:configabc")
.await
.is_ok());
}
#[tokio::test]
async fn test_gc_manifest_list_references() {
let dir = tempfile::tempdir().unwrap();
let storage = Storage::new_local(dir.path().join("data").to_str().unwrap());
// Multi-arch manifest list
let manifest = serde_json::json!({
"manifests": [
{"digest": "sha256:platformA", "size": 100},
{"digest": "sha256:platformB", "size": 200}
]
});
storage
.put(
"docker/multi/manifests/latest.json",
manifest.to_string().as_bytes(),
)
.await
.unwrap();
storage
.put("docker/multi/blobs/sha256:platformA", b"arch-a")
.await
.unwrap();
storage
.put("docker/multi/blobs/sha256:platformB", b"arch-b")
.await
.unwrap();
let result = run_gc(&storage, true).await;
assert_eq!(result.orphaned_blobs, 0);
}
#[tokio::test]
async fn test_gc_multi_registry_blobs() {
let dir = tempfile::tempdir().unwrap();
let storage = Storage::new_local(dir.path().join("data").to_str().unwrap());
// npm tarball (not referenced by Docker manifests => orphan candidate)
storage
.put("npm/lodash/tarballs/lodash-4.17.21.tgz", b"tarball-data")
.await
.unwrap();
let result = run_gc(&storage, true).await;
// npm tarballs contain "tarballs/" which matches the filter
assert_eq!(result.total_blobs, 1);
}
}

View File

@@ -90,3 +90,37 @@ async fn readiness_check(State(state): State<Arc<AppState>>) -> StatusCode {
async fn check_storage_reachable(state: &AppState) -> bool {
state.storage.health_check().await
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use crate::test_helpers::{body_bytes, create_test_context, send};
use axum::http::{Method, StatusCode};
#[tokio::test]
async fn test_health_returns_200() {
let ctx = create_test_context();
let response = send(&ctx.app, Method::GET, "/health", "").await;
assert_eq!(response.status(), StatusCode::OK);
let body = body_bytes(response).await;
let body_str = std::str::from_utf8(&body).unwrap();
assert!(body_str.contains("healthy"));
}
#[tokio::test]
async fn test_health_json_has_version() {
let ctx = create_test_context();
let response = send(&ctx.app, Method::GET, "/health", "").await;
assert_eq!(response.status(), StatusCode::OK);
let body = body_bytes(response).await;
let json: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert!(json.get("version").is_some());
}
#[tokio::test]
async fn test_ready_returns_200() {
let ctx = create_test_context();
let response = send(&ctx.app, Method::GET, "/ready", "").await;
assert_eq!(response.status(), StatusCode::OK);
}
}

View File

@@ -25,6 +25,9 @@ mod tokens;
mod ui;
mod validation;
#[cfg(test)]
mod test_helpers;
use axum::{extract::DefaultBodyLimit, http::HeaderValue, middleware, Router};
use clap::{Parser, Subcommand};
use std::path::{Path, PathBuf};

View File

@@ -175,7 +175,7 @@ mod tests {
}
#[test]
fn test_detect_registry_cargo() {
fn test_detect_registry_cargo_path() {
assert_eq!(detect_registry("/cargo/api/v1/crates"), "cargo");
}
@@ -200,4 +200,33 @@ mod tests {
assert_eq!(detect_registry("/ready"), "other");
assert_eq!(detect_registry("/unknown/path"), "other");
}
#[test]
fn test_detect_registry_go_path() {
assert_eq!(
detect_registry("/go/github.com/user/repo/@v/v1.0.0.info"),
"other"
);
}
#[test]
fn test_record_cache_hit() {
record_cache_hit("docker");
// Doesn't panic — metric is recorded
}
#[test]
fn test_record_cache_miss() {
record_cache_miss("npm");
}
#[test]
fn test_record_storage_op_success() {
record_storage_op("get", true);
}
#[test]
fn test_record_storage_op_error() {
record_storage_op("put", false);
}
}

View File

@@ -138,6 +138,7 @@ pub async fn migrate(
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use tempfile::TempDir;
@@ -201,16 +202,9 @@ mod tests {
src.put("test/file", b"data").await.unwrap();
let stats = migrate(
&src,
&dst,
MigrateOptions {
dry_run: true,
..Default::default()
},
)
.await
.unwrap();
let stats = migrate(&src, &dst, MigrateOptions { dry_run: true })
.await
.unwrap();
assert_eq!(stats.migrated, 1);

View File

@@ -283,6 +283,7 @@ fn parse_maven_deps(content: &str) -> Vec<MirrorTarget> {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
@@ -322,4 +323,121 @@ version = \"0.1.0\"
assert_eq!(targets[0].name, "org.apache.commons:commons-lang3");
assert_eq!(targets[0].version, "3.12.0");
}
#[test]
fn test_parse_requirements_txt_empty() {
let targets = parse_requirements_txt("");
assert!(targets.is_empty());
}
#[test]
fn test_parse_requirements_txt_comments_only() {
let content = "# This is a comment\n# Another comment\n\n";
let targets = parse_requirements_txt(content);
assert!(targets.is_empty());
}
#[test]
fn test_parse_requirements_txt_flags() {
let content = "-r other-requirements.txt\n-i https://pypi.org/simple\nflask==2.0\n";
let targets = parse_requirements_txt(content);
assert_eq!(targets.len(), 1);
assert_eq!(targets[0].name, "flask");
}
#[test]
fn test_parse_requirements_txt_version_specifiers() {
let content =
"pkg1>=1.0\npkg2<2.0\npkg3!=1.5\npkg4~=1.0\npkg5==1.0 ; python_version>='3.8'\n";
let targets = parse_requirements_txt(content);
assert_eq!(targets.len(), 5);
assert_eq!(targets[0].name, "pkg1");
assert_eq!(targets[0].version, "latest");
assert_eq!(targets[4].name, "pkg5");
assert_eq!(targets[4].version, "1.0 ; python_version>='3.8'");
}
#[test]
fn test_parse_requirements_txt_inline_comments() {
let content = "flask==2.0 # web framework\n";
let targets = parse_requirements_txt(content);
assert_eq!(targets.len(), 1);
assert_eq!(targets[0].name, "flask");
assert_eq!(targets[0].version, "2.0");
}
#[test]
fn test_parse_cargo_lock_empty() {
let content = "";
let result = parse_cargo_lock(content);
let targets = result.unwrap();
assert!(targets.is_empty());
}
#[test]
fn test_parse_cargo_lock_no_packages() {
let content = "[metadata]\nsome = \"value\"\n";
let targets = parse_cargo_lock(content).unwrap();
assert!(targets.is_empty());
}
#[test]
fn test_parse_cargo_lock_git_source() {
let content = r#"
[[package]]
name = "my-dep"
version = "0.1.0"
source = "git+https://github.com/user/repo#abc123"
"#;
let targets = parse_cargo_lock(content).unwrap();
assert!(targets.is_empty()); // git sources filtered out
}
#[test]
fn test_parse_cargo_lock_multiple() {
let content = r#"
[[package]]
name = "serde"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "tokio"
version = "1.36.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "local-crate"
version = "0.1.0"
"#;
let targets = parse_cargo_lock(content).unwrap();
assert_eq!(targets.len(), 2);
}
#[test]
fn test_parse_maven_deps_empty() {
let targets = parse_maven_deps("");
assert!(targets.is_empty());
}
#[test]
fn test_parse_maven_deps_short_line() {
let targets = parse_maven_deps("foo:bar\n");
assert!(targets.is_empty());
}
#[test]
fn test_parse_maven_deps_multiple() {
let content = "[INFO] org.slf4j:slf4j-api:jar:2.0.9:compile\n[INFO] com.google.guava:guava:jar:33.0.0-jre:compile\n";
let targets = parse_maven_deps(content);
assert_eq!(targets.len(), 2);
assert_eq!(targets[0].name, "org.slf4j:slf4j-api");
assert_eq!(targets[1].version, "33.0.0-jre");
}
#[test]
fn test_create_progress_bar() {
let pb = create_progress_bar(100);
assert_eq!(pb.length(), Some(100));
}
}

View File

@@ -251,6 +251,7 @@ async fn mirror_npm_packages(
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
@@ -324,4 +325,108 @@ mod tests {
assert_eq!(targets.len(), 1); // deduplicated
assert_eq!(targets[0].name, "debug");
}
#[test]
fn test_extract_package_name_simple() {
assert_eq!(extract_package_name("node_modules/lodash"), Some("lodash"));
}
#[test]
fn test_extract_package_name_scoped() {
assert_eq!(
extract_package_name("node_modules/@babel/core"),
Some("@babel/core")
);
}
#[test]
fn test_extract_package_name_nested() {
assert_eq!(
extract_package_name("node_modules/foo/node_modules/@scope/bar"),
Some("@scope/bar")
);
}
#[test]
fn test_extract_package_name_no_node_modules() {
assert_eq!(extract_package_name("just/a/path"), None);
}
#[test]
fn test_extract_package_name_empty_after() {
assert_eq!(extract_package_name("node_modules/"), None);
}
#[test]
fn test_parse_lockfile_v2() {
let lockfile = serde_json::json!({
"lockfileVersion": 2,
"packages": {
"": {"name": "root"},
"node_modules/express": {"version": "4.18.2"},
"node_modules/@types/node": {"version": "20.11.0"}
}
});
let targets = parse_npm_lockfile(&lockfile.to_string()).unwrap();
assert_eq!(targets.len(), 2);
}
#[test]
fn test_parse_lockfile_empty_packages() {
let lockfile = serde_json::json!({
"lockfileVersion": 3,
"packages": {}
});
let targets = parse_npm_lockfile(&lockfile.to_string()).unwrap();
assert!(targets.is_empty());
}
#[test]
fn test_parse_lockfile_invalid_json() {
let result = parse_npm_lockfile("not json at all");
assert!(result.is_err());
}
#[test]
fn test_parse_lockfile_v1_nested() {
let lockfile = serde_json::json!({
"lockfileVersion": 1,
"dependencies": {
"express": {
"version": "4.18.2",
"dependencies": {
"accepts": {"version": "1.3.8"}
}
}
}
});
let targets = parse_npm_lockfile(&lockfile.to_string()).unwrap();
assert_eq!(targets.len(), 2);
}
#[test]
fn test_parse_lockfile_v2_falls_back_to_v1() {
// v2 with empty packages should fall back to v1 dependencies
let lockfile = serde_json::json!({
"lockfileVersion": 2,
"packages": {},
"dependencies": {
"lodash": {"version": "4.17.21"}
}
});
let targets = parse_npm_lockfile(&lockfile.to_string()).unwrap();
assert_eq!(targets.len(), 1);
assert_eq!(targets[0].name, "lodash");
}
#[test]
fn test_parse_lockfile_no_version_field() {
let lockfile = serde_json::json!({
"packages": {
"node_modules/something": {"resolved": "https://example.com"}
}
});
let targets = parse_npm_lockfile(&lockfile.to_string()).unwrap();
assert!(targets.is_empty());
}
}

View File

@@ -68,3 +68,73 @@ async fn download(
Err(_) => StatusCode::NOT_FOUND.into_response(),
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use crate::test_helpers::{body_bytes, create_test_context, send};
use axum::http::{Method, StatusCode};
#[tokio::test]
async fn test_cargo_metadata_not_found() {
let ctx = create_test_context();
let resp = send(
&ctx.app,
Method::GET,
"/cargo/api/v1/crates/nonexistent",
"",
)
.await;
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_cargo_metadata_from_storage() {
let ctx = create_test_context();
let meta = r#"{"name":"test-crate","versions":[]}"#;
ctx.state
.storage
.put("cargo/test-crate/metadata.json", meta.as_bytes())
.await
.unwrap();
let resp = send(&ctx.app, Method::GET, "/cargo/api/v1/crates/test-crate", "").await;
assert_eq!(resp.status(), StatusCode::OK);
let body = body_bytes(resp).await;
assert_eq!(&body[..], meta.as_bytes());
}
#[tokio::test]
async fn test_cargo_download_not_found() {
let ctx = create_test_context();
let resp = send(
&ctx.app,
Method::GET,
"/cargo/api/v1/crates/missing/1.0.0/download",
"",
)
.await;
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_cargo_download_from_storage() {
let ctx = create_test_context();
ctx.state
.storage
.put("cargo/my-crate/1.2.3/my-crate-1.2.3.crate", b"crate-data")
.await
.unwrap();
let resp = send(
&ctx.app,
Method::GET,
"/cargo/api/v1/crates/my-crate/1.2.3/download",
"",
)
.await;
assert_eq!(resp.status(), StatusCode::OK);
let body = body_bytes(resp).await;
assert_eq!(&body[..], b"crate-data");
}
}

View File

@@ -1322,3 +1322,599 @@ async fn update_metadata_on_pull(storage: Storage, meta_key: String) {
let _ = storage.put(&meta_key, &json).await;
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
#[test]
fn test_image_metadata_default() {
let meta = ImageMetadata::default();
assert_eq!(meta.push_timestamp, 0);
assert_eq!(meta.last_pulled, 0);
assert_eq!(meta.downloads, 0);
assert_eq!(meta.size_bytes, 0);
assert_eq!(meta.os, "");
assert_eq!(meta.arch, "");
assert!(meta.variant.is_none());
assert!(meta.layers.is_empty());
}
#[test]
fn test_image_metadata_serialization() {
let meta = ImageMetadata {
push_timestamp: 1700000000,
last_pulled: 1700001000,
downloads: 42,
size_bytes: 1024000,
os: "linux".to_string(),
arch: "amd64".to_string(),
variant: None,
layers: vec![LayerInfo {
digest: "sha256:abc123".to_string(),
size: 512000,
}],
};
let json = serde_json::to_string(&meta).unwrap();
assert!(json.contains("\"os\":\"linux\""));
assert!(json.contains("\"arch\":\"amd64\""));
assert!(!json.contains("variant")); // None => skipped
}
#[test]
fn test_image_metadata_with_variant() {
let meta = ImageMetadata {
variant: Some("v8".to_string()),
..Default::default()
};
let json = serde_json::to_string(&meta).unwrap();
assert!(json.contains("\"variant\":\"v8\""));
}
#[test]
fn test_image_metadata_deserialization() {
let json = r#"{
"push_timestamp": 1700000000,
"last_pulled": 0,
"downloads": 5,
"size_bytes": 2048,
"os": "linux",
"arch": "arm64",
"variant": "v8",
"layers": [
{"digest": "sha256:aaa", "size": 1024},
{"digest": "sha256:bbb", "size": 1024}
]
}"#;
let meta: ImageMetadata = serde_json::from_str(json).unwrap();
assert_eq!(meta.os, "linux");
assert_eq!(meta.arch, "arm64");
assert_eq!(meta.variant, Some("v8".to_string()));
assert_eq!(meta.layers.len(), 2);
assert_eq!(meta.layers[0].digest, "sha256:aaa");
assert_eq!(meta.layers[1].size, 1024);
}
#[test]
fn test_layer_info_serialization_roundtrip() {
let layer = LayerInfo {
digest: "sha256:deadbeef".to_string(),
size: 999999,
};
let json = serde_json::to_value(&layer).unwrap();
let restored: LayerInfo = serde_json::from_value(json).unwrap();
assert_eq!(layer.digest, restored.digest);
assert_eq!(layer.size, restored.size);
}
#[test]
fn test_cleanup_expired_sessions_empty() {
let sessions: RwLock<HashMap<String, UploadSession>> = RwLock::new(HashMap::new());
cleanup_expired_sessions(&sessions);
assert_eq!(sessions.read().len(), 0);
}
#[test]
fn test_cleanup_expired_sessions_fresh() {
let sessions: RwLock<HashMap<String, UploadSession>> = RwLock::new(HashMap::new());
sessions.write().insert(
"uuid-1".to_string(),
UploadSession {
data: vec![1, 2, 3],
name: "test/image".to_string(),
created_at: std::time::Instant::now(),
},
);
cleanup_expired_sessions(&sessions);
assert_eq!(sessions.read().len(), 1); // not expired
}
#[test]
fn test_max_upload_sessions_default() {
// Without env var set, should return default
let max = max_upload_sessions();
assert!(max > 0);
assert_eq!(max, DEFAULT_MAX_UPLOAD_SESSIONS);
}
#[test]
fn test_max_session_size_default() {
let max = max_session_size();
assert_eq!(max, DEFAULT_MAX_SESSION_SIZE_MB * 1024 * 1024);
}
// --- detect_manifest_media_type tests ---
#[test]
fn test_detect_manifest_explicit_media_type() {
let manifest = serde_json::json!({
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"schemaVersion": 2
});
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
assert_eq!(
result,
"application/vnd.docker.distribution.manifest.v2+json"
);
}
#[test]
fn test_detect_manifest_oci_media_type() {
let manifest = serde_json::json!({
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"schemaVersion": 2
});
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
assert_eq!(result, "application/vnd.oci.image.manifest.v1+json");
}
#[test]
fn test_detect_manifest_schema_v1() {
let manifest = serde_json::json!({
"schemaVersion": 1,
"name": "test/image"
});
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
assert_eq!(
result,
"application/vnd.docker.distribution.manifest.v1+json"
);
}
#[test]
fn test_detect_manifest_docker_v2_from_config() {
let manifest = serde_json::json!({
"schemaVersion": 2,
"config": {
"mediaType": "application/vnd.docker.container.image.v1+json",
"digest": "sha256:abc"
}
});
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
assert_eq!(
result,
"application/vnd.docker.distribution.manifest.v2+json"
);
}
#[test]
fn test_detect_manifest_oci_from_config() {
let manifest = serde_json::json!({
"schemaVersion": 2,
"config": {
"mediaType": "application/vnd.oci.image.config.v1+json",
"digest": "sha256:abc"
}
});
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
assert_eq!(result, "application/vnd.oci.image.manifest.v1+json");
}
#[test]
fn test_detect_manifest_no_config_media_type() {
let manifest = serde_json::json!({
"schemaVersion": 2,
"config": {
"digest": "sha256:abc"
}
});
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
assert_eq!(
result,
"application/vnd.docker.distribution.manifest.v2+json"
);
}
#[test]
fn test_detect_manifest_index() {
let manifest = serde_json::json!({
"schemaVersion": 2,
"manifests": [
{"digest": "sha256:aaa", "platform": {"os": "linux", "architecture": "amd64"}}
]
});
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
assert_eq!(result, "application/vnd.oci.image.index.v1+json");
}
#[test]
fn test_detect_manifest_invalid_json() {
let result = detect_manifest_media_type(b"not json at all");
assert_eq!(
result,
"application/vnd.docker.distribution.manifest.v2+json"
);
}
#[test]
fn test_detect_manifest_empty() {
let result = detect_manifest_media_type(b"{}");
assert_eq!(
result,
"application/vnd.docker.distribution.manifest.v2+json"
);
}
#[test]
fn test_detect_manifest_helm_chart() {
let manifest = serde_json::json!({
"schemaVersion": 2,
"config": {
"mediaType": "application/vnd.cncf.helm.config.v1+json",
"digest": "sha256:abc"
}
});
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
assert_eq!(result, "application/vnd.oci.image.manifest.v1+json");
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod integration_tests {
use crate::test_helpers::{body_bytes, create_test_context, send};
use axum::body::Body;
use axum::http::{header, Method, StatusCode};
use sha2::Digest;
#[tokio::test]
async fn test_docker_v2_check() {
let ctx = create_test_context();
let resp = send(&ctx.app, Method::GET, "/v2/", Body::empty()).await;
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_docker_catalog_empty() {
let ctx = create_test_context();
let resp = send(&ctx.app, Method::GET, "/v2/_catalog", Body::empty()).await;
assert_eq!(resp.status(), StatusCode::OK);
let body = body_bytes(resp).await;
let json: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert!(json["repositories"].as_array().unwrap().is_empty());
}
#[tokio::test]
async fn test_docker_put_get_manifest() {
let ctx = create_test_context();
let manifest = serde_json::json!({
"schemaVersion": 2,
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"config": {
"mediaType": "application/vnd.docker.container.image.v1+json",
"size": 0,
"digest": "sha256:0000000000000000000000000000000000000000000000000000000000000000"
},
"layers": []
});
let manifest_bytes = serde_json::to_vec(&manifest).unwrap();
let put_resp = send(
&ctx.app,
Method::PUT,
"/v2/alpine/manifests/latest",
Body::from(manifest_bytes.clone()),
)
.await;
assert_eq!(put_resp.status(), StatusCode::CREATED);
let digest_header = put_resp
.headers()
.get("docker-content-digest")
.unwrap()
.to_str()
.unwrap()
.to_string();
assert!(digest_header.starts_with("sha256:"));
let get_resp = send(
&ctx.app,
Method::GET,
"/v2/alpine/manifests/latest",
Body::empty(),
)
.await;
assert_eq!(get_resp.status(), StatusCode::OK);
let get_digest = get_resp
.headers()
.get("docker-content-digest")
.unwrap()
.to_str()
.unwrap()
.to_string();
assert_eq!(get_digest, digest_header);
let body = body_bytes(get_resp).await;
assert_eq!(body.as_ref(), manifest_bytes.as_slice());
}
#[tokio::test]
async fn test_docker_list_tags() {
let ctx = create_test_context();
let manifest = serde_json::json!({
"schemaVersion": 2,
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"config": {
"mediaType": "application/vnd.docker.container.image.v1+json",
"size": 0,
"digest": "sha256:0000000000000000000000000000000000000000000000000000000000000000"
},
"layers": []
});
send(
&ctx.app,
Method::PUT,
"/v2/alpine/manifests/latest",
Body::from(serde_json::to_vec(&manifest).unwrap()),
)
.await;
let list_resp = send(&ctx.app, Method::GET, "/v2/alpine/tags/list", Body::empty()).await;
assert_eq!(list_resp.status(), StatusCode::OK);
let body = body_bytes(list_resp).await;
let json: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_eq!(json["name"], "alpine");
let tags = json["tags"].as_array().unwrap();
assert!(tags.contains(&serde_json::json!("latest")));
}
#[tokio::test]
async fn test_docker_delete_manifest() {
let ctx = create_test_context();
let manifest = serde_json::json!({
"schemaVersion": 2,
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"config": {
"mediaType": "application/vnd.docker.container.image.v1+json",
"size": 0,
"digest": "sha256:0000000000000000000000000000000000000000000000000000000000000000"
},
"layers": []
});
let put_resp = send(
&ctx.app,
Method::PUT,
"/v2/alpine/manifests/latest",
Body::from(serde_json::to_vec(&manifest).unwrap()),
)
.await;
let digest = put_resp
.headers()
.get("docker-content-digest")
.unwrap()
.to_str()
.unwrap()
.to_string();
let del = send(
&ctx.app,
Method::DELETE,
&format!("/v2/alpine/manifests/{}", digest),
Body::empty(),
)
.await;
assert_eq!(del.status(), StatusCode::ACCEPTED);
}
#[tokio::test]
async fn test_docker_monolithic_upload() {
let ctx = create_test_context();
let blob_data = b"test blob data";
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
let post_resp = send(
&ctx.app,
Method::POST,
"/v2/alpine/blobs/uploads/",
Body::empty(),
)
.await;
assert_eq!(post_resp.status(), StatusCode::ACCEPTED);
let location = post_resp
.headers()
.get("location")
.unwrap()
.to_str()
.unwrap()
.to_string();
let uuid = location.rsplit('/').next().unwrap();
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
let put_resp = send(&ctx.app, Method::PUT, &put_url, Body::from(&blob_data[..])).await;
assert_eq!(put_resp.status(), StatusCode::CREATED);
}
#[tokio::test]
async fn test_docker_chunked_upload() {
let ctx = create_test_context();
let blob_data = b"test chunked blob";
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
let post_resp = send(
&ctx.app,
Method::POST,
"/v2/alpine/blobs/uploads/",
Body::empty(),
)
.await;
assert_eq!(post_resp.status(), StatusCode::ACCEPTED);
let location = post_resp
.headers()
.get("location")
.unwrap()
.to_str()
.unwrap()
.to_string();
let uuid = location.rsplit('/').next().unwrap();
let patch_url = format!("/v2/alpine/blobs/uploads/{}", uuid);
let patch_resp = send(
&ctx.app,
Method::PATCH,
&patch_url,
Body::from(&blob_data[..]),
)
.await;
assert_eq!(patch_resp.status(), StatusCode::ACCEPTED);
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
let put_resp = send(&ctx.app, Method::PUT, &put_url, Body::empty()).await;
assert_eq!(put_resp.status(), StatusCode::CREATED);
}
#[tokio::test]
async fn test_docker_check_blob() {
let ctx = create_test_context();
let blob_data = b"test blob for head";
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
let post_resp = send(
&ctx.app,
Method::POST,
"/v2/alpine/blobs/uploads/",
Body::empty(),
)
.await;
let location = post_resp
.headers()
.get("location")
.unwrap()
.to_str()
.unwrap()
.to_string();
let uuid = location.rsplit('/').next().unwrap();
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
send(&ctx.app, Method::PUT, &put_url, Body::from(&blob_data[..])).await;
let head_url = format!("/v2/alpine/blobs/{}", digest);
let head_resp = send(&ctx.app, Method::HEAD, &head_url, Body::empty()).await;
assert_eq!(head_resp.status(), StatusCode::OK);
let cl = head_resp
.headers()
.get(header::CONTENT_LENGTH)
.unwrap()
.to_str()
.unwrap()
.parse::<usize>()
.unwrap();
assert_eq!(cl, blob_data.len());
}
#[tokio::test]
async fn test_docker_download_blob() {
let ctx = create_test_context();
let blob_data = b"test blob for download";
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
let post_resp = send(
&ctx.app,
Method::POST,
"/v2/alpine/blobs/uploads/",
Body::empty(),
)
.await;
let location = post_resp
.headers()
.get("location")
.unwrap()
.to_str()
.unwrap()
.to_string();
let uuid = location.rsplit('/').next().unwrap();
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
send(&ctx.app, Method::PUT, &put_url, Body::from(&blob_data[..])).await;
let get_url = format!("/v2/alpine/blobs/{}", digest);
let get_resp = send(&ctx.app, Method::GET, &get_url, Body::empty()).await;
assert_eq!(get_resp.status(), StatusCode::OK);
let body = body_bytes(get_resp).await;
assert_eq!(body.as_ref(), &blob_data[..]);
}
#[tokio::test]
async fn test_docker_blob_not_found() {
let ctx = create_test_context();
let fake_digest = "sha256:0000000000000000000000000000000000000000000000000000000000000000";
let head_url = format!("/v2/alpine/blobs/{}", fake_digest);
let resp = send(&ctx.app, Method::HEAD, &head_url, Body::empty()).await;
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_docker_delete_blob() {
let ctx = create_test_context();
let blob_data = b"test blob for delete";
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
let post_resp = send(
&ctx.app,
Method::POST,
"/v2/alpine/blobs/uploads/",
Body::empty(),
)
.await;
let location = post_resp
.headers()
.get("location")
.unwrap()
.to_str()
.unwrap()
.to_string();
let uuid = location.rsplit('/').next().unwrap();
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
send(&ctx.app, Method::PUT, &put_url, Body::from(&blob_data[..])).await;
let delete_url = format!("/v2/alpine/blobs/{}", digest);
let delete_resp = send(&ctx.app, Method::DELETE, &delete_url, Body::empty()).await;
assert_eq!(delete_resp.status(), StatusCode::ACCEPTED);
}
#[tokio::test]
async fn test_docker_namespaced_routes() {
let ctx = create_test_context();
let manifest = serde_json::json!({
"schemaVersion": 2,
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"config": {
"mediaType": "application/vnd.docker.container.image.v1+json",
"size": 0,
"digest": "sha256:0000000000000000000000000000000000000000000000000000000000000000"
},
"layers": []
});
let put_resp = send(
&ctx.app,
Method::PUT,
"/v2/library/alpine/manifests/latest",
Body::from(serde_json::to_vec(&manifest).unwrap()),
)
.await;
assert_eq!(put_resp.status(), StatusCode::CREATED);
assert!(put_resp
.headers()
.get("docker-content-digest")
.unwrap()
.to_str()
.unwrap()
.starts_with("sha256:"));
}
}

View File

@@ -139,6 +139,7 @@ fn parse_www_authenticate(header: &str) -> Option<HashMap<String, String>> {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
@@ -167,4 +168,86 @@ mod tests {
Some(&"https://ghcr.io/token".to_string())
);
}
#[test]
fn test_parse_www_authenticate_no_bearer() {
assert!(parse_www_authenticate("Basic realm=\"test\"").is_none());
}
#[test]
fn test_parse_www_authenticate_empty() {
assert!(parse_www_authenticate("").is_none());
}
#[test]
fn test_parse_www_authenticate_partial() {
let header = r#"Bearer realm="https://example.com/token""#;
let params = parse_www_authenticate(header).unwrap();
assert_eq!(
params.get("realm"),
Some(&"https://example.com/token".to_string())
);
assert!(!params.contains_key("service"));
}
#[test]
fn test_docker_auth_default() {
let auth = DockerAuth::default();
assert!(auth.tokens.read().is_empty());
}
#[test]
fn test_docker_auth_new() {
let auth = DockerAuth::new(30);
assert!(auth.tokens.read().is_empty());
}
#[tokio::test]
async fn test_get_token_no_www_authenticate() {
let auth = DockerAuth::default();
let result = auth
.get_token("https://registry.example.com", "library/test", None, None)
.await;
assert!(result.is_none());
}
#[tokio::test]
async fn test_get_token_cache_hit() {
let auth = DockerAuth::default();
// Manually insert a cached token
{
let mut tokens = auth.tokens.write();
tokens.insert(
"https://registry.example.com:library/test".to_string(),
CachedToken {
token: "cached-token-123".to_string(),
expires_at: Instant::now() + Duration::from_secs(300),
},
);
}
let result = auth
.get_token("https://registry.example.com", "library/test", None, None)
.await;
assert_eq!(result, Some("cached-token-123".to_string()));
}
#[tokio::test]
async fn test_get_token_cache_expired() {
let auth = DockerAuth::default();
{
let mut tokens = auth.tokens.write();
tokens.insert(
"https://registry.example.com:library/test".to_string(),
CachedToken {
token: "expired-token".to_string(),
expires_at: Instant::now() - Duration::from_secs(1),
},
);
}
// Without www_authenticate, returns None (can't fetch new token)
let result = auth
.get_token("https://registry.example.com", "library/test", None, None)
.await;
assert!(result.is_none());
}
}

View File

@@ -307,6 +307,7 @@ fn with_content_type(data: Vec<u8>, content_type: &'static str) -> Response {
// ============================================================================
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;

View File

@@ -145,3 +145,148 @@ fn with_content_type(
(StatusCode::OK, [(header::CONTENT_TYPE, content_type)], data)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_content_type_pom() {
let (status, headers, _) =
with_content_type("com/example/1.0/example-1.0.pom", Bytes::from("data"));
assert_eq!(status, StatusCode::OK);
assert_eq!(headers[0].1, "application/xml");
}
#[test]
fn test_content_type_jar() {
let (_, headers, _) =
with_content_type("com/example/1.0/example-1.0.jar", Bytes::from("data"));
assert_eq!(headers[0].1, "application/java-archive");
}
#[test]
fn test_content_type_xml() {
let (_, headers, _) =
with_content_type("com/example/maven-metadata.xml", Bytes::from("data"));
assert_eq!(headers[0].1, "application/xml");
}
#[test]
fn test_content_type_sha1() {
let (_, headers, _) =
with_content_type("com/example/1.0/example-1.0.jar.sha1", Bytes::from("data"));
assert_eq!(headers[0].1, "text/plain");
}
#[test]
fn test_content_type_md5() {
let (_, headers, _) =
with_content_type("com/example/1.0/example-1.0.jar.md5", Bytes::from("data"));
assert_eq!(headers[0].1, "text/plain");
}
#[test]
fn test_content_type_unknown() {
let (_, headers, _) = with_content_type("some/random/file.bin", Bytes::from("data"));
assert_eq!(headers[0].1, "application/octet-stream");
}
#[test]
fn test_content_type_preserves_body() {
let body = Bytes::from("test-jar-content");
let (_, _, data) = with_content_type("test.jar", body.clone());
assert_eq!(data, body);
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod integration_tests {
use crate::test_helpers::{body_bytes, create_test_context, send};
use axum::body::Body;
use axum::http::{header, Method, StatusCode};
#[tokio::test]
async fn test_maven_put_get_roundtrip() {
let ctx = create_test_context();
let jar_data = b"fake-jar-content";
let put = send(
&ctx.app,
Method::PUT,
"/maven2/com/example/mylib/1.0/mylib-1.0.jar",
Body::from(&jar_data[..]),
)
.await;
assert_eq!(put.status(), StatusCode::CREATED);
let get = send(
&ctx.app,
Method::GET,
"/maven2/com/example/mylib/1.0/mylib-1.0.jar",
"",
)
.await;
assert_eq!(get.status(), StatusCode::OK);
let body = body_bytes(get).await;
assert_eq!(&body[..], jar_data);
}
#[tokio::test]
async fn test_maven_not_found_no_proxy() {
let ctx = create_test_context();
let resp = send(
&ctx.app,
Method::GET,
"/maven2/missing/artifact/1.0/artifact-1.0.jar",
"",
)
.await;
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_maven_content_type_pom() {
let ctx = create_test_context();
send(
&ctx.app,
Method::PUT,
"/maven2/com/ex/1.0/ex-1.0.pom",
Body::from("<project/>"),
)
.await;
let get = send(&ctx.app, Method::GET, "/maven2/com/ex/1.0/ex-1.0.pom", "").await;
assert_eq!(get.status(), StatusCode::OK);
assert_eq!(
get.headers().get(header::CONTENT_TYPE).unwrap(),
"application/xml"
);
}
#[tokio::test]
async fn test_maven_content_type_jar() {
let ctx = create_test_context();
send(
&ctx.app,
Method::PUT,
"/maven2/org/test/app/2.0/app-2.0.jar",
Body::from("jar-data"),
)
.await;
let get = send(
&ctx.app,
Method::GET,
"/maven2/org/test/app/2.0/app-2.0.jar",
"",
)
.await;
assert_eq!(get.status(), StatusCode::OK);
assert_eq!(
get.headers().get(header::CONTENT_TYPE).unwrap(),
"application/java-archive"
);
}
}

View File

@@ -432,6 +432,7 @@ fn with_content_type(
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
@@ -555,4 +556,229 @@ mod tests {
assert!(!is_valid_attachment_name(""));
assert!(!is_valid_attachment_name("foo\0bar.tgz"));
}
#[test]
fn test_with_content_type_tarball() {
let data = Bytes::from("tarball-data");
let (status, headers, body) = with_content_type(true, data.clone());
assert_eq!(status, StatusCode::OK);
assert_eq!(headers[0].1, "application/octet-stream");
assert_eq!(body, data);
}
#[test]
fn test_with_content_type_json() {
let data = Bytes::from("json-data");
let (status, headers, body) = with_content_type(false, data.clone());
assert_eq!(status, StatusCode::OK);
assert_eq!(headers[0].1, "application/json");
assert_eq!(body, data);
}
#[test]
fn test_rewrite_tarball_urls_trailing_slash() {
let metadata = serde_json::json!({
"name": "test",
"versions": {
"1.0.0": {
"dist": {
"tarball": "https://registry.npmjs.org/test/-/test-1.0.0.tgz"
}
}
}
});
let data = serde_json::to_vec(&metadata).unwrap();
let result =
rewrite_tarball_urls(&data, "http://nora:5000/", "https://registry.npmjs.org/")
.unwrap();
let json: serde_json::Value = serde_json::from_slice(&result).unwrap();
let tarball = json["versions"]["1.0.0"]["dist"]["tarball"]
.as_str()
.unwrap();
assert!(tarball.starts_with("http://nora:5000/npm/"));
}
#[test]
fn test_rewrite_tarball_urls_preserves_other_fields() {
let metadata = serde_json::json!({
"name": "test",
"description": "A test package",
"versions": {
"1.0.0": {
"dist": {
"tarball": "https://registry.npmjs.org/test/-/test-1.0.0.tgz",
"shasum": "abc123"
},
"dependencies": {"lodash": "^4.0.0"}
}
}
});
let data = serde_json::to_vec(&metadata).unwrap();
let result =
rewrite_tarball_urls(&data, "http://nora:5000", "https://registry.npmjs.org").unwrap();
let json: serde_json::Value = serde_json::from_slice(&result).unwrap();
assert_eq!(json["description"], "A test package");
assert_eq!(json["versions"]["1.0.0"]["dist"]["shasum"], "abc123");
}
#[test]
fn test_is_valid_attachment_name_valid() {
assert!(is_valid_attachment_name("package-1.0.0.tgz"));
assert!(is_valid_attachment_name("@scope-pkg-2.0.tgz"));
assert!(is_valid_attachment_name("my_pkg.tgz"));
}
#[test]
fn test_is_valid_attachment_name_traversal() {
assert!(!is_valid_attachment_name("../etc/passwd"));
assert!(!is_valid_attachment_name("foo/../bar"));
}
#[test]
fn test_is_valid_attachment_name_slash() {
assert!(!is_valid_attachment_name("path/file.tgz"));
assert!(!is_valid_attachment_name("path\\file.tgz"));
}
#[test]
fn test_is_valid_attachment_name_null_byte() {
assert!(!is_valid_attachment_name("file\0.tgz"));
}
#[test]
fn test_is_valid_attachment_name_empty() {
assert!(!is_valid_attachment_name(""));
}
#[test]
fn test_is_valid_attachment_name_special_chars() {
assert!(!is_valid_attachment_name("file name.tgz")); // space
assert!(!is_valid_attachment_name("file;cmd.tgz")); // semicolon
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod integration_tests {
use crate::test_helpers::{body_bytes, create_test_context, send};
use axum::body::Body;
use axum::http::{Method, StatusCode};
use base64::Engine;
#[tokio::test]
async fn test_npm_metadata_from_cache() {
let ctx = create_test_context();
let metadata = serde_json::json!({
"name": "lodash",
"versions": {
"4.17.21": { "dist": { "tarball": "http://example.com/lodash.tgz" } }
}
});
let metadata_bytes = serde_json::to_vec(&metadata).unwrap();
ctx.state
.storage
.put("npm/lodash/metadata.json", &metadata_bytes)
.await
.unwrap();
let response = send(&ctx.app, Method::GET, "/npm/lodash", "").await;
assert_eq!(response.status(), StatusCode::OK);
let body = body_bytes(response).await;
let json: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_eq!(json["name"], "lodash");
}
#[tokio::test]
async fn test_npm_tarball_from_cache() {
let ctx = create_test_context();
let tarball_data = b"fake-tarball-bytes";
ctx.state
.storage
.put("npm/lodash/tarballs/lodash-4.17.21.tgz", tarball_data)
.await
.unwrap();
let response = send(
&ctx.app,
Method::GET,
"/npm/lodash/-/lodash-4.17.21.tgz",
"",
)
.await;
assert_eq!(response.status(), StatusCode::OK);
let body = body_bytes(response).await;
assert_eq!(&body[..], tarball_data);
}
#[tokio::test]
async fn test_npm_not_found_no_proxy() {
let ctx = create_test_context();
// No proxy configured, no local data
let response = send(&ctx.app, Method::GET, "/npm/nonexistent", "").await;
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_npm_publish_basic() {
let ctx = create_test_context();
let tarball_data = b"fake-tarball";
let base64_data = base64::engine::general_purpose::STANDARD.encode(tarball_data);
let payload = serde_json::json!({
"name": "mypkg",
"versions": {
"1.0.0": { "dist": {} }
},
"_attachments": {
"mypkg-1.0.0.tgz": { "data": base64_data }
},
"dist-tags": { "latest": "1.0.0" }
});
let body_bytes = serde_json::to_vec(&payload).unwrap();
let response = send(&ctx.app, Method::PUT, "/npm/mypkg", Body::from(body_bytes)).await;
assert_eq!(response.status(), StatusCode::CREATED);
// Verify tarball was stored
let stored_tarball = ctx
.state
.storage
.get("npm/mypkg/tarballs/mypkg-1.0.0.tgz")
.await
.unwrap();
assert_eq!(&stored_tarball[..], tarball_data);
}
#[tokio::test]
async fn test_npm_publish_name_mismatch() {
let ctx = create_test_context();
let tarball_data = b"fake-tarball";
let base64_data = base64::engine::general_purpose::STANDARD.encode(tarball_data);
let payload = serde_json::json!({
"name": "other",
"versions": {
"1.0.0": { "dist": {} }
},
"_attachments": {
"other-1.0.0.tgz": { "data": base64_data }
},
"dist-tags": { "latest": "1.0.0" }
});
let body_bytes = serde_json::to_vec(&payload).unwrap();
let response = send(&ctx.app, Method::PUT, "/npm/mypkg", Body::from(body_bytes)).await;
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
}

View File

@@ -305,3 +305,311 @@ fn find_file_url(html: &str, target_filename: &str) -> Option<String> {
None
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use proptest::prelude::*;
proptest! {
#[test]
fn extract_filename_never_panics(s in "\\PC{0,500}") {
let _ = extract_filename(&s);
}
#[test]
fn extract_filename_valid_tarball(
name in "[a-z][a-z0-9_-]{0,20}",
version in "[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}"
) {
let url = format!("https://files.example.com/packages/{}-{}.tar.gz", name, version);
let result = extract_filename(&url);
prop_assert!(result.is_some());
prop_assert!(result.unwrap().ends_with(".tar.gz"));
}
#[test]
fn extract_filename_valid_wheel(
name in "[a-z][a-z0-9_]{0,20}",
version in "[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}"
) {
let url = format!("https://files.example.com/{}-{}-py3-none-any.whl", name, version);
let result = extract_filename(&url);
prop_assert!(result.is_some());
prop_assert!(result.unwrap().ends_with(".whl"));
}
#[test]
fn extract_filename_strips_hash(
name in "[a-z]{1,10}",
hash in "[a-f0-9]{64}"
) {
let url = format!("https://example.com/{}.tar.gz#sha256={}", name, hash);
let result = extract_filename(&url);
prop_assert!(result.is_some());
let fname = result.unwrap();
prop_assert!(!fname.contains('#'));
}
#[test]
fn extract_filename_rejects_unknown_ext(
name in "[a-z]{1,10}",
ext in "(exe|dll|so|bin|dat)"
) {
let url = format!("https://example.com/{}.{}", name, ext);
prop_assert!(extract_filename(&url).is_none());
}
}
#[test]
fn test_normalize_name_lowercase() {
assert_eq!(normalize_name("Flask"), "flask");
assert_eq!(normalize_name("REQUESTS"), "requests");
}
#[test]
fn test_normalize_name_separators() {
assert_eq!(normalize_name("my-package"), "my-package");
assert_eq!(normalize_name("my_package"), "my-package");
assert_eq!(normalize_name("my.package"), "my-package");
}
#[test]
fn test_normalize_name_mixed() {
assert_eq!(
normalize_name("My_Complex.Package-Name"),
"my-complex-package-name"
);
}
#[test]
fn test_normalize_name_empty() {
assert_eq!(normalize_name(""), "");
}
#[test]
fn test_normalize_name_already_normal() {
assert_eq!(normalize_name("simple"), "simple");
}
#[test]
fn test_extract_filename_tarball() {
assert_eq!(
extract_filename(
"https://files.pythonhosted.org/packages/aa/bb/flask-2.0.0.tar.gz#sha256=abc123"
),
Some("flask-2.0.0.tar.gz")
);
}
#[test]
fn test_extract_filename_wheel() {
assert_eq!(
extract_filename(
"https://files.pythonhosted.org/packages/aa/bb/flask-2.0.0-py3-none-any.whl"
),
Some("flask-2.0.0-py3-none-any.whl")
);
}
#[test]
fn test_extract_filename_tgz() {
assert_eq!(
extract_filename("https://example.com/package-1.0.tgz"),
Some("package-1.0.tgz")
);
}
#[test]
fn test_extract_filename_zip() {
assert_eq!(
extract_filename("https://example.com/package-1.0.zip"),
Some("package-1.0.zip")
);
}
#[test]
fn test_extract_filename_egg() {
assert_eq!(
extract_filename("https://example.com/package-1.0.egg"),
Some("package-1.0.egg")
);
}
#[test]
fn test_extract_filename_unknown_ext() {
assert_eq!(extract_filename("https://example.com/readme.txt"), None);
}
#[test]
fn test_extract_filename_no_path() {
assert_eq!(extract_filename(""), None);
}
#[test]
fn test_extract_filename_bare() {
assert_eq!(
extract_filename("package-1.0.tar.gz"),
Some("package-1.0.tar.gz")
);
}
#[test]
fn test_remove_attribute_present() {
let html = r#"<a href="url" data-core-metadata="true">link</a>"#;
let result = remove_attribute(html, "data-core-metadata");
assert_eq!(result, r#"<a href="url">link</a>"#);
}
#[test]
fn test_remove_attribute_absent() {
let html = r#"<a href="url">link</a>"#;
let result = remove_attribute(html, "data-core-metadata");
assert_eq!(result, html);
}
#[test]
fn test_remove_attribute_multiple() {
let html =
r#"<a data-core-metadata="true">one</a><a data-core-metadata="sha256=abc">two</a>"#;
let result = remove_attribute(html, "data-core-metadata");
assert_eq!(result, r#"<a>one</a><a>two</a>"#);
}
#[test]
fn test_rewrite_pypi_links_basic() {
let html = r#"<a href="https://files.pythonhosted.org/packages/aa/bb/flask-2.0.tar.gz#sha256=abc">flask-2.0.tar.gz</a>"#;
let result = rewrite_pypi_links(html, "flask");
assert!(result.contains("/simple/flask/flask-2.0.tar.gz"));
}
#[test]
fn test_rewrite_pypi_links_unknown_ext() {
let html = r#"<a href="https://example.com/readme.txt">readme</a>"#;
let result = rewrite_pypi_links(html, "test");
assert!(result.contains("https://example.com/readme.txt"));
}
#[test]
fn test_rewrite_pypi_links_removes_metadata_attrs() {
let html = r#"<a href="https://example.com/pkg-1.0.whl" data-core-metadata="sha256=abc" data-dist-info-metadata="sha256=def">pkg</a>"#;
let result = rewrite_pypi_links(html, "pkg");
assert!(!result.contains("data-core-metadata"));
assert!(!result.contains("data-dist-info-metadata"));
}
#[test]
fn test_rewrite_pypi_links_empty() {
assert_eq!(rewrite_pypi_links("", "pkg"), "");
}
#[test]
fn test_find_file_url_found() {
let html = r#"<a href="https://files.pythonhosted.org/packages/aa/bb/flask-2.0.tar.gz#sha256=abc">flask-2.0.tar.gz</a>"#;
let result = find_file_url(html, "flask-2.0.tar.gz");
assert_eq!(
result,
Some("https://files.pythonhosted.org/packages/aa/bb/flask-2.0.tar.gz".to_string())
);
}
#[test]
fn test_find_file_url_not_found() {
let html = r#"<a href="https://example.com/other-1.0.tar.gz">other</a>"#;
let result = find_file_url(html, "flask-2.0.tar.gz");
assert_eq!(result, None);
}
#[test]
fn test_find_file_url_strips_hash() {
let html = r#"<a href="https://example.com/pkg-1.0.whl#sha256=deadbeef">pkg</a>"#;
let result = find_file_url(html, "pkg-1.0.whl");
assert_eq!(result, Some("https://example.com/pkg-1.0.whl".to_string()));
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod integration_tests {
use crate::test_helpers::{body_bytes, create_test_context, send};
use axum::http::{Method, StatusCode};
#[tokio::test]
async fn test_pypi_list_empty() {
let ctx = create_test_context();
let response = send(&ctx.app, Method::GET, "/simple/", "").await;
assert_eq!(response.status(), StatusCode::OK);
let body = body_bytes(response).await;
let html = String::from_utf8_lossy(&body);
assert!(html.contains("Simple Index"));
}
#[tokio::test]
async fn test_pypi_list_with_packages() {
let ctx = create_test_context();
// Pre-populate storage with a package
ctx.state
.storage
.put("pypi/flask/flask-2.0.tar.gz", b"fake-tarball-data")
.await
.unwrap();
let response = send(&ctx.app, Method::GET, "/simple/", "").await;
assert_eq!(response.status(), StatusCode::OK);
let body = body_bytes(response).await;
let html = String::from_utf8_lossy(&body);
assert!(html.contains("flask"));
}
#[tokio::test]
async fn test_pypi_versions_local() {
let ctx = create_test_context();
// Pre-populate storage
ctx.state
.storage
.put("pypi/flask/flask-2.0.tar.gz", b"fake-data")
.await
.unwrap();
let response = send(&ctx.app, Method::GET, "/simple/flask/", "").await;
assert_eq!(response.status(), StatusCode::OK);
let body = body_bytes(response).await;
let html = String::from_utf8_lossy(&body);
assert!(html.contains("flask-2.0.tar.gz"));
assert!(html.contains("/simple/flask/flask-2.0.tar.gz"));
}
#[tokio::test]
async fn test_pypi_download_local() {
let ctx = create_test_context();
let tarball_data = b"fake-tarball-content";
ctx.state
.storage
.put("pypi/flask/flask-2.0.tar.gz", tarball_data)
.await
.unwrap();
let response = send(&ctx.app, Method::GET, "/simple/flask/flask-2.0.tar.gz", "").await;
assert_eq!(response.status(), StatusCode::OK);
let body = body_bytes(response).await;
assert_eq!(&body[..], tarball_data);
}
#[tokio::test]
async fn test_pypi_not_found_no_proxy() {
let ctx = create_test_context();
// No proxy configured, no local data
let response = send(&ctx.app, Method::GET, "/simple/nonexistent/", "").await;
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
}

View File

@@ -141,3 +141,175 @@ fn guess_content_type(path: &str) -> &'static str {
_ => "application/octet-stream",
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_guess_content_type_json() {
assert_eq!(guess_content_type("config.json"), "application/json");
}
#[test]
fn test_guess_content_type_xml() {
assert_eq!(guess_content_type("data.xml"), "application/xml");
}
#[test]
fn test_guess_content_type_html() {
assert_eq!(guess_content_type("index.html"), "text/html");
assert_eq!(guess_content_type("page.htm"), "text/html");
}
#[test]
fn test_guess_content_type_css() {
assert_eq!(guess_content_type("style.css"), "text/css");
}
#[test]
fn test_guess_content_type_js() {
assert_eq!(guess_content_type("app.js"), "application/javascript");
}
#[test]
fn test_guess_content_type_text() {
assert_eq!(guess_content_type("readme.txt"), "text/plain");
}
#[test]
fn test_guess_content_type_markdown() {
assert_eq!(guess_content_type("README.md"), "text/markdown");
}
#[test]
fn test_guess_content_type_yaml() {
assert_eq!(guess_content_type("config.yaml"), "application/x-yaml");
assert_eq!(guess_content_type("config.yml"), "application/x-yaml");
}
#[test]
fn test_guess_content_type_toml() {
assert_eq!(guess_content_type("Cargo.toml"), "application/toml");
}
#[test]
fn test_guess_content_type_archives() {
assert_eq!(guess_content_type("data.tar"), "application/x-tar");
assert_eq!(guess_content_type("data.gz"), "application/gzip");
assert_eq!(guess_content_type("data.gzip"), "application/gzip");
assert_eq!(guess_content_type("data.zip"), "application/zip");
}
#[test]
fn test_guess_content_type_images() {
assert_eq!(guess_content_type("logo.png"), "image/png");
assert_eq!(guess_content_type("photo.jpg"), "image/jpeg");
assert_eq!(guess_content_type("photo.jpeg"), "image/jpeg");
assert_eq!(guess_content_type("anim.gif"), "image/gif");
assert_eq!(guess_content_type("icon.svg"), "image/svg+xml");
}
#[test]
fn test_guess_content_type_special() {
assert_eq!(guess_content_type("doc.pdf"), "application/pdf");
assert_eq!(guess_content_type("module.wasm"), "application/wasm");
}
#[test]
fn test_guess_content_type_unknown() {
assert_eq!(guess_content_type("binary.bin"), "application/octet-stream");
assert_eq!(guess_content_type("noext"), "application/octet-stream");
}
#[test]
fn test_guess_content_type_case_insensitive() {
assert_eq!(guess_content_type("FILE.JSON"), "application/json");
assert_eq!(guess_content_type("IMAGE.PNG"), "image/png");
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod integration_tests {
use crate::test_helpers::{
body_bytes, create_test_context, create_test_context_with_raw_disabled, send,
};
use axum::http::{Method, StatusCode};
#[tokio::test]
async fn test_raw_put_get_roundtrip() {
let ctx = create_test_context();
let put_resp = send(&ctx.app, Method::PUT, "/raw/test.txt", b"hello".to_vec()).await;
assert_eq!(put_resp.status(), StatusCode::CREATED);
let get_resp = send(&ctx.app, Method::GET, "/raw/test.txt", "").await;
assert_eq!(get_resp.status(), StatusCode::OK);
let body = body_bytes(get_resp).await;
assert_eq!(&body[..], b"hello");
}
#[tokio::test]
async fn test_raw_head() {
let ctx = create_test_context();
send(
&ctx.app,
Method::PUT,
"/raw/test.txt",
b"hello world".to_vec(),
)
.await;
let head_resp = send(&ctx.app, Method::HEAD, "/raw/test.txt", "").await;
assert_eq!(head_resp.status(), StatusCode::OK);
let cl = head_resp.headers().get("content-length").unwrap();
assert_eq!(cl.to_str().unwrap(), "11");
}
#[tokio::test]
async fn test_raw_delete() {
let ctx = create_test_context();
send(&ctx.app, Method::PUT, "/raw/test.txt", b"data".to_vec()).await;
let del = send(&ctx.app, Method::DELETE, "/raw/test.txt", "").await;
assert_eq!(del.status(), StatusCode::NO_CONTENT);
let get = send(&ctx.app, Method::GET, "/raw/test.txt", "").await;
assert_eq!(get.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_raw_not_found() {
let ctx = create_test_context();
let resp = send(&ctx.app, Method::GET, "/raw/missing.txt", "").await;
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_raw_content_type_json() {
let ctx = create_test_context();
send(&ctx.app, Method::PUT, "/raw/file.json", b"{}".to_vec()).await;
let resp = send(&ctx.app, Method::GET, "/raw/file.json", "").await;
assert_eq!(resp.status(), StatusCode::OK);
let ct = resp.headers().get("content-type").unwrap();
assert_eq!(ct.to_str().unwrap(), "application/json");
}
#[tokio::test]
async fn test_raw_payload_too_large() {
let ctx = create_test_context();
let big = vec![0u8; 2 * 1024 * 1024]; // 2 MB > 1 MB limit
let resp = send(&ctx.app, Method::PUT, "/raw/large.bin", big).await;
assert_eq!(resp.status(), StatusCode::PAYLOAD_TOO_LARGE);
}
#[tokio::test]
async fn test_raw_disabled() {
let ctx = create_test_context_with_raw_disabled();
let get = send(&ctx.app, Method::GET, "/raw/test.txt", "").await;
assert_eq!(get.status(), StatusCode::NOT_FOUND);
let put = send(&ctx.app, Method::PUT, "/raw/test.txt", b"data".to_vec()).await;
assert_eq!(put.status(), StatusCode::NOT_FOUND);
}
}

View File

@@ -92,4 +92,69 @@ mod tests {
let cloned = id.clone();
assert_eq!(id.0, cloned.0);
}
#[test]
fn test_request_id_debug() {
let id = RequestId("abc-def".to_string());
let debug = format!("{:?}", id);
assert!(debug.contains("abc-def"));
}
#[test]
fn test_request_id_header_name() {
assert_eq!(REQUEST_ID_HEADER.as_str(), "x-request-id");
}
#[test]
fn test_request_id_deref_string_methods() {
let id = RequestId("req-12345".to_string());
assert!(id.starts_with("req-"));
assert_eq!(id.len(), 9);
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod integration_tests {
use crate::test_helpers::{create_test_context, send, send_with_headers};
use axum::http::{Method, StatusCode};
#[tokio::test]
async fn test_response_has_request_id() {
let ctx = create_test_context();
let response = send(&ctx.app, Method::GET, "/health", "").await;
assert_eq!(response.status(), StatusCode::OK);
let request_id = response.headers().get("x-request-id");
assert!(
request_id.is_some(),
"Response must have X-Request-ID header"
);
let value = request_id.unwrap().to_str().unwrap();
assert!(!value.is_empty(), "X-Request-ID must not be empty");
}
#[tokio::test]
async fn test_preserves_incoming_request_id() {
let ctx = create_test_context();
let custom_id = "custom-123";
let response = send_with_headers(
&ctx.app,
Method::GET,
"/health",
vec![("x-request-id", custom_id)],
"",
)
.await;
assert_eq!(response.status(), StatusCode::OK);
let returned_id = response
.headers()
.get("x-request-id")
.unwrap()
.to_str()
.unwrap();
assert_eq!(returned_id, custom_id);
}
}

View File

@@ -72,6 +72,7 @@ impl SecretsProvider for EnvProvider {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;

View File

@@ -130,6 +130,7 @@ pub fn create_secrets_provider(
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;

View File

@@ -144,6 +144,7 @@ impl StorageBackend for LocalStorage {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use tempfile::TempDir;

View File

@@ -427,4 +427,48 @@ mod tests {
let result = hmac_sha256(b"key", b"data");
assert!(!result.is_empty());
}
#[test]
fn test_uri_encode_safe_chars() {
assert_eq!(uri_encode("hello"), "hello");
assert_eq!(uri_encode("foo/bar"), "foo/bar");
assert_eq!(uri_encode("test-file_v1.0"), "test-file_v1.0");
assert_eq!(uri_encode("a~b"), "a~b");
}
#[test]
fn test_uri_encode_special_chars() {
assert_eq!(uri_encode("hello world"), "hello%20world");
assert_eq!(uri_encode("file name.txt"), "file%20name.txt");
}
#[test]
fn test_uri_encode_query_chars() {
assert_eq!(uri_encode("key=value"), "key%3Dvalue");
assert_eq!(uri_encode("a&b"), "a%26b");
assert_eq!(uri_encode("a+b"), "a%2Bb");
}
#[test]
fn test_uri_encode_empty() {
assert_eq!(uri_encode(""), "");
}
#[test]
fn test_uri_encode_all_safe_ranges() {
// A-Z
assert_eq!(uri_encode("ABCXYZ"), "ABCXYZ");
// a-z
assert_eq!(uri_encode("abcxyz"), "abcxyz");
// 0-9
assert_eq!(uri_encode("0123456789"), "0123456789");
// Special safe: - _ . ~ /
assert_eq!(uri_encode("-_.~/"), "-_.~/");
}
#[test]
fn test_uri_encode_percent() {
assert_eq!(uri_encode("%"), "%25");
assert_eq!(uri_encode("100%done"), "100%25done");
}
}

View File

@@ -0,0 +1,255 @@
// Copyright (c) 2026 Volkov Pavel | DevITWay
// SPDX-License-Identifier: MIT
//! Shared test infrastructure for integration tests.
//!
//! Provides `TestContext` that builds a full axum Router backed by a
//! tempdir-based local storage with all upstream proxies disabled.
#![allow(clippy::unwrap_used)] // tests may use .unwrap() freely
use axum::{body::Body, extract::DefaultBodyLimit, http::Request, middleware, Router};
use http_body_util::BodyExt;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Instant;
use tempfile::TempDir;
use crate::activity_log::ActivityLog;
use crate::audit::AuditLog;
use crate::auth::HtpasswdAuth;
use crate::config::*;
use crate::dashboard_metrics::DashboardMetrics;
use crate::registry;
use crate::repo_index::RepoIndex;
use crate::storage::Storage;
use crate::tokens::TokenStore;
use crate::AppState;
use parking_lot::RwLock;
/// Everything a test needs: tempdir (must stay alive), shared state, and the router.
pub struct TestContext {
pub state: Arc<AppState>,
pub app: Router,
pub _tempdir: TempDir,
}
/// Build a test context with auth **disabled** and all proxies off.
pub fn create_test_context() -> TestContext {
build_context(false, &[], false, |_| {})
}
/// Build a test context with auth **enabled** (bcrypt cost=4 for speed).
pub fn create_test_context_with_auth(users: &[(&str, &str)]) -> TestContext {
build_context(true, users, false, |_| {})
}
/// Build a test context with auth + anonymous_read.
pub fn create_test_context_with_anonymous_read(users: &[(&str, &str)]) -> TestContext {
build_context(true, users, true, |_| {})
}
/// Build a test context with raw storage **disabled**.
pub fn create_test_context_with_raw_disabled() -> TestContext {
build_context(false, &[], false, |cfg| cfg.raw.enabled = false)
}
fn build_context(
auth_enabled: bool,
users: &[(&str, &str)],
anonymous_read: bool,
customize: impl FnOnce(&mut Config),
) -> TestContext {
let tempdir = TempDir::new().expect("failed to create tempdir");
let storage_path = tempdir.path().to_str().unwrap().to_string();
let mut config = Config {
server: ServerConfig {
host: "127.0.0.1".into(),
port: 0,
public_url: None,
body_limit_mb: 2048,
},
storage: StorageConfig {
mode: StorageMode::Local,
path: storage_path.clone(),
s3_url: String::new(),
bucket: String::new(),
s3_access_key: None,
s3_secret_key: None,
s3_region: String::new(),
},
maven: MavenConfig {
proxies: vec![],
proxy_timeout: 5,
},
npm: NpmConfig {
proxy: None,
proxy_auth: None,
proxy_timeout: 5,
metadata_ttl: 0,
},
pypi: PypiConfig {
proxy: None,
proxy_auth: None,
proxy_timeout: 5,
},
go: GoConfig {
proxy: None,
proxy_auth: None,
proxy_timeout: 5,
proxy_timeout_zip: 30,
max_zip_size: 10_485_760,
},
docker: DockerConfig {
proxy_timeout: 5,
upstreams: vec![],
},
raw: RawConfig {
enabled: true,
max_file_size: 1_048_576, // 1 MB
},
auth: AuthConfig {
enabled: auth_enabled,
anonymous_read,
htpasswd_file: String::new(),
token_storage: tempdir.path().join("tokens").to_str().unwrap().to_string(),
},
rate_limit: RateLimitConfig {
enabled: false,
..RateLimitConfig::default()
},
secrets: SecretsConfig::default(),
};
// Apply any custom config tweaks
customize(&mut config);
let storage = Storage::new_local(&storage_path);
let auth = if auth_enabled && !users.is_empty() {
let htpasswd_path = tempdir.path().join("users.htpasswd");
let mut content = String::new();
for (username, password) in users {
let hash = bcrypt::hash(password, 4).expect("bcrypt hash");
content.push_str(&format!("{}:{}\n", username, hash));
}
std::fs::write(&htpasswd_path, &content).expect("write htpasswd");
config.auth.htpasswd_file = htpasswd_path.to_str().unwrap().to_string();
HtpasswdAuth::from_file(&htpasswd_path)
} else {
None
};
let tokens = if auth_enabled {
Some(TokenStore::new(tempdir.path().join("tokens").as_path()))
} else {
None
};
let docker_auth = registry::DockerAuth::new(config.docker.proxy_timeout);
let state = Arc::new(AppState {
storage,
config,
start_time: Instant::now(),
auth,
tokens,
metrics: DashboardMetrics::new(),
activity: ActivityLog::new(50),
audit: AuditLog::new(&storage_path),
docker_auth,
repo_index: RepoIndex::new(),
http_client: reqwest::Client::new(),
upload_sessions: Arc::new(RwLock::new(HashMap::new())),
});
// Build router identical to run_server() but without TcpListener / rate-limiting
let registry_routes = Router::new()
.merge(registry::docker_routes())
.merge(registry::maven_routes())
.merge(registry::npm_routes())
.merge(registry::cargo_routes())
.merge(registry::pypi_routes())
.merge(registry::raw_routes())
.merge(registry::go_routes());
let public_routes = Router::new().merge(crate::health::routes());
let app_routes = Router::new()
.merge(crate::auth::token_routes())
.merge(registry_routes);
let app = Router::new()
.merge(public_routes)
.merge(app_routes)
.layer(DefaultBodyLimit::max(
state.config.server.body_limit_mb * 1024 * 1024,
))
.layer(middleware::from_fn(
crate::request_id::request_id_middleware,
))
.layer(middleware::from_fn_with_state(
state.clone(),
crate::auth::auth_middleware,
))
.with_state(state.clone());
TestContext {
state,
app,
_tempdir: tempdir,
}
}
// ---------------------------------------------------------------------------
// Convenience helpers
// ---------------------------------------------------------------------------
/// Send a request through the router and return the response.
pub async fn send(
app: &Router,
method: axum::http::Method,
uri: &str,
body: impl Into<Body>,
) -> axum::http::Response<Body> {
use tower::ServiceExt;
let request = Request::builder()
.method(method)
.uri(uri)
.body(body.into())
.unwrap();
app.clone().oneshot(request).await.unwrap()
}
/// Send a request with custom headers.
pub async fn send_with_headers(
app: &Router,
method: axum::http::Method,
uri: &str,
headers: Vec<(&str, &str)>,
body: impl Into<Body>,
) -> axum::http::Response<Body> {
use tower::ServiceExt;
let mut builder = Request::builder().method(method).uri(uri);
for (k, v) in headers {
builder = builder.header(k, v);
}
let request = builder.body(body.into()).unwrap();
app.clone().oneshot(request).await.unwrap()
}
/// Read the full response body into bytes.
pub async fn body_bytes(response: axum::http::Response<Body>) -> axum::body::Bytes {
response
.into_body()
.collect()
.await
.expect("failed to read body")
.to_bytes()
}

View File

@@ -393,6 +393,7 @@ pub enum TokenError {
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use tempfile::TempDir;
@@ -415,7 +416,7 @@ mod tests {
let temp_dir = TempDir::new().unwrap();
let store = TokenStore::new(temp_dir.path());
let token = store
let _token = store
.create_token("testuser", 30, None, Role::Write)
.unwrap();

View File

@@ -504,3 +504,150 @@ mod tests {
assert!(validate_docker_reference("-dash").is_err());
}
}
#[cfg(test)]
mod proptests {
use super::*;
use proptest::prelude::*;
/// Valid lowercase Docker name component
fn docker_component() -> impl Strategy<Value = String> {
"[a-z0-9][a-z0-9._-]{0,30}".prop_filter("no consecutive separators", |s| {
!s.contains("..") && !s.contains("//") && !s.contains("--") && !s.contains("__")
})
}
/// Valid sha256 hex string
fn sha256_hex() -> impl Strategy<Value = String> {
"[0-9a-f]{64}"
}
/// Valid Docker tag (no `..` or `/` which trigger path traversal rejection)
fn docker_tag() -> impl Strategy<Value = String> {
"[a-zA-Z0-9][a-zA-Z0-9._-]{0,50}".prop_filter("no path traversal", |s| {
!s.contains("..") && !s.contains('/')
})
}
// === validate_storage_key ===
proptest! {
#[test]
fn storage_key_never_panics(s in "\\PC{0,2000}") {
let _ = validate_storage_key(&s);
}
#[test]
fn storage_key_rejects_path_traversal(
prefix in "[a-z]{0,10}",
suffix in "[a-z]{0,10}"
) {
let key = format!("{}/../{}", prefix, suffix);
prop_assert!(validate_storage_key(&key).is_err());
}
#[test]
fn storage_key_rejects_absolute(path in "/[a-z/]{1,50}") {
prop_assert!(validate_storage_key(&path).is_err());
}
#[test]
fn storage_key_accepts_valid(
segments in prop::collection::vec("[a-z0-9]{1,20}", 1..5)
) {
let key = segments.join("/");
prop_assert!(validate_storage_key(&key).is_ok());
}
}
// === validate_docker_name ===
proptest! {
#[test]
fn docker_name_never_panics(s in "\\PC{0,500}") {
let _ = validate_docker_name(&s);
}
#[test]
fn docker_name_accepts_valid_single(name in docker_component()) {
prop_assert!(validate_docker_name(&name).is_ok());
}
#[test]
fn docker_name_accepts_valid_path(
components in prop::collection::vec(docker_component(), 1..4)
) {
let name = components.join("/");
prop_assert!(validate_docker_name(&name).is_ok());
}
#[test]
fn docker_name_rejects_uppercase(
lower in "[a-z]{1,10}",
upper in "[A-Z]{1,10}"
) {
let name = format!("{}{}", lower, upper);
prop_assert!(validate_docker_name(&name).is_err());
}
}
// === validate_digest ===
proptest! {
#[test]
fn digest_never_panics(s in "\\PC{0,200}") {
let _ = validate_digest(&s);
}
#[test]
fn digest_sha256_roundtrip(hash in sha256_hex()) {
let digest = format!("sha256:{}", hash);
prop_assert!(validate_digest(&digest).is_ok());
}
#[test]
fn digest_sha512_roundtrip(hash in "[0-9a-f]{128}") {
let digest = format!("sha512:{}", hash);
prop_assert!(validate_digest(&digest).is_ok());
}
#[test]
fn digest_wrong_algo_rejected(
algo in "[a-z]{2,8}",
hash in "[0-9a-f]{64}"
) {
prop_assume!(algo != "sha256" && algo != "sha512");
let digest = format!("{}:{}", algo, hash);
prop_assert!(validate_digest(&digest).is_err());
}
}
// === validate_docker_reference ===
proptest! {
#[test]
fn reference_never_panics(s in "\\PC{0,200}") {
let _ = validate_docker_reference(&s);
}
#[test]
fn reference_accepts_valid_tag(tag in docker_tag()) {
prop_assert!(validate_docker_reference(&tag).is_ok());
}
#[test]
fn reference_accepts_valid_digest(hash in sha256_hex()) {
let reference = format!("sha256:{}", hash);
prop_assert!(validate_docker_reference(&reference).is_ok());
}
#[test]
fn reference_rejects_traversal(
prefix in "[a-z]{0,5}",
suffix in "[a-z]{0,5}"
) {
let reference = format!("{}../{}", prefix, suffix);
prop_assert!(validate_docker_reference(&reference).is_err());
}
}
}

8
tarpaulin.toml Normal file
View File

@@ -0,0 +1,8 @@
[nora]
packages = ["nora-registry"]
engine = "llvm"
fail-under = 40
out = ["json", "html"]
output-dir = "coverage"
timeout = "300"
exclude-files = ["src/ui/*", "src/main.rs", "src/openapi.rs"]