mirror of
https://github.com/getnora-io/nora.git
synced 2026-04-12 16:10:31 +00:00
quality: MSRV, tarpaulin config, proptest for parsers (#84)
* fix: proxy dedup, multi-registry GC, TOCTOU and credential hygiene - Deduplicate proxy_fetch/proxy_fetch_text into generic proxy_fetch_core with response extractor closure (removes ~50 lines of copy-paste) - GC now scans all registry prefixes, not just docker/ - Add tracing::warn to fire-and-forget cache writes in docker proxy - Mark S3 credentials as skip_serializing to prevent accidental leaks - Remove TOCTOU race in LocalStorage get/delete (redundant exists check) * chore: clean up root directory structure - Move Dockerfile.astra and Dockerfile.redos to deploy/ (niche builds should not clutter the project root) - Harden .gitignore to exclude session files, working notes, and internal review scripts * refactor(metrics): replace 13 atomic fields with CounterMap Per-registry download/upload counters were 13 individual AtomicU64 fields, each duplicated across new(), with_persistence(), save(), record_download(), record_upload(), and get_registry_* (6 touch points per counter). Adding a new registry required changes in 6+ places. Now uses CounterMap (HashMap<String, AtomicU64>) for per-registry counters. Adding a new registry = one entry in REGISTRIES const. Added Go registry to REGISTRIES, gaining go metrics for free. * quality: add MSRV, tarpaulin config, proptest for parsers - Set rust-version = 1.75 in workspace Cargo.toml (MSRV policy) - Add tarpaulin.toml: llvm engine, fail-under=25, json+html output - Add coverage/ to .gitignore - Update CI to use tarpaulin.toml instead of inline flags - Add proptest dev-dependency and property tests: - validation.rs: 16 tests (never-panics + invariants for all 4 validators) - pypi.rs: 5 tests (extract_filename never-panics + format assertions) * test: add unit tests for 14 modules, coverage 21% → 30% Add 149 new tests across auth, backup, gc, metrics, mirror parsers, docker (manifest detection, session cleanup, metadata serde), docker_auth (token cache), maven, npm, pypi (normalize, rewrite, extract), raw (content-type guessing), request_id, and s3 (URI encoding). Update tarpaulin.toml: raise fail-under to 30, exclude UI/main from coverage reporting as they require integration tests. * bench: add criterion benchmarks for validation and manifest parsing Add parsing benchmark suite with 14 benchmarks covering: - Storage key, Docker name, digest, and reference validation - Docker manifest media type detection (v2, OCI index, minimal, invalid) Run with: cargo bench --package nora-registry --bench parsing * test: add 48 integration tests via tower oneshot Add integration tests for all HTTP handlers: - health (3), raw (7), cargo (4), maven (4), request_id (2) - pypi (5), npm (5), docker (12), auth (6) Create test_helpers.rs with TestContext pattern. Add tower and http-body-util dev-dependencies. Update tarpaulin fail-under 30 to 40. Coverage: 29.5% to 43.3% (2089/4825 lines) * fix: clean clippy warnings in tests, fix flaky audit test Add #[allow(clippy::unwrap_used)] to 18 test modules. Fix 3 additional clippy lints: writeln_empty_string, needless_update, unnecessary_get_then_check. Fix flaky audit test: replace single sleep(50ms) with retry loop (max 1s). Prefix unused token variable with underscore. cargo clippy --all-targets = 0 warnings (was 245 errors)
This commit is contained in:
@@ -68,3 +68,73 @@ async fn download(
|
||||
Err(_) => StatusCode::NOT_FOUND.into_response(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod tests {
|
||||
use crate::test_helpers::{body_bytes, create_test_context, send};
|
||||
use axum::http::{Method, StatusCode};
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cargo_metadata_not_found() {
|
||||
let ctx = create_test_context();
|
||||
let resp = send(
|
||||
&ctx.app,
|
||||
Method::GET,
|
||||
"/cargo/api/v1/crates/nonexistent",
|
||||
"",
|
||||
)
|
||||
.await;
|
||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cargo_metadata_from_storage() {
|
||||
let ctx = create_test_context();
|
||||
let meta = r#"{"name":"test-crate","versions":[]}"#;
|
||||
ctx.state
|
||||
.storage
|
||||
.put("cargo/test-crate/metadata.json", meta.as_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let resp = send(&ctx.app, Method::GET, "/cargo/api/v1/crates/test-crate", "").await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
let body = body_bytes(resp).await;
|
||||
assert_eq!(&body[..], meta.as_bytes());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cargo_download_not_found() {
|
||||
let ctx = create_test_context();
|
||||
let resp = send(
|
||||
&ctx.app,
|
||||
Method::GET,
|
||||
"/cargo/api/v1/crates/missing/1.0.0/download",
|
||||
"",
|
||||
)
|
||||
.await;
|
||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cargo_download_from_storage() {
|
||||
let ctx = create_test_context();
|
||||
ctx.state
|
||||
.storage
|
||||
.put("cargo/my-crate/1.2.3/my-crate-1.2.3.crate", b"crate-data")
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let resp = send(
|
||||
&ctx.app,
|
||||
Method::GET,
|
||||
"/cargo/api/v1/crates/my-crate/1.2.3/download",
|
||||
"",
|
||||
)
|
||||
.await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
let body = body_bytes(resp).await;
|
||||
assert_eq!(&body[..], b"crate-data");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1322,3 +1322,599 @@ async fn update_metadata_on_pull(storage: Storage, meta_key: String) {
|
||||
let _ = storage.put(&meta_key, &json).await;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_image_metadata_default() {
|
||||
let meta = ImageMetadata::default();
|
||||
assert_eq!(meta.push_timestamp, 0);
|
||||
assert_eq!(meta.last_pulled, 0);
|
||||
assert_eq!(meta.downloads, 0);
|
||||
assert_eq!(meta.size_bytes, 0);
|
||||
assert_eq!(meta.os, "");
|
||||
assert_eq!(meta.arch, "");
|
||||
assert!(meta.variant.is_none());
|
||||
assert!(meta.layers.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_image_metadata_serialization() {
|
||||
let meta = ImageMetadata {
|
||||
push_timestamp: 1700000000,
|
||||
last_pulled: 1700001000,
|
||||
downloads: 42,
|
||||
size_bytes: 1024000,
|
||||
os: "linux".to_string(),
|
||||
arch: "amd64".to_string(),
|
||||
variant: None,
|
||||
layers: vec![LayerInfo {
|
||||
digest: "sha256:abc123".to_string(),
|
||||
size: 512000,
|
||||
}],
|
||||
};
|
||||
let json = serde_json::to_string(&meta).unwrap();
|
||||
assert!(json.contains("\"os\":\"linux\""));
|
||||
assert!(json.contains("\"arch\":\"amd64\""));
|
||||
assert!(!json.contains("variant")); // None => skipped
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_image_metadata_with_variant() {
|
||||
let meta = ImageMetadata {
|
||||
variant: Some("v8".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
let json = serde_json::to_string(&meta).unwrap();
|
||||
assert!(json.contains("\"variant\":\"v8\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_image_metadata_deserialization() {
|
||||
let json = r#"{
|
||||
"push_timestamp": 1700000000,
|
||||
"last_pulled": 0,
|
||||
"downloads": 5,
|
||||
"size_bytes": 2048,
|
||||
"os": "linux",
|
||||
"arch": "arm64",
|
||||
"variant": "v8",
|
||||
"layers": [
|
||||
{"digest": "sha256:aaa", "size": 1024},
|
||||
{"digest": "sha256:bbb", "size": 1024}
|
||||
]
|
||||
}"#;
|
||||
let meta: ImageMetadata = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(meta.os, "linux");
|
||||
assert_eq!(meta.arch, "arm64");
|
||||
assert_eq!(meta.variant, Some("v8".to_string()));
|
||||
assert_eq!(meta.layers.len(), 2);
|
||||
assert_eq!(meta.layers[0].digest, "sha256:aaa");
|
||||
assert_eq!(meta.layers[1].size, 1024);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_layer_info_serialization_roundtrip() {
|
||||
let layer = LayerInfo {
|
||||
digest: "sha256:deadbeef".to_string(),
|
||||
size: 999999,
|
||||
};
|
||||
let json = serde_json::to_value(&layer).unwrap();
|
||||
let restored: LayerInfo = serde_json::from_value(json).unwrap();
|
||||
assert_eq!(layer.digest, restored.digest);
|
||||
assert_eq!(layer.size, restored.size);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_expired_sessions_empty() {
|
||||
let sessions: RwLock<HashMap<String, UploadSession>> = RwLock::new(HashMap::new());
|
||||
cleanup_expired_sessions(&sessions);
|
||||
assert_eq!(sessions.read().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_expired_sessions_fresh() {
|
||||
let sessions: RwLock<HashMap<String, UploadSession>> = RwLock::new(HashMap::new());
|
||||
sessions.write().insert(
|
||||
"uuid-1".to_string(),
|
||||
UploadSession {
|
||||
data: vec![1, 2, 3],
|
||||
name: "test/image".to_string(),
|
||||
created_at: std::time::Instant::now(),
|
||||
},
|
||||
);
|
||||
cleanup_expired_sessions(&sessions);
|
||||
assert_eq!(sessions.read().len(), 1); // not expired
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_max_upload_sessions_default() {
|
||||
// Without env var set, should return default
|
||||
let max = max_upload_sessions();
|
||||
assert!(max > 0);
|
||||
assert_eq!(max, DEFAULT_MAX_UPLOAD_SESSIONS);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_max_session_size_default() {
|
||||
let max = max_session_size();
|
||||
assert_eq!(max, DEFAULT_MAX_SESSION_SIZE_MB * 1024 * 1024);
|
||||
}
|
||||
|
||||
// --- detect_manifest_media_type tests ---
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_explicit_media_type() {
|
||||
let manifest = serde_json::json!({
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"schemaVersion": 2
|
||||
});
|
||||
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
|
||||
assert_eq!(
|
||||
result,
|
||||
"application/vnd.docker.distribution.manifest.v2+json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_oci_media_type() {
|
||||
let manifest = serde_json::json!({
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2
|
||||
});
|
||||
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
|
||||
assert_eq!(result, "application/vnd.oci.image.manifest.v1+json");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_schema_v1() {
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 1,
|
||||
"name": "test/image"
|
||||
});
|
||||
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
|
||||
assert_eq!(
|
||||
result,
|
||||
"application/vnd.docker.distribution.manifest.v1+json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_docker_v2_from_config() {
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": "application/vnd.docker.container.image.v1+json",
|
||||
"digest": "sha256:abc"
|
||||
}
|
||||
});
|
||||
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
|
||||
assert_eq!(
|
||||
result,
|
||||
"application/vnd.docker.distribution.manifest.v2+json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_oci_from_config() {
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": "application/vnd.oci.image.config.v1+json",
|
||||
"digest": "sha256:abc"
|
||||
}
|
||||
});
|
||||
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
|
||||
assert_eq!(result, "application/vnd.oci.image.manifest.v1+json");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_no_config_media_type() {
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"digest": "sha256:abc"
|
||||
}
|
||||
});
|
||||
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
|
||||
assert_eq!(
|
||||
result,
|
||||
"application/vnd.docker.distribution.manifest.v2+json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_index() {
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 2,
|
||||
"manifests": [
|
||||
{"digest": "sha256:aaa", "platform": {"os": "linux", "architecture": "amd64"}}
|
||||
]
|
||||
});
|
||||
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
|
||||
assert_eq!(result, "application/vnd.oci.image.index.v1+json");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_invalid_json() {
|
||||
let result = detect_manifest_media_type(b"not json at all");
|
||||
assert_eq!(
|
||||
result,
|
||||
"application/vnd.docker.distribution.manifest.v2+json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_empty() {
|
||||
let result = detect_manifest_media_type(b"{}");
|
||||
assert_eq!(
|
||||
result,
|
||||
"application/vnd.docker.distribution.manifest.v2+json"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_manifest_helm_chart() {
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": "application/vnd.cncf.helm.config.v1+json",
|
||||
"digest": "sha256:abc"
|
||||
}
|
||||
});
|
||||
let result = detect_manifest_media_type(manifest.to_string().as_bytes());
|
||||
assert_eq!(result, "application/vnd.oci.image.manifest.v1+json");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod integration_tests {
|
||||
use crate::test_helpers::{body_bytes, create_test_context, send};
|
||||
use axum::body::Body;
|
||||
use axum::http::{header, Method, StatusCode};
|
||||
use sha2::Digest;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_v2_check() {
|
||||
let ctx = create_test_context();
|
||||
let resp = send(&ctx.app, Method::GET, "/v2/", Body::empty()).await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_catalog_empty() {
|
||||
let ctx = create_test_context();
|
||||
let resp = send(&ctx.app, Method::GET, "/v2/_catalog", Body::empty()).await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
let body = body_bytes(resp).await;
|
||||
let json: serde_json::Value = serde_json::from_slice(&body).unwrap();
|
||||
assert!(json["repositories"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_put_get_manifest() {
|
||||
let ctx = create_test_context();
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 2,
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"config": {
|
||||
"mediaType": "application/vnd.docker.container.image.v1+json",
|
||||
"size": 0,
|
||||
"digest": "sha256:0000000000000000000000000000000000000000000000000000000000000000"
|
||||
},
|
||||
"layers": []
|
||||
});
|
||||
let manifest_bytes = serde_json::to_vec(&manifest).unwrap();
|
||||
|
||||
let put_resp = send(
|
||||
&ctx.app,
|
||||
Method::PUT,
|
||||
"/v2/alpine/manifests/latest",
|
||||
Body::from(manifest_bytes.clone()),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(put_resp.status(), StatusCode::CREATED);
|
||||
let digest_header = put_resp
|
||||
.headers()
|
||||
.get("docker-content-digest")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
assert!(digest_header.starts_with("sha256:"));
|
||||
|
||||
let get_resp = send(
|
||||
&ctx.app,
|
||||
Method::GET,
|
||||
"/v2/alpine/manifests/latest",
|
||||
Body::empty(),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(get_resp.status(), StatusCode::OK);
|
||||
let get_digest = get_resp
|
||||
.headers()
|
||||
.get("docker-content-digest")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
assert_eq!(get_digest, digest_header);
|
||||
let body = body_bytes(get_resp).await;
|
||||
assert_eq!(body.as_ref(), manifest_bytes.as_slice());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_list_tags() {
|
||||
let ctx = create_test_context();
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 2,
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"config": {
|
||||
"mediaType": "application/vnd.docker.container.image.v1+json",
|
||||
"size": 0,
|
||||
"digest": "sha256:0000000000000000000000000000000000000000000000000000000000000000"
|
||||
},
|
||||
"layers": []
|
||||
});
|
||||
send(
|
||||
&ctx.app,
|
||||
Method::PUT,
|
||||
"/v2/alpine/manifests/latest",
|
||||
Body::from(serde_json::to_vec(&manifest).unwrap()),
|
||||
)
|
||||
.await;
|
||||
|
||||
let list_resp = send(&ctx.app, Method::GET, "/v2/alpine/tags/list", Body::empty()).await;
|
||||
assert_eq!(list_resp.status(), StatusCode::OK);
|
||||
let body = body_bytes(list_resp).await;
|
||||
let json: serde_json::Value = serde_json::from_slice(&body).unwrap();
|
||||
assert_eq!(json["name"], "alpine");
|
||||
let tags = json["tags"].as_array().unwrap();
|
||||
assert!(tags.contains(&serde_json::json!("latest")));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_delete_manifest() {
|
||||
let ctx = create_test_context();
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 2,
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"config": {
|
||||
"mediaType": "application/vnd.docker.container.image.v1+json",
|
||||
"size": 0,
|
||||
"digest": "sha256:0000000000000000000000000000000000000000000000000000000000000000"
|
||||
},
|
||||
"layers": []
|
||||
});
|
||||
let put_resp = send(
|
||||
&ctx.app,
|
||||
Method::PUT,
|
||||
"/v2/alpine/manifests/latest",
|
||||
Body::from(serde_json::to_vec(&manifest).unwrap()),
|
||||
)
|
||||
.await;
|
||||
let digest = put_resp
|
||||
.headers()
|
||||
.get("docker-content-digest")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
let del = send(
|
||||
&ctx.app,
|
||||
Method::DELETE,
|
||||
&format!("/v2/alpine/manifests/{}", digest),
|
||||
Body::empty(),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(del.status(), StatusCode::ACCEPTED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_monolithic_upload() {
|
||||
let ctx = create_test_context();
|
||||
let blob_data = b"test blob data";
|
||||
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
|
||||
|
||||
let post_resp = send(
|
||||
&ctx.app,
|
||||
Method::POST,
|
||||
"/v2/alpine/blobs/uploads/",
|
||||
Body::empty(),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(post_resp.status(), StatusCode::ACCEPTED);
|
||||
let location = post_resp
|
||||
.headers()
|
||||
.get("location")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
let uuid = location.rsplit('/').next().unwrap();
|
||||
|
||||
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
|
||||
let put_resp = send(&ctx.app, Method::PUT, &put_url, Body::from(&blob_data[..])).await;
|
||||
assert_eq!(put_resp.status(), StatusCode::CREATED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_chunked_upload() {
|
||||
let ctx = create_test_context();
|
||||
let blob_data = b"test chunked blob";
|
||||
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
|
||||
|
||||
let post_resp = send(
|
||||
&ctx.app,
|
||||
Method::POST,
|
||||
"/v2/alpine/blobs/uploads/",
|
||||
Body::empty(),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(post_resp.status(), StatusCode::ACCEPTED);
|
||||
let location = post_resp
|
||||
.headers()
|
||||
.get("location")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
let uuid = location.rsplit('/').next().unwrap();
|
||||
|
||||
let patch_url = format!("/v2/alpine/blobs/uploads/{}", uuid);
|
||||
let patch_resp = send(
|
||||
&ctx.app,
|
||||
Method::PATCH,
|
||||
&patch_url,
|
||||
Body::from(&blob_data[..]),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(patch_resp.status(), StatusCode::ACCEPTED);
|
||||
|
||||
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
|
||||
let put_resp = send(&ctx.app, Method::PUT, &put_url, Body::empty()).await;
|
||||
assert_eq!(put_resp.status(), StatusCode::CREATED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_check_blob() {
|
||||
let ctx = create_test_context();
|
||||
let blob_data = b"test blob for head";
|
||||
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
|
||||
|
||||
let post_resp = send(
|
||||
&ctx.app,
|
||||
Method::POST,
|
||||
"/v2/alpine/blobs/uploads/",
|
||||
Body::empty(),
|
||||
)
|
||||
.await;
|
||||
let location = post_resp
|
||||
.headers()
|
||||
.get("location")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
let uuid = location.rsplit('/').next().unwrap();
|
||||
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
|
||||
send(&ctx.app, Method::PUT, &put_url, Body::from(&blob_data[..])).await;
|
||||
|
||||
let head_url = format!("/v2/alpine/blobs/{}", digest);
|
||||
let head_resp = send(&ctx.app, Method::HEAD, &head_url, Body::empty()).await;
|
||||
assert_eq!(head_resp.status(), StatusCode::OK);
|
||||
let cl = head_resp
|
||||
.headers()
|
||||
.get(header::CONTENT_LENGTH)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.parse::<usize>()
|
||||
.unwrap();
|
||||
assert_eq!(cl, blob_data.len());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_download_blob() {
|
||||
let ctx = create_test_context();
|
||||
let blob_data = b"test blob for download";
|
||||
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
|
||||
|
||||
let post_resp = send(
|
||||
&ctx.app,
|
||||
Method::POST,
|
||||
"/v2/alpine/blobs/uploads/",
|
||||
Body::empty(),
|
||||
)
|
||||
.await;
|
||||
let location = post_resp
|
||||
.headers()
|
||||
.get("location")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
let uuid = location.rsplit('/').next().unwrap();
|
||||
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
|
||||
send(&ctx.app, Method::PUT, &put_url, Body::from(&blob_data[..])).await;
|
||||
|
||||
let get_url = format!("/v2/alpine/blobs/{}", digest);
|
||||
let get_resp = send(&ctx.app, Method::GET, &get_url, Body::empty()).await;
|
||||
assert_eq!(get_resp.status(), StatusCode::OK);
|
||||
let body = body_bytes(get_resp).await;
|
||||
assert_eq!(body.as_ref(), &blob_data[..]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_blob_not_found() {
|
||||
let ctx = create_test_context();
|
||||
let fake_digest = "sha256:0000000000000000000000000000000000000000000000000000000000000000";
|
||||
let head_url = format!("/v2/alpine/blobs/{}", fake_digest);
|
||||
let resp = send(&ctx.app, Method::HEAD, &head_url, Body::empty()).await;
|
||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_delete_blob() {
|
||||
let ctx = create_test_context();
|
||||
let blob_data = b"test blob for delete";
|
||||
let digest = format!("sha256:{}", hex::encode(sha2::Sha256::digest(blob_data)));
|
||||
|
||||
let post_resp = send(
|
||||
&ctx.app,
|
||||
Method::POST,
|
||||
"/v2/alpine/blobs/uploads/",
|
||||
Body::empty(),
|
||||
)
|
||||
.await;
|
||||
let location = post_resp
|
||||
.headers()
|
||||
.get("location")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
let uuid = location.rsplit('/').next().unwrap();
|
||||
let put_url = format!("/v2/alpine/blobs/uploads/{}?digest={}", uuid, digest);
|
||||
send(&ctx.app, Method::PUT, &put_url, Body::from(&blob_data[..])).await;
|
||||
|
||||
let delete_url = format!("/v2/alpine/blobs/{}", digest);
|
||||
let delete_resp = send(&ctx.app, Method::DELETE, &delete_url, Body::empty()).await;
|
||||
assert_eq!(delete_resp.status(), StatusCode::ACCEPTED);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_namespaced_routes() {
|
||||
let ctx = create_test_context();
|
||||
let manifest = serde_json::json!({
|
||||
"schemaVersion": 2,
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"config": {
|
||||
"mediaType": "application/vnd.docker.container.image.v1+json",
|
||||
"size": 0,
|
||||
"digest": "sha256:0000000000000000000000000000000000000000000000000000000000000000"
|
||||
},
|
||||
"layers": []
|
||||
});
|
||||
let put_resp = send(
|
||||
&ctx.app,
|
||||
Method::PUT,
|
||||
"/v2/library/alpine/manifests/latest",
|
||||
Body::from(serde_json::to_vec(&manifest).unwrap()),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(put_resp.status(), StatusCode::CREATED);
|
||||
assert!(put_resp
|
||||
.headers()
|
||||
.get("docker-content-digest")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.starts_with("sha256:"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -139,6 +139,7 @@ fn parse_www_authenticate(header: &str) -> Option<HashMap<String, String>> {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -167,4 +168,86 @@ mod tests {
|
||||
Some(&"https://ghcr.io/token".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_www_authenticate_no_bearer() {
|
||||
assert!(parse_www_authenticate("Basic realm=\"test\"").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_www_authenticate_empty() {
|
||||
assert!(parse_www_authenticate("").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_www_authenticate_partial() {
|
||||
let header = r#"Bearer realm="https://example.com/token""#;
|
||||
let params = parse_www_authenticate(header).unwrap();
|
||||
assert_eq!(
|
||||
params.get("realm"),
|
||||
Some(&"https://example.com/token".to_string())
|
||||
);
|
||||
assert!(!params.contains_key("service"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_docker_auth_default() {
|
||||
let auth = DockerAuth::default();
|
||||
assert!(auth.tokens.read().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_docker_auth_new() {
|
||||
let auth = DockerAuth::new(30);
|
||||
assert!(auth.tokens.read().is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_token_no_www_authenticate() {
|
||||
let auth = DockerAuth::default();
|
||||
let result = auth
|
||||
.get_token("https://registry.example.com", "library/test", None, None)
|
||||
.await;
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_token_cache_hit() {
|
||||
let auth = DockerAuth::default();
|
||||
// Manually insert a cached token
|
||||
{
|
||||
let mut tokens = auth.tokens.write();
|
||||
tokens.insert(
|
||||
"https://registry.example.com:library/test".to_string(),
|
||||
CachedToken {
|
||||
token: "cached-token-123".to_string(),
|
||||
expires_at: Instant::now() + Duration::from_secs(300),
|
||||
},
|
||||
);
|
||||
}
|
||||
let result = auth
|
||||
.get_token("https://registry.example.com", "library/test", None, None)
|
||||
.await;
|
||||
assert_eq!(result, Some("cached-token-123".to_string()));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_token_cache_expired() {
|
||||
let auth = DockerAuth::default();
|
||||
{
|
||||
let mut tokens = auth.tokens.write();
|
||||
tokens.insert(
|
||||
"https://registry.example.com:library/test".to_string(),
|
||||
CachedToken {
|
||||
token: "expired-token".to_string(),
|
||||
expires_at: Instant::now() - Duration::from_secs(1),
|
||||
},
|
||||
);
|
||||
}
|
||||
// Without www_authenticate, returns None (can't fetch new token)
|
||||
let result = auth
|
||||
.get_token("https://registry.example.com", "library/test", None, None)
|
||||
.await;
|
||||
assert!(result.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -307,6 +307,7 @@ fn with_content_type(data: Vec<u8>, content_type: &'static str) -> Response {
|
||||
// ============================================================================
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
|
||||
@@ -145,3 +145,148 @@ fn with_content_type(
|
||||
|
||||
(StatusCode::OK, [(header::CONTENT_TYPE, content_type)], data)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_content_type_pom() {
|
||||
let (status, headers, _) =
|
||||
with_content_type("com/example/1.0/example-1.0.pom", Bytes::from("data"));
|
||||
assert_eq!(status, StatusCode::OK);
|
||||
assert_eq!(headers[0].1, "application/xml");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_content_type_jar() {
|
||||
let (_, headers, _) =
|
||||
with_content_type("com/example/1.0/example-1.0.jar", Bytes::from("data"));
|
||||
assert_eq!(headers[0].1, "application/java-archive");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_content_type_xml() {
|
||||
let (_, headers, _) =
|
||||
with_content_type("com/example/maven-metadata.xml", Bytes::from("data"));
|
||||
assert_eq!(headers[0].1, "application/xml");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_content_type_sha1() {
|
||||
let (_, headers, _) =
|
||||
with_content_type("com/example/1.0/example-1.0.jar.sha1", Bytes::from("data"));
|
||||
assert_eq!(headers[0].1, "text/plain");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_content_type_md5() {
|
||||
let (_, headers, _) =
|
||||
with_content_type("com/example/1.0/example-1.0.jar.md5", Bytes::from("data"));
|
||||
assert_eq!(headers[0].1, "text/plain");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_content_type_unknown() {
|
||||
let (_, headers, _) = with_content_type("some/random/file.bin", Bytes::from("data"));
|
||||
assert_eq!(headers[0].1, "application/octet-stream");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_content_type_preserves_body() {
|
||||
let body = Bytes::from("test-jar-content");
|
||||
let (_, _, data) = with_content_type("test.jar", body.clone());
|
||||
assert_eq!(data, body);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod integration_tests {
|
||||
use crate::test_helpers::{body_bytes, create_test_context, send};
|
||||
use axum::body::Body;
|
||||
use axum::http::{header, Method, StatusCode};
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_maven_put_get_roundtrip() {
|
||||
let ctx = create_test_context();
|
||||
let jar_data = b"fake-jar-content";
|
||||
|
||||
let put = send(
|
||||
&ctx.app,
|
||||
Method::PUT,
|
||||
"/maven2/com/example/mylib/1.0/mylib-1.0.jar",
|
||||
Body::from(&jar_data[..]),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(put.status(), StatusCode::CREATED);
|
||||
|
||||
let get = send(
|
||||
&ctx.app,
|
||||
Method::GET,
|
||||
"/maven2/com/example/mylib/1.0/mylib-1.0.jar",
|
||||
"",
|
||||
)
|
||||
.await;
|
||||
assert_eq!(get.status(), StatusCode::OK);
|
||||
let body = body_bytes(get).await;
|
||||
assert_eq!(&body[..], jar_data);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_maven_not_found_no_proxy() {
|
||||
let ctx = create_test_context();
|
||||
let resp = send(
|
||||
&ctx.app,
|
||||
Method::GET,
|
||||
"/maven2/missing/artifact/1.0/artifact-1.0.jar",
|
||||
"",
|
||||
)
|
||||
.await;
|
||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_maven_content_type_pom() {
|
||||
let ctx = create_test_context();
|
||||
send(
|
||||
&ctx.app,
|
||||
Method::PUT,
|
||||
"/maven2/com/ex/1.0/ex-1.0.pom",
|
||||
Body::from("<project/>"),
|
||||
)
|
||||
.await;
|
||||
|
||||
let get = send(&ctx.app, Method::GET, "/maven2/com/ex/1.0/ex-1.0.pom", "").await;
|
||||
assert_eq!(get.status(), StatusCode::OK);
|
||||
assert_eq!(
|
||||
get.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||
"application/xml"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_maven_content_type_jar() {
|
||||
let ctx = create_test_context();
|
||||
send(
|
||||
&ctx.app,
|
||||
Method::PUT,
|
||||
"/maven2/org/test/app/2.0/app-2.0.jar",
|
||||
Body::from("jar-data"),
|
||||
)
|
||||
.await;
|
||||
|
||||
let get = send(
|
||||
&ctx.app,
|
||||
Method::GET,
|
||||
"/maven2/org/test/app/2.0/app-2.0.jar",
|
||||
"",
|
||||
)
|
||||
.await;
|
||||
assert_eq!(get.status(), StatusCode::OK);
|
||||
assert_eq!(
|
||||
get.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||
"application/java-archive"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -432,6 +432,7 @@ fn with_content_type(
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -555,4 +556,229 @@ mod tests {
|
||||
assert!(!is_valid_attachment_name(""));
|
||||
assert!(!is_valid_attachment_name("foo\0bar.tgz"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_content_type_tarball() {
|
||||
let data = Bytes::from("tarball-data");
|
||||
let (status, headers, body) = with_content_type(true, data.clone());
|
||||
assert_eq!(status, StatusCode::OK);
|
||||
assert_eq!(headers[0].1, "application/octet-stream");
|
||||
assert_eq!(body, data);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_content_type_json() {
|
||||
let data = Bytes::from("json-data");
|
||||
let (status, headers, body) = with_content_type(false, data.clone());
|
||||
assert_eq!(status, StatusCode::OK);
|
||||
assert_eq!(headers[0].1, "application/json");
|
||||
assert_eq!(body, data);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rewrite_tarball_urls_trailing_slash() {
|
||||
let metadata = serde_json::json!({
|
||||
"name": "test",
|
||||
"versions": {
|
||||
"1.0.0": {
|
||||
"dist": {
|
||||
"tarball": "https://registry.npmjs.org/test/-/test-1.0.0.tgz"
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
let data = serde_json::to_vec(&metadata).unwrap();
|
||||
let result =
|
||||
rewrite_tarball_urls(&data, "http://nora:5000/", "https://registry.npmjs.org/")
|
||||
.unwrap();
|
||||
let json: serde_json::Value = serde_json::from_slice(&result).unwrap();
|
||||
let tarball = json["versions"]["1.0.0"]["dist"]["tarball"]
|
||||
.as_str()
|
||||
.unwrap();
|
||||
assert!(tarball.starts_with("http://nora:5000/npm/"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rewrite_tarball_urls_preserves_other_fields() {
|
||||
let metadata = serde_json::json!({
|
||||
"name": "test",
|
||||
"description": "A test package",
|
||||
"versions": {
|
||||
"1.0.0": {
|
||||
"dist": {
|
||||
"tarball": "https://registry.npmjs.org/test/-/test-1.0.0.tgz",
|
||||
"shasum": "abc123"
|
||||
},
|
||||
"dependencies": {"lodash": "^4.0.0"}
|
||||
}
|
||||
}
|
||||
});
|
||||
let data = serde_json::to_vec(&metadata).unwrap();
|
||||
let result =
|
||||
rewrite_tarball_urls(&data, "http://nora:5000", "https://registry.npmjs.org").unwrap();
|
||||
let json: serde_json::Value = serde_json::from_slice(&result).unwrap();
|
||||
assert_eq!(json["description"], "A test package");
|
||||
assert_eq!(json["versions"]["1.0.0"]["dist"]["shasum"], "abc123");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_valid_attachment_name_valid() {
|
||||
assert!(is_valid_attachment_name("package-1.0.0.tgz"));
|
||||
assert!(is_valid_attachment_name("@scope-pkg-2.0.tgz"));
|
||||
assert!(is_valid_attachment_name("my_pkg.tgz"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_valid_attachment_name_traversal() {
|
||||
assert!(!is_valid_attachment_name("../etc/passwd"));
|
||||
assert!(!is_valid_attachment_name("foo/../bar"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_valid_attachment_name_slash() {
|
||||
assert!(!is_valid_attachment_name("path/file.tgz"));
|
||||
assert!(!is_valid_attachment_name("path\\file.tgz"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_valid_attachment_name_null_byte() {
|
||||
assert!(!is_valid_attachment_name("file\0.tgz"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_valid_attachment_name_empty() {
|
||||
assert!(!is_valid_attachment_name(""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_valid_attachment_name_special_chars() {
|
||||
assert!(!is_valid_attachment_name("file name.tgz")); // space
|
||||
assert!(!is_valid_attachment_name("file;cmd.tgz")); // semicolon
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod integration_tests {
|
||||
use crate::test_helpers::{body_bytes, create_test_context, send};
|
||||
use axum::body::Body;
|
||||
use axum::http::{Method, StatusCode};
|
||||
use base64::Engine;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_npm_metadata_from_cache() {
|
||||
let ctx = create_test_context();
|
||||
|
||||
let metadata = serde_json::json!({
|
||||
"name": "lodash",
|
||||
"versions": {
|
||||
"4.17.21": { "dist": { "tarball": "http://example.com/lodash.tgz" } }
|
||||
}
|
||||
});
|
||||
let metadata_bytes = serde_json::to_vec(&metadata).unwrap();
|
||||
|
||||
ctx.state
|
||||
.storage
|
||||
.put("npm/lodash/metadata.json", &metadata_bytes)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let response = send(&ctx.app, Method::GET, "/npm/lodash", "").await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = body_bytes(response).await;
|
||||
let json: serde_json::Value = serde_json::from_slice(&body).unwrap();
|
||||
assert_eq!(json["name"], "lodash");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_npm_tarball_from_cache() {
|
||||
let ctx = create_test_context();
|
||||
|
||||
let tarball_data = b"fake-tarball-bytes";
|
||||
ctx.state
|
||||
.storage
|
||||
.put("npm/lodash/tarballs/lodash-4.17.21.tgz", tarball_data)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let response = send(
|
||||
&ctx.app,
|
||||
Method::GET,
|
||||
"/npm/lodash/-/lodash-4.17.21.tgz",
|
||||
"",
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = body_bytes(response).await;
|
||||
assert_eq!(&body[..], tarball_data);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_npm_not_found_no_proxy() {
|
||||
let ctx = create_test_context();
|
||||
|
||||
// No proxy configured, no local data
|
||||
let response = send(&ctx.app, Method::GET, "/npm/nonexistent", "").await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_npm_publish_basic() {
|
||||
let ctx = create_test_context();
|
||||
|
||||
let tarball_data = b"fake-tarball";
|
||||
let base64_data = base64::engine::general_purpose::STANDARD.encode(tarball_data);
|
||||
|
||||
let payload = serde_json::json!({
|
||||
"name": "mypkg",
|
||||
"versions": {
|
||||
"1.0.0": { "dist": {} }
|
||||
},
|
||||
"_attachments": {
|
||||
"mypkg-1.0.0.tgz": { "data": base64_data }
|
||||
},
|
||||
"dist-tags": { "latest": "1.0.0" }
|
||||
});
|
||||
|
||||
let body_bytes = serde_json::to_vec(&payload).unwrap();
|
||||
let response = send(&ctx.app, Method::PUT, "/npm/mypkg", Body::from(body_bytes)).await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::CREATED);
|
||||
|
||||
// Verify tarball was stored
|
||||
let stored_tarball = ctx
|
||||
.state
|
||||
.storage
|
||||
.get("npm/mypkg/tarballs/mypkg-1.0.0.tgz")
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(&stored_tarball[..], tarball_data);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_npm_publish_name_mismatch() {
|
||||
let ctx = create_test_context();
|
||||
|
||||
let tarball_data = b"fake-tarball";
|
||||
let base64_data = base64::engine::general_purpose::STANDARD.encode(tarball_data);
|
||||
|
||||
let payload = serde_json::json!({
|
||||
"name": "other",
|
||||
"versions": {
|
||||
"1.0.0": { "dist": {} }
|
||||
},
|
||||
"_attachments": {
|
||||
"other-1.0.0.tgz": { "data": base64_data }
|
||||
},
|
||||
"dist-tags": { "latest": "1.0.0" }
|
||||
});
|
||||
|
||||
let body_bytes = serde_json::to_vec(&payload).unwrap();
|
||||
let response = send(&ctx.app, Method::PUT, "/npm/mypkg", Body::from(body_bytes)).await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -305,3 +305,311 @@ fn find_file_url(html: &str, target_filename: &str) -> Option<String> {
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use proptest::prelude::*;
|
||||
|
||||
proptest! {
|
||||
#[test]
|
||||
fn extract_filename_never_panics(s in "\\PC{0,500}") {
|
||||
let _ = extract_filename(&s);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_filename_valid_tarball(
|
||||
name in "[a-z][a-z0-9_-]{0,20}",
|
||||
version in "[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}"
|
||||
) {
|
||||
let url = format!("https://files.example.com/packages/{}-{}.tar.gz", name, version);
|
||||
let result = extract_filename(&url);
|
||||
prop_assert!(result.is_some());
|
||||
prop_assert!(result.unwrap().ends_with(".tar.gz"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_filename_valid_wheel(
|
||||
name in "[a-z][a-z0-9_]{0,20}",
|
||||
version in "[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}"
|
||||
) {
|
||||
let url = format!("https://files.example.com/{}-{}-py3-none-any.whl", name, version);
|
||||
let result = extract_filename(&url);
|
||||
prop_assert!(result.is_some());
|
||||
prop_assert!(result.unwrap().ends_with(".whl"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_filename_strips_hash(
|
||||
name in "[a-z]{1,10}",
|
||||
hash in "[a-f0-9]{64}"
|
||||
) {
|
||||
let url = format!("https://example.com/{}.tar.gz#sha256={}", name, hash);
|
||||
let result = extract_filename(&url);
|
||||
prop_assert!(result.is_some());
|
||||
let fname = result.unwrap();
|
||||
prop_assert!(!fname.contains('#'));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_filename_rejects_unknown_ext(
|
||||
name in "[a-z]{1,10}",
|
||||
ext in "(exe|dll|so|bin|dat)"
|
||||
) {
|
||||
let url = format!("https://example.com/{}.{}", name, ext);
|
||||
prop_assert!(extract_filename(&url).is_none());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_name_lowercase() {
|
||||
assert_eq!(normalize_name("Flask"), "flask");
|
||||
assert_eq!(normalize_name("REQUESTS"), "requests");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_name_separators() {
|
||||
assert_eq!(normalize_name("my-package"), "my-package");
|
||||
assert_eq!(normalize_name("my_package"), "my-package");
|
||||
assert_eq!(normalize_name("my.package"), "my-package");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_name_mixed() {
|
||||
assert_eq!(
|
||||
normalize_name("My_Complex.Package-Name"),
|
||||
"my-complex-package-name"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_name_empty() {
|
||||
assert_eq!(normalize_name(""), "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_name_already_normal() {
|
||||
assert_eq!(normalize_name("simple"), "simple");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_filename_tarball() {
|
||||
assert_eq!(
|
||||
extract_filename(
|
||||
"https://files.pythonhosted.org/packages/aa/bb/flask-2.0.0.tar.gz#sha256=abc123"
|
||||
),
|
||||
Some("flask-2.0.0.tar.gz")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_filename_wheel() {
|
||||
assert_eq!(
|
||||
extract_filename(
|
||||
"https://files.pythonhosted.org/packages/aa/bb/flask-2.0.0-py3-none-any.whl"
|
||||
),
|
||||
Some("flask-2.0.0-py3-none-any.whl")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_filename_tgz() {
|
||||
assert_eq!(
|
||||
extract_filename("https://example.com/package-1.0.tgz"),
|
||||
Some("package-1.0.tgz")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_filename_zip() {
|
||||
assert_eq!(
|
||||
extract_filename("https://example.com/package-1.0.zip"),
|
||||
Some("package-1.0.zip")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_filename_egg() {
|
||||
assert_eq!(
|
||||
extract_filename("https://example.com/package-1.0.egg"),
|
||||
Some("package-1.0.egg")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_filename_unknown_ext() {
|
||||
assert_eq!(extract_filename("https://example.com/readme.txt"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_filename_no_path() {
|
||||
assert_eq!(extract_filename(""), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_filename_bare() {
|
||||
assert_eq!(
|
||||
extract_filename("package-1.0.tar.gz"),
|
||||
Some("package-1.0.tar.gz")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_attribute_present() {
|
||||
let html = r#"<a href="url" data-core-metadata="true">link</a>"#;
|
||||
let result = remove_attribute(html, "data-core-metadata");
|
||||
assert_eq!(result, r#"<a href="url">link</a>"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_attribute_absent() {
|
||||
let html = r#"<a href="url">link</a>"#;
|
||||
let result = remove_attribute(html, "data-core-metadata");
|
||||
assert_eq!(result, html);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_attribute_multiple() {
|
||||
let html =
|
||||
r#"<a data-core-metadata="true">one</a><a data-core-metadata="sha256=abc">two</a>"#;
|
||||
let result = remove_attribute(html, "data-core-metadata");
|
||||
assert_eq!(result, r#"<a>one</a><a>two</a>"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rewrite_pypi_links_basic() {
|
||||
let html = r#"<a href="https://files.pythonhosted.org/packages/aa/bb/flask-2.0.tar.gz#sha256=abc">flask-2.0.tar.gz</a>"#;
|
||||
let result = rewrite_pypi_links(html, "flask");
|
||||
assert!(result.contains("/simple/flask/flask-2.0.tar.gz"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rewrite_pypi_links_unknown_ext() {
|
||||
let html = r#"<a href="https://example.com/readme.txt">readme</a>"#;
|
||||
let result = rewrite_pypi_links(html, "test");
|
||||
assert!(result.contains("https://example.com/readme.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rewrite_pypi_links_removes_metadata_attrs() {
|
||||
let html = r#"<a href="https://example.com/pkg-1.0.whl" data-core-metadata="sha256=abc" data-dist-info-metadata="sha256=def">pkg</a>"#;
|
||||
let result = rewrite_pypi_links(html, "pkg");
|
||||
assert!(!result.contains("data-core-metadata"));
|
||||
assert!(!result.contains("data-dist-info-metadata"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rewrite_pypi_links_empty() {
|
||||
assert_eq!(rewrite_pypi_links("", "pkg"), "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_file_url_found() {
|
||||
let html = r#"<a href="https://files.pythonhosted.org/packages/aa/bb/flask-2.0.tar.gz#sha256=abc">flask-2.0.tar.gz</a>"#;
|
||||
let result = find_file_url(html, "flask-2.0.tar.gz");
|
||||
assert_eq!(
|
||||
result,
|
||||
Some("https://files.pythonhosted.org/packages/aa/bb/flask-2.0.tar.gz".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_file_url_not_found() {
|
||||
let html = r#"<a href="https://example.com/other-1.0.tar.gz">other</a>"#;
|
||||
let result = find_file_url(html, "flask-2.0.tar.gz");
|
||||
assert_eq!(result, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_file_url_strips_hash() {
|
||||
let html = r#"<a href="https://example.com/pkg-1.0.whl#sha256=deadbeef">pkg</a>"#;
|
||||
let result = find_file_url(html, "pkg-1.0.whl");
|
||||
assert_eq!(result, Some("https://example.com/pkg-1.0.whl".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod integration_tests {
|
||||
use crate::test_helpers::{body_bytes, create_test_context, send};
|
||||
use axum::http::{Method, StatusCode};
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_pypi_list_empty() {
|
||||
let ctx = create_test_context();
|
||||
let response = send(&ctx.app, Method::GET, "/simple/", "").await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = body_bytes(response).await;
|
||||
let html = String::from_utf8_lossy(&body);
|
||||
assert!(html.contains("Simple Index"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_pypi_list_with_packages() {
|
||||
let ctx = create_test_context();
|
||||
|
||||
// Pre-populate storage with a package
|
||||
ctx.state
|
||||
.storage
|
||||
.put("pypi/flask/flask-2.0.tar.gz", b"fake-tarball-data")
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let response = send(&ctx.app, Method::GET, "/simple/", "").await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = body_bytes(response).await;
|
||||
let html = String::from_utf8_lossy(&body);
|
||||
assert!(html.contains("flask"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_pypi_versions_local() {
|
||||
let ctx = create_test_context();
|
||||
|
||||
// Pre-populate storage
|
||||
ctx.state
|
||||
.storage
|
||||
.put("pypi/flask/flask-2.0.tar.gz", b"fake-data")
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let response = send(&ctx.app, Method::GET, "/simple/flask/", "").await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = body_bytes(response).await;
|
||||
let html = String::from_utf8_lossy(&body);
|
||||
assert!(html.contains("flask-2.0.tar.gz"));
|
||||
assert!(html.contains("/simple/flask/flask-2.0.tar.gz"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_pypi_download_local() {
|
||||
let ctx = create_test_context();
|
||||
|
||||
let tarball_data = b"fake-tarball-content";
|
||||
ctx.state
|
||||
.storage
|
||||
.put("pypi/flask/flask-2.0.tar.gz", tarball_data)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let response = send(&ctx.app, Method::GET, "/simple/flask/flask-2.0.tar.gz", "").await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
let body = body_bytes(response).await;
|
||||
assert_eq!(&body[..], tarball_data);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_pypi_not_found_no_proxy() {
|
||||
let ctx = create_test_context();
|
||||
|
||||
// No proxy configured, no local data
|
||||
let response = send(&ctx.app, Method::GET, "/simple/nonexistent/", "").await;
|
||||
|
||||
assert_eq!(response.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -141,3 +141,175 @@ fn guess_content_type(path: &str) -> &'static str {
|
||||
_ => "application/octet-stream",
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_json() {
|
||||
assert_eq!(guess_content_type("config.json"), "application/json");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_xml() {
|
||||
assert_eq!(guess_content_type("data.xml"), "application/xml");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_html() {
|
||||
assert_eq!(guess_content_type("index.html"), "text/html");
|
||||
assert_eq!(guess_content_type("page.htm"), "text/html");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_css() {
|
||||
assert_eq!(guess_content_type("style.css"), "text/css");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_js() {
|
||||
assert_eq!(guess_content_type("app.js"), "application/javascript");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_text() {
|
||||
assert_eq!(guess_content_type("readme.txt"), "text/plain");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_markdown() {
|
||||
assert_eq!(guess_content_type("README.md"), "text/markdown");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_yaml() {
|
||||
assert_eq!(guess_content_type("config.yaml"), "application/x-yaml");
|
||||
assert_eq!(guess_content_type("config.yml"), "application/x-yaml");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_toml() {
|
||||
assert_eq!(guess_content_type("Cargo.toml"), "application/toml");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_archives() {
|
||||
assert_eq!(guess_content_type("data.tar"), "application/x-tar");
|
||||
assert_eq!(guess_content_type("data.gz"), "application/gzip");
|
||||
assert_eq!(guess_content_type("data.gzip"), "application/gzip");
|
||||
assert_eq!(guess_content_type("data.zip"), "application/zip");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_images() {
|
||||
assert_eq!(guess_content_type("logo.png"), "image/png");
|
||||
assert_eq!(guess_content_type("photo.jpg"), "image/jpeg");
|
||||
assert_eq!(guess_content_type("photo.jpeg"), "image/jpeg");
|
||||
assert_eq!(guess_content_type("anim.gif"), "image/gif");
|
||||
assert_eq!(guess_content_type("icon.svg"), "image/svg+xml");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_special() {
|
||||
assert_eq!(guess_content_type("doc.pdf"), "application/pdf");
|
||||
assert_eq!(guess_content_type("module.wasm"), "application/wasm");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_unknown() {
|
||||
assert_eq!(guess_content_type("binary.bin"), "application/octet-stream");
|
||||
assert_eq!(guess_content_type("noext"), "application/octet-stream");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_content_type_case_insensitive() {
|
||||
assert_eq!(guess_content_type("FILE.JSON"), "application/json");
|
||||
assert_eq!(guess_content_type("IMAGE.PNG"), "image/png");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
mod integration_tests {
|
||||
use crate::test_helpers::{
|
||||
body_bytes, create_test_context, create_test_context_with_raw_disabled, send,
|
||||
};
|
||||
use axum::http::{Method, StatusCode};
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_raw_put_get_roundtrip() {
|
||||
let ctx = create_test_context();
|
||||
let put_resp = send(&ctx.app, Method::PUT, "/raw/test.txt", b"hello".to_vec()).await;
|
||||
assert_eq!(put_resp.status(), StatusCode::CREATED);
|
||||
|
||||
let get_resp = send(&ctx.app, Method::GET, "/raw/test.txt", "").await;
|
||||
assert_eq!(get_resp.status(), StatusCode::OK);
|
||||
let body = body_bytes(get_resp).await;
|
||||
assert_eq!(&body[..], b"hello");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_raw_head() {
|
||||
let ctx = create_test_context();
|
||||
send(
|
||||
&ctx.app,
|
||||
Method::PUT,
|
||||
"/raw/test.txt",
|
||||
b"hello world".to_vec(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let head_resp = send(&ctx.app, Method::HEAD, "/raw/test.txt", "").await;
|
||||
assert_eq!(head_resp.status(), StatusCode::OK);
|
||||
let cl = head_resp.headers().get("content-length").unwrap();
|
||||
assert_eq!(cl.to_str().unwrap(), "11");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_raw_delete() {
|
||||
let ctx = create_test_context();
|
||||
send(&ctx.app, Method::PUT, "/raw/test.txt", b"data".to_vec()).await;
|
||||
|
||||
let del = send(&ctx.app, Method::DELETE, "/raw/test.txt", "").await;
|
||||
assert_eq!(del.status(), StatusCode::NO_CONTENT);
|
||||
|
||||
let get = send(&ctx.app, Method::GET, "/raw/test.txt", "").await;
|
||||
assert_eq!(get.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_raw_not_found() {
|
||||
let ctx = create_test_context();
|
||||
let resp = send(&ctx.app, Method::GET, "/raw/missing.txt", "").await;
|
||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_raw_content_type_json() {
|
||||
let ctx = create_test_context();
|
||||
send(&ctx.app, Method::PUT, "/raw/file.json", b"{}".to_vec()).await;
|
||||
|
||||
let resp = send(&ctx.app, Method::GET, "/raw/file.json", "").await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
let ct = resp.headers().get("content-type").unwrap();
|
||||
assert_eq!(ct.to_str().unwrap(), "application/json");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_raw_payload_too_large() {
|
||||
let ctx = create_test_context();
|
||||
let big = vec![0u8; 2 * 1024 * 1024]; // 2 MB > 1 MB limit
|
||||
let resp = send(&ctx.app, Method::PUT, "/raw/large.bin", big).await;
|
||||
assert_eq!(resp.status(), StatusCode::PAYLOAD_TOO_LARGE);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_raw_disabled() {
|
||||
let ctx = create_test_context_with_raw_disabled();
|
||||
let get = send(&ctx.app, Method::GET, "/raw/test.txt", "").await;
|
||||
assert_eq!(get.status(), StatusCode::NOT_FOUND);
|
||||
let put = send(&ctx.app, Method::PUT, "/raw/test.txt", b"data".to_vec()).await;
|
||||
assert_eq!(put.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user