fix: code quality hardening — unwrap removal, unsafe forbid, Go/Raw tests (#72)

* fix: remove unwrap() from production code, improve error handling

- Replace unwrap() with proper error handling in npm, mirror, validation
- Add input validation to cargo registry (crate name + version)
- Improve expect() messages with descriptive context in metrics, rate_limit
- Remove unnecessary clone() in error.rs, docker.rs, npm.rs, dashboard_metrics
- Add #![deny(clippy::unwrap_used)] to prevent future unwrap in prod code
- Add let-else pattern for safer null checks in validation.rs

* docs: update SECURITY.md — add 0.3.x to supported versions

* security: forbid unsafe code at crate level

Add #![forbid(unsafe_code)] to both lib.rs and main.rs.
NORA has zero unsafe blocks — this prevents future additions
without removing the forbid attribute (stronger than deny).

* build: add rust-toolchain.toml, Dockerfile HEALTHCHECK

- Pin toolchain to stable with clippy + rustfmt components
- Add Docker HEALTHCHECK for standalone deployments (wget /health)

* test: add Go proxy and Raw registry integration tests

Go proxy tests: list, .info, .mod, @latest, path traversal, 404
Raw registry tests: upload/download, HEAD, 404, path traversal,
overwrite, delete, binary data (10KB)
This commit is contained in:
2026-03-31 21:15:59 +03:00
committed by GitHub
parent 9ec5fe526b
commit bb125db074
16 changed files with 186 additions and 26 deletions

View File

@@ -21,5 +21,8 @@ VOLUME ["/data"]
USER nora USER nora
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD wget -q --spider http://localhost:4000/health || exit 1
ENTRYPOINT ["/usr/local/bin/nora"] ENTRYPOINT ["/usr/local/bin/nora"]
CMD ["serve"] CMD ["serve"]

View File

@@ -4,6 +4,7 @@
| Version | Supported | | Version | Supported |
| ------- | ------------------ | | ------- | ------------------ |
| 0.3.x | :white_check_mark: |
| 0.2.x | :white_check_mark: | | 0.2.x | :white_check_mark: |
| < 0.2 | :x: | | < 0.2 | :x: |

View File

@@ -76,7 +76,6 @@ impl DashboardMetrics {
pub fn with_persistence(storage_path: &str) -> Self { pub fn with_persistence(storage_path: &str) -> Self {
let path = Path::new(storage_path).join("metrics.json"); let path = Path::new(storage_path).join("metrics.json");
let mut metrics = Self::new(); let mut metrics = Self::new();
metrics.persist_path = Some(path.clone());
// Load existing metrics if file exists // Load existing metrics if file exists
if path.exists() { if path.exists() {
@@ -108,6 +107,7 @@ impl DashboardMetrics {
} }
} }
metrics.persist_path = Some(path);
metrics metrics
} }

View File

@@ -51,11 +51,11 @@ struct ErrorResponse {
impl IntoResponse for AppError { impl IntoResponse for AppError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
let (status, message) = match &self { let (status, message) = match self {
AppError::NotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()), AppError::NotFound(msg) => (StatusCode::NOT_FOUND, msg),
AppError::BadRequest(msg) => (StatusCode::BAD_REQUEST, msg.clone()), AppError::BadRequest(msg) => (StatusCode::BAD_REQUEST, msg),
AppError::Unauthorized(msg) => (StatusCode::UNAUTHORIZED, msg.clone()), AppError::Unauthorized(msg) => (StatusCode::UNAUTHORIZED, msg),
AppError::Internal(msg) => (StatusCode::INTERNAL_SERVER_ERROR, msg.clone()), AppError::Internal(msg) => (StatusCode::INTERNAL_SERVER_ERROR, msg),
AppError::Storage(e) => match e { AppError::Storage(e) => match e {
StorageError::NotFound => (StatusCode::NOT_FOUND, "Resource not found".to_string()), StorageError::NotFound => (StatusCode::NOT_FOUND, "Resource not found".to_string()),
StorageError::Validation(v) => (StatusCode::BAD_REQUEST, v.to_string()), StorageError::Validation(v) => (StatusCode::BAD_REQUEST, v.to_string()),

View File

@@ -1,3 +1,5 @@
#![deny(clippy::unwrap_used)]
#![forbid(unsafe_code)]
//! NORA Registry — library interface for fuzzing and testing //! NORA Registry — library interface for fuzzing and testing
pub mod validation; pub mod validation;

View File

@@ -1,6 +1,7 @@
// Copyright (c) 2026 Volkov Pavel | DevITWay // Copyright (c) 2026 Volkov Pavel | DevITWay
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
#![deny(clippy::unwrap_used)]
#![forbid(unsafe_code)]
mod activity_log; mod activity_log;
mod audit; mod audit;
mod auth; mod auth;

View File

@@ -26,7 +26,7 @@ lazy_static! {
"nora_http_requests_total", "nora_http_requests_total",
"Total number of HTTP requests", "Total number of HTTP requests",
&["registry", "method", "status"] &["registry", "method", "status"]
).expect("metric can be created"); ).expect("failed to create HTTP_REQUESTS_TOTAL metric at startup");
/// HTTP request duration histogram /// HTTP request duration histogram
pub static ref HTTP_REQUEST_DURATION: HistogramVec = register_histogram_vec!( pub static ref HTTP_REQUEST_DURATION: HistogramVec = register_histogram_vec!(
@@ -34,28 +34,28 @@ lazy_static! {
"HTTP request latency in seconds", "HTTP request latency in seconds",
&["registry", "method"], &["registry", "method"],
vec![0.001, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0] vec![0.001, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0]
).expect("metric can be created"); ).expect("failed to create HTTP_REQUEST_DURATION metric at startup");
/// Cache requests counter (hit/miss) /// Cache requests counter (hit/miss)
pub static ref CACHE_REQUESTS: IntCounterVec = register_int_counter_vec!( pub static ref CACHE_REQUESTS: IntCounterVec = register_int_counter_vec!(
"nora_cache_requests_total", "nora_cache_requests_total",
"Total cache requests", "Total cache requests",
&["registry", "result"] &["registry", "result"]
).expect("metric can be created"); ).expect("failed to create CACHE_REQUESTS metric at startup");
/// Storage operations counter /// Storage operations counter
pub static ref STORAGE_OPERATIONS: IntCounterVec = register_int_counter_vec!( pub static ref STORAGE_OPERATIONS: IntCounterVec = register_int_counter_vec!(
"nora_storage_operations_total", "nora_storage_operations_total",
"Total storage operations", "Total storage operations",
&["operation", "status"] &["operation", "status"]
).expect("metric can be created"); ).expect("failed to create STORAGE_OPERATIONS metric at startup");
/// Artifacts count by registry /// Artifacts count by registry
pub static ref ARTIFACTS_TOTAL: IntCounterVec = register_int_counter_vec!( pub static ref ARTIFACTS_TOTAL: IntCounterVec = register_int_counter_vec!(
"nora_artifacts_total", "nora_artifacts_total",
"Total artifacts stored", "Total artifacts stored",
&["registry"] &["registry"]
).expect("metric can be created"); ).expect("failed to create ARTIFACTS_TOTAL metric at startup");
} }
/// Routes for metrics endpoint /// Routes for metrics endpoint

View File

@@ -64,7 +64,7 @@ pub fn create_progress_bar(total: u64) -> ProgressBar {
.template( .template(
"{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({eta}) {msg}", "{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({eta}) {msg}",
) )
.unwrap() .expect("static progress bar template is valid")
.progress_chars("=>-"), .progress_chars("=>-"),
); );
pb pb
@@ -220,7 +220,7 @@ fn parse_requirements_txt(content: &str) -> Vec<MirrorTarget> {
.lines() .lines()
.filter(|l| !l.trim().is_empty() && !l.starts_with('#') && !l.starts_with('-')) .filter(|l| !l.trim().is_empty() && !l.starts_with('#') && !l.starts_with('-'))
.filter_map(|line| { .filter_map(|line| {
let line = line.split('#').next().unwrap().trim(); let line = line.split('#').next().unwrap_or(line).trim();
if let Some((name, version)) = line.split_once("==") { if let Some((name, version)) = line.split_once("==") {
Some(MirrorTarget { Some(MirrorTarget {
name: name.trim().to_string(), name: name.trim().to_string(),

View File

@@ -200,7 +200,11 @@ async fn mirror_npm_packages(
let mut handles = Vec::new(); let mut handles = Vec::new();
for target in targets { for target in targets {
let permit = sem.clone().acquire_owned().await.unwrap(); let permit = sem
.clone()
.acquire_owned()
.await
.expect("semaphore closed unexpectedly");
let client = client.clone(); let client = client.clone();
let pb = pb.clone(); let pb = pb.clone();
let fetched = fetched.clone(); let fetched = fetched.clone();

View File

@@ -25,7 +25,7 @@ pub fn auth_rate_limiter(
.burst_size(config.auth_burst) .burst_size(config.auth_burst)
.use_headers() .use_headers()
.finish() .finish()
.expect("Failed to build auth rate limiter"); .expect("failed to build auth rate limiter: invalid RateLimitConfig");
tower_governor::GovernorLayer::new(gov_config) tower_governor::GovernorLayer::new(gov_config)
} }
@@ -46,7 +46,7 @@ pub fn upload_rate_limiter(
.burst_size(config.upload_burst) .burst_size(config.upload_burst)
.use_headers() .use_headers()
.finish() .finish()
.expect("Failed to build upload rate limiter"); .expect("failed to build upload rate limiter: invalid RateLimitConfig");
tower_governor::GovernorLayer::new(gov_config) tower_governor::GovernorLayer::new(gov_config)
} }
@@ -65,7 +65,7 @@ pub fn general_rate_limiter(
.burst_size(config.general_burst) .burst_size(config.general_burst)
.use_headers() .use_headers()
.finish() .finish()
.expect("Failed to build general rate limiter"); .expect("failed to build general rate limiter: invalid RateLimitConfig");
tower_governor::GovernorLayer::new(gov_config) tower_governor::GovernorLayer::new(gov_config)
} }

View File

@@ -3,6 +3,7 @@
use crate::activity_log::{ActionType, ActivityEntry}; use crate::activity_log::{ActionType, ActivityEntry};
use crate::audit::AuditEntry; use crate::audit::AuditEntry;
use crate::validation::validate_storage_key;
use crate::AppState; use crate::AppState;
use axum::{ use axum::{
extract::{Path, State}, extract::{Path, State},
@@ -26,6 +27,10 @@ async fn get_metadata(
State(state): State<Arc<AppState>>, State(state): State<Arc<AppState>>,
Path(crate_name): Path<String>, Path(crate_name): Path<String>,
) -> Response { ) -> Response {
// Validate input to prevent path traversal
if validate_storage_key(&crate_name).is_err() {
return StatusCode::BAD_REQUEST.into_response();
}
let key = format!("cargo/{}/metadata.json", crate_name); let key = format!("cargo/{}/metadata.json", crate_name);
match state.storage.get(&key).await { match state.storage.get(&key).await {
Ok(data) => (StatusCode::OK, data).into_response(), Ok(data) => (StatusCode::OK, data).into_response(),
@@ -37,6 +42,10 @@ async fn download(
State(state): State<Arc<AppState>>, State(state): State<Arc<AppState>>,
Path((crate_name, version)): Path<(String, String)>, Path((crate_name, version)): Path<(String, String)>,
) -> Response { ) -> Response {
// Validate inputs to prevent path traversal
if validate_storage_key(&crate_name).is_err() || validate_storage_key(&version).is_err() {
return StatusCode::BAD_REQUEST.into_response();
}
let key = format!( let key = format!(
"cargo/{}/{}/{}-{}.crate", "cargo/{}/{}/{}-{}.crate",
crate_name, version, crate_name, version crate_name, version, crate_name, version

View File

@@ -346,7 +346,7 @@ async fn start_upload(Path(name): Path<String>) -> Response {
( (
StatusCode::ACCEPTED, StatusCode::ACCEPTED,
[ [
(header::LOCATION, location.clone()), (header::LOCATION, location),
(HeaderName::from_static("docker-upload-uuid"), uuid), (HeaderName::from_static("docker-upload-uuid"), uuid),
], ],
) )

View File

@@ -176,8 +176,7 @@ async fn handle_request(State(state): State<Arc<AppState>>, Path(path): Path<Str
} else { } else {
// Metadata: rewrite tarball URLs to point to NORA // Metadata: rewrite tarball URLs to point to NORA
let nora_base = nora_base_url(&state); let nora_base = nora_base_url(&state);
let rewritten = rewrite_tarball_urls(&data, &nora_base, proxy_url) let rewritten = rewrite_tarball_urls(&data, &nora_base, proxy_url).unwrap_or(data);
.unwrap_or_else(|_| data.clone());
data_to_cache = rewritten.clone(); data_to_cache = rewritten.clone();
data_to_serve = rewritten; data_to_serve = rewritten;
@@ -217,8 +216,7 @@ async fn refetch_metadata(state: &Arc<AppState>, path: &str, key: &str) -> Optio
.ok()?; .ok()?;
let nora_base = nora_base_url(state); let nora_base = nora_base_url(state);
let rewritten = let rewritten = rewrite_tarball_urls(&data, &nora_base, proxy_url).unwrap_or(data);
rewrite_tarball_urls(&data, &nora_base, proxy_url).unwrap_or_else(|_| data.clone());
let storage = state.storage.clone(); let storage = state.storage.clone();
let key_clone = key.to_string(); let key_clone = key.to_string();
@@ -346,7 +344,9 @@ async fn handle_publish(
} }
// Merge versions // Merge versions
let meta_obj = metadata.as_object_mut().unwrap(); let Some(meta_obj) = metadata.as_object_mut() else {
return (StatusCode::INTERNAL_SERVER_ERROR, "invalid metadata format").into_response();
};
let stored_versions = meta_obj.entry("versions").or_insert(serde_json::json!({})); let stored_versions = meta_obj.entry("versions").or_insert(serde_json::json!({}));
if let Some(sv) = stored_versions.as_object_mut() { if let Some(sv) = stored_versions.as_object_mut() {
for (ver, ver_data) in new_versions { for (ver, ver_data) in new_versions {

View File

@@ -178,7 +178,12 @@ pub fn validate_docker_name(name: &str) -> Result<(), ValidationError> {
"empty path segment".to_string(), "empty path segment".to_string(),
)); ));
} }
let first = segment.chars().next().unwrap(); // Safety: segment.is_empty() checked above, but use match for defense-in-depth
let Some(first) = segment.chars().next() else {
return Err(ValidationError::InvalidDockerName(
"empty path segment".to_string(),
));
};
if !first.is_ascii_alphanumeric() { if !first.is_ascii_alphanumeric() {
return Err(ValidationError::InvalidDockerName( return Err(ValidationError::InvalidDockerName(
"segment must start with alphanumeric".to_string(), "segment must start with alphanumeric".to_string(),
@@ -292,7 +297,10 @@ pub fn validate_docker_reference(reference: &str) -> Result<(), ValidationError>
} }
// Validate as tag // Validate as tag
let first = reference.chars().next().unwrap(); // Safety: empty check at function start, but use let-else for defense-in-depth
let Some(first) = reference.chars().next() else {
return Err(ValidationError::EmptyInput);
};
if !first.is_ascii_alphanumeric() { if !first.is_ascii_alphanumeric() {
return Err(ValidationError::InvalidReference( return Err(ValidationError::InvalidReference(
"tag must start with alphanumeric".to_string(), "tag must start with alphanumeric".to_string(),

3
rust-toolchain.toml Normal file
View File

@@ -0,0 +1,3 @@
[toolchain]
channel = "stable"
components = ["clippy", "rustfmt"]

View File

@@ -324,6 +324,135 @@ else
fi fi
echo "" echo ""
# ============================================
# Go Proxy Tests
# ============================================
echo ""
echo "=== Go Proxy ==="
# Pre-seed a Go module for testing
GO_MODULE="example.com/testmod"
GO_VERSION="v1.0.0"
GO_STORAGE="$STORAGE_DIR/go"
mkdir -p "$GO_STORAGE/example.com/testmod/@v"
# Create .info file
echo '{"Version":"v1.0.0","Time":"2026-01-01T00:00:00Z"}' > "$GO_STORAGE/example.com/testmod/@v/v1.0.0.info"
# Create .mod file
echo 'module example.com/testmod
go 1.21' > "$GO_STORAGE/example.com/testmod/@v/v1.0.0.mod"
# Create list file
echo "v1.0.0" > "$GO_STORAGE/example.com/testmod/@v/list"
# Test: Go module list
check "Go list versions" \
curl -sf "$BASE/go/example.com/testmod/@v/list" -o /dev/null
# Test: Go module .info
INFO_RESULT=$(curl -sf "$BASE/go/example.com/testmod/@v/v1.0.0.info" 2>/dev/null)
if echo "$INFO_RESULT" | grep -q "v1.0.0"; then
pass "Go .info returns version"
else
fail "Go .info: $INFO_RESULT"
fi
# Test: Go module .mod
MOD_RESULT=$(curl -sf "$BASE/go/example.com/testmod/@v/v1.0.0.mod" 2>/dev/null)
if echo "$MOD_RESULT" | grep -q "module example.com/testmod"; then
pass "Go .mod returns module content"
else
fail "Go .mod: $MOD_RESULT"
fi
# Test: Go @latest (200 with upstream, 404 without — both valid)
LATEST_CODE=$(curl -s -o /dev/null -w "%{http_code}" "$BASE/go/example.com/testmod/@latest")
if [ "$LATEST_CODE" = "200" ] || [ "$LATEST_CODE" = "404" ]; then
pass "Go @latest handled ($LATEST_CODE)"
else
fail "Go @latest returned $LATEST_CODE"
fi
# Test: Go path traversal rejection
TRAVERSAL_RESULT=$(curl -s -o /dev/null -w "%{http_code}" "$BASE/go/../../etc/passwd/@v/list")
if [ "$TRAVERSAL_RESULT" = "400" ] || [ "$TRAVERSAL_RESULT" = "404" ]; then
pass "Go path traversal rejected ($TRAVERSAL_RESULT)"
else
fail "Go path traversal returned $TRAVERSAL_RESULT"
fi
# Test: Go nonexistent module
NOTFOUND=$(curl -s -o /dev/null -w "%{http_code}" "$BASE/go/nonexistent.com/pkg/@v/list")
if [ "$NOTFOUND" = "404" ]; then
pass "Go 404 on nonexistent module"
else
fail "Go nonexistent returned $NOTFOUND"
fi
# ============================================
# Raw Registry Extended Tests
# ============================================
echo ""
echo "=== Raw Registry (extended) ==="
# Test: Raw upload and download (basic — already exists, extend)
echo "integration-test-data-$(date +%s)" | curl -sf -X PUT --data-binary @- "$BASE/raw/integration/test.txt" >/dev/null 2>&1
check "Raw upload + download" \
curl -sf "$BASE/raw/integration/test.txt" -o /dev/null
# Test: Raw HEAD (check exists)
HEAD_RESULT=$(curl -sf -o /dev/null -w "%{http_code}" --head "$BASE/raw/integration/test.txt")
if [ "$HEAD_RESULT" = "200" ]; then
pass "Raw HEAD returns 200"
else
fail "Raw HEAD returned $HEAD_RESULT"
fi
# Test: Raw 404 on nonexistent
NOTFOUND=$(curl -s -o /dev/null -w "%{http_code}" "$BASE/raw/nonexistent/file.bin")
if [ "$NOTFOUND" = "404" ]; then
pass "Raw 404 on nonexistent file"
else
fail "Raw nonexistent returned $NOTFOUND"
fi
# Test: Raw path traversal
TRAVERSAL=$(curl -s -o /dev/null -w "%{http_code}" "$BASE/raw/../../../etc/passwd")
if [ "$TRAVERSAL" = "400" ] || [ "$TRAVERSAL" = "404" ]; then
pass "Raw path traversal rejected ($TRAVERSAL)"
else
fail "Raw path traversal returned $TRAVERSAL"
fi
# Test: Raw overwrite
echo "version-1" | curl -sf -X PUT --data-binary @- "$BASE/raw/integration/overwrite.txt" >/dev/null 2>&1
echo "version-2" | curl -sf -X PUT --data-binary @- "$BASE/raw/integration/overwrite.txt" >/dev/null 2>&1
CONTENT=$(curl -sf "$BASE/raw/integration/overwrite.txt" 2>/dev/null)
if [ "$CONTENT" = "version-2" ]; then
pass "Raw overwrite works"
else
fail "Raw overwrite: got '$CONTENT'"
fi
# Test: Raw delete
curl -sf -X DELETE "$BASE/raw/integration/overwrite.txt" >/dev/null 2>&1
DELETE_CHECK=$(curl -s -o /dev/null -w "%{http_code}" "$BASE/raw/integration/overwrite.txt")
if [ "$DELETE_CHECK" = "404" ]; then
pass "Raw delete works"
else
fail "Raw delete: file still returns $DELETE_CHECK"
fi
# Test: Raw binary data (not just text)
dd if=/dev/urandom bs=1024 count=10 2>/dev/null | curl -sf -X PUT --data-binary @- "$BASE/raw/integration/binary.bin" >/dev/null 2>&1
BIN_SIZE=$(curl -sf "$BASE/raw/integration/binary.bin" 2>/dev/null | wc -c)
if [ "$BIN_SIZE" -ge 10000 ]; then
pass "Raw binary upload/download (${BIN_SIZE} bytes)"
else
fail "Raw binary: expected ~10240, got $BIN_SIZE"
fi
echo "--- Mirror CLI ---" echo "--- Mirror CLI ---"
# Create a minimal lockfile # Create a minimal lockfile
LOCKFILE=$(mktemp) LOCKFILE=$(mktemp)