mirror of
https://github.com/getnora-io/nora.git
synced 2026-04-13 12:00:31 +00:00
Compare commits
117 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e415f0f1ce | |||
| aa86633a04 | |||
| 31afa1f70b | |||
| f36abd82ef | |||
| ea6a86b0f1 | |||
| 638f99d8dc | |||
| c55307a3af | |||
| cc416f3adf | |||
| 30aedac238 | |||
| 34e85acd6e | |||
|
|
41eefdd90d | ||
|
|
94ca418155 | ||
|
|
e72648a6c4 | ||
| 18e93d23a9 | |||
| db05adb060 | |||
| a57de6690e | |||
| d3439ae33d | |||
| b3b74b8b2d | |||
| d41b55fa3a | |||
| 5a68bfd695 | |||
| 9c8fee5a5d | |||
| bbff337b4c | |||
| a73335c549 | |||
| ad6aba46b2 | |||
| 095270d113 | |||
| 769f5fb01d | |||
| 53884e143b | |||
| 0eb26f24f7 | |||
| fa962b2d6e | |||
| a1da4fff1e | |||
| 868c4feca7 | |||
| 5b4cba1392 | |||
| ad890be56a | |||
| 3b9ea37b0e | |||
| 233b83f902 | |||
| d886426957 | |||
| 52c2443543 | |||
| 26d30b622d | |||
| 272898f43c | |||
| 61de6c6ddd | |||
| b80c7c5160 | |||
| 68089b2bbf | |||
| af411a2bf4 | |||
| 96ccd16879 | |||
| 6582000789 | |||
| 070774ac94 | |||
| 058fc41f1c | |||
| 7f5a3c7c8a | |||
| 5b57cc5913 | |||
| aa844d851d | |||
| 8569de23d5 | |||
|
|
9349b93757 | ||
|
|
69080dfd90 | ||
|
|
ae799aed94 | ||
|
|
95c6e403a8 | ||
|
|
2c886040d7 | ||
|
|
9ab6ccc594 | ||
|
|
679b36b986 | ||
|
|
da8c473e02 | ||
|
|
3dc8b81261 | ||
| 7502c583d0 | |||
| a9455c35b9 | |||
| 8278297b4a | |||
| 8da4c4278a | |||
| 99c1f9b5ec | |||
| 07de85d4f8 | |||
| 4c3a9f6bd5 | |||
| 402d2321ef | |||
| f560e5f76b | |||
| e34032d08f | |||
| 03a3bf9197 | |||
| 6c5f0dda30 | |||
| fb058302c8 | |||
| 79565aec47 | |||
| 58a484d805 | |||
| 45c3e276dc | |||
|
|
f4e53b85dd | ||
| 05d89d5153 | |||
| b149f7ebd4 | |||
| 5254e2a54a | |||
| 8783d1dc4b | |||
|
|
4c05df2359 | ||
| 7f8e3cfe68 | |||
|
|
13f33e8919 | ||
|
|
7454ff2e03 | ||
|
|
5ffb5a9be3 | ||
|
|
c8793a4b60 | ||
|
|
fd4a7b0b0f | ||
|
|
7af1e7462c | ||
|
|
de1a188fa7 | ||
|
|
36d0749bb3 | ||
| fb0f80ac5a | |||
| 161d7f706a | |||
| e4e38e3aab | |||
| b153bc0c5b | |||
| d76383c701 | |||
| d161c2f645 | |||
| c7f9d5c036 | |||
| b41bfd9a88 | |||
| 3e3070a401 | |||
| 3868b16ea4 | |||
| 3a6d3eeb9a | |||
| dd29707395 | |||
| e7a6a652af | |||
| 4ad802ce2f | |||
|
|
04c806b659 | ||
|
|
50a5395a87 | ||
|
|
bcd172f23f | ||
|
|
a5a7c4f8be | ||
|
|
2c7c497c30 | ||
|
|
6b6f88ab9c | ||
|
|
1255e3227b | ||
|
|
aabd0b76fb | ||
| ac14405af3 | |||
| 5f385dce45 | |||
| 761e08f168 | |||
| eb4f82df07 |
9
.clusterfuzzlite/Dockerfile
Normal file
9
.clusterfuzzlite/Dockerfile
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
FROM rust:1.87-slim
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y build-essential pkg-config && rm -rf /var/lib/apt/lists/*
|
||||||
|
RUN cargo install cargo-fuzz
|
||||||
|
|
||||||
|
COPY . /src
|
||||||
|
WORKDIR /src
|
||||||
|
|
||||||
|
RUN cd fuzz && cargo fuzz build 2>/dev/null || true
|
||||||
5
.clusterfuzzlite/project.yaml
Normal file
5
.clusterfuzzlite/project.yaml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
language: rust
|
||||||
|
fuzzing_engines:
|
||||||
|
- libfuzzer
|
||||||
|
sanitizers:
|
||||||
|
- address
|
||||||
16
.github/dependabot.yml
vendored
Normal file
16
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
# GitHub Actions — обновляет версии actions в workflows
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
labels: [dependencies, ci]
|
||||||
|
|
||||||
|
# Cargo — только security-апдейты, без шума от minor/patch
|
||||||
|
- package-ecosystem: cargo
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
open-pull-requests-limit: 5
|
||||||
|
labels: [dependencies, rust]
|
||||||
117
.github/workflows/ci.yml
vendored
117
.github/workflows/ci.yml
vendored
@@ -6,18 +6,20 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
|
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
name: Test
|
name: Test
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||||
|
|
||||||
- name: Cache cargo
|
- name: Cache cargo
|
||||||
uses: Swatinem/rust-cache@v2
|
uses: Swatinem/rust-cache@42dc69e1aa15d09112580998cf2ef0119e2e91ae # v2
|
||||||
|
|
||||||
- name: Check formatting
|
- name: Check formatting
|
||||||
run: cargo fmt --check
|
run: cargo fmt --check
|
||||||
@@ -33,38 +35,36 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
security-events: write # for uploading SARIF to GitHub Security tab
|
security-events: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # full history required for gitleaks
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||||
|
|
||||||
- name: Cache cargo
|
- name: Cache cargo
|
||||||
uses: Swatinem/rust-cache@v2
|
uses: Swatinem/rust-cache@42dc69e1aa15d09112580998cf2ef0119e2e91ae # v2
|
||||||
|
|
||||||
# ── Secrets ────────────────────────────────────────────────────────────
|
# ── Secrets ────────────────────────────────────────────────────────────
|
||||||
- name: Gitleaks — scan for hardcoded secrets
|
- name: Gitleaks — scan for hardcoded secrets
|
||||||
run: |
|
run: |
|
||||||
curl -sL https://github.com/gitleaks/gitleaks/releases/download/v8.21.2/gitleaks_8.21.2_linux_x64.tar.gz \
|
curl -sL https://github.com/gitleaks/gitleaks/releases/download/v8.21.2/gitleaks_8.21.2_linux_x64.tar.gz \
|
||||||
| tar xz -C /usr/local/bin gitleaks
|
| tar xz -C /usr/local/bin gitleaks
|
||||||
gitleaks detect --source . --exit-code 1 --report-format sarif --report-path gitleaks.sarif || true
|
gitleaks detect --source . --exit-code 1 --report-format sarif --report-path gitleaks.sarif
|
||||||
continue-on-error: true # findings are reported, do not block the pipeline
|
|
||||||
|
|
||||||
# ── CVE in Rust dependencies ────────────────────────────────────────────
|
# ── CVE in Rust dependencies ────────────────────────────────────────────
|
||||||
- name: Install cargo-audit
|
- name: Install cargo-audit
|
||||||
run: cargo install cargo-audit --locked
|
run: cargo install cargo-audit --locked
|
||||||
|
|
||||||
- name: cargo audit — RustSec advisory database
|
- name: cargo audit — RustSec advisory database
|
||||||
run: cargo audit
|
run: cargo audit --ignore RUSTSEC-2025-0119
|
||||||
continue-on-error: true # warn only; known CVEs should not block CI until triaged
|
|
||||||
|
|
||||||
# ── Licenses, banned crates, supply chain policy ────────────────────────
|
# ── Licenses, banned crates, supply chain policy ────────────────────────
|
||||||
- name: cargo deny — licenses and banned crates
|
- name: cargo deny — licenses and banned crates
|
||||||
uses: EmbarkStudios/cargo-deny-action@v2
|
uses: EmbarkStudios/cargo-deny-action@82eb9f621fbc699dd0918f3ea06864c14cc84246 # v2
|
||||||
with:
|
with:
|
||||||
command: check
|
command: check
|
||||||
arguments: --all-features
|
arguments: --all-features
|
||||||
@@ -72,18 +72,103 @@ jobs:
|
|||||||
# ── CVE scan of source tree and Cargo.lock ──────────────────────────────
|
# ── CVE scan of source tree and Cargo.lock ──────────────────────────────
|
||||||
- name: Trivy — filesystem scan (Cargo.lock + source)
|
- name: Trivy — filesystem scan (Cargo.lock + source)
|
||||||
if: always()
|
if: always()
|
||||||
uses: aquasecurity/trivy-action@master
|
uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1 # 0.35.0
|
||||||
with:
|
with:
|
||||||
scan-type: fs
|
scan-type: fs
|
||||||
scan-ref: .
|
scan-ref: .
|
||||||
format: sarif
|
format: sarif
|
||||||
output: trivy-fs.sarif
|
output: trivy-fs.sarif
|
||||||
severity: HIGH,CRITICAL
|
severity: HIGH,CRITICAL
|
||||||
exit-code: 0 # warn only; change to 1 to block the pipeline
|
exit-code: 1
|
||||||
|
|
||||||
- name: Upload Trivy fs results to GitHub Security tab
|
- name: Upload Trivy fs results to GitHub Security tab
|
||||||
uses: github/codeql-action/upload-sarif@v3
|
uses: github/codeql-action/upload-sarif@a60c4df7a135c7317c1e9ddf9b5a9b07a910dda9 # v4
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
sarif_file: trivy-fs.sarif
|
sarif_file: trivy-fs.sarif
|
||||||
category: trivy-fs
|
category: trivy-fs
|
||||||
|
|
||||||
|
integration:
|
||||||
|
name: Integration
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||||
|
|
||||||
|
- name: Cache cargo
|
||||||
|
uses: Swatinem/rust-cache@42dc69e1aa15d09112580998cf2ef0119e2e91ae # v2
|
||||||
|
|
||||||
|
- name: Build NORA
|
||||||
|
run: cargo build --release --package nora-registry
|
||||||
|
|
||||||
|
- name: Start NORA
|
||||||
|
run: |
|
||||||
|
NORA_STORAGE_PATH=/tmp/nora-data ./target/release/nora &
|
||||||
|
for i in $(seq 1 15); do
|
||||||
|
curl -sf http://localhost:4000/health && break || sleep 2
|
||||||
|
done
|
||||||
|
curl -sf http://localhost:4000/health | jq .
|
||||||
|
|
||||||
|
- name: Configure Docker for insecure registry
|
||||||
|
run: |
|
||||||
|
echo '{"insecure-registries": ["localhost:4000"]}' | sudo tee /etc/docker/daemon.json
|
||||||
|
sudo systemctl restart docker
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
- name: Docker — push and pull image
|
||||||
|
run: |
|
||||||
|
docker pull alpine:3.20
|
||||||
|
docker tag alpine:3.20 localhost:4000/test/alpine:integration
|
||||||
|
docker push localhost:4000/test/alpine:integration
|
||||||
|
docker rmi localhost:4000/test/alpine:integration
|
||||||
|
docker pull localhost:4000/test/alpine:integration
|
||||||
|
echo "Docker push/pull OK"
|
||||||
|
|
||||||
|
- name: Docker — verify catalog and tags
|
||||||
|
run: |
|
||||||
|
curl -sf http://localhost:4000/v2/_catalog | jq .
|
||||||
|
curl -sf http://localhost:4000/v2/test/alpine/tags/list | jq .
|
||||||
|
|
||||||
|
- name: npm — verify registry endpoint
|
||||||
|
run: |
|
||||||
|
STATUS=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:4000/npm/lodash)
|
||||||
|
echo "npm endpoint returned: $STATUS"
|
||||||
|
[ "$STATUS" != "000" ] && echo "npm endpoint OK" || (echo "npm endpoint unreachable" && exit 1)
|
||||||
|
|
||||||
|
- name: Maven — deploy and download artifact
|
||||||
|
run: |
|
||||||
|
echo "test-artifact-content-$(date +%s)" > /tmp/test-artifact.jar
|
||||||
|
CHECKSUM=$(sha256sum /tmp/test-artifact.jar | cut -d' ' -f1)
|
||||||
|
curl -sf -X PUT --data-binary @/tmp/test-artifact.jar \
|
||||||
|
http://localhost:4000/maven2/com/example/test-lib/1.0.0/test-lib-1.0.0.jar
|
||||||
|
curl -sf -o /tmp/downloaded.jar \
|
||||||
|
http://localhost:4000/maven2/com/example/test-lib/1.0.0/test-lib-1.0.0.jar
|
||||||
|
DOWNLOAD_CHECKSUM=$(sha256sum /tmp/downloaded.jar | cut -d' ' -f1)
|
||||||
|
[ "$CHECKSUM" = "$DOWNLOAD_CHECKSUM" ] && echo "Maven deploy/download OK" || (echo "Checksum mismatch!" && exit 1)
|
||||||
|
|
||||||
|
- name: PyPI — verify simple index
|
||||||
|
run: |
|
||||||
|
STATUS=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:4000/simple/)
|
||||||
|
echo "PyPI simple index returned: $STATUS"
|
||||||
|
[ "$STATUS" = "200" ] && echo "PyPI endpoint OK" || (echo "Expected 200, got $STATUS" && exit 1)
|
||||||
|
|
||||||
|
- name: Cargo — verify registry API responds
|
||||||
|
run: |
|
||||||
|
STATUS=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:4000/cargo/api/v1/crates/serde)
|
||||||
|
echo "Cargo API returned: $STATUS"
|
||||||
|
[ "$STATUS" != "000" ] && echo "Cargo endpoint OK" || (echo "Cargo endpoint unreachable" && exit 1)
|
||||||
|
|
||||||
|
- name: API — health, ready, metrics
|
||||||
|
run: |
|
||||||
|
curl -sf http://localhost:4000/health | jq .status
|
||||||
|
curl -sf http://localhost:4000/ready
|
||||||
|
curl -sf http://localhost:4000/metrics | head -5
|
||||||
|
echo "API checks OK"
|
||||||
|
|
||||||
|
- name: Stop NORA
|
||||||
|
if: always()
|
||||||
|
run: pkill nora || true
|
||||||
|
|||||||
174
.github/workflows/release.yml
vendored
174
.github/workflows/release.yml
vendored
@@ -4,20 +4,23 @@ on:
|
|||||||
push:
|
push:
|
||||||
tags: ['v*']
|
tags: ['v*']
|
||||||
|
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
env:
|
env:
|
||||||
REGISTRY: ghcr.io
|
REGISTRY: ghcr.io
|
||||||
|
NORA: localhost:5000
|
||||||
IMAGE_NAME: ${{ github.repository }}
|
IMAGE_NAME: ${{ github.repository }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build & Push
|
name: Build & Push
|
||||||
runs-on: self-hosted
|
runs-on: [self-hosted, nora]
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
run: |
|
run: |
|
||||||
@@ -30,29 +33,40 @@ jobs:
|
|||||||
cargo build --release --target x86_64-unknown-linux-musl --package nora-registry
|
cargo build --release --target x86_64-unknown-linux-musl --package nora-registry
|
||||||
cp target/x86_64-unknown-linux-musl/release/nora ./nora
|
cp target/x86_64-unknown-linux-musl/release/nora ./nora
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Upload binary artifact
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||||
|
with:
|
||||||
|
name: nora-binary-${{ github.run_id }}
|
||||||
|
path: ./nora
|
||||||
|
retention-days: 1
|
||||||
|
|
||||||
- name: Log in to Container Registry
|
- name: Set up Docker Buildx
|
||||||
uses: docker/login-action@v3
|
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4
|
||||||
|
with:
|
||||||
|
driver-opts: network=host
|
||||||
|
|
||||||
|
- name: Log in to GitHub Container Registry
|
||||||
|
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.REGISTRY }}
|
registry: ${{ env.REGISTRY }}
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
# ── Alpine (standard) ────────────────────────────────────────────────────
|
# ── Alpine ───────────────────────────────────────────────────────────────
|
||||||
- name: Extract metadata (alpine)
|
- name: Extract metadata (alpine)
|
||||||
id: meta-alpine
|
id: meta-alpine
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6
|
||||||
with:
|
with:
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
images: |
|
||||||
|
${{ env.NORA }}/${{ env.IMAGE_NAME }}
|
||||||
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
tags: |
|
tags: |
|
||||||
type=semver,pattern={{version}}
|
type=semver,pattern={{version}}
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
type=raw,value=latest
|
type=raw,value=latest
|
||||||
|
|
||||||
- name: Build and push (alpine)
|
- name: Build and push (alpine)
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: Dockerfile
|
file: Dockerfile
|
||||||
@@ -60,15 +74,17 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta-alpine.outputs.tags }}
|
tags: ${{ steps.meta-alpine.outputs.tags }}
|
||||||
labels: ${{ steps.meta-alpine.outputs.labels }}
|
labels: ${{ steps.meta-alpine.outputs.labels }}
|
||||||
cache-from: type=gha,scope=alpine
|
cache-from: type=registry,ref=${{ env.NORA }}/${{ env.IMAGE_NAME }}-cache:alpine,ignore-error=true
|
||||||
cache-to: type=gha,mode=max,scope=alpine
|
cache-to: type=registry,ref=${{ env.NORA }}/${{ env.IMAGE_NAME }}-cache:alpine,mode=max
|
||||||
|
|
||||||
# ── RED OS ───────────────────────────────────────────────────────────────
|
# ── RED OS ───────────────────────────────────────────────────────────────
|
||||||
- name: Extract metadata (redos)
|
- name: Extract metadata (redos)
|
||||||
id: meta-redos
|
id: meta-redos
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6
|
||||||
with:
|
with:
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
images: |
|
||||||
|
${{ env.NORA }}/${{ env.IMAGE_NAME }}
|
||||||
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
flavor: suffix=-redos,onlatest=true
|
flavor: suffix=-redos,onlatest=true
|
||||||
tags: |
|
tags: |
|
||||||
type=semver,pattern={{version}}
|
type=semver,pattern={{version}}
|
||||||
@@ -76,7 +92,7 @@ jobs:
|
|||||||
type=raw,value=redos
|
type=raw,value=redos
|
||||||
|
|
||||||
- name: Build and push (redos)
|
- name: Build and push (redos)
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: Dockerfile.redos
|
file: Dockerfile.redos
|
||||||
@@ -84,12 +100,50 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta-redos.outputs.tags }}
|
tags: ${{ steps.meta-redos.outputs.tags }}
|
||||||
labels: ${{ steps.meta-redos.outputs.labels }}
|
labels: ${{ steps.meta-redos.outputs.labels }}
|
||||||
cache-from: type=gha,scope=redos
|
cache-from: type=registry,ref=${{ env.NORA }}/${{ env.IMAGE_NAME }}-cache:redos,ignore-error=true
|
||||||
cache-to: type=gha,mode=max,scope=redos
|
cache-to: type=registry,ref=${{ env.NORA }}/${{ env.IMAGE_NAME }}-cache:redos,mode=max
|
||||||
|
|
||||||
|
# ── Astra Linux SE ───────────────────────────────────────────────────────
|
||||||
|
- name: Extract metadata (astra)
|
||||||
|
id: meta-astra
|
||||||
|
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
${{ env.NORA }}/${{ env.IMAGE_NAME }}
|
||||||
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
flavor: suffix=-astra,onlatest=true
|
||||||
|
tags: |
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=raw,value=astra
|
||||||
|
|
||||||
|
- name: Build and push (astra)
|
||||||
|
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile.astra
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta-astra.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta-astra.outputs.labels }}
|
||||||
|
cache-from: type=registry,ref=${{ env.NORA }}/${{ env.IMAGE_NAME }}-cache:astra,ignore-error=true
|
||||||
|
cache-to: type=registry,ref=${{ env.NORA }}/${{ env.IMAGE_NAME }}-cache:astra,mode=max
|
||||||
|
|
||||||
|
# ── Smoke test ──────────────────────────────────────────────────────────
|
||||||
|
- name: Smoke test — verify alpine image starts and responds
|
||||||
|
run: |
|
||||||
|
docker rm -f nora-smoke 2>/dev/null || true
|
||||||
|
docker run --rm -d --name nora-smoke -p 5555:4000 -e NORA_HOST=0.0.0.0 \
|
||||||
|
${{ env.NORA }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
for i in $(seq 1 10); do
|
||||||
|
curl -sf http://localhost:5555/health && break || sleep 2
|
||||||
|
done
|
||||||
|
curl -sf http://localhost:5555/health
|
||||||
|
docker stop nora-smoke
|
||||||
|
|
||||||
scan:
|
scan:
|
||||||
name: Scan (${{ matrix.name }})
|
name: Scan (${{ matrix.name }})
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, nora]
|
||||||
needs: build
|
needs: build
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -104,33 +158,26 @@ jobs:
|
|||||||
suffix: ""
|
suffix: ""
|
||||||
- name: redos
|
- name: redos
|
||||||
suffix: "-redos"
|
suffix: "-redos"
|
||||||
|
- name: astra
|
||||||
|
suffix: "-astra"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Log in to Container Registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY }}
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set version tag (strip leading v)
|
- name: Set version tag (strip leading v)
|
||||||
id: ver
|
id: ver
|
||||||
run: echo "tag=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
run: echo "tag=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
# ── CVE scan of the pushed image ────────────────────────────────────────
|
|
||||||
# Images are FROM scratch — no OS packages, only binary CVE scan
|
|
||||||
- name: Trivy — image scan (${{ matrix.name }})
|
- name: Trivy — image scan (${{ matrix.name }})
|
||||||
uses: aquasecurity/trivy-action@master
|
uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1 # 0.35.0
|
||||||
with:
|
with:
|
||||||
scan-type: image
|
scan-type: image
|
||||||
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.ver.outputs.tag }}${{ matrix.suffix }}
|
image-ref: ${{ env.NORA }}/${{ env.IMAGE_NAME }}:${{ steps.ver.outputs.tag }}${{ matrix.suffix }}
|
||||||
format: sarif
|
format: sarif
|
||||||
output: trivy-image-${{ matrix.name }}.sarif
|
output: trivy-image-${{ matrix.name }}.sarif
|
||||||
severity: HIGH,CRITICAL
|
severity: HIGH,CRITICAL
|
||||||
exit-code: 0 # warn only; change to 1 to block on vulnerabilities
|
exit-code: 1
|
||||||
|
|
||||||
- name: Upload Trivy image results to GitHub Security tab
|
- name: Upload Trivy image results to GitHub Security tab
|
||||||
uses: github/codeql-action/upload-sarif@v3
|
uses: github/codeql-action/upload-sarif@a60c4df7a135c7317c1e9ddf9b5a9b07a910dda9 # v4
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
sarif_file: trivy-image-${{ matrix.name }}.sarif
|
sarif_file: trivy-image-${{ matrix.name }}.sarif
|
||||||
@@ -138,53 +185,71 @@ jobs:
|
|||||||
|
|
||||||
release:
|
release:
|
||||||
name: GitHub Release
|
name: GitHub Release
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, nora]
|
||||||
needs: [build, scan]
|
needs: [build, scan]
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: read # to pull image for SBOM generation
|
packages: read
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||||
|
|
||||||
- name: Log in to Container Registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY }}
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set version tag (strip leading v)
|
- name: Set version tag (strip leading v)
|
||||||
id: ver
|
id: ver
|
||||||
run: echo "tag=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
run: echo "tag=${GITHUB_REF_NAME#v}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
# ── SBOM — Software Bill of Materials ───────────────────────────────────
|
- name: Download binary artifact
|
||||||
- name: Generate SBOM (SPDX)
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||||
uses: anchore/sbom-action@v0
|
|
||||||
with:
|
with:
|
||||||
image: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.ver.outputs.tag }}
|
name: nora-binary-${{ github.run_id }}
|
||||||
|
path: ./artifacts
|
||||||
|
|
||||||
|
- name: Prepare binary
|
||||||
|
run: |
|
||||||
|
cp ./artifacts/nora ./nora-linux-amd64
|
||||||
|
chmod +x ./nora-linux-amd64
|
||||||
|
sha256sum ./nora-linux-amd64 > nora-linux-amd64.sha256
|
||||||
|
echo "Binary size: $(du -sh nora-linux-amd64 | cut -f1)"
|
||||||
|
cat nora-linux-amd64.sha256
|
||||||
|
|
||||||
|
- name: Generate SBOM (SPDX)
|
||||||
|
uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0
|
||||||
|
with:
|
||||||
|
image: ${{ env.NORA }}/${{ env.IMAGE_NAME }}:${{ steps.ver.outputs.tag }}
|
||||||
format: spdx-json
|
format: spdx-json
|
||||||
output-file: nora-${{ github.ref_name }}.sbom.spdx.json
|
output-file: nora-${{ github.ref_name }}.sbom.spdx.json
|
||||||
registry-username: ${{ github.actor }}
|
|
||||||
registry-password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Generate SBOM (CycloneDX)
|
- name: Generate SBOM (CycloneDX)
|
||||||
uses: anchore/sbom-action@v0
|
uses: anchore/sbom-action@57aae528053a48a3f6235f2d9461b05fbcb7366d # v0
|
||||||
with:
|
with:
|
||||||
image: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.ver.outputs.tag }}
|
image: ${{ env.NORA }}/${{ env.IMAGE_NAME }}:${{ steps.ver.outputs.tag }}
|
||||||
format: cyclonedx-json
|
format: cyclonedx-json
|
||||||
output-file: nora-${{ github.ref_name }}.sbom.cdx.json
|
output-file: nora-${{ github.ref_name }}.sbom.cdx.json
|
||||||
registry-username: ${{ github.actor }}
|
|
||||||
registry-password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Create Release
|
- name: Create Release
|
||||||
uses: softprops/action-gh-release@v1
|
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2
|
||||||
with:
|
with:
|
||||||
generate_release_notes: true
|
generate_release_notes: true
|
||||||
files: |
|
files: |
|
||||||
|
nora-linux-amd64
|
||||||
|
nora-linux-amd64.sha256
|
||||||
nora-${{ github.ref_name }}.sbom.spdx.json
|
nora-${{ github.ref_name }}.sbom.spdx.json
|
||||||
nora-${{ github.ref_name }}.sbom.cdx.json
|
nora-${{ github.ref_name }}.sbom.cdx.json
|
||||||
body: |
|
body: |
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -fsSL https://getnora.io/install.sh | sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Or download the binary directly:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -LO https://github.com/${{ github.repository }}/releases/download/${{ github.ref_name }}/nora-linux-amd64
|
||||||
|
chmod +x nora-linux-amd64
|
||||||
|
sudo mv nora-linux-amd64 /usr/local/bin/nora
|
||||||
|
```
|
||||||
|
|
||||||
## Docker
|
## Docker
|
||||||
|
|
||||||
**Alpine (standard):**
|
**Alpine (standard):**
|
||||||
@@ -197,6 +262,11 @@ jobs:
|
|||||||
docker pull ghcr.io/${{ github.repository }}:${{ github.ref_name }}-redos
|
docker pull ghcr.io/${{ github.repository }}:${{ github.ref_name }}-redos
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Astra Linux SE:**
|
||||||
|
```bash
|
||||||
|
docker pull ghcr.io/${{ github.repository }}:${{ github.ref_name }}-astra
|
||||||
|
```
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
See [CHANGELOG.md](https://github.com/${{ github.repository }}/blob/main/CHANGELOG.md)
|
See [CHANGELOG.md](https://github.com/${{ github.repository }}/blob/main/CHANGELOG.md)
|
||||||
|
|||||||
35
.github/workflows/scorecard.yml
vendored
Normal file
35
.github/workflows/scorecard.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
name: OpenSSF Scorecard
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
schedule:
|
||||||
|
- cron: '0 6 * * 1' # every Monday at 06:00 UTC
|
||||||
|
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
name: Scorecard analysis
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
security-events: write
|
||||||
|
id-token: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Run OpenSSF Scorecard
|
||||||
|
uses: ossf/scorecard-action@v2.4.3
|
||||||
|
with:
|
||||||
|
results_file: results.sarif
|
||||||
|
results_format: sarif
|
||||||
|
publish_results: true
|
||||||
|
|
||||||
|
- name: Upload Scorecard results to GitHub Security tab
|
||||||
|
uses: github/codeql-action/upload-sarif@v4
|
||||||
|
with:
|
||||||
|
sarif_file: results.sarif
|
||||||
|
category: scorecard
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -6,6 +6,9 @@ data/
|
|||||||
*.log
|
*.log
|
||||||
internal config
|
internal config
|
||||||
|
|
||||||
|
# Backup files
|
||||||
|
*.bak
|
||||||
|
|
||||||
# Internal files
|
# Internal files
|
||||||
SESSION*.md
|
SESSION*.md
|
||||||
TODO.md
|
TODO.md
|
||||||
|
|||||||
8
.gitleaks.toml
Normal file
8
.gitleaks.toml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Gitleaks configuration
|
||||||
|
# https://github.com/gitleaks/gitleaks
|
||||||
|
|
||||||
|
[allowlist]
|
||||||
|
description = "Allowlist for false positives"
|
||||||
|
|
||||||
|
# Documentation examples with placeholder credentials
|
||||||
|
commits = ["92155cf6574d89f93ee68503a7b68455ceaa19af"]
|
||||||
634
CHANGELOG.md
634
CHANGELOG.md
@@ -1,9 +1,363 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## [0.2.31] - 2026-03-16
|
||||||
|
|
||||||
|
### Added / Добавлено
|
||||||
|
- **npm URL rewriting**: Tarball URLs in proxied metadata now rewritten to point to NORA (previously tarballs bypassed NORA and downloaded directly from npmjs.org)
|
||||||
|
- **npm scoped packages**: Full support for `@scope/package` in proxy handler and repository index
|
||||||
|
- **npm publish**: `PUT /npm/{package}` accepts standard npm publish payload with base64-encoded tarballs
|
||||||
|
- **npm metadata TTL**: Configurable cache TTL (`NORA_NPM_METADATA_TTL`, default 300s) with stale-while-revalidate fallback
|
||||||
|
- **Immutable cache**: SHA256 integrity verification on cached npm tarballs — detects tampering on cache hit
|
||||||
|
- **npm URL rewriting**: Tarball URL в проксированных метаданных теперь переписываются на NORA (ранее тарболы шли напрямую из npmjs.org)
|
||||||
|
- **npm scoped packages**: Полная поддержка `@scope/package` в прокси-хендлере и индексе репозитория
|
||||||
|
- **npm publish**: `PUT /npm/{package}` принимает стандартный npm publish payload с base64-тарболами
|
||||||
|
- **npm metadata TTL**: Настраиваемый TTL кеша (`NORA_NPM_METADATA_TTL`, default 300s) с stale-while-revalidate
|
||||||
|
- **Immutable cache**: SHA256 проверка целостности npm-тарболов — обнаружение подмены при отдаче из кеша
|
||||||
|
|
||||||
|
### Security / Безопасность
|
||||||
|
- **Path traversal protection**: Attachment filename validation in npm publish (rejects `../`, `/`, `\`)
|
||||||
|
- **Package name mismatch**: npm publish rejects payloads where URL path doesn't match `name` field (anti-spoofing)
|
||||||
|
- **Version immutability**: npm publish returns 409 Conflict on duplicate version
|
||||||
|
- **Защита от path traversal**: Валидация имён файлов в npm publish (отклоняет `../`, `/`, `\`)
|
||||||
|
- **Проверка имени пакета**: npm publish отклоняет payload если имя в URL не совпадает с полем `name` (anti-spoofing)
|
||||||
|
- **Иммутабельность версий**: npm publish возвращает 409 Conflict при попытке перезаписать версию
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **npm proxy_auth**: `proxy_auth` field was configured but not wired into `fetch_from_proxy` — now sends Basic Auth header to upstream
|
||||||
|
- **npm proxy_auth**: Поле `proxy_auth` было в конфиге, но не передавалось в `fetch_from_proxy` — теперь отправляет Basic Auth в upstream
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
All notable changes to NORA will be documented in this file.
|
All notable changes to NORA will be documented in this file.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.29] - 2026-03-15
|
||||||
|
|
||||||
|
### Added / Добавлено
|
||||||
|
- **Upstream Authentication**: All registry proxies now support Basic Auth credentials for private upstream registries
|
||||||
|
- **Аутентификация upstream**: Все прокси реестров теперь поддерживают Basic Auth для приватных upstream-реестров
|
||||||
|
- Docker: `NORA_DOCKER_UPSTREAMS="https://registry.corp.com|user:pass"`
|
||||||
|
- Maven: `NORA_MAVEN_PROXIES="https://nexus.corp.com/maven2|user:pass"`
|
||||||
|
- npm: `NORA_NPM_PROXY_AUTH="user:pass"`
|
||||||
|
- PyPI: `NORA_PYPI_PROXY_AUTH="user:pass"`
|
||||||
|
- **Plaintext credential warning**: NORA logs a warning at startup if credentials are stored in config.toml instead of env vars
|
||||||
|
- **Предупреждение о plaintext credentials**: NORA логирует предупреждение при старте, если credentials хранятся в config.toml вместо переменных окружения
|
||||||
|
|
||||||
|
### Changed / Изменено
|
||||||
|
- Extracted `basic_auth_header()` helper for consistent auth across all protocols
|
||||||
|
- Вынесен хелпер `basic_auth_header()` для единообразной авторизации всех протоколов
|
||||||
|
|
||||||
|
### Removed / Удалено
|
||||||
|
- Removed unused `DockerAuth::fetch_with_auth()` method (dead code cleanup)
|
||||||
|
- Удалён неиспользуемый метод `DockerAuth::fetch_with_auth()` (очистка мёртвого кода)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.28] - 2026-03-13
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **docker-compose.yml**: Fixed image reference from `getnora/nora:latest` to `ghcr.io/getnora-io/nora:latest`
|
||||||
|
- **docker-compose.yml**: Исправлена ссылка на образ с `getnora/nora:latest` на `ghcr.io/getnora-io/nora:latest`
|
||||||
|
|
||||||
|
### Documentation / Документация
|
||||||
|
- **Authentication Guide**: Added complete auth setup guide in README — htpasswd, API tokens, RBAC roles, curl examples
|
||||||
|
- **Руководство по аутентификации**: Добавлено полное руководство по настройке auth в README — htpasswd, API-токены, RBAC-роли, примеры curl
|
||||||
|
- **FSTEC builds**: Documented `Dockerfile.astra` and `Dockerfile.redos` purpose in README
|
||||||
|
- **Сборки ФСТЭК**: Документировано назначение `Dockerfile.astra` и `Dockerfile.redos` в README
|
||||||
|
- **TLS / HTTPS**: Added reverse proxy setup guide (Caddy, Nginx) and `insecure-registries` Docker config for internal deployments
|
||||||
|
- **TLS / HTTPS**: Добавлено руководство по настройке reverse proxy (Caddy, Nginx) и конфигурация `insecure-registries` Docker для внутренних инсталляций
|
||||||
|
|
||||||
|
### Removed / Удалено
|
||||||
|
- Removed stale `CHANGELOG.md.bak` from repository
|
||||||
|
- Удалён устаревший `CHANGELOG.md.bak` из репозитория
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.27] - 2026-03-03
|
||||||
|
|
||||||
|
### Added / Добавлено
|
||||||
|
- **Configurable body limit**: `NORA_BODY_LIMIT_MB` env var (default: `2048` = 2GB) — replaces hardcoded 100MB limit that caused `413 Payload Too Large` on large Docker image push
|
||||||
|
- **Настраиваемый лимит тела запроса**: переменная `NORA_BODY_LIMIT_MB` (по умолчанию: `2048` = 2GB) — заменяет захардкоженный лимит 100MB, вызывавший `413 Payload Too Large` при push больших Docker-образов
|
||||||
|
- **Docker Delete API**: `DELETE /v2/{name}/manifests/{reference}` and `DELETE /v2/{name}/blobs/{digest}` per Docker Registry V2 spec (returns 202 Accepted)
|
||||||
|
- **Docker Delete API**: `DELETE /v2/{name}/manifests/{reference}` и `DELETE /v2/{name}/blobs/{digest}` по спецификации Docker Registry V2 (возвращает 202 Accepted)
|
||||||
|
- Namespace-qualified DELETE variants (`/v2/{ns}/{name}/...`)
|
||||||
|
- Audit log integration for delete operations
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- Docker push of images >100MB no longer fails with 413 error
|
||||||
|
- Push Docker-образов >100MB больше не падает с ошибкой 413
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.26] - 2026-03-03
|
||||||
|
|
||||||
|
### Added / Добавлено
|
||||||
|
- **Helm OCI support**: `helm push` / `helm pull` now works out of the box via OCI protocol
|
||||||
|
- **Поддержка Helm OCI**: `helm push` / `helm pull` теперь работают из коробки через OCI протокол
|
||||||
|
- **RBAC**: Token-based role system with three roles — `read`, `write`, `admin` (default: `read`)
|
||||||
|
- **RBAC**: Ролевая система на основе токенов — `read`, `write`, `admin` (по умолчанию: `read`)
|
||||||
|
- **Audit log**: Persistent append-only JSONL audit trail for all registry operations (`{storage}/audit.jsonl`)
|
||||||
|
- **Аудит**: Персистентный append-only JSONL лог всех операций реестра (`{storage}/audit.jsonl`)
|
||||||
|
- **GC command**: `nora gc --dry-run` — garbage collection for orphaned blobs (mark-and-sweep)
|
||||||
|
- **Команда GC**: `nora gc --dry-run` — сборка мусора для осиротевших блобов (mark-and-sweep)
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Helm OCI pull**: Fixed OCI manifest media type detection — manifests with non-Docker `config.mediaType` now correctly return `application/vnd.oci.image.manifest.v1+json`
|
||||||
|
- **Helm OCI pull**: Исправлено определение media type OCI манифестов — манифесты с не-Docker `config.mediaType` теперь корректно возвращают `application/vnd.oci.image.manifest.v1+json`
|
||||||
|
- **Docker-Content-Digest**: Added missing header in blob upload response (required by Helm OCI client)
|
||||||
|
- **Docker-Content-Digest**: Добавлен отсутствующий заголовок в ответе на загрузку blob (требуется клиентом Helm OCI)
|
||||||
|
|
||||||
|
### Security / Безопасность
|
||||||
|
- Read-only tokens (`role: read`) are now blocked from PUT/POST/DELETE/PATCH operations with HTTP 403
|
||||||
|
- Токены только для чтения (`role: read`) теперь блокируются при PUT/POST/DELETE/PATCH с HTTP 403
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.25] - 2026-03-03
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Rate limiter fix**: Added `NORA_RATE_LIMIT_ENABLED` env var (default: `true`) to disable rate limiting on internal deployments
|
||||||
|
- **Исправление rate limiter**: Добавлена переменная `NORA_RATE_LIMIT_ENABLED` (по умолчанию: `true`) для отключения rate limiting на внутренних инсталляциях
|
||||||
|
- **SmartIpKeyExtractor**: Upload and general routes now use `SmartIpKeyExtractor` (reads `X-Forwarded-For`) instead of `PeerIpKeyExtractor` — fixes 429 errors behind reverse proxy / Docker bridge
|
||||||
|
- **SmartIpKeyExtractor**: Маршруты upload и general теперь используют `SmartIpKeyExtractor` (читает `X-Forwarded-For`) вместо `PeerIpKeyExtractor` — устраняет ошибки 429 за reverse proxy / Docker bridge
|
||||||
|
|
||||||
|
### Dependencies / Зависимости
|
||||||
|
- `clap` 4.5.56 → 4.5.60
|
||||||
|
- `uuid` 1.20.0 → 1.21.0
|
||||||
|
- `tempfile` 3.24.0 → 3.26.0
|
||||||
|
- `bcrypt` 0.17.1 → 0.18.0
|
||||||
|
- `indicatif` 0.17.11 → 0.18.4
|
||||||
|
|
||||||
|
### CI/CD
|
||||||
|
- `actions/checkout` 4 → 6
|
||||||
|
- `actions/upload-artifact` 4 → 7
|
||||||
|
- `softprops/action-gh-release` 1 → 2
|
||||||
|
- `aquasecurity/trivy-action` 0.30.0 → 0.34.2
|
||||||
|
- `docker/build-push-action` 5 → 6
|
||||||
|
- Move scan/release to self-hosted runner with NORA cache
|
||||||
|
- Сканирование/релиз перенесены на self-hosted runner с кэшем через NORA
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.24] - 2026-02-24
|
||||||
|
|
||||||
|
### Added / Добавлено
|
||||||
|
- `install.sh` installer script live at <https://getnora.io/install.sh> — `curl -fsSL https://getnora.io/install.sh | sh`
|
||||||
|
- Скрипт установки `install.sh` доступен на <https://getnora.io/install.sh>
|
||||||
|
|
||||||
|
### CI/CD
|
||||||
|
- Restore Astra Linux SE Docker image build, Trivy scan, and release artifact (`-astra` tag)
|
||||||
|
- Восстановлена сборка Docker-образа для Astra Linux SE, сканирование Trivy и артефакт релиза (тег `-astra`)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.23] - 2026-02-24
|
||||||
|
|
||||||
|
### Added / Добавлено
|
||||||
|
- Binary (`nora`) + SHA-256 checksum attached to every GitHub Release
|
||||||
|
- Бинарник (`nora`) и SHA-256 контрольная сумма прикреплены к каждому релизу GitHub
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- Security: bump `prometheus` 0.13 → 0.14 (CVE-2025-53605) and `bytes` 1.11.0 → 1.11.1 (CVE-2026-25541)
|
||||||
|
- Безопасность: обновлены `prometheus` 0.13 → 0.14 (CVE-2025-53605) и `bytes` 1.11.0 → 1.11.1 (CVE-2026-25541)
|
||||||
|
|
||||||
|
### CI/CD
|
||||||
|
- Add Dependabot for automated dependency updates / Добавлен Dependabot для автоматического обновления зависимостей
|
||||||
|
- Pin `aquasecurity/trivy-action` to `0.30.0`, bump to `0.34.1`; scan gate blocks release on HIGH/CRITICAL CVE
|
||||||
|
- Закреплён `trivy-action@0.30.0`, обновлён до `0.34.1`; сканирование блокирует релиз при HIGH/CRITICAL CVE
|
||||||
|
- Upgrade `codeql-action` v3 → v4 / Обновлён `codeql-action` v3 → v4
|
||||||
|
- Fix `deny.toml` deprecated keys (`copyleft`, `unlicensed` removed in `cargo-deny`) / Исправлены устаревшие ключи в `deny.toml`
|
||||||
|
- Fix binary path in Docker image (`/usr/local/bin/nora`) / Исправлен путь бинарника в Docker-образе
|
||||||
|
- Pin build job to `nora` runner label / Джоб сборки закреплён за runner'ом с меткой `nora`
|
||||||
|
- Allow `CDLA-Permissive-2.0` license (`webpki-roots`) / Разрешена лицензия `CDLA-Permissive-2.0`
|
||||||
|
- Ignore `RUSTSEC-2025-0119` (unmaintained transitive dep `number_prefix` via `indicatif`)
|
||||||
|
|
||||||
|
### Dependencies / Зависимости
|
||||||
|
- `chrono` 0.4.43 → 0.4.44
|
||||||
|
- `quick-xml` 0.31.0 → 0.39.2
|
||||||
|
- `toml` 0.8.23 → 1.0.3+spec-1.1.0
|
||||||
|
- `flate2` 1.1.8 → 1.1.9
|
||||||
|
- `softprops/action-gh-release` 1 → 2
|
||||||
|
- `actions/checkout` 4 → 6
|
||||||
|
- `docker/build-push-action` 5 → 6
|
||||||
|
|
||||||
|
### Documentation / Документация
|
||||||
|
- Replace text title with SVG logo; `O` styled in blue-600 / Заголовок заменён SVG-логотипом; буква `O` стилизована в blue-600
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.22] - 2026-02-24
|
||||||
|
|
||||||
|
### Changed / Изменено
|
||||||
|
- First stable release with Docker images published to container registry
|
||||||
|
- Первый стабильный релиз с Docker-образами, опубликованными в container registry
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.21] - 2026-02-24
|
||||||
|
|
||||||
|
### CI/CD
|
||||||
|
- Consolidate all Docker builds into a single job to fix runner network issues / Все Docker-сборки объединены в один job для устранения сетевых проблем runner'а
|
||||||
|
- Build musl static binary for maximum portability / Сборка musl-бинарника для максимальной переносимости
|
||||||
|
- Add security scanning (Trivy) + SBOM generation to release pipeline / Добавлено сканирование безопасности (Trivy) и генерация SBOM в pipeline релиза
|
||||||
|
- Add Cargo cache to speed up builds / Добавлен кэш Cargo для ускорения сборок
|
||||||
|
- Replace `gitleaks` GitHub Action with CLI (no license requirement) / `gitleaks` Action заменён CLI-вызовом (лицензия не требуется)
|
||||||
|
- Use GitHub-runner's own Rust toolchain (avoid path conflicts) / Используется Rust toolchain самого GitHub-runner'а
|
||||||
|
- Use shared runner filesystem instead of artifact API (avoids network upload latency) / Общая файловая система runner'а вместо artifact API
|
||||||
|
- Remove Astra Linux build temporarily / Сборка для Astra Linux временно удалена
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.20] - 2026-02-23
|
||||||
|
|
||||||
|
### Added / Добавлено
|
||||||
|
- Parallel CI builds for Astra Linux and RedOS / Параллельная сборка в CI для Astra Linux и RedOS
|
||||||
|
|
||||||
|
### Changed / Изменено
|
||||||
|
- Use `FROM scratch` base image for Astra Linux and RedOS Docker builds / Базовый образ `FROM scratch` для Docker-сборок Astra Linux и RedOS
|
||||||
|
- Shared `reqwest::Client` across all registry handlers / Общий `reqwest::Client` для всех registry-обработчиков
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- Auth: replace `starts_with` with explicit `matches!` for token path checks / Аутентификация: `starts_with` заменён явной проверкой `matches!` для путей с токенами
|
||||||
|
- Remove unnecessary QEMU step for amd64-only builds / Удалён лишний шаг QEMU для amd64-сборок
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
|
## [0.2.19] - 2026-01-31
|
||||||
|
|
||||||
|
### Added / Добавлено
|
||||||
|
- Pre-commit hook to prevent accidental commits of sensitive files / Pre-commit хук для защиты от случайного коммита чувствительных файлов
|
||||||
|
- README badges: build status, version, license / Бейджи в README: статус сборки, версия, лицензия
|
||||||
|
|
||||||
|
### Performance / Производительность
|
||||||
|
- In-memory repository index with pagination for faster dashboard load / Индекс репозитория в памяти с пагинацией для ускорения загрузки дашборда
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- Use `div_ceil` instead of manual ceiling division / Использован `div_ceil` вместо ручной реализации деления с округлением вверх
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.18] - 2026-01-31
|
## [0.2.18] - 2026-01-31
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
@@ -11,6 +365,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.17] - 2026-01-31
|
## [0.2.17] - 2026-01-31
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@@ -19,6 +383,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.16] - 2026-01-31
|
## [0.2.16] - 2026-01-31
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
@@ -27,6 +401,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.15] - 2026-01-31
|
## [0.2.15] - 2026-01-31
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
@@ -34,6 +418,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.14] - 2026-01-31
|
## [0.2.14] - 2026-01-31
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
@@ -42,6 +436,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.13] - 2026-01-31
|
## [0.2.13] - 2026-01-31
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
@@ -51,6 +455,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.12] - 2026-01-30
|
## [0.2.12] - 2026-01-30
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@@ -75,6 +489,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.11] - 2026-01-26
|
## [0.2.11] - 2026-01-26
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@@ -84,6 +508,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.10] - 2026-01-26
|
## [0.2.10] - 2026-01-26
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
@@ -91,6 +525,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.9] - 2026-01-26
|
## [0.2.9] - 2026-01-26
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
@@ -98,6 +542,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.8] - 2026-01-26
|
## [0.2.8] - 2026-01-26
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@@ -105,6 +559,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.7] - 2026-01-26
|
## [0.2.7] - 2026-01-26
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@@ -112,6 +576,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.6] - 2026-01-26
|
## [0.2.6] - 2026-01-26
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@@ -126,6 +600,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.5] - 2026-01-26
|
## [0.2.5] - 2026-01-26
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
@@ -133,6 +617,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.4] - 2026-01-26
|
## [0.2.4] - 2026-01-26
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
@@ -141,6 +635,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.0] - 2026-01-25
|
## [0.2.0] - 2026-01-25
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@@ -211,6 +715,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.1.0] - 2026-01-24
|
## [0.1.0] - 2026-01-24
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@@ -230,12 +744,32 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
# Журнал изменений (RU)
|
# Журнал изменений (RU)
|
||||||
|
|
||||||
Все значимые изменения NORA документируются в этом файле.
|
Все значимые изменения NORA документируются в этом файле.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.12] - 2026-01-30
|
## [0.2.12] - 2026-01-30
|
||||||
|
|
||||||
### Добавлено
|
### Добавлено
|
||||||
@@ -260,6 +794,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.11] - 2026-01-26
|
## [0.2.11] - 2026-01-26
|
||||||
|
|
||||||
### Добавлено
|
### Добавлено
|
||||||
@@ -269,6 +813,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.10] - 2026-01-26
|
## [0.2.10] - 2026-01-26
|
||||||
|
|
||||||
### Изменено
|
### Изменено
|
||||||
@@ -276,6 +830,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.9] - 2026-01-26
|
## [0.2.9] - 2026-01-26
|
||||||
|
|
||||||
### Изменено
|
### Изменено
|
||||||
@@ -283,6 +847,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.8] - 2026-01-26
|
## [0.2.8] - 2026-01-26
|
||||||
|
|
||||||
### Добавлено
|
### Добавлено
|
||||||
@@ -290,6 +864,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.7] - 2026-01-26
|
## [0.2.7] - 2026-01-26
|
||||||
|
|
||||||
### Добавлено
|
### Добавлено
|
||||||
@@ -297,6 +881,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.6] - 2026-01-26
|
## [0.2.6] - 2026-01-26
|
||||||
|
|
||||||
### Добавлено
|
### Добавлено
|
||||||
@@ -311,6 +905,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.5] - 2026-01-26
|
## [0.2.5] - 2026-01-26
|
||||||
|
|
||||||
### Исправлено
|
### Исправлено
|
||||||
@@ -318,6 +922,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.4] - 2026-01-26
|
## [0.2.4] - 2026-01-26
|
||||||
|
|
||||||
### Исправлено
|
### Исправлено
|
||||||
@@ -326,6 +940,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.2.0] - 2026-01-25
|
## [0.2.0] - 2026-01-25
|
||||||
|
|
||||||
### Добавлено
|
### Добавлено
|
||||||
@@ -396,6 +1020,16 @@ All notable changes to NORA will be documented in this file.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.2.30] - 2026-03-16
|
||||||
|
|
||||||
|
### Fixed / Исправлено
|
||||||
|
- **Dashboard**: Docker upstream now shown in mount points table (was null)
|
||||||
|
- **Dashboard**: Docker namespaced repositories (library/alpine, grafana/grafana) now visible in UI
|
||||||
|
- **Dashboard**: npm proxy-cached packages now appear in package list
|
||||||
|
- **Dashboard**: Отображение Docker upstream в таблице точек монтирования (было null)
|
||||||
|
- **Dashboard**: Namespaced Docker-репозитории (library/alpine, grafana/grafana) теперь видны в UI
|
||||||
|
- **Dashboard**: npm-пакеты из прокси-кеша теперь отображаются в списке пакетов
|
||||||
|
|
||||||
## [0.1.0] - 2026-01-24
|
## [0.1.0] - 2026-01-24
|
||||||
|
|
||||||
### Добавлено
|
### Добавлено
|
||||||
|
|||||||
447
Cargo.lock
generated
447
Cargo.lock
generated
@@ -34,9 +34,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anstream"
|
name = "anstream"
|
||||||
version = "0.6.21"
|
version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a"
|
checksum = "824a212faf96e9acacdbd09febd34438f8f711fb84e09a8916013cd7815ca28d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstyle",
|
"anstyle",
|
||||||
"anstyle-parse",
|
"anstyle-parse",
|
||||||
@@ -55,9 +55,9 @@ checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anstyle-parse"
|
name = "anstyle-parse"
|
||||||
version = "0.2.7"
|
version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
|
checksum = "52ce7f38b242319f7cabaa6813055467063ecdc9d355bbb4ce0c68908cd8130e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"utf8parse",
|
"utf8parse",
|
||||||
]
|
]
|
||||||
@@ -82,6 +82,12 @@ dependencies = [
|
|||||||
"windows-sys 0.61.2",
|
"windows-sys 0.61.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyhow"
|
||||||
|
version = "1.0.102"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arbitrary"
|
name = "arbitrary"
|
||||||
version = "1.4.2"
|
version = "1.4.2"
|
||||||
@@ -184,13 +190,13 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bcrypt"
|
name = "bcrypt"
|
||||||
version = "0.17.1"
|
version = "0.19.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "abaf6da45c74385272ddf00e1ac074c7d8a6c1a1dda376902bd6a427522a8b2c"
|
checksum = "523ab528ce3a7ada6597f8ccf5bd8d85ebe26d5edf311cad4d1d3cfb2d357ac6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64",
|
"base64",
|
||||||
"blowfish",
|
"blowfish",
|
||||||
"getrandom 0.3.4",
|
"getrandom 0.4.1",
|
||||||
"subtle",
|
"subtle",
|
||||||
"zeroize",
|
"zeroize",
|
||||||
]
|
]
|
||||||
@@ -234,9 +240,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bytes"
|
name = "bytes"
|
||||||
version = "1.11.0"
|
version = "1.11.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
|
checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
@@ -245,6 +251,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "47b26a0954ae34af09b50f0de26458fa95369a0d478d8236d3f93082b219bd29"
|
checksum = "47b26a0954ae34af09b50f0de26458fa95369a0d478d8236d3f93082b219bd29"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"find-msvc-tools",
|
"find-msvc-tools",
|
||||||
|
"jobserver",
|
||||||
|
"libc",
|
||||||
"shlex",
|
"shlex",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -262,9 +270,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chrono"
|
name = "chrono"
|
||||||
version = "0.4.43"
|
version = "0.4.44"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118"
|
checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"iana-time-zone",
|
"iana-time-zone",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
@@ -286,9 +294,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.5.56"
|
version = "4.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a75ca66430e33a14957acc24c5077b503e7d374151b2b4b3a10c83b4ceb4be0e"
|
checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap_builder",
|
"clap_builder",
|
||||||
"clap_derive",
|
"clap_derive",
|
||||||
@@ -296,9 +304,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_builder"
|
name = "clap_builder"
|
||||||
version = "4.5.56"
|
version = "4.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "793207c7fa6300a0608d1080b858e5fdbe713cdc1c8db9fb17777d8a13e63df0"
|
checksum = "714a53001bf66416adb0e2ef5ac857140e7dc3a0c48fb28b2f10762fc4b5069f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstream",
|
"anstream",
|
||||||
"anstyle",
|
"anstyle",
|
||||||
@@ -308,9 +316,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_derive"
|
name = "clap_derive"
|
||||||
version = "4.5.55"
|
version = "4.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
|
checksum = "1110bd8a634a1ab8cb04345d8d878267d57c3cf1b38d91b71af6686408bbca6a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
@@ -320,9 +328,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_lex"
|
name = "clap_lex"
|
||||||
version = "0.7.7"
|
version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32"
|
checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colorchoice"
|
name = "colorchoice"
|
||||||
@@ -332,15 +340,15 @@ checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "console"
|
name = "console"
|
||||||
version = "0.15.11"
|
version = "0.16.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
|
checksum = "03e45a4a8926227e4197636ba97a9fc9b00477e9f4bd711395687c5f0734bec4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"encode_unicode",
|
"encode_unicode",
|
||||||
"libc",
|
"libc",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"unicode-width",
|
"unicode-width",
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.61.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -495,9 +503,9 @@ checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flate2"
|
name = "flate2"
|
||||||
version = "1.1.8"
|
version = "1.1.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b375d6465b98090a5f25b1c7703f3859783755aa9a80433b36e0379a3ec2f369"
|
checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"crc32fast",
|
"crc32fast",
|
||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
@@ -510,6 +518,12 @@ version = "1.0.7"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "foldhash"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "foldhash"
|
name = "foldhash"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@@ -667,6 +681,19 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "getrandom"
|
||||||
|
version = "0.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
"r-efi",
|
||||||
|
"wasip2",
|
||||||
|
"wasip3",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "governor"
|
name = "governor"
|
||||||
version = "0.10.4"
|
version = "0.10.4"
|
||||||
@@ -715,6 +742,15 @@ version = "0.14.5"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
|
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hashbrown"
|
||||||
|
version = "0.15.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
|
||||||
|
dependencies = [
|
||||||
|
"foldhash 0.1.5",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hashbrown"
|
name = "hashbrown"
|
||||||
version = "0.16.1"
|
version = "0.16.1"
|
||||||
@@ -723,7 +759,7 @@ checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"allocator-api2",
|
"allocator-api2",
|
||||||
"equivalent",
|
"equivalent",
|
||||||
"foldhash",
|
"foldhash 0.2.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -980,6 +1016,12 @@ dependencies = [
|
|||||||
"zerovec",
|
"zerovec",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "id-arena"
|
||||||
|
version = "2.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
@@ -1015,14 +1057,14 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indicatif"
|
name = "indicatif"
|
||||||
version = "0.17.11"
|
version = "0.18.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235"
|
checksum = "25470f23803092da7d239834776d653104d551bc4d7eacaf31e6837854b8e9eb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console",
|
"console",
|
||||||
"number_prefix",
|
|
||||||
"portable-atomic",
|
"portable-atomic",
|
||||||
"unicode-width",
|
"unicode-width",
|
||||||
|
"unit-prefix",
|
||||||
"web-time",
|
"web-time",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -1063,6 +1105,16 @@ version = "1.0.17"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
|
checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jobserver"
|
||||||
|
version = "0.1.34"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
|
||||||
|
dependencies = [
|
||||||
|
"getrandom 0.3.4",
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
version = "0.3.85"
|
version = "0.3.85"
|
||||||
@@ -1080,10 +1132,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "leb128fmt"
|
||||||
version = "0.2.180"
|
version = "0.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc"
|
checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libc"
|
||||||
|
version = "0.2.182"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libfuzzer-sys"
|
||||||
|
version = "0.4.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f12a681b7dd8ce12bff52488013ba614b869148d54dd79836ab85aafdd53f08d"
|
||||||
|
dependencies = [
|
||||||
|
"arbitrary",
|
||||||
|
"cc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libredox"
|
name = "libredox"
|
||||||
@@ -1098,9 +1166,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "linux-raw-sys"
|
name = "linux-raw-sys"
|
||||||
version = "0.11.0"
|
version = "0.12.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
|
checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "litemap"
|
name = "litemap"
|
||||||
@@ -1201,7 +1269,7 @@ checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nora-cli"
|
name = "nora-cli"
|
||||||
version = "0.2.20"
|
version = "0.2.31"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap",
|
"clap",
|
||||||
"flate2",
|
"flate2",
|
||||||
@@ -1213,9 +1281,17 @@ dependencies = [
|
|||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nora-fuzz"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"libfuzzer-sys",
|
||||||
|
"nora-registry",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nora-registry"
|
name = "nora-registry"
|
||||||
version = "0.2.20"
|
version = "0.2.31"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"axum",
|
"axum",
|
||||||
@@ -1253,7 +1329,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nora-storage"
|
name = "nora-storage"
|
||||||
version = "0.2.20"
|
version = "0.2.31"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"axum",
|
"axum",
|
||||||
"base64",
|
"base64",
|
||||||
@@ -1298,12 +1374,6 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "number_prefix"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.21.3"
|
version = "1.21.3"
|
||||||
@@ -1401,6 +1471,16 @@ dependencies = [
|
|||||||
"zerocopy",
|
"zerocopy",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prettyplease"
|
||||||
|
version = "0.2.37"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.106"
|
version = "1.0.106"
|
||||||
@@ -1412,9 +1492,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prometheus"
|
name = "prometheus"
|
||||||
version = "0.13.4"
|
version = "0.14.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1"
|
checksum = "3ca5326d8d0b950a9acd87e6a3f94745394f62e4dae1b1ee22b2bc0c394af43a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"fnv",
|
"fnv",
|
||||||
@@ -1422,14 +1502,28 @@ dependencies = [
|
|||||||
"memchr",
|
"memchr",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"protobuf",
|
"protobuf",
|
||||||
"thiserror 1.0.69",
|
"thiserror 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "protobuf"
|
name = "protobuf"
|
||||||
version = "2.28.0"
|
version = "3.7.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94"
|
checksum = "d65a1d4ddae7d8b5de68153b48f6aa3bba8cb002b243dbdbc55a5afbc98f99f4"
|
||||||
|
dependencies = [
|
||||||
|
"once_cell",
|
||||||
|
"protobuf-support",
|
||||||
|
"thiserror 1.0.69",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "protobuf-support"
|
||||||
|
version = "3.7.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3e36c2f31e0a47f9280fb347ef5e461ffcd2c52dd520d8e216b52f93b0b0d7d6"
|
||||||
|
dependencies = [
|
||||||
|
"thiserror 1.0.69",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quanta"
|
name = "quanta"
|
||||||
@@ -1448,9 +1542,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quick-xml"
|
name = "quick-xml"
|
||||||
version = "0.31.0"
|
version = "0.39.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"
|
checksum = "958f21e8e7ceb5a1aa7fa87fab28e7c75976e0bfe7e23ff069e0a260f894067d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -1478,9 +1572,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quinn-proto"
|
name = "quinn-proto"
|
||||||
version = "0.11.13"
|
version = "0.11.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
|
checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes",
|
"bytes",
|
||||||
"getrandom 0.3.4",
|
"getrandom 0.3.4",
|
||||||
@@ -1513,9 +1607,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.44"
|
version = "1.0.45"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4"
|
checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
@@ -1707,9 +1801,9 @@ checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustix"
|
name = "rustix"
|
||||||
version = "1.1.3"
|
version = "1.1.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34"
|
checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"errno",
|
"errno",
|
||||||
@@ -1780,6 +1874,12 @@ version = "1.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "semver"
|
||||||
|
version = "1.0.27"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.228"
|
version = "1.0.228"
|
||||||
@@ -1836,11 +1936,11 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_spanned"
|
name = "serde_spanned"
|
||||||
version = "0.6.9"
|
version = "1.0.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
|
checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde_core",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1948,9 +2048,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.114"
|
version = "2.0.117"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a"
|
checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -1990,12 +2090,12 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tempfile"
|
name = "tempfile"
|
||||||
version = "3.24.0"
|
version = "3.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c"
|
checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fastrand",
|
"fastrand",
|
||||||
"getrandom 0.3.4",
|
"getrandom 0.4.1",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"rustix",
|
"rustix",
|
||||||
"windows-sys 0.61.2",
|
"windows-sys 0.61.2",
|
||||||
@@ -2077,9 +2177,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
version = "1.49.0"
|
version = "1.50.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86"
|
checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes",
|
"bytes",
|
||||||
"libc",
|
"libc",
|
||||||
@@ -2139,44 +2239,42 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.8.23"
|
version = "1.0.6+spec-1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
|
checksum = "399b1124a3c9e16766831c6bba21e50192572cdd98706ea114f9502509686ffc"
|
||||||
dependencies = [
|
|
||||||
"serde",
|
|
||||||
"serde_spanned",
|
|
||||||
"toml_datetime",
|
|
||||||
"toml_edit",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "toml_datetime"
|
|
||||||
version = "0.6.11"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
|
|
||||||
dependencies = [
|
|
||||||
"serde",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "toml_edit"
|
|
||||||
version = "0.22.27"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"serde",
|
"serde_core",
|
||||||
"serde_spanned",
|
"serde_spanned",
|
||||||
"toml_datetime",
|
"toml_datetime",
|
||||||
"toml_write",
|
"toml_parser",
|
||||||
|
"toml_writer",
|
||||||
"winnow",
|
"winnow",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml_write"
|
name = "toml_datetime"
|
||||||
version = "0.1.2"
|
version = "1.0.0+spec-1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
|
checksum = "32c2555c699578a4f59f0cc68e5116c8d7cabbd45e1409b989d4be085b53f13e"
|
||||||
|
dependencies = [
|
||||||
|
"serde_core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "toml_parser"
|
||||||
|
version = "1.0.9+spec-1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4"
|
||||||
|
dependencies = [
|
||||||
|
"winnow",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "toml_writer"
|
||||||
|
version = "1.0.6+spec-1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tonic"
|
name = "tonic"
|
||||||
@@ -2329,9 +2427,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-subscriber"
|
name = "tracing-subscriber"
|
||||||
version = "0.3.22"
|
version = "0.3.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
|
checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"matchers",
|
"matchers",
|
||||||
"nu-ansi-term",
|
"nu-ansi-term",
|
||||||
@@ -2378,6 +2476,18 @@ version = "0.2.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
|
checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-xid"
|
||||||
|
version = "0.2.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unit-prefix"
|
||||||
|
version = "0.5.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "81e544489bf3d8ef66c953931f56617f423cd4b5494be343d9b9d3dda037b9a3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "untrusted"
|
name = "untrusted"
|
||||||
version = "0.9.0"
|
version = "0.9.0"
|
||||||
@@ -2453,11 +2563,11 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uuid"
|
name = "uuid"
|
||||||
version = "1.20.0"
|
version = "1.22.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ee48d38b119b0cd71fe4141b30f5ba9c7c5d9f4e7a3a8b4a674e4b6ef789976f"
|
checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"getrandom 0.3.4",
|
"getrandom 0.4.1",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
@@ -2508,6 +2618,15 @@ dependencies = [
|
|||||||
"wit-bindgen",
|
"wit-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasip3"
|
||||||
|
version = "0.4.0+wasi-0.3.0-rc-2026-01-06"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5"
|
||||||
|
dependencies = [
|
||||||
|
"wit-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
version = "0.2.108"
|
version = "0.2.108"
|
||||||
@@ -2567,6 +2686,40 @@ dependencies = [
|
|||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-encoder"
|
||||||
|
version = "0.244.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319"
|
||||||
|
dependencies = [
|
||||||
|
"leb128fmt",
|
||||||
|
"wasmparser",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-metadata"
|
||||||
|
version = "0.244.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"indexmap",
|
||||||
|
"wasm-encoder",
|
||||||
|
"wasmparser",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasmparser"
|
||||||
|
version = "0.244.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags",
|
||||||
|
"hashbrown 0.15.5",
|
||||||
|
"indexmap",
|
||||||
|
"semver",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "web-sys"
|
name = "web-sys"
|
||||||
version = "0.3.85"
|
version = "0.3.85"
|
||||||
@@ -2695,15 +2848,6 @@ dependencies = [
|
|||||||
"windows-targets 0.52.6",
|
"windows-targets 0.52.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-sys"
|
|
||||||
version = "0.59.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
|
||||||
dependencies = [
|
|
||||||
"windows-targets 0.52.6",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.60.2"
|
version = "0.60.2"
|
||||||
@@ -2856,9 +3000,6 @@ name = "winnow"
|
|||||||
version = "0.7.14"
|
version = "0.7.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
|
checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
|
||||||
dependencies = [
|
|
||||||
"memchr",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wiremock"
|
name = "wiremock"
|
||||||
@@ -2888,6 +3029,88 @@ name = "wit-bindgen"
|
|||||||
version = "0.51.0"
|
version = "0.51.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
|
checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
|
||||||
|
dependencies = [
|
||||||
|
"wit-bindgen-rust-macro",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wit-bindgen-core"
|
||||||
|
version = "0.51.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"heck",
|
||||||
|
"wit-parser",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wit-bindgen-rust"
|
||||||
|
version = "0.51.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"heck",
|
||||||
|
"indexmap",
|
||||||
|
"prettyplease",
|
||||||
|
"syn",
|
||||||
|
"wasm-metadata",
|
||||||
|
"wit-bindgen-core",
|
||||||
|
"wit-component",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wit-bindgen-rust-macro"
|
||||||
|
version = "0.51.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"prettyplease",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
"wit-bindgen-core",
|
||||||
|
"wit-bindgen-rust",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wit-component"
|
||||||
|
version = "0.244.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"bitflags",
|
||||||
|
"indexmap",
|
||||||
|
"log",
|
||||||
|
"serde",
|
||||||
|
"serde_derive",
|
||||||
|
"serde_json",
|
||||||
|
"wasm-encoder",
|
||||||
|
"wasm-metadata",
|
||||||
|
"wasmparser",
|
||||||
|
"wit-parser",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wit-parser"
|
||||||
|
version = "0.244.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"id-arena",
|
||||||
|
"indexmap",
|
||||||
|
"log",
|
||||||
|
"semver",
|
||||||
|
"serde",
|
||||||
|
"serde_derive",
|
||||||
|
"serde_json",
|
||||||
|
"unicode-xid",
|
||||||
|
"wasmparser",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "writeable"
|
name = "writeable"
|
||||||
@@ -3038,9 +3261,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zlib-rs"
|
name = "zlib-rs"
|
||||||
version = "0.5.5"
|
version = "0.6.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "40990edd51aae2c2b6907af74ffb635029d5788228222c4bb811e9351c0caad3"
|
checksum = "c745c48e1007337ed136dc99df34128b9faa6ed542d80a1c673cf55a6d7236c8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zmij"
|
name = "zmij"
|
||||||
|
|||||||
@@ -4,10 +4,11 @@ members = [
|
|||||||
"nora-registry",
|
"nora-registry",
|
||||||
"nora-storage",
|
"nora-storage",
|
||||||
"nora-cli",
|
"nora-cli",
|
||||||
|
"fuzz",
|
||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.2.22"
|
version = "0.2.32"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
authors = ["DevITWay <devitway@gmail.com>"]
|
authors = ["DevITWay <devitway@gmail.com>"]
|
||||||
|
|||||||
125
README.md
125
README.md
@@ -1,16 +1,18 @@
|
|||||||
# 🐿️ N○RA
|
|
||||||
|
|
||||||
[](LICENSE)
|
[](LICENSE)
|
||||||
[](https://github.com/getnora-io/nora/releases)
|
[](https://github.com/getnora-io/nora/releases)
|
||||||
[](https://github.com/getnora-io/nora/actions)
|
[](https://github.com/getnora-io/nora/actions)
|
||||||
|
[](https://github.com/getnora-io/nora/pkgs/container/nora)
|
||||||
|
[](https://github.com/getnora-io/nora/stargazers)
|
||||||
[](https://www.rust-lang.org/)
|
[](https://www.rust-lang.org/)
|
||||||
[](https://t.me/DevITWay)
|
[](https://getnora.dev)
|
||||||
|
[](https://t.me/getnora)
|
||||||
|
[](https://scorecard.dev/viewer/?uri=github.com/getnora-io/nora)
|
||||||
|
|
||||||
> **Your Cloud-Native Artifact Registry**
|
> **Multi-protocol artifact registry that doesn't suck.**
|
||||||
|
>
|
||||||
|
> One binary. All protocols. Stupidly fast.
|
||||||
|
|
||||||
Fast. Organized. Feel at Home.
|
**32 MB** binary | **< 100 MB** RAM | **3s** startup | **5** protocols
|
||||||
|
|
||||||
**10x faster** than Nexus | **< 100 MB RAM** | **32 MB Docker image**
|
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
@@ -35,8 +37,10 @@ Fast. Organized. Feel at Home.
|
|||||||
|
|
||||||
- **Security**
|
- **Security**
|
||||||
- Basic Auth (htpasswd + bcrypt)
|
- Basic Auth (htpasswd + bcrypt)
|
||||||
- Revocable API tokens
|
- Revocable API tokens with RBAC
|
||||||
- ENV-based configuration (12-Factor)
|
- ENV-based configuration (12-Factor)
|
||||||
|
- SBOM (SPDX + CycloneDX) in every release
|
||||||
|
- See [SECURITY.md](SECURITY.md) for vulnerability reporting
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
@@ -85,6 +89,39 @@ npm config set registry http://localhost:4000/npm/
|
|||||||
npm publish
|
npm publish
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Authentication
|
||||||
|
|
||||||
|
NORA supports Basic Auth (htpasswd) and revocable API tokens with RBAC.
|
||||||
|
|
||||||
|
### Quick Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Create htpasswd file with bcrypt
|
||||||
|
htpasswd -cbB users.htpasswd admin yourpassword
|
||||||
|
# Add more users:
|
||||||
|
htpasswd -bB users.htpasswd ci-user ci-secret
|
||||||
|
|
||||||
|
# 2. Start NORA with auth enabled
|
||||||
|
docker run -d -p 4000:4000 \
|
||||||
|
-v nora-data:/data \
|
||||||
|
-v ./users.htpasswd:/data/users.htpasswd \
|
||||||
|
-e NORA_AUTH_ENABLED=true \
|
||||||
|
ghcr.io/getnora-io/nora:latest
|
||||||
|
|
||||||
|
# 3. Verify
|
||||||
|
curl -u admin:yourpassword http://localhost:4000/v2/_catalog
|
||||||
|
```
|
||||||
|
|
||||||
|
### API Tokens (RBAC)
|
||||||
|
|
||||||
|
| Role | Pull/Read | Push/Write | Delete/Admin |
|
||||||
|
|------|-----------|------------|--------------|
|
||||||
|
| `read` | Yes | No | No |
|
||||||
|
| `write` | Yes | Yes | No |
|
||||||
|
| `admin` | Yes | Yes | Yes |
|
||||||
|
|
||||||
|
See [Authentication guide](https://getnora.dev/configuration/authentication/) for token management, Docker login, and CI/CD integration.
|
||||||
|
|
||||||
## CLI Commands
|
## CLI Commands
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -104,18 +141,10 @@ nora migrate --from local --to s3
|
|||||||
| `NORA_HOST` | 127.0.0.1 | Bind address |
|
| `NORA_HOST` | 127.0.0.1 | Bind address |
|
||||||
| `NORA_PORT` | 4000 | Port |
|
| `NORA_PORT` | 4000 | Port |
|
||||||
| `NORA_STORAGE_MODE` | local | `local` or `s3` |
|
| `NORA_STORAGE_MODE` | local | `local` or `s3` |
|
||||||
| `NORA_STORAGE_PATH` | data/storage | Local storage path |
|
|
||||||
| `NORA_STORAGE_S3_URL` | - | S3 endpoint URL |
|
|
||||||
| `NORA_STORAGE_BUCKET` | registry | S3 bucket name |
|
|
||||||
| `NORA_AUTH_ENABLED` | false | Enable authentication |
|
| `NORA_AUTH_ENABLED` | false | Enable authentication |
|
||||||
| `NORA_RATE_LIMIT_AUTH_RPS` | 1 | Auth requests per second |
|
| `NORA_DOCKER_UPSTREAMS` | `https://registry-1.docker.io` | Docker upstreams (`url\|user:pass,...`) |
|
||||||
| `NORA_RATE_LIMIT_AUTH_BURST` | 5 | Auth burst size |
|
|
||||||
| `NORA_RATE_LIMIT_UPLOAD_RPS` | 200 | Upload requests per second |
|
See [full configuration reference](https://getnora.dev/configuration/settings/) for all environment variables including storage, rate limiting, proxy auth, and secrets.
|
||||||
| `NORA_RATE_LIMIT_UPLOAD_BURST` | 500 | Upload burst size |
|
|
||||||
| `NORA_RATE_LIMIT_GENERAL_RPS` | 100 | General requests per second |
|
|
||||||
| `NORA_RATE_LIMIT_GENERAL_BURST` | 200 | General burst size |
|
|
||||||
| `NORA_SECRETS_PROVIDER` | env | Secrets provider (`env`) |
|
|
||||||
| `NORA_SECRETS_CLEAR_ENV` | false | Clear env vars after reading |
|
|
||||||
|
|
||||||
### config.toml
|
### config.toml
|
||||||
|
|
||||||
@@ -132,24 +161,15 @@ path = "data/storage"
|
|||||||
enabled = false
|
enabled = false
|
||||||
htpasswd_file = "users.htpasswd"
|
htpasswd_file = "users.htpasswd"
|
||||||
|
|
||||||
[rate_limit]
|
[docker]
|
||||||
# Strict limits for authentication (brute-force protection)
|
proxy_timeout = 60
|
||||||
auth_rps = 1
|
|
||||||
auth_burst = 5
|
|
||||||
# High limits for CI/CD upload workloads
|
|
||||||
upload_rps = 200
|
|
||||||
upload_burst = 500
|
|
||||||
# Balanced limits for general API endpoints
|
|
||||||
general_rps = 100
|
|
||||||
general_burst = 200
|
|
||||||
|
|
||||||
[secrets]
|
[[docker.upstreams]]
|
||||||
# Provider: env (default), aws-secrets, vault, k8s (coming soon)
|
url = "https://registry-1.docker.io"
|
||||||
provider = "env"
|
|
||||||
# Clear environment variables after reading (security hardening)
|
|
||||||
clear_env = false
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
See [full config reference](https://getnora.dev/configuration/settings/) for rate limiting, secrets, proxy auth, and all options.
|
||||||
|
|
||||||
## Endpoints
|
## Endpoints
|
||||||
|
|
||||||
| URL | Description |
|
| URL | Description |
|
||||||
@@ -165,6 +185,31 @@ clear_env = false
|
|||||||
| `/cargo/` | Cargo |
|
| `/cargo/` | Cargo |
|
||||||
| `/simple/` | PyPI |
|
| `/simple/` | PyPI |
|
||||||
|
|
||||||
|
## TLS / HTTPS
|
||||||
|
|
||||||
|
NORA serves plain HTTP. Use a reverse proxy for TLS:
|
||||||
|
|
||||||
|
```
|
||||||
|
registry.example.com {
|
||||||
|
reverse_proxy localhost:4000
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
For internal networks without TLS, configure Docker:
|
||||||
|
|
||||||
|
```json
|
||||||
|
// /etc/docker/daemon.json
|
||||||
|
{
|
||||||
|
"insecure-registries": ["192.168.1.100:4000"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
See [TLS / HTTPS guide](https://getnora.dev/configuration/tls/) for Nginx, Traefik, and custom CA setup.
|
||||||
|
|
||||||
|
## FSTEC-Certified OS Builds
|
||||||
|
|
||||||
|
Dedicated builds for Astra Linux SE and RED OS are published as `-astra` and `-redos` tagged images in every [GitHub Release](https://github.com/getnora-io/nora/releases). Both use `scratch` base with statically-linked binary.
|
||||||
|
|
||||||
## Performance
|
## Performance
|
||||||
|
|
||||||
| Metric | NORA | Nexus | JFrog |
|
| Metric | NORA | Nexus | JFrog |
|
||||||
@@ -173,11 +218,21 @@ clear_env = false
|
|||||||
| Memory | < 100 MB | 2-4 GB | 2-4 GB |
|
| Memory | < 100 MB | 2-4 GB | 2-4 GB |
|
||||||
| Image Size | 32 MB | 600+ MB | 1+ GB |
|
| Image Size | 32 MB | 600+ MB | 1+ GB |
|
||||||
|
|
||||||
|
## Roadmap
|
||||||
|
|
||||||
|
- **OIDC / Workload Identity** — zero-secret auth for GitHub Actions, GitLab CI
|
||||||
|
- **Online Garbage Collection** — non-blocking cleanup without registry downtime
|
||||||
|
- **Retention Policies** — declarative rules: keep last N tags, delete older than X days
|
||||||
|
- **Image Signing** — cosign/notation verification and policy enforcement
|
||||||
|
- **Replication** — push/pull sync between NORA instances
|
||||||
|
|
||||||
|
See [CHANGELOG.md](CHANGELOG.md) for release history.
|
||||||
|
|
||||||
## Author
|
## Author
|
||||||
|
|
||||||
**Created and maintained by [DevITWay](https://github.com/devitway)**
|
**Created and maintained by [DevITWay](https://github.com/devitway)**
|
||||||
|
|
||||||
- Website: [devopsway.ru](https://devopsway.ru)
|
- Website: [getnora.dev](https://getnora.dev)
|
||||||
- Telegram: [@DevITWay](https://t.me/DevITWay)
|
- Telegram: [@DevITWay](https://t.me/DevITWay)
|
||||||
- GitHub: [@devitway](https://github.com/devitway)
|
- GitHub: [@devitway](https://github.com/devitway)
|
||||||
- Email: devitway@gmail.com
|
- Email: devitway@gmail.com
|
||||||
|
|||||||
11
deny.toml
11
deny.toml
@@ -4,7 +4,9 @@
|
|||||||
[advisories]
|
[advisories]
|
||||||
# Vulnerability database (RustSec)
|
# Vulnerability database (RustSec)
|
||||||
db-urls = ["https://github.com/rustsec/advisory-db"]
|
db-urls = ["https://github.com/rustsec/advisory-db"]
|
||||||
ignore = []
|
ignore = [
|
||||||
|
"RUSTSEC-2025-0119", # number_prefix unmaintained via indicatif; no fix available. Review by 2026-06-15
|
||||||
|
]
|
||||||
|
|
||||||
[licenses]
|
[licenses]
|
||||||
# Allowed open-source licenses
|
# Allowed open-source licenses
|
||||||
@@ -20,15 +22,13 @@ allow = [
|
|||||||
"CC0-1.0",
|
"CC0-1.0",
|
||||||
"OpenSSL",
|
"OpenSSL",
|
||||||
"Zlib",
|
"Zlib",
|
||||||
"MPL-2.0", # Mozilla Public License — ok for binary linking
|
"CDLA-Permissive-2.0", # webpki-roots (CA certificates bundle)
|
||||||
|
"MPL-2.0",
|
||||||
]
|
]
|
||||||
copyleft = "warn" # GPL etc — warn, don't block
|
|
||||||
unlicensed = "deny"
|
|
||||||
|
|
||||||
[bans]
|
[bans]
|
||||||
multiple-versions = "warn"
|
multiple-versions = "warn"
|
||||||
deny = [
|
deny = [
|
||||||
# Prefer rustls over openssl for static builds and supply chain cleanliness
|
|
||||||
{ name = "openssl-sys" },
|
{ name = "openssl-sys" },
|
||||||
{ name = "openssl" },
|
{ name = "openssl" },
|
||||||
]
|
]
|
||||||
@@ -37,5 +37,4 @@ skip = []
|
|||||||
[sources]
|
[sources]
|
||||||
unknown-registry = "warn"
|
unknown-registry = "warn"
|
||||||
unknown-git = "warn"
|
unknown-git = "warn"
|
||||||
# Allow only the official crates.io index
|
|
||||||
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
|
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
|
||||||
|
|||||||
111
dist/install.sh
vendored
Executable file
111
dist/install.sh
vendored
Executable file
@@ -0,0 +1,111 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# NORA Artifact Registry — install script
|
||||||
|
# Usage: curl -fsSL https://getnora.io/install.sh | bash
|
||||||
|
|
||||||
|
VERSION="${NORA_VERSION:-latest}"
|
||||||
|
ARCH=$(uname -m)
|
||||||
|
OS=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||||
|
INSTALL_DIR="/usr/local/bin"
|
||||||
|
CONFIG_DIR="/etc/nora"
|
||||||
|
DATA_DIR="/var/lib/nora"
|
||||||
|
LOG_DIR="/var/log/nora"
|
||||||
|
|
||||||
|
case "$ARCH" in
|
||||||
|
x86_64|amd64) ARCH="x86_64" ;;
|
||||||
|
aarch64|arm64) ARCH="aarch64" ;;
|
||||||
|
*) echo "Unsupported architecture: $ARCH"; exit 1 ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo "Installing NORA ($OS/$ARCH)..."
|
||||||
|
|
||||||
|
# Download binary
|
||||||
|
if [ "$VERSION" = "latest" ]; then
|
||||||
|
DOWNLOAD_URL="https://github.com/getnora-io/nora/releases/latest/download/nora-${OS}-${ARCH}"
|
||||||
|
else
|
||||||
|
DOWNLOAD_URL="https://github.com/getnora-io/nora/releases/download/${VERSION}/nora-${OS}-${ARCH}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Downloading from $DOWNLOAD_URL..."
|
||||||
|
if command -v curl &>/dev/null; then
|
||||||
|
curl -fsSL -o /tmp/nora "$DOWNLOAD_URL"
|
||||||
|
elif command -v wget &>/dev/null; then
|
||||||
|
wget -qO /tmp/nora "$DOWNLOAD_URL"
|
||||||
|
else
|
||||||
|
echo "Error: curl or wget required"; exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
chmod +x /tmp/nora
|
||||||
|
sudo mv /tmp/nora "$INSTALL_DIR/nora"
|
||||||
|
|
||||||
|
# Create system user
|
||||||
|
if ! id nora &>/dev/null; then
|
||||||
|
sudo useradd --system --shell /usr/sbin/nologin --home-dir "$DATA_DIR" --create-home nora
|
||||||
|
echo "Created system user: nora"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create directories
|
||||||
|
sudo mkdir -p "$CONFIG_DIR" "$DATA_DIR" "$LOG_DIR"
|
||||||
|
sudo chown nora:nora "$DATA_DIR" "$LOG_DIR"
|
||||||
|
|
||||||
|
# Install default config if not exists
|
||||||
|
if [ ! -f "$CONFIG_DIR/nora.env" ]; then
|
||||||
|
cat > /tmp/nora.env << 'ENVEOF'
|
||||||
|
NORA_HOST=0.0.0.0
|
||||||
|
NORA_PORT=4000
|
||||||
|
NORA_STORAGE_PATH=/var/lib/nora
|
||||||
|
ENVEOF
|
||||||
|
sudo mv /tmp/nora.env "$CONFIG_DIR/nora.env"
|
||||||
|
sudo chmod 600 "$CONFIG_DIR/nora.env"
|
||||||
|
sudo chown nora:nora "$CONFIG_DIR/nora.env"
|
||||||
|
echo "Created default config: $CONFIG_DIR/nora.env"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Install systemd service
|
||||||
|
if [ -d /etc/systemd/system ]; then
|
||||||
|
cat > /tmp/nora.service << 'SVCEOF'
|
||||||
|
[Unit]
|
||||||
|
Description=NORA Artifact Registry
|
||||||
|
Documentation=https://getnora.dev
|
||||||
|
After=network-online.target
|
||||||
|
Wants=network-online.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=nora
|
||||||
|
Group=nora
|
||||||
|
ExecStart=/usr/local/bin/nora serve
|
||||||
|
WorkingDirectory=/etc/nora
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=5
|
||||||
|
LimitNOFILE=65535
|
||||||
|
NoNewPrivileges=true
|
||||||
|
ProtectSystem=strict
|
||||||
|
ProtectHome=true
|
||||||
|
ReadWritePaths=/var/lib/nora /var/log/nora
|
||||||
|
PrivateTmp=true
|
||||||
|
EnvironmentFile=-/etc/nora/nora.env
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
SVCEOF
|
||||||
|
sudo mv /tmp/nora.service /etc/systemd/system/nora.service
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
sudo systemctl enable nora
|
||||||
|
echo "Installed systemd service: nora"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "NORA installed successfully!"
|
||||||
|
echo ""
|
||||||
|
echo " Binary: $INSTALL_DIR/nora"
|
||||||
|
echo " Config: $CONFIG_DIR/nora.env"
|
||||||
|
echo " Data: $DATA_DIR"
|
||||||
|
echo " Version: $(nora --version 2>/dev/null || echo 'unknown')"
|
||||||
|
echo ""
|
||||||
|
echo "Next steps:"
|
||||||
|
echo " 1. Edit $CONFIG_DIR/nora.env"
|
||||||
|
echo " 2. sudo systemctl start nora"
|
||||||
|
echo " 3. curl http://localhost:4000/health"
|
||||||
|
echo ""
|
||||||
9031
dist/nora.cdx.json
vendored
Normal file
9031
dist/nora.cdx.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
31
dist/nora.env.example
vendored
Normal file
31
dist/nora.env.example
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# NORA configuration — environment variables
|
||||||
|
# Copy to /etc/nora/nora.env and adjust
|
||||||
|
|
||||||
|
# Server
|
||||||
|
NORA_HOST=0.0.0.0
|
||||||
|
NORA_PORT=4000
|
||||||
|
# NORA_PUBLIC_URL=https://registry.example.com
|
||||||
|
|
||||||
|
# Storage
|
||||||
|
NORA_STORAGE_PATH=/var/lib/nora
|
||||||
|
# NORA_STORAGE_MODE=s3
|
||||||
|
# NORA_STORAGE_S3_URL=http://minio:9000
|
||||||
|
# NORA_STORAGE_BUCKET=registry
|
||||||
|
|
||||||
|
# Auth (optional)
|
||||||
|
# NORA_AUTH_ENABLED=true
|
||||||
|
# NORA_AUTH_HTPASSWD_FILE=/etc/nora/users.htpasswd
|
||||||
|
|
||||||
|
# Rate limiting
|
||||||
|
# NORA_RATE_LIMIT_ENABLED=true
|
||||||
|
|
||||||
|
# npm proxy
|
||||||
|
# NORA_NPM_PROXY=https://registry.npmjs.org
|
||||||
|
# NORA_NPM_METADATA_TTL=300
|
||||||
|
# NORA_NPM_PROXY_AUTH=user:pass
|
||||||
|
|
||||||
|
# PyPI proxy
|
||||||
|
# NORA_PYPI_PROXY=https://pypi.org/simple/
|
||||||
|
|
||||||
|
# Docker upstreams
|
||||||
|
# NORA_DOCKER_UPSTREAMS=https://registry-1.docker.io
|
||||||
28
dist/nora.service
vendored
Normal file
28
dist/nora.service
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=NORA Artifact Registry
|
||||||
|
Documentation=https://getnora.dev
|
||||||
|
After=network-online.target
|
||||||
|
Wants=network-online.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=nora
|
||||||
|
Group=nora
|
||||||
|
ExecStart=/usr/local/bin/nora serve
|
||||||
|
WorkingDirectory=/etc/nora
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=5
|
||||||
|
LimitNOFILE=65535
|
||||||
|
|
||||||
|
# Security hardening
|
||||||
|
NoNewPrivileges=true
|
||||||
|
ProtectSystem=strict
|
||||||
|
ProtectHome=true
|
||||||
|
ReadWritePaths=/var/lib/nora /var/log/nora
|
||||||
|
PrivateTmp=true
|
||||||
|
|
||||||
|
# Environment
|
||||||
|
EnvironmentFile=-/etc/nora/nora.env
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
services:
|
services:
|
||||||
nora:
|
nora:
|
||||||
build: .
|
build: .
|
||||||
image: getnora/nora:latest
|
image: ghcr.io/getnora-io/nora:latest
|
||||||
ports:
|
ports:
|
||||||
- "4000:4000"
|
- "4000:4000"
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
13
docs-ru/README.md
Normal file
13
docs-ru/README.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Документация NORA для Росреестра
|
||||||
|
|
||||||
|
## Структура
|
||||||
|
|
||||||
|
- `ТУ.md` — Технические условия
|
||||||
|
- `Руководство.md` — Руководство пользователя
|
||||||
|
- `Руководство_администратора.md` — Руководство администратора
|
||||||
|
- `SBOM.md` — Перечень компонентов (Software Bill of Materials)
|
||||||
|
|
||||||
|
## Статус
|
||||||
|
|
||||||
|
Подготовка документации для включения в Единый реестр российских программ
|
||||||
|
для электронных вычислительных машин и баз данных (Минцифры РФ).
|
||||||
301
docs-ru/admin-guide.md
Normal file
301
docs-ru/admin-guide.md
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
# Руководство администратора NORA
|
||||||
|
|
||||||
|
**Версия:** 1.0
|
||||||
|
**Дата:** 2026-03-16
|
||||||
|
**Правообладатель:** ООО «ТАИАРС» (торговая марка АРТАИС)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Общие сведения
|
||||||
|
|
||||||
|
NORA — многопротокольный реестр артефактов, предназначенный для хранения, кэширования и распространения программных компонентов. Программа обеспечивает централизованное управление зависимостями при разработке и сборке программного обеспечения.
|
||||||
|
|
||||||
|
### 1.1. Назначение
|
||||||
|
|
||||||
|
- Хранение и раздача артефактов по протоколам Docker (OCI), npm, Maven, PyPI, Cargo, Helm OCI и Raw.
|
||||||
|
- Проксирование и кэширование внешних репозиториев для ускорения сборок и обеспечения доступности при отсутствии соединения с сетью Интернет.
|
||||||
|
- Контроль целостности артефактов посредством верификации SHA-256.
|
||||||
|
- Аудит и протоколирование всех операций.
|
||||||
|
|
||||||
|
### 1.2. Системные требования
|
||||||
|
|
||||||
|
| Параметр | Минимальные | Рекомендуемые |
|
||||||
|
|----------|-------------|---------------|
|
||||||
|
| ОС | Linux (amd64, arm64) | Ubuntu 22.04+, RHEL 8+ |
|
||||||
|
| ЦПУ | 1 ядро | 2+ ядра |
|
||||||
|
| ОЗУ | 256 МБ | 1+ ГБ |
|
||||||
|
| Диск | 1 ГБ | 50+ ГБ (зависит от объёма хранимых артефактов) |
|
||||||
|
| Сеть | TCP-порт (по умолчанию 4000) | — |
|
||||||
|
|
||||||
|
### 1.3. Зависимости
|
||||||
|
|
||||||
|
Программа поставляется как единый статически слинкованный исполняемый файл. Внешние зависимости отсутствуют. Перечень библиотек, включённых в состав программы, приведён в файле `nora.cdx.json` (формат CycloneDX).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Установка
|
||||||
|
|
||||||
|
### 2.1. Автоматическая установка
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -fsSL https://getnora.io/install.sh | bash
|
||||||
|
```
|
||||||
|
|
||||||
|
Скрипт выполняет следующие действия:
|
||||||
|
|
||||||
|
1. Определяет архитектуру процессора (amd64 или arm64).
|
||||||
|
2. Загружает исполняемый файл с GitHub Releases.
|
||||||
|
3. Создаёт системного пользователя `nora`.
|
||||||
|
4. Создаёт каталоги: `/etc/nora/`, `/var/lib/nora/`, `/var/log/nora/`.
|
||||||
|
5. Устанавливает файл конфигурации `/etc/nora/nora.env`.
|
||||||
|
6. Устанавливает и активирует systemd-сервис.
|
||||||
|
|
||||||
|
### 2.2. Ручная установка
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Загрузка
|
||||||
|
wget https://github.com/getnora-io/nora/releases/download/v1.0.0/nora-linux-x86_64
|
||||||
|
chmod +x nora-linux-x86_64
|
||||||
|
mv nora-linux-x86_64 /usr/local/bin/nora
|
||||||
|
|
||||||
|
# Создание пользователя
|
||||||
|
useradd --system --shell /usr/sbin/nologin --home-dir /var/lib/nora --create-home nora
|
||||||
|
|
||||||
|
# Создание каталогов
|
||||||
|
mkdir -p /etc/nora /var/lib/nora /var/log/nora
|
||||||
|
chown nora:nora /var/lib/nora /var/log/nora
|
||||||
|
|
||||||
|
# Установка systemd-сервиса
|
||||||
|
cp dist/nora.service /etc/systemd/system/
|
||||||
|
systemctl daemon-reload
|
||||||
|
systemctl enable nora
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.3. Установка из Docker-образа
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -d \
|
||||||
|
--name nora \
|
||||||
|
-p 4000:4000 \
|
||||||
|
-v nora-data:/data \
|
||||||
|
ghcr.io/getnora-io/nora:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Конфигурация
|
||||||
|
|
||||||
|
Конфигурация задаётся через переменные окружения, файл `config.toml` или их комбинацию. Приоритет: переменные окружения > config.toml > значения по умолчанию.
|
||||||
|
|
||||||
|
### 3.1. Основные параметры
|
||||||
|
|
||||||
|
| Переменная | Описание | По умолчанию |
|
||||||
|
|-----------|----------|--------------|
|
||||||
|
| `NORA_HOST` | Адрес привязки | `127.0.0.1` |
|
||||||
|
| `NORA_PORT` | Порт | `4000` |
|
||||||
|
| `NORA_PUBLIC_URL` | Внешний URL (для генерации ссылок) | — |
|
||||||
|
| `NORA_STORAGE_PATH` | Путь к каталогу хранилища | `data/storage` |
|
||||||
|
| `NORA_STORAGE_MODE` | Тип хранилища: `local` или `s3` | `local` |
|
||||||
|
| `NORA_BODY_LIMIT_MB` | Максимальный размер тела запроса (МБ) | `2048` |
|
||||||
|
|
||||||
|
### 3.2. Аутентификация
|
||||||
|
|
||||||
|
| Переменная | Описание | По умолчанию |
|
||||||
|
|-----------|----------|--------------|
|
||||||
|
| `NORA_AUTH_ENABLED` | Включить аутентификацию | `false` |
|
||||||
|
| `NORA_AUTH_HTPASSWD_FILE` | Путь к файлу htpasswd | `users.htpasswd` |
|
||||||
|
|
||||||
|
Создание пользователя:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
htpasswd -Bc /etc/nora/users.htpasswd admin
|
||||||
|
```
|
||||||
|
|
||||||
|
Роли: `admin` (полный доступ), `write` (чтение и запись), `read` (только чтение, по умолчанию).
|
||||||
|
|
||||||
|
### 3.3. Проксирование внешних репозиториев
|
||||||
|
|
||||||
|
| Переменная | Описание | По умолчанию |
|
||||||
|
|-----------|----------|--------------|
|
||||||
|
| `NORA_NPM_PROXY` | URL npm-реестра | `https://registry.npmjs.org` |
|
||||||
|
| `NORA_NPM_PROXY_AUTH` | Учётные данные (`user:pass`) | — |
|
||||||
|
| `NORA_NPM_METADATA_TTL` | TTL кэша метаданных (секунды) | `300` |
|
||||||
|
| `NORA_PYPI_PROXY` | URL PyPI-реестра | `https://pypi.org/simple/` |
|
||||||
|
| `NORA_MAVEN_PROXIES` | Список Maven-репозиториев через запятую | `https://repo1.maven.org/maven2` |
|
||||||
|
| `NORA_DOCKER_UPSTREAMS` | Docker-реестры, формат: `url\|auth,url2` | `https://registry-1.docker.io` |
|
||||||
|
|
||||||
|
### 3.4. Ограничение частоты запросов
|
||||||
|
|
||||||
|
| Переменная | Описание | По умолчанию |
|
||||||
|
|-----------|----------|--------------|
|
||||||
|
| `NORA_RATE_LIMIT_ENABLED` | Включить ограничение | `true` |
|
||||||
|
| `NORA_RATE_LIMIT_GENERAL_RPS` | Запросов в секунду (общие) | `100` |
|
||||||
|
| `NORA_RATE_LIMIT_AUTH_RPS` | Запросов в секунду (аутентификация) | `1` |
|
||||||
|
| `NORA_RATE_LIMIT_UPLOAD_RPS` | Запросов в секунду (загрузка) | `200` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Управление сервисом
|
||||||
|
|
||||||
|
### 4.1. Запуск и остановка
|
||||||
|
|
||||||
|
```bash
|
||||||
|
systemctl start nora # Запуск
|
||||||
|
systemctl stop nora # Остановка
|
||||||
|
systemctl restart nora # Перезапуск
|
||||||
|
systemctl status nora # Статус
|
||||||
|
journalctl -u nora -f # Просмотр журнала
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.2. Проверка работоспособности
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl http://localhost:4000/health
|
||||||
|
```
|
||||||
|
|
||||||
|
Ответ при нормальной работе:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"status": "healthy",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"storage": { "backend": "local", "reachable": true },
|
||||||
|
"registries": { "docker": "ok", "npm": "ok", "maven": "ok", "cargo": "ok", "pypi": "ok" }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.3. Метрики (Prometheus)
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /metrics
|
||||||
|
```
|
||||||
|
|
||||||
|
Экспортируются: количество запросов, латентность, загрузки и выгрузки по протоколам.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Резервное копирование и восстановление
|
||||||
|
|
||||||
|
### 5.1. Создание резервной копии
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nora backup --output /backup/nora-$(date +%Y%m%d).tar.gz
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.2. Восстановление
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nora restore --input /backup/nora-20260316.tar.gz
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.3. Сборка мусора
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nora gc --dry-run # Просмотр (без удаления)
|
||||||
|
nora gc # Удаление осиротевших блобов
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Предварительное кэширование (nora mirror)
|
||||||
|
|
||||||
|
Команда `nora mirror` позволяет заранее загрузить зависимости через прокси-кэш NORA. Это обеспечивает доступность артефактов при работе в изолированных средах без доступа к сети Интернет.
|
||||||
|
|
||||||
|
### 6.1. Кэширование по lockfile
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nora mirror npm --lockfile package-lock.json --registry http://localhost:4000
|
||||||
|
nora mirror pip --lockfile requirements.txt --registry http://localhost:4000
|
||||||
|
nora mirror cargo --lockfile Cargo.lock --registry http://localhost:4000
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6.2. Кэширование по списку пакетов
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nora mirror npm --packages lodash,express --registry http://localhost:4000
|
||||||
|
nora mirror npm --packages lodash --all-versions --registry http://localhost:4000
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6.3. Параметры
|
||||||
|
|
||||||
|
| Флаг | Описание | По умолчанию |
|
||||||
|
|------|----------|--------------|
|
||||||
|
| `--registry` | URL экземпляра NORA | `http://localhost:4000` |
|
||||||
|
| `--concurrency` | Количество параллельных загрузок | `8` |
|
||||||
|
| `--all-versions` | Загрузить все версии (только с `--packages`) | — |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Миграция хранилища
|
||||||
|
|
||||||
|
Перенос артефактов между локальным хранилищем и S3:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nora migrate --from local --to s3 --dry-run # Просмотр
|
||||||
|
nora migrate --from local --to s3 # Выполнение
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Безопасность
|
||||||
|
|
||||||
|
### 8.1. Контроль целостности
|
||||||
|
|
||||||
|
При проксировании npm-пакетов NORA вычисляет и сохраняет контрольную сумму SHA-256 для каждого тарбола. При повторной выдаче из кэша контрольная сумма проверяется. В случае расхождения запрос отклоняется, а в журнал записывается предупреждение уровня SECURITY.
|
||||||
|
|
||||||
|
### 8.2. Защита от подмены пакетов
|
||||||
|
|
||||||
|
- Валидация имён файлов при публикации (защита от обхода каталогов).
|
||||||
|
- Проверка соответствия имени пакета в URL и теле запроса.
|
||||||
|
- Иммутабельность версий: повторная публикация той же версии запрещена.
|
||||||
|
|
||||||
|
### 8.3. Аудит
|
||||||
|
|
||||||
|
Все операции (загрузка, выгрузка, обращения к кэшу, ошибки) фиксируются в файле `audit.jsonl` в каталоге хранилища. Формат — JSON Lines, одна запись на строку.
|
||||||
|
|
||||||
|
### 8.4. Усиление systemd
|
||||||
|
|
||||||
|
Файл сервиса содержит параметры безопасности:
|
||||||
|
|
||||||
|
- `NoNewPrivileges=true` — запрет повышения привилегий.
|
||||||
|
- `ProtectSystem=strict` — файловая система только для чтения, кроме указанных каталогов.
|
||||||
|
- `ProtectHome=true` — запрет доступа к домашним каталогам.
|
||||||
|
- `PrivateTmp=true` — изолированный каталог временных файлов.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Точки подключения (endpoints)
|
||||||
|
|
||||||
|
| Протокол | Endpoint | Описание |
|
||||||
|
|----------|----------|----------|
|
||||||
|
| Docker / OCI | `/v2/` | Docker Registry V2 API |
|
||||||
|
| npm | `/npm/` | npm-реестр (прокси + публикация) |
|
||||||
|
| Maven | `/maven2/` | Maven-репозиторий |
|
||||||
|
| PyPI | `/simple/` | Python Simple API (PEP 503) |
|
||||||
|
| Cargo | `/cargo/` | Cargo-реестр |
|
||||||
|
| Helm | `/v2/` (OCI) | Helm-чарты через OCI-протокол |
|
||||||
|
| Raw | `/raw/` | Произвольные файлы |
|
||||||
|
| Мониторинг | `/health`, `/ready`, `/metrics` | Проверка и метрики |
|
||||||
|
| Интерфейс | `/ui/` | Веб-интерфейс управления |
|
||||||
|
| Документация API | `/api-docs` | OpenAPI (Swagger UI) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Устранение неполадок
|
||||||
|
|
||||||
|
### Сервис не запускается
|
||||||
|
|
||||||
|
```bash
|
||||||
|
journalctl -u nora --no-pager -n 50
|
||||||
|
```
|
||||||
|
|
||||||
|
Частые причины: занят порт, недоступен каталог хранилища, ошибка в конфигурации.
|
||||||
|
|
||||||
|
### Прокси-кэш не работает
|
||||||
|
|
||||||
|
1. Проверьте доступность внешнего реестра: `curl https://registry.npmjs.org/lodash`.
|
||||||
|
2. Убедитесь, что переменная `NORA_NPM_PROXY` задана корректно.
|
||||||
|
3. При использовании приватного реестра укажите `NORA_NPM_PROXY_AUTH`.
|
||||||
|
|
||||||
|
### Ошибка целостности (Integrity check failed)
|
||||||
|
|
||||||
|
Контрольная сумма кэшированного тарбола не совпадает с сохранённой. Возможные причины: повреждение файловой системы или несанкционированное изменение файла. Удалите повреждённый файл из каталога хранилища — NORA загрузит его заново из внешнего реестра.
|
||||||
165
docs-ru/technical-spec.md
Normal file
165
docs-ru/technical-spec.md
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
# Технические условия
|
||||||
|
|
||||||
|
## Программа «NORA — Реестр артефактов»
|
||||||
|
|
||||||
|
**Версия документа:** 1.0
|
||||||
|
**Дата:** 2026-03-16
|
||||||
|
**Правообладатель:** ООО «ТАИАРС» (торговая марка АРТАИС)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Наименование и обозначение
|
||||||
|
|
||||||
|
**Полное наименование:** NORA — многопротокольный реестр артефактов.
|
||||||
|
|
||||||
|
**Краткое наименование:** NORA.
|
||||||
|
|
||||||
|
**Обозначение:** nora-registry.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Назначение
|
||||||
|
|
||||||
|
Программа предназначена для хранения, кэширования и распространения программных компонентов (артефактов), используемых при разработке, сборке и развёртывании программного обеспечения.
|
||||||
|
|
||||||
|
### 2.1. Область применения
|
||||||
|
|
||||||
|
- Организация внутренних репозиториев программных компонентов.
|
||||||
|
- Проксирование и кэширование общедоступных репозиториев (npmjs.org, PyPI, Maven Central, Docker Hub, crates.io).
|
||||||
|
- Обеспечение доступности зависимостей в изолированных средах без доступа к сети Интернет (air-gapped).
|
||||||
|
- Контроль целостности и безопасности цепочки поставки программного обеспечения.
|
||||||
|
|
||||||
|
### 2.2. Класс программного обеспечения
|
||||||
|
|
||||||
|
Инструментальное программное обеспечение для разработки и DevOps.
|
||||||
|
|
||||||
|
Код ОКПД2: 62.01 — Разработка компьютерного программного обеспечения.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Функциональные характеристики
|
||||||
|
|
||||||
|
### 3.1. Поддерживаемые протоколы
|
||||||
|
|
||||||
|
| Протокол | Стандарт | Назначение |
|
||||||
|
|----------|----------|------------|
|
||||||
|
| Docker / OCI | OCI Distribution Spec v1.0 | Контейнерные образы, Helm-чарты |
|
||||||
|
| npm | npm Registry API | Библиотеки JavaScript / TypeScript |
|
||||||
|
| Maven | Maven Repository Layout | Библиотеки Java / Kotlin |
|
||||||
|
| PyPI | PEP 503 (Simple API) | Библиотеки Python |
|
||||||
|
| Cargo | Cargo Registry Protocol | Библиотеки Rust |
|
||||||
|
| Raw | HTTP PUT/GET | Произвольные файлы |
|
||||||
|
|
||||||
|
### 3.2. Режимы работы
|
||||||
|
|
||||||
|
1. **Хранилище (hosted):** приём и хранение артефактов, опубликованных пользователями.
|
||||||
|
2. **Прокси-кэш (proxy):** прозрачное проксирование запросов к внешним репозиториям с локальным кэшированием.
|
||||||
|
3. **Комбинированный:** одновременная работа в режимах хранилища и прокси-кэша (поиск сначала в локальном хранилище, затем во внешнем репозитории).
|
||||||
|
|
||||||
|
### 3.3. Управление доступом
|
||||||
|
|
||||||
|
- Аутентификация на основе htpasswd (bcrypt).
|
||||||
|
- Ролевая модель: `read` (чтение), `write` (чтение и запись), `admin` (полный доступ).
|
||||||
|
- Токены доступа с ограниченным сроком действия.
|
||||||
|
|
||||||
|
### 3.4. Безопасность
|
||||||
|
|
||||||
|
- Контроль целостности кэшированных артефактов (SHA-256).
|
||||||
|
- Защита от обхода каталогов (path traversal) при публикации.
|
||||||
|
- Проверка соответствия имени пакета в URL и теле запроса.
|
||||||
|
- Иммутабельность опубликованных версий.
|
||||||
|
- Аудит всех операций в формате JSON Lines.
|
||||||
|
- Поддержка TLS при размещении за обратным прокси-сервером.
|
||||||
|
|
||||||
|
### 3.5. Эксплуатация
|
||||||
|
|
||||||
|
- Предварительное кэширование зависимостей (`nora mirror`) по файлам фиксации версий (lockfile).
|
||||||
|
- Сборка мусора (`nora gc`) — удаление осиротевших блобов.
|
||||||
|
- Резервное копирование и восстановление (`nora backup`, `nora restore`).
|
||||||
|
- Миграция между локальным хранилищем и S3-совместимым объектным хранилищем.
|
||||||
|
- Мониторинг: эндпоинты `/health`, `/ready`, `/metrics` (формат Prometheus).
|
||||||
|
- Веб-интерфейс для просмотра содержимого реестра.
|
||||||
|
- Документация API в формате OpenAPI 3.0.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Технические характеристики
|
||||||
|
|
||||||
|
### 4.1. Среда исполнения
|
||||||
|
|
||||||
|
| Параметр | Значение |
|
||||||
|
|----------|----------|
|
||||||
|
| Язык реализации | Rust |
|
||||||
|
| Формат поставки | Единый исполняемый файл (статическая линковка) |
|
||||||
|
| Поддерживаемые ОС | Linux (ядро 4.15+) |
|
||||||
|
| Архитектуры | x86_64 (amd64), aarch64 (arm64) |
|
||||||
|
| Контейнеризация | Docker-образ на базе `scratch` |
|
||||||
|
| Системная интеграция | systemd (файл сервиса в комплекте) |
|
||||||
|
|
||||||
|
### 4.2. Хранение данных
|
||||||
|
|
||||||
|
| Параметр | Значение |
|
||||||
|
|----------|----------|
|
||||||
|
| Локальное хранилище | Файловая система (ext4, XFS, ZFS) |
|
||||||
|
| Объектное хранилище | S3-совместимое API (MinIO, Yandex Object Storage, Selectel S3) |
|
||||||
|
| Структура | Иерархическая: `{protocol}/{package}/{artifact}` |
|
||||||
|
| Аудит | Append-only JSONL файл |
|
||||||
|
|
||||||
|
### 4.3. Конфигурация
|
||||||
|
|
||||||
|
| Источник | Приоритет |
|
||||||
|
|----------|-----------|
|
||||||
|
| Переменные окружения (`NORA_*`) | Высший |
|
||||||
|
| Файл `config.toml` | Средний |
|
||||||
|
| Значения по умолчанию | Низший |
|
||||||
|
|
||||||
|
### 4.4. Производительность
|
||||||
|
|
||||||
|
| Параметр | Значение |
|
||||||
|
|----------|----------|
|
||||||
|
| Время запуска | < 100 мс |
|
||||||
|
| Обслуживание из кэша | < 2 мс (метаданные), < 10 мс (артефакты до 1 МБ) |
|
||||||
|
| Параллельная обработка | Асинхронный ввод-вывод (tokio runtime) |
|
||||||
|
| Ограничение частоты | Настраиваемое (по умолчанию 100 запросов/сек) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Лицензирование
|
||||||
|
|
||||||
|
| Компонент | Лицензия |
|
||||||
|
|-----------|----------|
|
||||||
|
| NORA (core) | MIT License |
|
||||||
|
| NORA Enterprise | Проприетарная |
|
||||||
|
|
||||||
|
Полный перечень лицензий включённых библиотек приведён в файле SBOM (формат CycloneDX).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Комплектность
|
||||||
|
|
||||||
|
| Компонент | Описание |
|
||||||
|
|-----------|----------|
|
||||||
|
| `nora` | Исполняемый файл |
|
||||||
|
| `nora.service` | Файл systemd-сервиса |
|
||||||
|
| `nora.env.example` | Шаблон конфигурации |
|
||||||
|
| `install.sh` | Скрипт установки |
|
||||||
|
| `nora.cdx.json` | SBOM в формате CycloneDX |
|
||||||
|
| Руководство администратора | Настоящий комплект документации |
|
||||||
|
| Руководство пользователя | Настоящий комплект документации |
|
||||||
|
| Технические условия | Настоящий документ |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Контактная информация
|
||||||
|
|
||||||
|
**Правообладатель:** ООО «ТАИАРС»
|
||||||
|
|
||||||
|
**Торговая марка:** АРТАИС
|
||||||
|
|
||||||
|
**Сайт продукта:** https://getnora.io
|
||||||
|
|
||||||
|
**Документация:** https://getnora.dev
|
||||||
|
|
||||||
|
**Исходный код:** https://github.com/getnora-io/nora
|
||||||
|
|
||||||
|
**Поддержка:** https://t.me/getnora
|
||||||
221
docs-ru/user-guide.md
Normal file
221
docs-ru/user-guide.md
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
# Руководство пользователя NORA
|
||||||
|
|
||||||
|
**Версия:** 1.0
|
||||||
|
**Дата:** 2026-03-16
|
||||||
|
**Правообладатель:** ООО «ТАИАРС» (торговая марка АРТАИС)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Общие сведения
|
||||||
|
|
||||||
|
NORA — реестр артефактов для команд разработки. Программа обеспечивает хранение и кэширование библиотек, Docker-образов и иных программных компонентов, используемых при сборке приложений.
|
||||||
|
|
||||||
|
Данное руководство предназначено для разработчиков, которые используют NORA в качестве источника зависимостей.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Настройка рабочего окружения
|
||||||
|
|
||||||
|
### 2.1. npm / Node.js
|
||||||
|
|
||||||
|
Укажите NORA в качестве реестра:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm config set registry http://nora.example.com:4000/npm
|
||||||
|
```
|
||||||
|
|
||||||
|
Или создайте файл `.npmrc` в корне проекта:
|
||||||
|
|
||||||
|
```
|
||||||
|
registry=http://nora.example.com:4000/npm
|
||||||
|
```
|
||||||
|
|
||||||
|
После этого все команды `npm install` будут загружать пакеты через NORA. При первом обращении NORA загрузит пакет из внешнего реестра (npmjs.org) и сохранит его в кэш. Последующие обращения обслуживаются из кэша.
|
||||||
|
|
||||||
|
### 2.2. Docker
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker login nora.example.com:4000
|
||||||
|
docker pull nora.example.com:4000/library/nginx:latest
|
||||||
|
docker push nora.example.com:4000/myteam/myapp:1.0.0
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.3. Maven
|
||||||
|
|
||||||
|
Добавьте репозиторий в `settings.xml`:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<mirrors>
|
||||||
|
<mirror>
|
||||||
|
<id>nora</id>
|
||||||
|
<mirrorOf>central</mirrorOf>
|
||||||
|
<url>http://nora.example.com:4000/maven2</url>
|
||||||
|
</mirror>
|
||||||
|
</mirrors>
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.4. Python / pip
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install --index-url http://nora.example.com:4000/simple flask
|
||||||
|
```
|
||||||
|
|
||||||
|
Или в `pip.conf`:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[global]
|
||||||
|
index-url = http://nora.example.com:4000/simple
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.5. Cargo / Rust
|
||||||
|
|
||||||
|
Настройка в `~/.cargo/config.toml`:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[registries.nora]
|
||||||
|
index = "sparse+http://nora.example.com:4000/cargo/"
|
||||||
|
|
||||||
|
[source.crates-io]
|
||||||
|
replace-with = "nora"
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.6. Helm
|
||||||
|
|
||||||
|
Helm использует OCI-протокол через Docker Registry API:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
helm push mychart-0.1.0.tgz oci://nora.example.com:4000/helm
|
||||||
|
helm pull oci://nora.example.com:4000/helm/mychart --version 0.1.0
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Публикация пакетов
|
||||||
|
|
||||||
|
### 3.1. npm
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm publish --registry http://nora.example.com:4000/npm
|
||||||
|
```
|
||||||
|
|
||||||
|
Требования:
|
||||||
|
- Файл `package.json` с полями `name` и `version`.
|
||||||
|
- Каждая версия публикуется однократно. Повторная публикация той же версии запрещена.
|
||||||
|
|
||||||
|
### 3.2. Docker
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker tag myapp:latest nora.example.com:4000/myteam/myapp:1.0.0
|
||||||
|
docker push nora.example.com:4000/myteam/myapp:1.0.0
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.3. Maven
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mvn deploy -DaltDeploymentRepository=nora::default::http://nora.example.com:4000/maven2
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.4. Raw (произвольные файлы)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Загрузка
|
||||||
|
curl -X PUT --data-binary @release.tar.gz http://nora.example.com:4000/raw/builds/release-1.0.tar.gz
|
||||||
|
|
||||||
|
# Скачивание
|
||||||
|
curl -O http://nora.example.com:4000/raw/builds/release-1.0.tar.gz
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Работа в изолированной среде
|
||||||
|
|
||||||
|
Если сборочный сервер не имеет доступа к сети Интернет, используйте предварительное кэширование.
|
||||||
|
|
||||||
|
### 4.1. Кэширование зависимостей проекта
|
||||||
|
|
||||||
|
На машине с доступом к Интернету и NORA выполните:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nora mirror npm --lockfile package-lock.json --registry http://nora.example.com:4000
|
||||||
|
```
|
||||||
|
|
||||||
|
После этого все зависимости из lockfile будут доступны через NORA, даже если связь с внешними реестрами отсутствует.
|
||||||
|
|
||||||
|
### 4.2. Кэширование всех версий пакета
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nora mirror npm --packages lodash,express --all-versions --registry http://nora.example.com:4000
|
||||||
|
```
|
||||||
|
|
||||||
|
Эта команда загрузит все опубликованные версии указанных пакетов.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Веб-интерфейс
|
||||||
|
|
||||||
|
NORA предоставляет веб-интерфейс для просмотра содержимого реестра:
|
||||||
|
|
||||||
|
```
|
||||||
|
http://nora.example.com:4000/ui/
|
||||||
|
```
|
||||||
|
|
||||||
|
Доступные функции:
|
||||||
|
- Просмотр списка артефактов по протоколам.
|
||||||
|
- Количество версий и размер каждого пакета.
|
||||||
|
- Журнал последних операций.
|
||||||
|
- Метрики загрузок.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Документация API
|
||||||
|
|
||||||
|
Интерактивная документация API доступна по адресу:
|
||||||
|
|
||||||
|
```
|
||||||
|
http://nora.example.com:4000/api-docs
|
||||||
|
```
|
||||||
|
|
||||||
|
Формат: OpenAPI 3.0 (Swagger UI).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Аутентификация
|
||||||
|
|
||||||
|
Если администратор включил аутентификацию, для операций записи требуется токен.
|
||||||
|
|
||||||
|
### 7.1. Получение токена
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -u admin:password http://nora.example.com:4000/auth/token
|
||||||
|
```
|
||||||
|
|
||||||
|
### 7.2. Использование токена
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# npm
|
||||||
|
npm config set //nora.example.com:4000/npm/:_authToken TOKEN
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
docker login nora.example.com:4000
|
||||||
|
|
||||||
|
# curl
|
||||||
|
curl -H "Authorization: Bearer TOKEN" http://nora.example.com:4000/npm/my-package
|
||||||
|
```
|
||||||
|
|
||||||
|
Операции чтения по умолчанию не требуют аутентификации (роль `read` назначается автоматически).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Часто задаваемые вопросы
|
||||||
|
|
||||||
|
**В: Что произойдёт, если внешний реестр (npmjs.org) станет недоступен?**
|
||||||
|
О: NORA продолжит обслуживать запросы из кэша. Пакеты, которые ранее не запрашивались, будут недоступны до восстановления связи. Для предотвращения такой ситуации используйте `nora mirror`.
|
||||||
|
|
||||||
|
**В: Можно ли публиковать приватные пакеты?**
|
||||||
|
О: Да. Пакеты, опубликованные через `npm publish` или `docker push`, сохраняются в локальном хранилище NORA и доступны всем пользователям данного экземпляра.
|
||||||
|
|
||||||
|
**В: Как обновить кэш метаданных?**
|
||||||
|
О: Кэш метаданных npm обновляется автоматически по истечении TTL (по умолчанию 5 минут). Для немедленного обновления удалите файл `metadata.json` из каталога хранилища.
|
||||||
|
|
||||||
|
**В: Поддерживаются ли scoped-пакеты npm (@scope/package)?**
|
||||||
|
О: Да, полностью. Например: `npm install @babel/core --registry http://nora.example.com:4000/npm`.
|
||||||
22
fuzz/Cargo.toml
Normal file
22
fuzz/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
[package]
|
||||||
|
name = "nora-fuzz"
|
||||||
|
version = "0.0.0"
|
||||||
|
publish = false
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[package.metadata]
|
||||||
|
cargo-fuzz = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
libfuzzer-sys = "0.4"
|
||||||
|
nora-registry = { path = "../nora-registry" }
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "fuzz_validation"
|
||||||
|
path = "fuzz_targets/fuzz_validation.rs"
|
||||||
|
doc = false
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "fuzz_docker_manifest"
|
||||||
|
path = "fuzz_targets/fuzz_docker_manifest.rs"
|
||||||
|
doc = false
|
||||||
8
fuzz/fuzz_targets/fuzz_docker_manifest.rs
Normal file
8
fuzz/fuzz_targets/fuzz_docker_manifest.rs
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
#![no_main]
|
||||||
|
use libfuzzer_sys::fuzz_target;
|
||||||
|
use nora_registry::docker_fuzz::detect_manifest_media_type;
|
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| {
|
||||||
|
// Fuzz Docker manifest parser — must never panic on any input
|
||||||
|
let _ = detect_manifest_media_type(data);
|
||||||
|
});
|
||||||
13
fuzz/fuzz_targets/fuzz_validation.rs
Normal file
13
fuzz/fuzz_targets/fuzz_validation.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
#![no_main]
|
||||||
|
use libfuzzer_sys::fuzz_target;
|
||||||
|
use nora_registry::validation::{
|
||||||
|
validate_digest, validate_docker_name, validate_docker_reference, validate_storage_key,
|
||||||
|
};
|
||||||
|
|
||||||
|
fuzz_target!(|data: &str| {
|
||||||
|
// Fuzz all validators — they must never panic on any input
|
||||||
|
let _ = validate_storage_key(data);
|
||||||
|
let _ = validate_docker_name(data);
|
||||||
|
let _ = validate_digest(data);
|
||||||
|
let _ = validate_docker_reference(data);
|
||||||
|
});
|
||||||
98
install.sh
Normal file
98
install.sh
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
# NORA installer — https://getnora.io/install.sh
|
||||||
|
# Usage: curl -fsSL https://getnora.io/install.sh | sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
REPO="getnora-io/nora"
|
||||||
|
BINARY="nora"
|
||||||
|
INSTALL_DIR="/usr/local/bin"
|
||||||
|
|
||||||
|
# ── Detect OS and architecture ──────────────────────────────────────────────
|
||||||
|
|
||||||
|
OS="$(uname -s)"
|
||||||
|
ARCH="$(uname -m)"
|
||||||
|
|
||||||
|
case "$OS" in
|
||||||
|
Linux) os="linux" ;;
|
||||||
|
Darwin) os="darwin" ;;
|
||||||
|
*)
|
||||||
|
echo "Unsupported OS: $OS"
|
||||||
|
echo "Please download manually: https://github.com/$REPO/releases/latest"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
case "$ARCH" in
|
||||||
|
x86_64 | amd64) arch="amd64" ;;
|
||||||
|
aarch64 | arm64) arch="arm64" ;;
|
||||||
|
*)
|
||||||
|
echo "Unsupported architecture: $ARCH"
|
||||||
|
echo "Please download manually: https://github.com/$REPO/releases/latest"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
ASSET="${BINARY}-${os}-${arch}"
|
||||||
|
|
||||||
|
# ── Get latest release version ──────────────────────────────────────────────
|
||||||
|
|
||||||
|
VERSION="$(curl -fsSL "https://api.github.com/repos/$REPO/releases/latest" \
|
||||||
|
| grep '"tag_name"' \
|
||||||
|
| sed 's/.*"tag_name": *"\([^"]*\)".*/\1/')"
|
||||||
|
|
||||||
|
if [ -z "$VERSION" ]; then
|
||||||
|
echo "Failed to get latest version"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Installing NORA $VERSION ($os/$arch)..."
|
||||||
|
|
||||||
|
# ── Download binary and checksum ────────────────────────────────────────────
|
||||||
|
|
||||||
|
BASE_URL="https://github.com/$REPO/releases/download/$VERSION"
|
||||||
|
TMP_DIR="$(mktemp -d)"
|
||||||
|
trap 'rm -rf "$TMP_DIR"' EXIT
|
||||||
|
|
||||||
|
echo "Downloading $ASSET..."
|
||||||
|
curl -fsSL "$BASE_URL/$ASSET" -o "$TMP_DIR/$BINARY"
|
||||||
|
curl -fsSL "$BASE_URL/$ASSET.sha256" -o "$TMP_DIR/$ASSET.sha256"
|
||||||
|
|
||||||
|
# ── Verify checksum ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
echo "Verifying checksum..."
|
||||||
|
EXPECTED="$(awk '{print $1}' "$TMP_DIR/$ASSET.sha256")"
|
||||||
|
ACTUAL="$(sha256sum "$TMP_DIR/$BINARY" | awk '{print $1}')"
|
||||||
|
|
||||||
|
if [ "$EXPECTED" != "$ACTUAL" ]; then
|
||||||
|
echo "Checksum mismatch!"
|
||||||
|
echo " Expected: $EXPECTED"
|
||||||
|
echo " Actual: $ACTUAL"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Checksum OK"
|
||||||
|
|
||||||
|
# ── Install ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
chmod +x "$TMP_DIR/$BINARY"
|
||||||
|
|
||||||
|
if [ -w "$INSTALL_DIR" ]; then
|
||||||
|
mv "$TMP_DIR/$BINARY" "$INSTALL_DIR/$BINARY"
|
||||||
|
elif command -v sudo >/dev/null 2>&1; then
|
||||||
|
sudo mv "$TMP_DIR/$BINARY" "$INSTALL_DIR/$BINARY"
|
||||||
|
else
|
||||||
|
# Fallback to ~/.local/bin
|
||||||
|
INSTALL_DIR="$HOME/.local/bin"
|
||||||
|
mkdir -p "$INSTALL_DIR"
|
||||||
|
mv "$TMP_DIR/$BINARY" "$INSTALL_DIR/$BINARY"
|
||||||
|
echo "Installed to $INSTALL_DIR/$BINARY"
|
||||||
|
echo "Make sure $INSTALL_DIR is in your PATH"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── Done ────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "NORA $VERSION installed to $INSTALL_DIR/$BINARY"
|
||||||
|
echo ""
|
||||||
|
nora --version 2>/dev/null || true
|
||||||
5
logo.svg
Normal file
5
logo.svg
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 300 72" width="300" height="72">
|
||||||
|
<text font-family="'SF Mono', 'Fira Code', 'Cascadia Code', monospace" font-weight="800" fill="#0f172a" letter-spacing="1">
|
||||||
|
<tspan x="8" y="58" font-size="52">N</tspan><tspan font-size="68" dy="-10" fill="#2563EB">O</tspan><tspan font-size="52" dy="10">RA</tspan>
|
||||||
|
</text>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 373 B |
@@ -18,6 +18,6 @@ reqwest.workspace = true
|
|||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
clap = { version = "4", features = ["derive"] }
|
clap = { version = "4", features = ["derive"] }
|
||||||
indicatif = "0.17"
|
indicatif = "0.18"
|
||||||
tar = "0.4"
|
tar = "0.4"
|
||||||
flate2 = "1.0"
|
flate2 = "1.1"
|
||||||
|
|||||||
5902
nora-cli/nora-cli.cdx.json
Normal file
5902
nora-cli/nora-cli.cdx.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,10 @@ description = "Cloud-Native Artifact Registry - Fast, lightweight, multi-protoco
|
|||||||
keywords = ["registry", "docker", "artifacts", "cloud-native", "devops"]
|
keywords = ["registry", "docker", "artifacts", "cloud-native", "devops"]
|
||||||
categories = ["command-line-utilities", "development-tools", "web-programming"]
|
categories = ["command-line-utilities", "development-tools", "web-programming"]
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "nora_registry"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "nora"
|
name = "nora"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
@@ -26,19 +30,19 @@ sha2.workspace = true
|
|||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
hmac.workspace = true
|
hmac.workspace = true
|
||||||
hex.workspace = true
|
hex.workspace = true
|
||||||
toml = "0.8"
|
toml = "1.0"
|
||||||
uuid = { version = "1", features = ["v4"] }
|
uuid = { version = "1", features = ["v4"] }
|
||||||
bcrypt = "0.17"
|
bcrypt = "0.19"
|
||||||
base64 = "0.22"
|
base64 = "0.22"
|
||||||
prometheus = "0.13"
|
prometheus = "0.14"
|
||||||
lazy_static = "1.5"
|
lazy_static = "1.5"
|
||||||
httpdate = "1"
|
httpdate = "1"
|
||||||
utoipa = { version = "5", features = ["axum_extras"] }
|
utoipa = { version = "5", features = ["axum_extras"] }
|
||||||
utoipa-swagger-ui = { version = "9", features = ["axum", "reqwest"] }
|
utoipa-swagger-ui = { version = "9", features = ["axum", "reqwest"] }
|
||||||
clap = { version = "4", features = ["derive"] }
|
clap = { version = "4", features = ["derive"] }
|
||||||
tar = "0.4"
|
tar = "0.4"
|
||||||
flate2 = "1.0"
|
flate2 = "1.1"
|
||||||
indicatif = "0.17"
|
indicatif = "0.18"
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
thiserror = "2"
|
thiserror = "2"
|
||||||
tower_governor = "0.8"
|
tower_governor = "0.8"
|
||||||
|
|||||||
9031
nora-registry/nora-registry.cdx.json
Normal file
9031
nora-registry/nora-registry.cdx.json
Normal file
File diff suppressed because it is too large
Load Diff
73
nora-registry/src/audit.rs
Normal file
73
nora-registry/src/audit.rs
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
//! Persistent audit log — append-only JSONL file
|
||||||
|
//!
|
||||||
|
//! Records who/when/what for every registry operation.
|
||||||
|
//! File: {storage_path}/audit.jsonl
|
||||||
|
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use parking_lot::Mutex;
|
||||||
|
use serde::Serialize;
|
||||||
|
use std::fs::{self, OpenOptions};
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize)]
|
||||||
|
pub struct AuditEntry {
|
||||||
|
pub ts: DateTime<Utc>,
|
||||||
|
pub action: String,
|
||||||
|
pub actor: String,
|
||||||
|
pub artifact: String,
|
||||||
|
pub registry: String,
|
||||||
|
pub detail: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuditEntry {
|
||||||
|
pub fn new(action: &str, actor: &str, artifact: &str, registry: &str, detail: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
ts: Utc::now(),
|
||||||
|
action: action.to_string(),
|
||||||
|
actor: actor.to_string(),
|
||||||
|
artifact: artifact.to_string(),
|
||||||
|
registry: registry.to_string(),
|
||||||
|
detail: detail.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AuditLog {
|
||||||
|
path: PathBuf,
|
||||||
|
writer: Mutex<Option<fs::File>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AuditLog {
|
||||||
|
pub fn new(storage_path: &str) -> Self {
|
||||||
|
let path = PathBuf::from(storage_path).join("audit.jsonl");
|
||||||
|
let writer = match OpenOptions::new().create(true).append(true).open(&path) {
|
||||||
|
Ok(f) => {
|
||||||
|
info!(path = %path.display(), "Audit log initialized");
|
||||||
|
Mutex::new(Some(f))
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!(path = %path.display(), error = %e, "Failed to open audit log, auditing disabled");
|
||||||
|
Mutex::new(None)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Self { path, writer }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn log(&self, entry: AuditEntry) {
|
||||||
|
if let Some(ref mut file) = *self.writer.lock() {
|
||||||
|
if let Ok(json) = serde_json::to_string(&entry) {
|
||||||
|
let _ = writeln!(file, "{}", json);
|
||||||
|
let _ = file.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn path(&self) -> &PathBuf {
|
||||||
|
&self.path
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -13,6 +13,7 @@ use std::collections::HashMap;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use crate::tokens::Role;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
|
|
||||||
/// Htpasswd-based authentication
|
/// Htpasswd-based authentication
|
||||||
@@ -108,7 +109,18 @@ pub async fn auth_middleware(
|
|||||||
if let Some(token) = auth_header.strip_prefix("Bearer ") {
|
if let Some(token) = auth_header.strip_prefix("Bearer ") {
|
||||||
if let Some(ref token_store) = state.tokens {
|
if let Some(ref token_store) = state.tokens {
|
||||||
match token_store.verify_token(token) {
|
match token_store.verify_token(token) {
|
||||||
Ok(_user) => return next.run(request).await,
|
Ok((_user, role)) => {
|
||||||
|
let method = request.method().clone();
|
||||||
|
if (method == axum::http::Method::PUT
|
||||||
|
|| method == axum::http::Method::POST
|
||||||
|
|| method == axum::http::Method::DELETE
|
||||||
|
|| method == axum::http::Method::PATCH)
|
||||||
|
&& !role.can_write()
|
||||||
|
{
|
||||||
|
return (StatusCode::FORBIDDEN, "Read-only token").into_response();
|
||||||
|
}
|
||||||
|
return next.run(request).await;
|
||||||
|
}
|
||||||
Err(_) => return unauthorized_response("Invalid or expired token"),
|
Err(_) => return unauthorized_response("Invalid or expired token"),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -175,6 +187,12 @@ pub struct CreateTokenRequest {
|
|||||||
#[serde(default = "default_ttl")]
|
#[serde(default = "default_ttl")]
|
||||||
pub ttl_days: u64,
|
pub ttl_days: u64,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
|
#[serde(default = "default_role_str")]
|
||||||
|
pub role: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_role_str() -> String {
|
||||||
|
"read".to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_ttl() -> u64 {
|
fn default_ttl() -> u64 {
|
||||||
@@ -194,6 +212,7 @@ pub struct TokenListItem {
|
|||||||
pub expires_at: u64,
|
pub expires_at: u64,
|
||||||
pub last_used: Option<u64>,
|
pub last_used: Option<u64>,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
|
pub role: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
@@ -227,7 +246,19 @@ async fn create_token(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match token_store.create_token(&req.username, req.ttl_days, req.description) {
|
let role = match req.role.as_str() {
|
||||||
|
"read" => Role::Read,
|
||||||
|
"write" => Role::Write,
|
||||||
|
"admin" => Role::Admin,
|
||||||
|
_ => {
|
||||||
|
return (
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
"Invalid role. Use: read, write, admin",
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match token_store.create_token(&req.username, req.ttl_days, req.description, role) {
|
||||||
Ok(token) => Json(CreateTokenResponse {
|
Ok(token) => Json(CreateTokenResponse {
|
||||||
token,
|
token,
|
||||||
expires_in_days: req.ttl_days,
|
expires_in_days: req.ttl_days,
|
||||||
@@ -271,6 +302,7 @@ async fn list_tokens(
|
|||||||
expires_at: t.expires_at,
|
expires_at: t.expires_at,
|
||||||
last_used: t.last_used,
|
last_used: t.last_used,
|
||||||
description: t.description,
|
description: t.description,
|
||||||
|
role: t.role.to_string(),
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,18 @@
|
|||||||
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use base64::{engine::general_purpose::STANDARD, Engine};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
pub use crate::secrets::SecretsConfig;
|
pub use crate::secrets::SecretsConfig;
|
||||||
|
|
||||||
|
/// Encode "user:pass" into a Basic Auth header value, e.g. "Basic dXNlcjpwYXNz".
|
||||||
|
pub fn basic_auth_header(credentials: &str) -> String {
|
||||||
|
format!("Basic {}", STANDARD.encode(credentials))
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
pub server: ServerConfig,
|
pub server: ServerConfig,
|
||||||
@@ -36,6 +42,13 @@ pub struct ServerConfig {
|
|||||||
/// Public URL for generating pull commands (e.g., "registry.example.com")
|
/// Public URL for generating pull commands (e.g., "registry.example.com")
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub public_url: Option<String>,
|
pub public_url: Option<String>,
|
||||||
|
/// Maximum request body size in MB (default: 2048 = 2GB)
|
||||||
|
#[serde(default = "default_body_limit_mb")]
|
||||||
|
pub body_limit_mb: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_body_limit_mb() -> usize {
|
||||||
|
2048 // 2GB - enough for any Docker image
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
|
||||||
@@ -86,7 +99,7 @@ fn default_bucket() -> String {
|
|||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct MavenConfig {
|
pub struct MavenConfig {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub proxies: Vec<String>,
|
pub proxies: Vec<MavenProxyEntry>,
|
||||||
#[serde(default = "default_timeout")]
|
#[serde(default = "default_timeout")]
|
||||||
pub proxy_timeout: u64,
|
pub proxy_timeout: u64,
|
||||||
}
|
}
|
||||||
@@ -95,14 +108,21 @@ pub struct MavenConfig {
|
|||||||
pub struct NpmConfig {
|
pub struct NpmConfig {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub proxy: Option<String>,
|
pub proxy: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub proxy_auth: Option<String>, // "user:pass" for basic auth
|
||||||
#[serde(default = "default_timeout")]
|
#[serde(default = "default_timeout")]
|
||||||
pub proxy_timeout: u64,
|
pub proxy_timeout: u64,
|
||||||
|
/// Metadata cache TTL in seconds (default: 300 = 5 min). Set to 0 to cache forever.
|
||||||
|
#[serde(default = "default_metadata_ttl")]
|
||||||
|
pub metadata_ttl: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct PypiConfig {
|
pub struct PypiConfig {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub proxy: Option<String>,
|
pub proxy: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub proxy_auth: Option<String>, // "user:pass" for basic auth
|
||||||
#[serde(default = "default_timeout")]
|
#[serde(default = "default_timeout")]
|
||||||
pub proxy_timeout: u64,
|
pub proxy_timeout: u64,
|
||||||
}
|
}
|
||||||
@@ -124,6 +144,37 @@ pub struct DockerUpstream {
|
|||||||
pub auth: Option<String>, // "user:pass" for basic auth
|
pub auth: Option<String>, // "user:pass" for basic auth
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Maven upstream proxy configuration
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
pub enum MavenProxyEntry {
|
||||||
|
Simple(String),
|
||||||
|
Full(MavenProxy),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Maven upstream proxy with optional auth
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct MavenProxy {
|
||||||
|
pub url: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub auth: Option<String>, // "user:pass" for basic auth
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MavenProxyEntry {
|
||||||
|
pub fn url(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
MavenProxyEntry::Simple(s) => s,
|
||||||
|
MavenProxyEntry::Full(p) => &p.url,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn auth(&self) -> Option<&str> {
|
||||||
|
match self {
|
||||||
|
MavenProxyEntry::Simple(_) => None,
|
||||||
|
MavenProxyEntry::Full(p) => p.auth.as_deref(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Raw repository configuration for simple file storage
|
/// Raw repository configuration for simple file storage
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct RawConfig {
|
pub struct RawConfig {
|
||||||
@@ -167,10 +218,16 @@ fn default_timeout() -> u64 {
|
|||||||
30
|
30
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn default_metadata_ttl() -> u64 {
|
||||||
|
300 // 5 minutes
|
||||||
|
}
|
||||||
|
|
||||||
impl Default for MavenConfig {
|
impl Default for MavenConfig {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
proxies: vec!["https://repo1.maven.org/maven2".to_string()],
|
proxies: vec![MavenProxyEntry::Simple(
|
||||||
|
"https://repo1.maven.org/maven2".to_string(),
|
||||||
|
)],
|
||||||
proxy_timeout: 30,
|
proxy_timeout: 30,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -180,7 +237,9 @@ impl Default for NpmConfig {
|
|||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
proxy: Some("https://registry.npmjs.org".to_string()),
|
proxy: Some("https://registry.npmjs.org".to_string()),
|
||||||
|
proxy_auth: None,
|
||||||
proxy_timeout: 30,
|
proxy_timeout: 30,
|
||||||
|
metadata_ttl: 300,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -189,6 +248,7 @@ impl Default for PypiConfig {
|
|||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
proxy: Some("https://pypi.org/simple/".to_string()),
|
proxy: Some("https://pypi.org/simple/".to_string()),
|
||||||
|
proxy_auth: None,
|
||||||
proxy_timeout: 30,
|
proxy_timeout: 30,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -249,6 +309,8 @@ impl Default for AuthConfig {
|
|||||||
/// - `NORA_RATE_LIMIT_GENERAL_BURST` - General burst size
|
/// - `NORA_RATE_LIMIT_GENERAL_BURST` - General burst size
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct RateLimitConfig {
|
pub struct RateLimitConfig {
|
||||||
|
#[serde(default = "default_rate_limit_enabled")]
|
||||||
|
pub enabled: bool,
|
||||||
#[serde(default = "default_auth_rps")]
|
#[serde(default = "default_auth_rps")]
|
||||||
pub auth_rps: u64,
|
pub auth_rps: u64,
|
||||||
#[serde(default = "default_auth_burst")]
|
#[serde(default = "default_auth_burst")]
|
||||||
@@ -263,6 +325,9 @@ pub struct RateLimitConfig {
|
|||||||
pub general_burst: u32,
|
pub general_burst: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn default_rate_limit_enabled() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
fn default_auth_rps() -> u64 {
|
fn default_auth_rps() -> u64 {
|
||||||
1
|
1
|
||||||
}
|
}
|
||||||
@@ -285,6 +350,7 @@ fn default_general_burst() -> u32 {
|
|||||||
impl Default for RateLimitConfig {
|
impl Default for RateLimitConfig {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
enabled: default_rate_limit_enabled(),
|
||||||
auth_rps: default_auth_rps(),
|
auth_rps: default_auth_rps(),
|
||||||
auth_burst: default_auth_burst(),
|
auth_burst: default_auth_burst(),
|
||||||
upload_rps: default_upload_rps(),
|
upload_rps: default_upload_rps(),
|
||||||
@@ -296,6 +362,37 @@ impl Default for RateLimitConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
|
/// Warn if credentials are configured via config.toml (not env vars)
|
||||||
|
pub fn warn_plaintext_credentials(&self) {
|
||||||
|
// Docker upstreams
|
||||||
|
for (i, upstream) in self.docker.upstreams.iter().enumerate() {
|
||||||
|
if upstream.auth.is_some() && std::env::var("NORA_DOCKER_UPSTREAMS").is_err() {
|
||||||
|
tracing::warn!(
|
||||||
|
upstream_index = i,
|
||||||
|
url = %upstream.url,
|
||||||
|
"Docker upstream credentials in config.toml are plaintext — consider NORA_DOCKER_UPSTREAMS env var"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Maven proxies
|
||||||
|
for proxy in &self.maven.proxies {
|
||||||
|
if proxy.auth().is_some() && std::env::var("NORA_MAVEN_PROXIES").is_err() {
|
||||||
|
tracing::warn!(
|
||||||
|
url = %proxy.url(),
|
||||||
|
"Maven proxy credentials in config.toml are plaintext — consider NORA_MAVEN_PROXIES env var"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// npm
|
||||||
|
if self.npm.proxy_auth.is_some() && std::env::var("NORA_NPM_PROXY_AUTH").is_err() {
|
||||||
|
tracing::warn!("npm proxy credentials in config.toml are plaintext — consider NORA_NPM_PROXY_AUTH env var");
|
||||||
|
}
|
||||||
|
// PyPI
|
||||||
|
if self.pypi.proxy_auth.is_some() && std::env::var("NORA_PYPI_PROXY_AUTH").is_err() {
|
||||||
|
tracing::warn!("PyPI proxy credentials in config.toml are plaintext — consider NORA_PYPI_PROXY_AUTH env var");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Load configuration with priority: ENV > config.toml > defaults
|
/// Load configuration with priority: ENV > config.toml > defaults
|
||||||
pub fn load() -> Self {
|
pub fn load() -> Self {
|
||||||
// 1. Start with defaults
|
// 1. Start with defaults
|
||||||
@@ -324,6 +421,11 @@ impl Config {
|
|||||||
if let Ok(val) = env::var("NORA_PUBLIC_URL") {
|
if let Ok(val) = env::var("NORA_PUBLIC_URL") {
|
||||||
self.server.public_url = if val.is_empty() { None } else { Some(val) };
|
self.server.public_url = if val.is_empty() { None } else { Some(val) };
|
||||||
}
|
}
|
||||||
|
if let Ok(val) = env::var("NORA_BODY_LIMIT_MB") {
|
||||||
|
if let Ok(mb) = val.parse() {
|
||||||
|
self.server.body_limit_mb = mb;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Storage config
|
// Storage config
|
||||||
if let Ok(val) = env::var("NORA_STORAGE_MODE") {
|
if let Ok(val) = env::var("NORA_STORAGE_MODE") {
|
||||||
@@ -359,9 +461,23 @@ impl Config {
|
|||||||
self.auth.htpasswd_file = val;
|
self.auth.htpasswd_file = val;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Maven config
|
// Maven config — supports "url1,url2" or "url1|auth1,url2|auth2"
|
||||||
if let Ok(val) = env::var("NORA_MAVEN_PROXIES") {
|
if let Ok(val) = env::var("NORA_MAVEN_PROXIES") {
|
||||||
self.maven.proxies = val.split(',').map(|s| s.trim().to_string()).collect();
|
self.maven.proxies = val
|
||||||
|
.split(',')
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.map(|s| {
|
||||||
|
let parts: Vec<&str> = s.trim().splitn(2, '|').collect();
|
||||||
|
if parts.len() > 1 {
|
||||||
|
MavenProxyEntry::Full(MavenProxy {
|
||||||
|
url: parts[0].to_string(),
|
||||||
|
auth: Some(parts[1].to_string()),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
MavenProxyEntry::Simple(parts[0].to_string())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
}
|
}
|
||||||
if let Ok(val) = env::var("NORA_MAVEN_PROXY_TIMEOUT") {
|
if let Ok(val) = env::var("NORA_MAVEN_PROXY_TIMEOUT") {
|
||||||
if let Ok(timeout) = val.parse() {
|
if let Ok(timeout) = val.parse() {
|
||||||
@@ -378,6 +494,16 @@ impl Config {
|
|||||||
self.npm.proxy_timeout = timeout;
|
self.npm.proxy_timeout = timeout;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if let Ok(val) = env::var("NORA_NPM_METADATA_TTL") {
|
||||||
|
if let Ok(ttl) = val.parse() {
|
||||||
|
self.npm.metadata_ttl = ttl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// npm proxy auth
|
||||||
|
if let Ok(val) = env::var("NORA_NPM_PROXY_AUTH") {
|
||||||
|
self.npm.proxy_auth = if val.is_empty() { None } else { Some(val) };
|
||||||
|
}
|
||||||
|
|
||||||
// PyPI config
|
// PyPI config
|
||||||
if let Ok(val) = env::var("NORA_PYPI_PROXY") {
|
if let Ok(val) = env::var("NORA_PYPI_PROXY") {
|
||||||
@@ -389,6 +515,11 @@ impl Config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// PyPI proxy auth
|
||||||
|
if let Ok(val) = env::var("NORA_PYPI_PROXY_AUTH") {
|
||||||
|
self.pypi.proxy_auth = if val.is_empty() { None } else { Some(val) };
|
||||||
|
}
|
||||||
|
|
||||||
// Docker config
|
// Docker config
|
||||||
if let Ok(val) = env::var("NORA_DOCKER_PROXY_TIMEOUT") {
|
if let Ok(val) = env::var("NORA_DOCKER_PROXY_TIMEOUT") {
|
||||||
if let Ok(timeout) = val.parse() {
|
if let Ok(timeout) = val.parse() {
|
||||||
@@ -426,6 +557,9 @@ impl Config {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Rate limit config
|
// Rate limit config
|
||||||
|
if let Ok(val) = env::var("NORA_RATE_LIMIT_ENABLED") {
|
||||||
|
self.rate_limit.enabled = val.to_lowercase() == "true" || val == "1";
|
||||||
|
}
|
||||||
if let Ok(val) = env::var("NORA_RATE_LIMIT_AUTH_RPS") {
|
if let Ok(val) = env::var("NORA_RATE_LIMIT_AUTH_RPS") {
|
||||||
if let Ok(v) = val.parse::<u64>() {
|
if let Ok(v) = val.parse::<u64>() {
|
||||||
self.rate_limit.auth_rps = v;
|
self.rate_limit.auth_rps = v;
|
||||||
@@ -474,6 +608,7 @@ impl Default for Config {
|
|||||||
host: String::from("127.0.0.1"),
|
host: String::from("127.0.0.1"),
|
||||||
port: 4000,
|
port: 4000,
|
||||||
public_url: None,
|
public_url: None,
|
||||||
|
body_limit_mb: 2048,
|
||||||
},
|
},
|
||||||
storage: StorageConfig {
|
storage: StorageConfig {
|
||||||
mode: StorageMode::Local,
|
mode: StorageMode::Local,
|
||||||
|
|||||||
@@ -1,8 +1,29 @@
|
|||||||
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::atomic::{AtomicU64, Ordering};
|
use std::sync::atomic::{AtomicU64, Ordering};
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
/// Serializable snapshot of metrics for persistence
|
||||||
|
#[derive(Serialize, Deserialize, Default)]
|
||||||
|
struct MetricsSnapshot {
|
||||||
|
downloads: u64,
|
||||||
|
uploads: u64,
|
||||||
|
cache_hits: u64,
|
||||||
|
cache_misses: u64,
|
||||||
|
docker_downloads: u64,
|
||||||
|
docker_uploads: u64,
|
||||||
|
npm_downloads: u64,
|
||||||
|
maven_downloads: u64,
|
||||||
|
maven_uploads: u64,
|
||||||
|
cargo_downloads: u64,
|
||||||
|
pypi_downloads: u64,
|
||||||
|
raw_downloads: u64,
|
||||||
|
raw_uploads: u64,
|
||||||
|
}
|
||||||
|
|
||||||
/// Dashboard metrics for tracking registry activity
|
/// Dashboard metrics for tracking registry activity
|
||||||
/// Uses atomic counters for thread-safe access without locks
|
/// Uses atomic counters for thread-safe access without locks
|
||||||
@@ -25,6 +46,9 @@ pub struct DashboardMetrics {
|
|||||||
pub raw_uploads: AtomicU64,
|
pub raw_uploads: AtomicU64,
|
||||||
|
|
||||||
pub start_time: Instant,
|
pub start_time: Instant,
|
||||||
|
|
||||||
|
/// Path to metrics.json for persistence
|
||||||
|
persist_path: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DashboardMetrics {
|
impl DashboardMetrics {
|
||||||
@@ -44,6 +68,75 @@ impl DashboardMetrics {
|
|||||||
raw_downloads: AtomicU64::new(0),
|
raw_downloads: AtomicU64::new(0),
|
||||||
raw_uploads: AtomicU64::new(0),
|
raw_uploads: AtomicU64::new(0),
|
||||||
start_time: Instant::now(),
|
start_time: Instant::now(),
|
||||||
|
persist_path: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create metrics with persistence — loads existing data from metrics.json
|
||||||
|
pub fn with_persistence(storage_path: &str) -> Self {
|
||||||
|
let path = Path::new(storage_path).join("metrics.json");
|
||||||
|
let mut metrics = Self::new();
|
||||||
|
metrics.persist_path = Some(path.clone());
|
||||||
|
|
||||||
|
// Load existing metrics if file exists
|
||||||
|
if path.exists() {
|
||||||
|
match std::fs::read_to_string(&path) {
|
||||||
|
Ok(data) => match serde_json::from_str::<MetricsSnapshot>(&data) {
|
||||||
|
Ok(snap) => {
|
||||||
|
metrics.downloads = AtomicU64::new(snap.downloads);
|
||||||
|
metrics.uploads = AtomicU64::new(snap.uploads);
|
||||||
|
metrics.cache_hits = AtomicU64::new(snap.cache_hits);
|
||||||
|
metrics.cache_misses = AtomicU64::new(snap.cache_misses);
|
||||||
|
metrics.docker_downloads = AtomicU64::new(snap.docker_downloads);
|
||||||
|
metrics.docker_uploads = AtomicU64::new(snap.docker_uploads);
|
||||||
|
metrics.npm_downloads = AtomicU64::new(snap.npm_downloads);
|
||||||
|
metrics.maven_downloads = AtomicU64::new(snap.maven_downloads);
|
||||||
|
metrics.maven_uploads = AtomicU64::new(snap.maven_uploads);
|
||||||
|
metrics.cargo_downloads = AtomicU64::new(snap.cargo_downloads);
|
||||||
|
metrics.pypi_downloads = AtomicU64::new(snap.pypi_downloads);
|
||||||
|
metrics.raw_downloads = AtomicU64::new(snap.raw_downloads);
|
||||||
|
metrics.raw_uploads = AtomicU64::new(snap.raw_uploads);
|
||||||
|
info!(
|
||||||
|
downloads = snap.downloads,
|
||||||
|
uploads = snap.uploads,
|
||||||
|
"Loaded persisted metrics"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Err(e) => warn!("Failed to parse metrics.json: {}", e),
|
||||||
|
},
|
||||||
|
Err(e) => warn!("Failed to read metrics.json: {}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
metrics
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Save current metrics to disk
|
||||||
|
pub fn save(&self) {
|
||||||
|
let Some(path) = &self.persist_path else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let snap = MetricsSnapshot {
|
||||||
|
downloads: self.downloads.load(Ordering::Relaxed),
|
||||||
|
uploads: self.uploads.load(Ordering::Relaxed),
|
||||||
|
cache_hits: self.cache_hits.load(Ordering::Relaxed),
|
||||||
|
cache_misses: self.cache_misses.load(Ordering::Relaxed),
|
||||||
|
docker_downloads: self.docker_downloads.load(Ordering::Relaxed),
|
||||||
|
docker_uploads: self.docker_uploads.load(Ordering::Relaxed),
|
||||||
|
npm_downloads: self.npm_downloads.load(Ordering::Relaxed),
|
||||||
|
maven_downloads: self.maven_downloads.load(Ordering::Relaxed),
|
||||||
|
maven_uploads: self.maven_uploads.load(Ordering::Relaxed),
|
||||||
|
cargo_downloads: self.cargo_downloads.load(Ordering::Relaxed),
|
||||||
|
pypi_downloads: self.pypi_downloads.load(Ordering::Relaxed),
|
||||||
|
raw_downloads: self.raw_downloads.load(Ordering::Relaxed),
|
||||||
|
raw_uploads: self.raw_uploads.load(Ordering::Relaxed),
|
||||||
|
};
|
||||||
|
// Atomic write: write to tmp then rename
|
||||||
|
let tmp = path.with_extension("json.tmp");
|
||||||
|
if let Ok(data) = serde_json::to_string_pretty(&snap) {
|
||||||
|
if std::fs::write(&tmp, &data).is_ok() {
|
||||||
|
let _ = std::fs::rename(&tmp, path);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
#![allow(dead_code)]
|
|
||||||
//! Application error handling with HTTP response conversion
|
//! Application error handling with HTTP response conversion
|
||||||
//!
|
//!
|
||||||
//! Provides a unified error type that can be converted to HTTP responses
|
//! Provides a unified error type that can be converted to HTTP responses
|
||||||
@@ -18,6 +17,7 @@ use thiserror::Error;
|
|||||||
use crate::storage::StorageError;
|
use crate::storage::StorageError;
|
||||||
use crate::validation::ValidationError;
|
use crate::validation::ValidationError;
|
||||||
|
|
||||||
|
#[allow(dead_code)] // Wiring into handlers planned for v0.3
|
||||||
/// Application-level errors with HTTP response conversion
|
/// Application-level errors with HTTP response conversion
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum AppError {
|
pub enum AppError {
|
||||||
@@ -40,6 +40,7 @@ pub enum AppError {
|
|||||||
Validation(#[from] ValidationError),
|
Validation(#[from] ValidationError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
/// JSON error response body
|
/// JSON error response body
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct ErrorResponse {
|
struct ErrorResponse {
|
||||||
@@ -74,6 +75,7 @@ impl IntoResponse for AppError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
impl AppError {
|
impl AppError {
|
||||||
/// Create a not found error
|
/// Create a not found error
|
||||||
pub fn not_found(msg: impl Into<String>) -> Self {
|
pub fn not_found(msg: impl Into<String>) -> Self {
|
||||||
|
|||||||
121
nora-registry/src/gc.rs
Normal file
121
nora-registry/src/gc.rs
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
//! Garbage Collection for orphaned blobs
|
||||||
|
//!
|
||||||
|
//! Mark-and-sweep approach:
|
||||||
|
//! 1. List all blobs across registries
|
||||||
|
//! 2. Parse all manifests to find referenced blobs
|
||||||
|
//! 3. Blobs not referenced by any manifest = orphans
|
||||||
|
//! 4. Delete orphans (with --dry-run support)
|
||||||
|
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
use crate::storage::Storage;
|
||||||
|
|
||||||
|
pub struct GcResult {
|
||||||
|
pub total_blobs: usize,
|
||||||
|
pub referenced_blobs: usize,
|
||||||
|
pub orphaned_blobs: usize,
|
||||||
|
pub deleted_blobs: usize,
|
||||||
|
pub orphan_keys: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run_gc(storage: &Storage, dry_run: bool) -> GcResult {
|
||||||
|
info!("Starting garbage collection (dry_run={})", dry_run);
|
||||||
|
|
||||||
|
// 1. Collect all blob keys
|
||||||
|
let all_blobs = collect_all_blobs(storage).await;
|
||||||
|
info!("Found {} total blobs", all_blobs.len());
|
||||||
|
|
||||||
|
// 2. Collect all referenced digests from manifests
|
||||||
|
let referenced = collect_referenced_digests(storage).await;
|
||||||
|
info!(
|
||||||
|
"Found {} referenced digests from manifests",
|
||||||
|
referenced.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
// 3. Find orphans
|
||||||
|
let mut orphan_keys: Vec<String> = Vec::new();
|
||||||
|
for key in &all_blobs {
|
||||||
|
if let Some(digest) = key.rsplit('/').next() {
|
||||||
|
if !referenced.contains(digest) {
|
||||||
|
orphan_keys.push(key.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Found {} orphaned blobs", orphan_keys.len());
|
||||||
|
|
||||||
|
let mut deleted = 0;
|
||||||
|
if !dry_run {
|
||||||
|
for key in &orphan_keys {
|
||||||
|
if storage.delete(key).await.is_ok() {
|
||||||
|
deleted += 1;
|
||||||
|
info!("Deleted: {}", key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
info!("Deleted {} orphaned blobs", deleted);
|
||||||
|
} else {
|
||||||
|
for key in &orphan_keys {
|
||||||
|
info!("[dry-run] Would delete: {}", key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
GcResult {
|
||||||
|
total_blobs: all_blobs.len(),
|
||||||
|
referenced_blobs: referenced.len(),
|
||||||
|
orphaned_blobs: orphan_keys.len(),
|
||||||
|
deleted_blobs: deleted,
|
||||||
|
orphan_keys,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn collect_all_blobs(storage: &Storage) -> Vec<String> {
|
||||||
|
let mut blobs = Vec::new();
|
||||||
|
let docker_blobs = storage.list("docker/").await;
|
||||||
|
for key in docker_blobs {
|
||||||
|
if key.contains("/blobs/") {
|
||||||
|
blobs.push(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
blobs
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn collect_referenced_digests(storage: &Storage) -> HashSet<String> {
|
||||||
|
let mut referenced = HashSet::new();
|
||||||
|
|
||||||
|
let all_keys = storage.list("docker/").await;
|
||||||
|
for key in &all_keys {
|
||||||
|
if !key.contains("/manifests/") || !key.ends_with(".json") || key.ends_with(".meta.json") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(data) = storage.get(key).await {
|
||||||
|
if let Ok(json) = serde_json::from_slice::<serde_json::Value>(&data) {
|
||||||
|
if let Some(config) = json.get("config") {
|
||||||
|
if let Some(digest) = config.get("digest").and_then(|v| v.as_str()) {
|
||||||
|
referenced.insert(digest.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(layers) = json.get("layers").and_then(|v| v.as_array()) {
|
||||||
|
for layer in layers {
|
||||||
|
if let Some(digest) = layer.get("digest").and_then(|v| v.as_str()) {
|
||||||
|
referenced.insert(digest.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(manifests) = json.get("manifests").and_then(|v| v.as_array()) {
|
||||||
|
for m in manifests {
|
||||||
|
if let Some(digest) = m.get("digest").and_then(|v| v.as_str()) {
|
||||||
|
referenced.insert(digest.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
referenced
|
||||||
|
}
|
||||||
28
nora-registry/src/lib.rs
Normal file
28
nora-registry/src/lib.rs
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
//! NORA Registry — library interface for fuzzing and testing
|
||||||
|
|
||||||
|
pub mod validation;
|
||||||
|
|
||||||
|
/// Re-export Docker manifest parsing for fuzz targets
|
||||||
|
pub mod docker_fuzz {
|
||||||
|
pub fn detect_manifest_media_type(data: &[u8]) -> String {
|
||||||
|
let Ok(value) = serde_json::from_slice::<serde_json::Value>(data) else {
|
||||||
|
return "application/octet-stream".to_string();
|
||||||
|
};
|
||||||
|
if let Some(mt) = value.get("mediaType").and_then(|v| v.as_str()) {
|
||||||
|
return mt.to_string();
|
||||||
|
}
|
||||||
|
if value.get("manifests").is_some() {
|
||||||
|
return "application/vnd.oci.image.index.v1+json".to_string();
|
||||||
|
}
|
||||||
|
if value.get("schemaVersion").and_then(|v| v.as_i64()) == Some(2) {
|
||||||
|
if value.get("layers").is_some() {
|
||||||
|
return "application/vnd.oci.image.manifest.v1+json".to_string();
|
||||||
|
}
|
||||||
|
return "application/vnd.docker.distribution.manifest.v2+json".to_string();
|
||||||
|
}
|
||||||
|
if value.get("schemaVersion").and_then(|v| v.as_i64()) == Some(1) {
|
||||||
|
return "application/vnd.docker.distribution.manifest.v1+json".to_string();
|
||||||
|
}
|
||||||
|
"application/vnd.docker.distribution.manifest.v2+json".to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,14 +2,17 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
mod activity_log;
|
mod activity_log;
|
||||||
|
mod audit;
|
||||||
mod auth;
|
mod auth;
|
||||||
mod backup;
|
mod backup;
|
||||||
mod config;
|
mod config;
|
||||||
mod dashboard_metrics;
|
mod dashboard_metrics;
|
||||||
mod error;
|
mod error;
|
||||||
|
mod gc;
|
||||||
mod health;
|
mod health;
|
||||||
mod metrics;
|
mod metrics;
|
||||||
mod migrate;
|
mod migrate;
|
||||||
|
mod mirror;
|
||||||
mod openapi;
|
mod openapi;
|
||||||
mod rate_limit;
|
mod rate_limit;
|
||||||
mod registry;
|
mod registry;
|
||||||
@@ -31,6 +34,7 @@ use tracing::{error, info, warn};
|
|||||||
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
|
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
|
||||||
|
|
||||||
use activity_log::ActivityLog;
|
use activity_log::ActivityLog;
|
||||||
|
use audit::AuditLog;
|
||||||
use auth::HtpasswdAuth;
|
use auth::HtpasswdAuth;
|
||||||
use config::{Config, StorageMode};
|
use config::{Config, StorageMode};
|
||||||
use dashboard_metrics::DashboardMetrics;
|
use dashboard_metrics::DashboardMetrics;
|
||||||
@@ -61,6 +65,12 @@ enum Commands {
|
|||||||
#[arg(short, long)]
|
#[arg(short, long)]
|
||||||
input: PathBuf,
|
input: PathBuf,
|
||||||
},
|
},
|
||||||
|
/// Garbage collect orphaned blobs
|
||||||
|
Gc {
|
||||||
|
/// Dry run - show what would be deleted without deleting
|
||||||
|
#[arg(long, default_value = "false")]
|
||||||
|
dry_run: bool,
|
||||||
|
},
|
||||||
/// Migrate artifacts between storage backends
|
/// Migrate artifacts between storage backends
|
||||||
Migrate {
|
Migrate {
|
||||||
/// Source storage: local or s3
|
/// Source storage: local or s3
|
||||||
@@ -73,6 +83,17 @@ enum Commands {
|
|||||||
#[arg(long, default_value = "false")]
|
#[arg(long, default_value = "false")]
|
||||||
dry_run: bool,
|
dry_run: bool,
|
||||||
},
|
},
|
||||||
|
/// Pre-fetch dependencies through NORA proxy cache
|
||||||
|
Mirror {
|
||||||
|
#[command(subcommand)]
|
||||||
|
format: mirror::MirrorFormat,
|
||||||
|
/// NORA registry URL
|
||||||
|
#[arg(long, default_value = "http://localhost:4000", global = true)]
|
||||||
|
registry: String,
|
||||||
|
/// Max concurrent downloads
|
||||||
|
#[arg(long, default_value = "8", global = true)]
|
||||||
|
concurrency: usize,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
@@ -83,6 +104,7 @@ pub struct AppState {
|
|||||||
pub tokens: Option<TokenStore>,
|
pub tokens: Option<TokenStore>,
|
||||||
pub metrics: DashboardMetrics,
|
pub metrics: DashboardMetrics,
|
||||||
pub activity: ActivityLog,
|
pub activity: ActivityLog,
|
||||||
|
pub audit: AuditLog,
|
||||||
pub docker_auth: registry::DockerAuth,
|
pub docker_auth: registry::DockerAuth,
|
||||||
pub repo_index: RepoIndex,
|
pub repo_index: RepoIndex,
|
||||||
pub http_client: reqwest::Client,
|
pub http_client: reqwest::Client,
|
||||||
@@ -143,6 +165,27 @@ async fn main() {
|
|||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Some(Commands::Gc { dry_run }) => {
|
||||||
|
let result = gc::run_gc(&storage, dry_run).await;
|
||||||
|
println!("GC Summary:");
|
||||||
|
println!(" Total blobs: {}", result.total_blobs);
|
||||||
|
println!(" Referenced: {}", result.referenced_blobs);
|
||||||
|
println!(" Orphaned: {}", result.orphaned_blobs);
|
||||||
|
println!(" Deleted: {}", result.deleted_blobs);
|
||||||
|
if dry_run && !result.orphan_keys.is_empty() {
|
||||||
|
println!("\nRun without --dry-run to delete orphaned blobs.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(Commands::Mirror {
|
||||||
|
format,
|
||||||
|
registry,
|
||||||
|
concurrency,
|
||||||
|
}) => {
|
||||||
|
if let Err(e) = mirror::run_mirror(format, ®istry, concurrency).await {
|
||||||
|
error!("Mirror failed: {}", e);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
Some(Commands::Migrate { from, to, dry_run }) => {
|
Some(Commands::Migrate { from, to, dry_run }) => {
|
||||||
let source = match from.as_str() {
|
let source = match from.as_str() {
|
||||||
"local" => Storage::new_local(&config.storage.path),
|
"local" => Storage::new_local(&config.storage.path),
|
||||||
@@ -210,6 +253,7 @@ async fn run_server(config: Config, storage: Storage) {
|
|||||||
|
|
||||||
// Log rate limiting configuration
|
// Log rate limiting configuration
|
||||||
info!(
|
info!(
|
||||||
|
enabled = config.rate_limit.enabled,
|
||||||
auth_rps = config.rate_limit.auth_rps,
|
auth_rps = config.rate_limit.auth_rps,
|
||||||
auth_burst = config.rate_limit.auth_burst,
|
auth_burst = config.rate_limit.auth_burst,
|
||||||
upload_rps = config.rate_limit.upload_rps,
|
upload_rps = config.rate_limit.upload_rps,
|
||||||
@@ -264,41 +308,25 @@ async fn run_server(config: Config, storage: Storage) {
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create rate limiters before moving config to state
|
let storage_path = config.storage.path.clone();
|
||||||
let auth_limiter = rate_limit::auth_rate_limiter(&config.rate_limit);
|
let rate_limit_enabled = config.rate_limit.enabled;
|
||||||
let upload_limiter = rate_limit::upload_rate_limiter(&config.rate_limit);
|
|
||||||
let general_limiter = rate_limit::general_rate_limiter(&config.rate_limit);
|
// Warn about plaintext credentials in config.toml
|
||||||
|
config.warn_plaintext_credentials();
|
||||||
|
|
||||||
// Initialize Docker auth with proxy timeout
|
// Initialize Docker auth with proxy timeout
|
||||||
let docker_auth = registry::DockerAuth::new(config.docker.proxy_timeout);
|
let docker_auth = registry::DockerAuth::new(config.docker.proxy_timeout);
|
||||||
|
|
||||||
let http_client = reqwest::Client::new();
|
let http_client = reqwest::Client::new();
|
||||||
|
|
||||||
let state = Arc::new(AppState {
|
// Registry routes (shared between rate-limited and non-limited paths)
|
||||||
storage,
|
|
||||||
config,
|
|
||||||
start_time,
|
|
||||||
auth,
|
|
||||||
tokens,
|
|
||||||
metrics: DashboardMetrics::new(),
|
|
||||||
activity: ActivityLog::new(50),
|
|
||||||
docker_auth,
|
|
||||||
repo_index: RepoIndex::new(),
|
|
||||||
http_client,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Token routes with strict rate limiting (brute-force protection)
|
|
||||||
let auth_routes = auth::token_routes().layer(auth_limiter);
|
|
||||||
|
|
||||||
// Registry routes with upload rate limiting
|
|
||||||
let registry_routes = Router::new()
|
let registry_routes = Router::new()
|
||||||
.merge(registry::docker_routes())
|
.merge(registry::docker_routes())
|
||||||
.merge(registry::maven_routes())
|
.merge(registry::maven_routes())
|
||||||
.merge(registry::npm_routes())
|
.merge(registry::npm_routes())
|
||||||
.merge(registry::cargo_routes())
|
.merge(registry::cargo_routes())
|
||||||
.merge(registry::pypi_routes())
|
.merge(registry::pypi_routes())
|
||||||
.merge(registry::raw_routes())
|
.merge(registry::raw_routes());
|
||||||
.layer(upload_limiter);
|
|
||||||
|
|
||||||
// Routes WITHOUT rate limiting (health, metrics, UI)
|
// Routes WITHOUT rate limiting (health, metrics, UI)
|
||||||
let public_routes = Router::new()
|
let public_routes = Router::new()
|
||||||
@@ -307,16 +335,46 @@ async fn run_server(config: Config, storage: Storage) {
|
|||||||
.merge(ui::routes())
|
.merge(ui::routes())
|
||||||
.merge(openapi::routes());
|
.merge(openapi::routes());
|
||||||
|
|
||||||
// Routes WITH rate limiting
|
let app_routes = if rate_limit_enabled {
|
||||||
let rate_limited_routes = Router::new()
|
// Create rate limiters before moving config to state
|
||||||
.merge(auth_routes)
|
let auth_limiter = rate_limit::auth_rate_limiter(&config.rate_limit);
|
||||||
.merge(registry_routes)
|
let upload_limiter = rate_limit::upload_rate_limiter(&config.rate_limit);
|
||||||
.layer(general_limiter);
|
let general_limiter = rate_limit::general_rate_limiter(&config.rate_limit);
|
||||||
|
|
||||||
|
let auth_routes = auth::token_routes().layer(auth_limiter);
|
||||||
|
let limited_registry = registry_routes.layer(upload_limiter);
|
||||||
|
|
||||||
|
Router::new()
|
||||||
|
.merge(auth_routes)
|
||||||
|
.merge(limited_registry)
|
||||||
|
.layer(general_limiter)
|
||||||
|
} else {
|
||||||
|
info!("Rate limiting DISABLED");
|
||||||
|
Router::new()
|
||||||
|
.merge(auth::token_routes())
|
||||||
|
.merge(registry_routes)
|
||||||
|
};
|
||||||
|
|
||||||
|
let state = Arc::new(AppState {
|
||||||
|
storage,
|
||||||
|
config,
|
||||||
|
start_time,
|
||||||
|
auth,
|
||||||
|
tokens,
|
||||||
|
metrics: DashboardMetrics::with_persistence(&storage_path),
|
||||||
|
activity: ActivityLog::new(50),
|
||||||
|
audit: AuditLog::new(&storage_path),
|
||||||
|
docker_auth,
|
||||||
|
repo_index: RepoIndex::new(),
|
||||||
|
http_client,
|
||||||
|
});
|
||||||
|
|
||||||
let app = Router::new()
|
let app = Router::new()
|
||||||
.merge(public_routes)
|
.merge(public_routes)
|
||||||
.merge(rate_limited_routes)
|
.merge(app_routes)
|
||||||
.layer(DefaultBodyLimit::max(100 * 1024 * 1024)) // 100MB default body limit
|
.layer(DefaultBodyLimit::max(
|
||||||
|
state.config.server.body_limit_mb * 1024 * 1024,
|
||||||
|
))
|
||||||
.layer(middleware::from_fn(request_id::request_id_middleware))
|
.layer(middleware::from_fn(request_id::request_id_middleware))
|
||||||
.layer(middleware::from_fn(metrics::metrics_middleware))
|
.layer(middleware::from_fn(metrics::metrics_middleware))
|
||||||
.layer(middleware::from_fn_with_state(
|
.layer(middleware::from_fn_with_state(
|
||||||
@@ -335,6 +393,7 @@ async fn run_server(config: Config, storage: Storage) {
|
|||||||
version = env!("CARGO_PKG_VERSION"),
|
version = env!("CARGO_PKG_VERSION"),
|
||||||
storage = state.storage.backend_name(),
|
storage = state.storage.backend_name(),
|
||||||
auth_enabled = state.auth.is_some(),
|
auth_enabled = state.auth.is_some(),
|
||||||
|
body_limit_mb = state.config.server.body_limit_mb,
|
||||||
"Nora started"
|
"Nora started"
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -353,6 +412,16 @@ async fn run_server(config: Config, storage: Storage) {
|
|||||||
"Available endpoints"
|
"Available endpoints"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Background task: persist metrics every 30 seconds
|
||||||
|
let metrics_state = state.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let mut interval = tokio::time::interval(std::time::Duration::from_secs(30));
|
||||||
|
loop {
|
||||||
|
interval.tick().await;
|
||||||
|
metrics_state.metrics.save();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// Graceful shutdown on SIGTERM/SIGINT
|
// Graceful shutdown on SIGTERM/SIGINT
|
||||||
axum::serve(
|
axum::serve(
|
||||||
listener,
|
listener,
|
||||||
@@ -362,6 +431,9 @@ async fn run_server(config: Config, storage: Storage) {
|
|||||||
.await
|
.await
|
||||||
.expect("Server error");
|
.expect("Server error");
|
||||||
|
|
||||||
|
// Save metrics on shutdown
|
||||||
|
state.metrics.save();
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
uptime_seconds = state.start_time.elapsed().as_secs(),
|
uptime_seconds = state.start_time.elapsed().as_secs(),
|
||||||
"Nora shutdown complete"
|
"Nora shutdown complete"
|
||||||
|
|||||||
325
nora-registry/src/mirror/mod.rs
Normal file
325
nora-registry/src/mirror/mod.rs
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
//! `nora mirror` — pre-fetch dependencies through NORA proxy cache.
|
||||||
|
|
||||||
|
mod npm;
|
||||||
|
|
||||||
|
use clap::Subcommand;
|
||||||
|
use indicatif::{ProgressBar, ProgressStyle};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum MirrorFormat {
|
||||||
|
/// Mirror npm packages
|
||||||
|
Npm {
|
||||||
|
/// Path to package-lock.json (v1/v2/v3)
|
||||||
|
#[arg(long, conflicts_with = "packages")]
|
||||||
|
lockfile: Option<PathBuf>,
|
||||||
|
/// Comma-separated package names
|
||||||
|
#[arg(long, conflicts_with = "lockfile", value_delimiter = ',')]
|
||||||
|
packages: Option<Vec<String>>,
|
||||||
|
/// Fetch all versions (only with --packages)
|
||||||
|
#[arg(long)]
|
||||||
|
all_versions: bool,
|
||||||
|
},
|
||||||
|
/// Mirror Python packages
|
||||||
|
Pip {
|
||||||
|
/// Path to requirements.txt
|
||||||
|
#[arg(long)]
|
||||||
|
lockfile: PathBuf,
|
||||||
|
},
|
||||||
|
/// Mirror Cargo crates
|
||||||
|
Cargo {
|
||||||
|
/// Path to Cargo.lock
|
||||||
|
#[arg(long)]
|
||||||
|
lockfile: PathBuf,
|
||||||
|
},
|
||||||
|
/// Mirror Maven artifacts
|
||||||
|
Maven {
|
||||||
|
/// Path to dependency list (mvn dependency:list output)
|
||||||
|
#[arg(long)]
|
||||||
|
lockfile: PathBuf,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
|
||||||
|
pub struct MirrorTarget {
|
||||||
|
pub name: String,
|
||||||
|
pub version: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MirrorResult {
|
||||||
|
pub total: usize,
|
||||||
|
pub fetched: usize,
|
||||||
|
pub failed: usize,
|
||||||
|
pub bytes: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_progress_bar(total: u64) -> ProgressBar {
|
||||||
|
let pb = ProgressBar::new(total);
|
||||||
|
pb.set_style(
|
||||||
|
ProgressStyle::default_bar()
|
||||||
|
.template(
|
||||||
|
"{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({eta}) {msg}",
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
.progress_chars("=>-"),
|
||||||
|
);
|
||||||
|
pb
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run_mirror(
|
||||||
|
format: MirrorFormat,
|
||||||
|
registry: &str,
|
||||||
|
concurrency: usize,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(120))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("Failed to create HTTP client: {}", e))?;
|
||||||
|
|
||||||
|
// Health check
|
||||||
|
let health_url = format!("{}/health", registry.trim_end_matches('/'));
|
||||||
|
match client.get(&health_url).send().await {
|
||||||
|
Ok(r) if r.status().is_success() => {}
|
||||||
|
_ => {
|
||||||
|
return Err(format!(
|
||||||
|
"Cannot connect to NORA at {}. Is `nora serve` running?",
|
||||||
|
registry
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let start = Instant::now();
|
||||||
|
|
||||||
|
let result = match format {
|
||||||
|
MirrorFormat::Npm {
|
||||||
|
lockfile,
|
||||||
|
packages,
|
||||||
|
all_versions,
|
||||||
|
} => {
|
||||||
|
npm::run_npm_mirror(
|
||||||
|
&client,
|
||||||
|
registry,
|
||||||
|
lockfile,
|
||||||
|
packages,
|
||||||
|
all_versions,
|
||||||
|
concurrency,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
MirrorFormat::Pip { lockfile } => {
|
||||||
|
mirror_lockfile(&client, registry, "pip", &lockfile).await?
|
||||||
|
}
|
||||||
|
MirrorFormat::Cargo { lockfile } => {
|
||||||
|
mirror_lockfile(&client, registry, "cargo", &lockfile).await?
|
||||||
|
}
|
||||||
|
MirrorFormat::Maven { lockfile } => {
|
||||||
|
mirror_lockfile(&client, registry, "maven", &lockfile).await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let elapsed = start.elapsed();
|
||||||
|
println!("\nMirror complete:");
|
||||||
|
println!(" Total: {}", result.total);
|
||||||
|
println!(" Fetched: {}", result.fetched);
|
||||||
|
println!(" Failed: {}", result.failed);
|
||||||
|
println!(" Size: {:.1} MB", result.bytes as f64 / 1_048_576.0);
|
||||||
|
println!(" Time: {:.1}s", elapsed.as_secs_f64());
|
||||||
|
|
||||||
|
if result.failed > 0 {
|
||||||
|
Err(format!("{} packages failed to mirror", result.failed))
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn mirror_lockfile(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
registry: &str,
|
||||||
|
format: &str,
|
||||||
|
lockfile: &PathBuf,
|
||||||
|
) -> Result<MirrorResult, String> {
|
||||||
|
let content = std::fs::read_to_string(lockfile)
|
||||||
|
.map_err(|e| format!("Cannot read {}: {}", lockfile.display(), e))?;
|
||||||
|
|
||||||
|
let targets = match format {
|
||||||
|
"pip" => parse_requirements_txt(&content),
|
||||||
|
"cargo" => parse_cargo_lock(&content)?,
|
||||||
|
"maven" => parse_maven_deps(&content),
|
||||||
|
_ => vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
if targets.is_empty() {
|
||||||
|
println!("No packages found in {}", lockfile.display());
|
||||||
|
return Ok(MirrorResult {
|
||||||
|
total: 0,
|
||||||
|
fetched: 0,
|
||||||
|
failed: 0,
|
||||||
|
bytes: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let pb = create_progress_bar(targets.len() as u64);
|
||||||
|
let base = registry.trim_end_matches('/');
|
||||||
|
let mut fetched = 0;
|
||||||
|
let mut failed = 0;
|
||||||
|
let mut bytes = 0u64;
|
||||||
|
|
||||||
|
for target in &targets {
|
||||||
|
let url = match format {
|
||||||
|
"pip" => format!("{}/simple/{}/", base, target.name),
|
||||||
|
"cargo" => format!(
|
||||||
|
"{}/cargo/api/v1/crates/{}/{}/download",
|
||||||
|
base, target.name, target.version
|
||||||
|
),
|
||||||
|
"maven" => {
|
||||||
|
let parts: Vec<&str> = target.name.split(':').collect();
|
||||||
|
if parts.len() == 2 {
|
||||||
|
let group_path = parts[0].replace('.', "/");
|
||||||
|
format!(
|
||||||
|
"{}/maven2/{}/{}/{}/{}-{}.jar",
|
||||||
|
base, group_path, parts[1], target.version, parts[1], target.version
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
pb.inc(1);
|
||||||
|
failed += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
match client.get(&url).send().await {
|
||||||
|
Ok(r) if r.status().is_success() => {
|
||||||
|
if let Ok(body) = r.bytes().await {
|
||||||
|
bytes += body.len() as u64;
|
||||||
|
}
|
||||||
|
fetched += 1;
|
||||||
|
}
|
||||||
|
_ => failed += 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
pb.set_message(format!("{}@{}", target.name, target.version));
|
||||||
|
pb.inc(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
pb.finish_with_message("done");
|
||||||
|
Ok(MirrorResult {
|
||||||
|
total: targets.len(),
|
||||||
|
fetched,
|
||||||
|
failed,
|
||||||
|
bytes,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_requirements_txt(content: &str) -> Vec<MirrorTarget> {
|
||||||
|
content
|
||||||
|
.lines()
|
||||||
|
.filter(|l| !l.trim().is_empty() && !l.starts_with('#') && !l.starts_with('-'))
|
||||||
|
.filter_map(|line| {
|
||||||
|
let line = line.split('#').next().unwrap().trim();
|
||||||
|
if let Some((name, version)) = line.split_once("==") {
|
||||||
|
Some(MirrorTarget {
|
||||||
|
name: name.trim().to_string(),
|
||||||
|
version: version.trim().to_string(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
let name = line.split(['>', '<', '=', '!', '~', ';']).next()?.trim();
|
||||||
|
if name.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(MirrorTarget {
|
||||||
|
name: name.to_string(),
|
||||||
|
version: "latest".to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_cargo_lock(content: &str) -> Result<Vec<MirrorTarget>, String> {
|
||||||
|
let lock: toml::Value =
|
||||||
|
toml::from_str(content).map_err(|e| format!("Invalid Cargo.lock: {}", e))?;
|
||||||
|
let packages = lock
|
||||||
|
.get("package")
|
||||||
|
.and_then(|p| p.as_array())
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or_default();
|
||||||
|
Ok(packages
|
||||||
|
.iter()
|
||||||
|
.filter(|p| {
|
||||||
|
p.get("source")
|
||||||
|
.and_then(|s| s.as_str())
|
||||||
|
.map(|s| s.starts_with("registry+"))
|
||||||
|
.unwrap_or(false)
|
||||||
|
})
|
||||||
|
.filter_map(|p| {
|
||||||
|
let name = p.get("name")?.as_str()?.to_string();
|
||||||
|
let version = p.get("version")?.as_str()?.to_string();
|
||||||
|
Some(MirrorTarget { name, version })
|
||||||
|
})
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_maven_deps(content: &str) -> Vec<MirrorTarget> {
|
||||||
|
content
|
||||||
|
.lines()
|
||||||
|
.filter_map(|line| {
|
||||||
|
let line = line.trim().trim_start_matches("[INFO]").trim();
|
||||||
|
let parts: Vec<&str> = line.split(':').collect();
|
||||||
|
if parts.len() >= 4 {
|
||||||
|
let name = format!("{}:{}", parts[0], parts[1]);
|
||||||
|
let version = parts[3].to_string();
|
||||||
|
Some(MirrorTarget { name, version })
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_requirements_txt() {
|
||||||
|
let content = "flask==2.3.0\nrequests>=2.28.0\n# comment\nnumpy==1.24.3\n";
|
||||||
|
let targets = parse_requirements_txt(content);
|
||||||
|
assert_eq!(targets.len(), 3);
|
||||||
|
assert_eq!(targets[0].name, "flask");
|
||||||
|
assert_eq!(targets[0].version, "2.3.0");
|
||||||
|
assert_eq!(targets[1].name, "requests");
|
||||||
|
assert_eq!(targets[1].version, "latest");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_cargo_lock() {
|
||||||
|
let content = "\
|
||||||
|
[[package]]
|
||||||
|
name = \"serde\"
|
||||||
|
version = \"1.0.197\"
|
||||||
|
source = \"registry+https://github.com/rust-lang/crates.io-index\"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = \"my-local-crate\"
|
||||||
|
version = \"0.1.0\"
|
||||||
|
";
|
||||||
|
let targets = parse_cargo_lock(content).unwrap();
|
||||||
|
assert_eq!(targets.len(), 1);
|
||||||
|
assert_eq!(targets[0].name, "serde");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_maven_deps() {
|
||||||
|
let content = "[INFO] org.apache.commons:commons-lang3:jar:3.12.0:compile\n";
|
||||||
|
let targets = parse_maven_deps(content);
|
||||||
|
assert_eq!(targets.len(), 1);
|
||||||
|
assert_eq!(targets[0].name, "org.apache.commons:commons-lang3");
|
||||||
|
assert_eq!(targets[0].version, "3.12.0");
|
||||||
|
}
|
||||||
|
}
|
||||||
323
nora-registry/src/mirror/npm.rs
Normal file
323
nora-registry/src/mirror/npm.rs
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
//! npm lockfile parser + mirror logic.
|
||||||
|
|
||||||
|
use super::{create_progress_bar, MirrorResult, MirrorTarget};
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use tokio::sync::Semaphore;
|
||||||
|
|
||||||
|
/// Entry point for npm mirroring
|
||||||
|
pub async fn run_npm_mirror(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
registry: &str,
|
||||||
|
lockfile: Option<PathBuf>,
|
||||||
|
packages: Option<Vec<String>>,
|
||||||
|
all_versions: bool,
|
||||||
|
concurrency: usize,
|
||||||
|
) -> Result<MirrorResult, String> {
|
||||||
|
let targets = if let Some(path) = lockfile {
|
||||||
|
let content = std::fs::read_to_string(&path)
|
||||||
|
.map_err(|e| format!("Cannot read {}: {}", path.display(), e))?;
|
||||||
|
parse_npm_lockfile(&content)?
|
||||||
|
} else if let Some(names) = packages {
|
||||||
|
resolve_npm_packages(client, registry, &names, all_versions).await?
|
||||||
|
} else {
|
||||||
|
return Err("Specify --lockfile or --packages".to_string());
|
||||||
|
};
|
||||||
|
|
||||||
|
if targets.is_empty() {
|
||||||
|
println!("No npm packages to mirror");
|
||||||
|
return Ok(MirrorResult {
|
||||||
|
total: 0,
|
||||||
|
fetched: 0,
|
||||||
|
failed: 0,
|
||||||
|
bytes: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"Mirroring {} npm packages via {}...",
|
||||||
|
targets.len(),
|
||||||
|
registry
|
||||||
|
);
|
||||||
|
mirror_npm_packages(client, registry, &targets, concurrency).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse package-lock.json (v1, v2, v3)
|
||||||
|
fn parse_npm_lockfile(content: &str) -> Result<Vec<MirrorTarget>, String> {
|
||||||
|
let json: serde_json::Value =
|
||||||
|
serde_json::from_str(content).map_err(|e| format!("Invalid JSON: {}", e))?;
|
||||||
|
|
||||||
|
let version = json
|
||||||
|
.get("lockfileVersion")
|
||||||
|
.and_then(|v| v.as_u64())
|
||||||
|
.unwrap_or(1);
|
||||||
|
|
||||||
|
let mut seen = HashSet::new();
|
||||||
|
let mut targets = Vec::new();
|
||||||
|
|
||||||
|
if version >= 2 {
|
||||||
|
// v2/v3: use "packages" object
|
||||||
|
if let Some(packages) = json.get("packages").and_then(|p| p.as_object()) {
|
||||||
|
for (key, pkg) in packages {
|
||||||
|
if key.is_empty() {
|
||||||
|
continue; // root package
|
||||||
|
}
|
||||||
|
if let Some(name) = extract_package_name(key) {
|
||||||
|
if let Some(ver) = pkg.get("version").and_then(|v| v.as_str()) {
|
||||||
|
let pair = (name.to_string(), ver.to_string());
|
||||||
|
if seen.insert(pair.clone()) {
|
||||||
|
targets.push(MirrorTarget {
|
||||||
|
name: pair.0,
|
||||||
|
version: pair.1,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if version == 1 || targets.is_empty() {
|
||||||
|
// v1 fallback: recursive "dependencies"
|
||||||
|
if let Some(deps) = json.get("dependencies").and_then(|d| d.as_object()) {
|
||||||
|
parse_v1_deps(deps, &mut targets, &mut seen);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(targets)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract package name from lockfile key like "node_modules/@babel/core"
|
||||||
|
fn extract_package_name(key: &str) -> Option<&str> {
|
||||||
|
// Handle nested: "node_modules/foo/node_modules/@scope/bar" → "@scope/bar"
|
||||||
|
let last_nm = key.rfind("node_modules/")?;
|
||||||
|
let after = &key[last_nm + "node_modules/".len()..];
|
||||||
|
let name = after.trim_end_matches('/');
|
||||||
|
if name.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively parse v1 lockfile "dependencies"
|
||||||
|
fn parse_v1_deps(
|
||||||
|
deps: &serde_json::Map<String, serde_json::Value>,
|
||||||
|
targets: &mut Vec<MirrorTarget>,
|
||||||
|
seen: &mut HashSet<(String, String)>,
|
||||||
|
) {
|
||||||
|
for (name, pkg) in deps {
|
||||||
|
if let Some(ver) = pkg.get("version").and_then(|v| v.as_str()) {
|
||||||
|
let pair = (name.clone(), ver.to_string());
|
||||||
|
if seen.insert(pair.clone()) {
|
||||||
|
targets.push(MirrorTarget {
|
||||||
|
name: pair.0,
|
||||||
|
version: pair.1,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Recurse into nested dependencies
|
||||||
|
if let Some(nested) = pkg.get("dependencies").and_then(|d| d.as_object()) {
|
||||||
|
parse_v1_deps(nested, targets, seen);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolve --packages list by fetching metadata from NORA
|
||||||
|
async fn resolve_npm_packages(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
registry: &str,
|
||||||
|
names: &[String],
|
||||||
|
all_versions: bool,
|
||||||
|
) -> Result<Vec<MirrorTarget>, String> {
|
||||||
|
let base = registry.trim_end_matches('/');
|
||||||
|
let mut targets = Vec::new();
|
||||||
|
|
||||||
|
for name in names {
|
||||||
|
let url = format!("{}/npm/{}", base, name);
|
||||||
|
let resp = client.get(&url).send().await.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
eprintln!("Warning: {} not found (HTTP {})", name, resp.status());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let json: serde_json::Value = resp.json().await.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
if all_versions {
|
||||||
|
if let Some(versions) = json.get("versions").and_then(|v| v.as_object()) {
|
||||||
|
for ver in versions.keys() {
|
||||||
|
targets.push(MirrorTarget {
|
||||||
|
name: name.clone(),
|
||||||
|
version: ver.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Just latest
|
||||||
|
let latest = json
|
||||||
|
.get("dist-tags")
|
||||||
|
.and_then(|d| d.get("latest"))
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("latest");
|
||||||
|
targets.push(MirrorTarget {
|
||||||
|
name: name.clone(),
|
||||||
|
version: latest.to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(targets)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch packages through NORA (triggers proxy cache)
|
||||||
|
async fn mirror_npm_packages(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
registry: &str,
|
||||||
|
targets: &[MirrorTarget],
|
||||||
|
concurrency: usize,
|
||||||
|
) -> Result<MirrorResult, String> {
|
||||||
|
let base = registry.trim_end_matches('/');
|
||||||
|
let pb = create_progress_bar(targets.len() as u64);
|
||||||
|
let sem = std::sync::Arc::new(Semaphore::new(concurrency));
|
||||||
|
|
||||||
|
// Deduplicate metadata fetches (one per package name)
|
||||||
|
let unique_names: HashSet<&str> = targets.iter().map(|t| t.name.as_str()).collect();
|
||||||
|
pb.set_message("fetching metadata...");
|
||||||
|
for name in &unique_names {
|
||||||
|
let url = format!("{}/npm/{}", base, name);
|
||||||
|
let _ = client.get(&url).send().await; // trigger metadata cache
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch tarballs concurrently
|
||||||
|
let fetched = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
|
||||||
|
let failed = std::sync::Arc::new(std::sync::atomic::AtomicUsize::new(0));
|
||||||
|
let bytes = std::sync::Arc::new(std::sync::atomic::AtomicU64::new(0));
|
||||||
|
|
||||||
|
let mut handles = Vec::new();
|
||||||
|
|
||||||
|
for target in targets {
|
||||||
|
let permit = sem.clone().acquire_owned().await.unwrap();
|
||||||
|
let client = client.clone();
|
||||||
|
let pb = pb.clone();
|
||||||
|
let fetched = fetched.clone();
|
||||||
|
let failed = failed.clone();
|
||||||
|
let bytes = bytes.clone();
|
||||||
|
|
||||||
|
let short_name = target.name.split('/').next_back().unwrap_or(&target.name);
|
||||||
|
let tarball_url = format!(
|
||||||
|
"{}/npm/{}/-/{}-{}.tgz",
|
||||||
|
base, target.name, short_name, target.version
|
||||||
|
);
|
||||||
|
let label = format!("{}@{}", target.name, target.version);
|
||||||
|
|
||||||
|
handles.push(tokio::spawn(async move {
|
||||||
|
let _permit = permit;
|
||||||
|
match client.get(&tarball_url).send().await {
|
||||||
|
Ok(r) if r.status().is_success() => {
|
||||||
|
if let Ok(body) = r.bytes().await {
|
||||||
|
bytes.fetch_add(body.len() as u64, std::sync::atomic::Ordering::Relaxed);
|
||||||
|
}
|
||||||
|
fetched.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
failed.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pb.set_message(label);
|
||||||
|
pb.inc(1);
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
for h in handles {
|
||||||
|
let _ = h.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pb.finish_with_message("done");
|
||||||
|
|
||||||
|
Ok(MirrorResult {
|
||||||
|
total: targets.len(),
|
||||||
|
fetched: fetched.load(std::sync::atomic::Ordering::Relaxed),
|
||||||
|
failed: failed.load(std::sync::atomic::Ordering::Relaxed),
|
||||||
|
bytes: bytes.load(std::sync::atomic::Ordering::Relaxed),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_package_name() {
|
||||||
|
assert_eq!(extract_package_name("node_modules/lodash"), Some("lodash"));
|
||||||
|
assert_eq!(
|
||||||
|
extract_package_name("node_modules/@babel/core"),
|
||||||
|
Some("@babel/core")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
extract_package_name("node_modules/foo/node_modules/bar"),
|
||||||
|
Some("bar")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
extract_package_name("node_modules/foo/node_modules/@types/node"),
|
||||||
|
Some("@types/node")
|
||||||
|
);
|
||||||
|
assert_eq!(extract_package_name(""), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_lockfile_v3() {
|
||||||
|
let content = r#"{
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"packages": {
|
||||||
|
"": { "name": "test" },
|
||||||
|
"node_modules/lodash": { "version": "4.17.21" },
|
||||||
|
"node_modules/@babel/core": { "version": "7.26.0" },
|
||||||
|
"node_modules/@babel/core/node_modules/semver": { "version": "6.3.1" }
|
||||||
|
}
|
||||||
|
}"#;
|
||||||
|
let targets = parse_npm_lockfile(content).unwrap();
|
||||||
|
assert_eq!(targets.len(), 3);
|
||||||
|
let names: HashSet<&str> = targets.iter().map(|t| t.name.as_str()).collect();
|
||||||
|
assert!(names.contains("lodash"));
|
||||||
|
assert!(names.contains("@babel/core"));
|
||||||
|
assert!(names.contains("semver"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_lockfile_v1() {
|
||||||
|
let content = r#"{
|
||||||
|
"lockfileVersion": 1,
|
||||||
|
"dependencies": {
|
||||||
|
"express": {
|
||||||
|
"version": "4.18.2",
|
||||||
|
"dependencies": {
|
||||||
|
"accepts": { "version": "1.3.8" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}"#;
|
||||||
|
let targets = parse_npm_lockfile(content).unwrap();
|
||||||
|
assert_eq!(targets.len(), 2);
|
||||||
|
assert_eq!(targets[0].name, "express");
|
||||||
|
assert_eq!(targets[1].name, "accepts");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_deduplication() {
|
||||||
|
let content = r#"{
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"packages": {
|
||||||
|
"": {},
|
||||||
|
"node_modules/debug": { "version": "4.3.4" },
|
||||||
|
"node_modules/express/node_modules/debug": { "version": "4.3.4" }
|
||||||
|
}
|
||||||
|
}"#;
|
||||||
|
let targets = parse_npm_lockfile(content).unwrap();
|
||||||
|
assert_eq!(targets.len(), 1); // deduplicated
|
||||||
|
assert_eq!(targets[0].name, "debug");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
//!
|
//!
|
||||||
//! Functions in this module are stubs used only for generating OpenAPI documentation.
|
//! Functions in this module are stubs used only for generating OpenAPI documentation.
|
||||||
|
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)] // utoipa doc stubs — not called at runtime, used by derive macros
|
||||||
|
|
||||||
use axum::Router;
|
use axum::Router;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
use crate::config::RateLimitConfig;
|
use crate::config::RateLimitConfig;
|
||||||
use tower_governor::governor::GovernorConfigBuilder;
|
use tower_governor::governor::GovernorConfigBuilder;
|
||||||
|
use tower_governor::key_extractor::SmartIpKeyExtractor;
|
||||||
|
|
||||||
/// Create rate limiter layer for auth endpoints (strict protection against brute-force)
|
/// Create rate limiter layer for auth endpoints (strict protection against brute-force)
|
||||||
pub fn auth_rate_limiter(
|
pub fn auth_rate_limiter(
|
||||||
@@ -35,11 +36,12 @@ pub fn auth_rate_limiter(
|
|||||||
pub fn upload_rate_limiter(
|
pub fn upload_rate_limiter(
|
||||||
config: &RateLimitConfig,
|
config: &RateLimitConfig,
|
||||||
) -> tower_governor::GovernorLayer<
|
) -> tower_governor::GovernorLayer<
|
||||||
tower_governor::key_extractor::PeerIpKeyExtractor,
|
SmartIpKeyExtractor,
|
||||||
governor::middleware::StateInformationMiddleware,
|
governor::middleware::StateInformationMiddleware,
|
||||||
axum::body::Body,
|
axum::body::Body,
|
||||||
> {
|
> {
|
||||||
let gov_config = GovernorConfigBuilder::default()
|
let gov_config = GovernorConfigBuilder::default()
|
||||||
|
.key_extractor(SmartIpKeyExtractor)
|
||||||
.per_second(config.upload_rps)
|
.per_second(config.upload_rps)
|
||||||
.burst_size(config.upload_burst)
|
.burst_size(config.upload_burst)
|
||||||
.use_headers()
|
.use_headers()
|
||||||
@@ -53,11 +55,12 @@ pub fn upload_rate_limiter(
|
|||||||
pub fn general_rate_limiter(
|
pub fn general_rate_limiter(
|
||||||
config: &RateLimitConfig,
|
config: &RateLimitConfig,
|
||||||
) -> tower_governor::GovernorLayer<
|
) -> tower_governor::GovernorLayer<
|
||||||
tower_governor::key_extractor::PeerIpKeyExtractor,
|
SmartIpKeyExtractor,
|
||||||
governor::middleware::StateInformationMiddleware,
|
governor::middleware::StateInformationMiddleware,
|
||||||
axum::body::Body,
|
axum::body::Body,
|
||||||
> {
|
> {
|
||||||
let gov_config = GovernorConfigBuilder::default()
|
let gov_config = GovernorConfigBuilder::default()
|
||||||
|
.key_extractor(SmartIpKeyExtractor)
|
||||||
.per_second(config.general_rps)
|
.per_second(config.general_rps)
|
||||||
.burst_size(config.general_burst)
|
.burst_size(config.general_burst)
|
||||||
.use_headers()
|
.use_headers()
|
||||||
@@ -102,6 +105,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_custom_config() {
|
fn test_custom_config() {
|
||||||
let config = RateLimitConfig {
|
let config = RateLimitConfig {
|
||||||
|
enabled: true,
|
||||||
auth_rps: 10,
|
auth_rps: 10,
|
||||||
auth_burst: 20,
|
auth_burst: 20,
|
||||||
upload_rps: 500,
|
upload_rps: 500,
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::activity_log::{ActionType, ActivityEntry};
|
use crate::activity_log::{ActionType, ActivityEntry};
|
||||||
|
use crate::audit::AuditEntry;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
@@ -50,6 +51,9 @@ async fn download(
|
|||||||
"cargo",
|
"cargo",
|
||||||
"LOCAL",
|
"LOCAL",
|
||||||
));
|
));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("pull", "api", "", "cargo", ""));
|
||||||
(StatusCode::OK, data).into_response()
|
(StatusCode::OK, data).into_response()
|
||||||
}
|
}
|
||||||
Err(_) => StatusCode::NOT_FOUND.into_response(),
|
Err(_) => StatusCode::NOT_FOUND.into_response(),
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::activity_log::{ActionType, ActivityEntry};
|
use crate::activity_log::{ActionType, ActivityEntry};
|
||||||
|
use crate::audit::AuditEntry;
|
||||||
|
use crate::config::basic_auth_header;
|
||||||
use crate::registry::docker_auth::DockerAuth;
|
use crate::registry::docker_auth::DockerAuth;
|
||||||
use crate::storage::Storage;
|
use crate::storage::Storage;
|
||||||
use crate::validation::{validate_digest, validate_docker_name, validate_docker_reference};
|
use crate::validation::{validate_digest, validate_docker_name, validate_docker_reference};
|
||||||
@@ -11,7 +13,7 @@ use axum::{
|
|||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
http::{header, HeaderName, StatusCode},
|
http::{header, HeaderName, StatusCode},
|
||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
routing::{get, head, patch, put},
|
routing::{delete, get, head, patch, put},
|
||||||
Json, Router,
|
Json, Router,
|
||||||
};
|
};
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
@@ -64,6 +66,8 @@ pub fn routes() -> Router<Arc<AppState>> {
|
|||||||
)
|
)
|
||||||
.route("/v2/{name}/manifests/{reference}", get(get_manifest))
|
.route("/v2/{name}/manifests/{reference}", get(get_manifest))
|
||||||
.route("/v2/{name}/manifests/{reference}", put(put_manifest))
|
.route("/v2/{name}/manifests/{reference}", put(put_manifest))
|
||||||
|
.route("/v2/{name}/manifests/{reference}", delete(delete_manifest))
|
||||||
|
.route("/v2/{name}/blobs/{digest}", delete(delete_blob))
|
||||||
.route("/v2/{name}/tags/list", get(list_tags))
|
.route("/v2/{name}/tags/list", get(list_tags))
|
||||||
// Two-segment name routes (e.g., /v2/library/alpine/...)
|
// Two-segment name routes (e.g., /v2/library/alpine/...)
|
||||||
.route("/v2/{ns}/{name}/blobs/{digest}", head(check_blob_ns))
|
.route("/v2/{ns}/{name}/blobs/{digest}", head(check_blob_ns))
|
||||||
@@ -84,6 +88,11 @@ pub fn routes() -> Router<Arc<AppState>> {
|
|||||||
"/v2/{ns}/{name}/manifests/{reference}",
|
"/v2/{ns}/{name}/manifests/{reference}",
|
||||||
put(put_manifest_ns),
|
put(put_manifest_ns),
|
||||||
)
|
)
|
||||||
|
.route(
|
||||||
|
"/v2/{ns}/{name}/manifests/{reference}",
|
||||||
|
delete(delete_manifest_ns),
|
||||||
|
)
|
||||||
|
.route("/v2/{ns}/{name}/blobs/{digest}", delete(delete_blob_ns))
|
||||||
.route("/v2/{ns}/{name}/tags/list", get(list_tags_ns))
|
.route("/v2/{ns}/{name}/tags/list", get(list_tags_ns))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -173,6 +182,7 @@ async fn download_blob(
|
|||||||
&digest,
|
&digest,
|
||||||
&state.docker_auth,
|
&state.docker_auth,
|
||||||
state.config.docker.proxy_timeout,
|
state.config.docker.proxy_timeout,
|
||||||
|
upstream.auth.as_deref(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
@@ -204,6 +214,38 @@ async fn download_blob(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Auto-prepend library/ for single-segment names (Docker Hub official images)
|
||||||
|
if !name.contains('/') {
|
||||||
|
let library_name = format!("library/{}", name);
|
||||||
|
for upstream in &state.config.docker.upstreams {
|
||||||
|
if let Ok(data) = fetch_blob_from_upstream(
|
||||||
|
&state.http_client,
|
||||||
|
&upstream.url,
|
||||||
|
&library_name,
|
||||||
|
&digest,
|
||||||
|
&state.docker_auth,
|
||||||
|
state.config.docker.proxy_timeout,
|
||||||
|
upstream.auth.as_deref(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
let storage = state.storage.clone();
|
||||||
|
let key_clone = key.clone();
|
||||||
|
let data_clone = data.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let _ = storage.put(&key_clone, &data_clone).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
return (
|
||||||
|
StatusCode::OK,
|
||||||
|
[(header::CONTENT_TYPE, "application/octet-stream")],
|
||||||
|
Bytes::from(data),
|
||||||
|
)
|
||||||
|
.into_response();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
StatusCode::NOT_FOUND.into_response()
|
StatusCode::NOT_FOUND.into_response()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -307,7 +349,17 @@ async fn upload_blob(
|
|||||||
));
|
));
|
||||||
state.repo_index.invalidate("docker");
|
state.repo_index.invalidate("docker");
|
||||||
let location = format!("/v2/{}/blobs/{}", name, digest);
|
let location = format!("/v2/{}/blobs/{}", name, digest);
|
||||||
(StatusCode::CREATED, [(header::LOCATION, location)]).into_response()
|
(
|
||||||
|
StatusCode::CREATED,
|
||||||
|
[
|
||||||
|
(header::LOCATION, location),
|
||||||
|
(
|
||||||
|
HeaderName::from_static("docker-content-digest"),
|
||||||
|
digest.to_string(),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
}
|
}
|
||||||
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
|
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
|
||||||
}
|
}
|
||||||
@@ -374,6 +426,7 @@ async fn get_manifest(
|
|||||||
&reference,
|
&reference,
|
||||||
&state.docker_auth,
|
&state.docker_auth,
|
||||||
state.config.docker.proxy_timeout,
|
state.config.docker.proxy_timeout,
|
||||||
|
upstream.auth.as_deref(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
@@ -432,6 +485,57 @@ async fn get_manifest(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Auto-prepend library/ for single-segment names (Docker Hub official images)
|
||||||
|
// e.g., "nginx" -> "library/nginx", "alpine" -> "library/alpine"
|
||||||
|
if !name.contains('/') {
|
||||||
|
let library_name = format!("library/{}", name);
|
||||||
|
for upstream in &state.config.docker.upstreams {
|
||||||
|
if let Ok((data, content_type)) = fetch_manifest_from_upstream(
|
||||||
|
&state.http_client,
|
||||||
|
&upstream.url,
|
||||||
|
&library_name,
|
||||||
|
&reference,
|
||||||
|
&state.docker_auth,
|
||||||
|
state.config.docker.proxy_timeout,
|
||||||
|
upstream.auth.as_deref(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
state.metrics.record_download("docker");
|
||||||
|
state.metrics.record_cache_miss();
|
||||||
|
state.activity.push(ActivityEntry::new(
|
||||||
|
ActionType::ProxyFetch,
|
||||||
|
format!("{}:{}", name, reference),
|
||||||
|
"docker",
|
||||||
|
"PROXY",
|
||||||
|
));
|
||||||
|
|
||||||
|
use sha2::Digest;
|
||||||
|
let digest = format!("sha256:{:x}", sha2::Sha256::digest(&data));
|
||||||
|
|
||||||
|
// Cache under original name for future local hits
|
||||||
|
let storage = state.storage.clone();
|
||||||
|
let key_clone = key.clone();
|
||||||
|
let data_clone = data.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let _ = storage.put(&key_clone, &data_clone).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
state.repo_index.invalidate("docker");
|
||||||
|
|
||||||
|
return (
|
||||||
|
StatusCode::OK,
|
||||||
|
[
|
||||||
|
(header::CONTENT_TYPE, content_type),
|
||||||
|
(HeaderName::from_static("docker-content-digest"), digest),
|
||||||
|
],
|
||||||
|
Bytes::from(data),
|
||||||
|
)
|
||||||
|
.into_response();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
StatusCode::NOT_FOUND.into_response()
|
StatusCode::NOT_FOUND.into_response()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -481,6 +585,13 @@ async fn put_manifest(
|
|||||||
"docker",
|
"docker",
|
||||||
"LOCAL",
|
"LOCAL",
|
||||||
));
|
));
|
||||||
|
state.audit.log(AuditEntry::new(
|
||||||
|
"push",
|
||||||
|
"api",
|
||||||
|
&format!("{}:{}", name, reference),
|
||||||
|
"docker",
|
||||||
|
"manifest",
|
||||||
|
));
|
||||||
state.repo_index.invalidate("docker");
|
state.repo_index.invalidate("docker");
|
||||||
|
|
||||||
let location = format!("/v2/{}/manifests/{}", name, reference);
|
let location = format!("/v2/{}/manifests/{}", name, reference);
|
||||||
@@ -512,6 +623,109 @@ async fn list_tags(State(state): State<Arc<AppState>>, Path(name): Path<String>)
|
|||||||
(StatusCode::OK, Json(json!({"name": name, "tags": tags}))).into_response()
|
(StatusCode::OK, Json(json!({"name": name, "tags": tags}))).into_response()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Delete handlers (Docker Registry V2 spec)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
async fn delete_manifest(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Path((name, reference)): Path<(String, String)>,
|
||||||
|
) -> Response {
|
||||||
|
if let Err(e) = validate_docker_name(&name) {
|
||||||
|
return (StatusCode::BAD_REQUEST, e.to_string()).into_response();
|
||||||
|
}
|
||||||
|
if let Err(e) = validate_docker_reference(&reference) {
|
||||||
|
return (StatusCode::BAD_REQUEST, e.to_string()).into_response();
|
||||||
|
}
|
||||||
|
|
||||||
|
let key = format!("docker/{}/manifests/{}.json", name, reference);
|
||||||
|
|
||||||
|
// If reference is a tag, also delete digest-keyed copy
|
||||||
|
let is_tag = !reference.starts_with("sha256:");
|
||||||
|
if is_tag {
|
||||||
|
if let Ok(data) = state.storage.get(&key).await {
|
||||||
|
use sha2::Digest;
|
||||||
|
let digest = format!("sha256:{:x}", sha2::Sha256::digest(&data));
|
||||||
|
let digest_key = format!("docker/{}/manifests/{}.json", name, digest);
|
||||||
|
let _ = state.storage.delete(&digest_key).await;
|
||||||
|
let digest_meta = format!("docker/{}/manifests/{}.meta.json", name, digest);
|
||||||
|
let _ = state.storage.delete(&digest_meta).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete manifest
|
||||||
|
match state.storage.delete(&key).await {
|
||||||
|
Ok(()) => {
|
||||||
|
// Delete associated metadata
|
||||||
|
let meta_key = format!("docker/{}/manifests/{}.meta.json", name, reference);
|
||||||
|
let _ = state.storage.delete(&meta_key).await;
|
||||||
|
|
||||||
|
state.audit.log(AuditEntry::new(
|
||||||
|
"delete",
|
||||||
|
"api",
|
||||||
|
&format!("{}:{}", name, reference),
|
||||||
|
"docker",
|
||||||
|
"manifest",
|
||||||
|
));
|
||||||
|
state.repo_index.invalidate("docker");
|
||||||
|
tracing::info!(name = %name, reference = %reference, "Docker manifest deleted");
|
||||||
|
StatusCode::ACCEPTED.into_response()
|
||||||
|
}
|
||||||
|
Err(crate::storage::StorageError::NotFound) => (
|
||||||
|
StatusCode::NOT_FOUND,
|
||||||
|
Json(json!({
|
||||||
|
"errors": [{
|
||||||
|
"code": "MANIFEST_UNKNOWN",
|
||||||
|
"message": "manifest unknown",
|
||||||
|
"detail": { "name": name, "reference": reference }
|
||||||
|
}]
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.into_response(),
|
||||||
|
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_blob(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Path((name, digest)): Path<(String, String)>,
|
||||||
|
) -> Response {
|
||||||
|
if let Err(e) = validate_docker_name(&name) {
|
||||||
|
return (StatusCode::BAD_REQUEST, e.to_string()).into_response();
|
||||||
|
}
|
||||||
|
if let Err(e) = validate_digest(&digest) {
|
||||||
|
return (StatusCode::BAD_REQUEST, e.to_string()).into_response();
|
||||||
|
}
|
||||||
|
|
||||||
|
let key = format!("docker/{}/blobs/{}", name, digest);
|
||||||
|
match state.storage.delete(&key).await {
|
||||||
|
Ok(()) => {
|
||||||
|
state.audit.log(AuditEntry::new(
|
||||||
|
"delete",
|
||||||
|
"api",
|
||||||
|
&format!("{}@{}", name, &digest[..19.min(digest.len())]),
|
||||||
|
"docker",
|
||||||
|
"blob",
|
||||||
|
));
|
||||||
|
state.repo_index.invalidate("docker");
|
||||||
|
tracing::info!(name = %name, digest = %digest, "Docker blob deleted");
|
||||||
|
StatusCode::ACCEPTED.into_response()
|
||||||
|
}
|
||||||
|
Err(crate::storage::StorageError::NotFound) => (
|
||||||
|
StatusCode::NOT_FOUND,
|
||||||
|
Json(json!({
|
||||||
|
"errors": [{
|
||||||
|
"code": "BLOB_UNKNOWN",
|
||||||
|
"message": "blob unknown to registry",
|
||||||
|
"detail": { "digest": digest }
|
||||||
|
}]
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.into_response(),
|
||||||
|
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// Namespace handlers (for two-segment names like library/alpine)
|
// Namespace handlers (for two-segment names like library/alpine)
|
||||||
// These combine ns/name into a single name and delegate to the main handlers
|
// These combine ns/name into a single name and delegate to the main handlers
|
||||||
@@ -581,6 +795,22 @@ async fn list_tags_ns(
|
|||||||
list_tags(state, Path(full_name)).await
|
list_tags(state, Path(full_name)).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn delete_manifest_ns(
|
||||||
|
state: State<Arc<AppState>>,
|
||||||
|
Path((ns, name, reference)): Path<(String, String, String)>,
|
||||||
|
) -> Response {
|
||||||
|
let full_name = format!("{}/{}", ns, name);
|
||||||
|
delete_manifest(state, Path((full_name, reference))).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_blob_ns(
|
||||||
|
state: State<Arc<AppState>>,
|
||||||
|
Path((ns, name, digest)): Path<(String, String, String)>,
|
||||||
|
) -> Response {
|
||||||
|
let full_name = format!("{}/{}", ns, name);
|
||||||
|
delete_blob(state, Path((full_name, digest))).await
|
||||||
|
}
|
||||||
|
|
||||||
/// Fetch a blob from an upstream Docker registry
|
/// Fetch a blob from an upstream Docker registry
|
||||||
async fn fetch_blob_from_upstream(
|
async fn fetch_blob_from_upstream(
|
||||||
client: &reqwest::Client,
|
client: &reqwest::Client,
|
||||||
@@ -589,6 +819,7 @@ async fn fetch_blob_from_upstream(
|
|||||||
digest: &str,
|
digest: &str,
|
||||||
docker_auth: &DockerAuth,
|
docker_auth: &DockerAuth,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
|
basic_auth: Option<&str>,
|
||||||
) -> Result<Vec<u8>, ()> {
|
) -> Result<Vec<u8>, ()> {
|
||||||
let url = format!(
|
let url = format!(
|
||||||
"{}/v2/{}/blobs/{}",
|
"{}/v2/{}/blobs/{}",
|
||||||
@@ -597,13 +828,12 @@ async fn fetch_blob_from_upstream(
|
|||||||
digest
|
digest
|
||||||
);
|
);
|
||||||
|
|
||||||
// First try without auth
|
// First try — with basic auth if configured
|
||||||
let response = client
|
let mut request = client.get(&url).timeout(Duration::from_secs(timeout));
|
||||||
.get(&url)
|
if let Some(credentials) = basic_auth {
|
||||||
.timeout(Duration::from_secs(timeout))
|
request = request.header("Authorization", basic_auth_header(credentials));
|
||||||
.send()
|
}
|
||||||
.await
|
let response = request.send().await.map_err(|_| ())?;
|
||||||
.map_err(|_| ())?;
|
|
||||||
|
|
||||||
let response = if response.status() == reqwest::StatusCode::UNAUTHORIZED {
|
let response = if response.status() == reqwest::StatusCode::UNAUTHORIZED {
|
||||||
// Get Www-Authenticate header and fetch token
|
// Get Www-Authenticate header and fetch token
|
||||||
@@ -614,7 +844,7 @@ async fn fetch_blob_from_upstream(
|
|||||||
.map(String::from);
|
.map(String::from);
|
||||||
|
|
||||||
if let Some(token) = docker_auth
|
if let Some(token) = docker_auth
|
||||||
.get_token(upstream_url, name, www_auth.as_deref())
|
.get_token(upstream_url, name, www_auth.as_deref(), basic_auth)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
client
|
client
|
||||||
@@ -646,6 +876,7 @@ async fn fetch_manifest_from_upstream(
|
|||||||
reference: &str,
|
reference: &str,
|
||||||
docker_auth: &DockerAuth,
|
docker_auth: &DockerAuth,
|
||||||
timeout: u64,
|
timeout: u64,
|
||||||
|
basic_auth: Option<&str>,
|
||||||
) -> Result<(Vec<u8>, String), ()> {
|
) -> Result<(Vec<u8>, String), ()> {
|
||||||
let url = format!(
|
let url = format!(
|
||||||
"{}/v2/{}/manifests/{}",
|
"{}/v2/{}/manifests/{}",
|
||||||
@@ -662,16 +893,17 @@ async fn fetch_manifest_from_upstream(
|
|||||||
application/vnd.oci.image.manifest.v1+json, \
|
application/vnd.oci.image.manifest.v1+json, \
|
||||||
application/vnd.oci.image.index.v1+json";
|
application/vnd.oci.image.index.v1+json";
|
||||||
|
|
||||||
// First try without auth
|
// First try — with basic auth if configured
|
||||||
let response = client
|
let mut request = client
|
||||||
.get(&url)
|
.get(&url)
|
||||||
.timeout(Duration::from_secs(timeout))
|
.timeout(Duration::from_secs(timeout))
|
||||||
.header("Accept", accept_header)
|
.header("Accept", accept_header);
|
||||||
.send()
|
if let Some(credentials) = basic_auth {
|
||||||
.await
|
request = request.header("Authorization", basic_auth_header(credentials));
|
||||||
.map_err(|e| {
|
}
|
||||||
tracing::error!(error = %e, url = %url, "Failed to send request to upstream");
|
let response = request.send().await.map_err(|e| {
|
||||||
})?;
|
tracing::error!(error = %e, url = %url, "Failed to send request to upstream");
|
||||||
|
})?;
|
||||||
|
|
||||||
tracing::debug!(status = %response.status(), "Initial upstream response");
|
tracing::debug!(status = %response.status(), "Initial upstream response");
|
||||||
|
|
||||||
@@ -686,7 +918,7 @@ async fn fetch_manifest_from_upstream(
|
|||||||
tracing::debug!(www_auth = ?www_auth, "Got 401, fetching token");
|
tracing::debug!(www_auth = ?www_auth, "Got 401, fetching token");
|
||||||
|
|
||||||
if let Some(token) = docker_auth
|
if let Some(token) = docker_auth
|
||||||
.get_token(upstream_url, name, www_auth.as_deref())
|
.get_token(upstream_url, name, www_auth.as_deref(), basic_auth)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
tracing::debug!("Token acquired, retrying with auth");
|
tracing::debug!("Token acquired, retrying with auth");
|
||||||
@@ -739,8 +971,16 @@ fn detect_manifest_media_type(data: &[u8]) -> String {
|
|||||||
if schema_version == 1 {
|
if schema_version == 1 {
|
||||||
return "application/vnd.docker.distribution.manifest.v1+json".to_string();
|
return "application/vnd.docker.distribution.manifest.v1+json".to_string();
|
||||||
}
|
}
|
||||||
// schemaVersion 2 without mediaType is likely docker manifest v2
|
// schemaVersion 2 without mediaType - check config.mediaType to distinguish OCI vs Docker
|
||||||
if json.get("config").is_some() {
|
if let Some(config) = json.get("config") {
|
||||||
|
if let Some(config_mt) = config.get("mediaType").and_then(|v| v.as_str()) {
|
||||||
|
if config_mt.starts_with("application/vnd.docker.") {
|
||||||
|
return "application/vnd.docker.distribution.manifest.v2+json".to_string();
|
||||||
|
}
|
||||||
|
// OCI or Helm or any non-docker config mediaType
|
||||||
|
return "application/vnd.oci.image.manifest.v1+json".to_string();
|
||||||
|
}
|
||||||
|
// No config.mediaType - assume docker v2
|
||||||
return "application/vnd.docker.distribution.manifest.v2+json".to_string();
|
return "application/vnd.docker.distribution.manifest.v2+json".to_string();
|
||||||
}
|
}
|
||||||
// If it has "manifests" array, it's an index/list
|
// If it has "manifests" array, it's an index/list
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use crate::config::basic_auth_header;
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
@@ -36,6 +37,7 @@ impl DockerAuth {
|
|||||||
registry_url: &str,
|
registry_url: &str,
|
||||||
name: &str,
|
name: &str,
|
||||||
www_authenticate: Option<&str>,
|
www_authenticate: Option<&str>,
|
||||||
|
basic_auth: Option<&str>,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
let cache_key = format!("{}:{}", registry_url, name);
|
let cache_key = format!("{}:{}", registry_url, name);
|
||||||
|
|
||||||
@@ -51,7 +53,7 @@ impl DockerAuth {
|
|||||||
|
|
||||||
// Need to fetch a new token
|
// Need to fetch a new token
|
||||||
let www_auth = www_authenticate?;
|
let www_auth = www_authenticate?;
|
||||||
let token = self.fetch_token(www_auth, name).await?;
|
let token = self.fetch_token(www_auth, name, basic_auth).await?;
|
||||||
|
|
||||||
// Cache the token (default 5 minute expiry)
|
// Cache the token (default 5 minute expiry)
|
||||||
{
|
{
|
||||||
@@ -70,7 +72,12 @@ impl DockerAuth {
|
|||||||
|
|
||||||
/// Parse Www-Authenticate header and fetch token from auth server
|
/// Parse Www-Authenticate header and fetch token from auth server
|
||||||
/// Format: Bearer realm="https://auth.docker.io/token",service="registry.docker.io",scope="repository:library/alpine:pull"
|
/// Format: Bearer realm="https://auth.docker.io/token",service="registry.docker.io",scope="repository:library/alpine:pull"
|
||||||
async fn fetch_token(&self, www_authenticate: &str, name: &str) -> Option<String> {
|
async fn fetch_token(
|
||||||
|
&self,
|
||||||
|
www_authenticate: &str,
|
||||||
|
name: &str,
|
||||||
|
basic_auth: Option<&str>,
|
||||||
|
) -> Option<String> {
|
||||||
let params = parse_www_authenticate(www_authenticate)?;
|
let params = parse_www_authenticate(www_authenticate)?;
|
||||||
|
|
||||||
let realm = params.get("realm")?;
|
let realm = params.get("realm")?;
|
||||||
@@ -82,7 +89,13 @@ impl DockerAuth {
|
|||||||
|
|
||||||
tracing::debug!(url = %url, "Fetching auth token");
|
tracing::debug!(url = %url, "Fetching auth token");
|
||||||
|
|
||||||
let response = self.client.get(&url).send().await.ok()?;
|
let mut request = self.client.get(&url);
|
||||||
|
if let Some(credentials) = basic_auth {
|
||||||
|
request = request.header("Authorization", basic_auth_header(credentials));
|
||||||
|
tracing::debug!("Using basic auth for token request");
|
||||||
|
}
|
||||||
|
|
||||||
|
let response = request.send().await.ok()?;
|
||||||
|
|
||||||
if !response.status().is_success() {
|
if !response.status().is_success() {
|
||||||
tracing::warn!(status = %response.status(), "Token request failed");
|
tracing::warn!(status = %response.status(), "Token request failed");
|
||||||
@@ -97,44 +110,6 @@ impl DockerAuth {
|
|||||||
.and_then(|v| v.as_str())
|
.and_then(|v| v.as_str())
|
||||||
.map(String::from)
|
.map(String::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Make an authenticated request to an upstream registry
|
|
||||||
pub async fn fetch_with_auth(
|
|
||||||
&self,
|
|
||||||
url: &str,
|
|
||||||
registry_url: &str,
|
|
||||||
name: &str,
|
|
||||||
) -> Result<reqwest::Response, ()> {
|
|
||||||
// First try without auth
|
|
||||||
let response = self.client.get(url).send().await.map_err(|_| ())?;
|
|
||||||
|
|
||||||
if response.status() == reqwest::StatusCode::UNAUTHORIZED {
|
|
||||||
// Extract Www-Authenticate header
|
|
||||||
let www_auth = response
|
|
||||||
.headers()
|
|
||||||
.get("www-authenticate")
|
|
||||||
.and_then(|v| v.to_str().ok())
|
|
||||||
.map(String::from);
|
|
||||||
|
|
||||||
// Get token and retry
|
|
||||||
if let Some(token) = self
|
|
||||||
.get_token(registry_url, name, www_auth.as_deref())
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
return self
|
|
||||||
.client
|
|
||||||
.get(url)
|
|
||||||
.header("Authorization", format!("Bearer {}", token))
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|_| ());
|
|
||||||
}
|
|
||||||
|
|
||||||
return Err(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(response)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for DockerAuth {
|
impl Default for DockerAuth {
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::activity_log::{ActionType, ActivityEntry};
|
use crate::activity_log::{ActionType, ActivityEntry};
|
||||||
|
use crate::audit::AuditEntry;
|
||||||
|
use crate::config::basic_auth_header;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use axum::{
|
use axum::{
|
||||||
body::Bytes,
|
body::Bytes,
|
||||||
@@ -42,13 +44,23 @@ async fn download(State(state): State<Arc<AppState>>, Path(path): Path<String>)
|
|||||||
"maven",
|
"maven",
|
||||||
"CACHE",
|
"CACHE",
|
||||||
));
|
));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("cache_hit", "api", "", "maven", ""));
|
||||||
return with_content_type(&path, data).into_response();
|
return with_content_type(&path, data).into_response();
|
||||||
}
|
}
|
||||||
|
|
||||||
for proxy_url in &state.config.maven.proxies {
|
for proxy in &state.config.maven.proxies {
|
||||||
let url = format!("{}/{}", proxy_url.trim_end_matches('/'), path);
|
let url = format!("{}/{}", proxy.url().trim_end_matches('/'), path);
|
||||||
|
|
||||||
match fetch_from_proxy(&state.http_client, &url, state.config.maven.proxy_timeout).await {
|
match fetch_from_proxy(
|
||||||
|
&state.http_client,
|
||||||
|
&url,
|
||||||
|
state.config.maven.proxy_timeout,
|
||||||
|
proxy.auth(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
state.metrics.record_download("maven");
|
state.metrics.record_download("maven");
|
||||||
state.metrics.record_cache_miss();
|
state.metrics.record_cache_miss();
|
||||||
@@ -58,6 +70,9 @@ async fn download(State(state): State<Arc<AppState>>, Path(path): Path<String>)
|
|||||||
"maven",
|
"maven",
|
||||||
"PROXY",
|
"PROXY",
|
||||||
));
|
));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("proxy_fetch", "api", "", "maven", ""));
|
||||||
|
|
||||||
let storage = state.storage.clone();
|
let storage = state.storage.clone();
|
||||||
let key_clone = key.clone();
|
let key_clone = key.clone();
|
||||||
@@ -103,6 +118,9 @@ async fn upload(
|
|||||||
"maven",
|
"maven",
|
||||||
"LOCAL",
|
"LOCAL",
|
||||||
));
|
));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("push", "api", "", "maven", ""));
|
||||||
state.repo_index.invalidate("maven");
|
state.repo_index.invalidate("maven");
|
||||||
StatusCode::CREATED
|
StatusCode::CREATED
|
||||||
}
|
}
|
||||||
@@ -114,13 +132,13 @@ async fn fetch_from_proxy(
|
|||||||
client: &reqwest::Client,
|
client: &reqwest::Client,
|
||||||
url: &str,
|
url: &str,
|
||||||
timeout_secs: u64,
|
timeout_secs: u64,
|
||||||
|
auth: Option<&str>,
|
||||||
) -> Result<Vec<u8>, ()> {
|
) -> Result<Vec<u8>, ()> {
|
||||||
let response = client
|
let mut request = client.get(url).timeout(Duration::from_secs(timeout_secs));
|
||||||
.get(url)
|
if let Some(credentials) = auth {
|
||||||
.timeout(Duration::from_secs(timeout_secs))
|
request = request.header("Authorization", basic_auth_header(credentials));
|
||||||
.send()
|
}
|
||||||
.await
|
let response = request.send().await.map_err(|_| ())?;
|
||||||
.map_err(|_| ())?;
|
|
||||||
|
|
||||||
if !response.status().is_success() {
|
if !response.status().is_success() {
|
||||||
return Err(());
|
return Err(());
|
||||||
|
|||||||
@@ -2,27 +2,72 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::activity_log::{ActionType, ActivityEntry};
|
use crate::activity_log::{ActionType, ActivityEntry};
|
||||||
|
use crate::audit::AuditEntry;
|
||||||
|
use crate::config::basic_auth_header;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use axum::{
|
use axum::{
|
||||||
body::Bytes,
|
body::Bytes,
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
http::{header, StatusCode},
|
http::{header, StatusCode},
|
||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
routing::get,
|
routing::{get, put},
|
||||||
Router,
|
Router,
|
||||||
};
|
};
|
||||||
|
use base64::Engine;
|
||||||
|
use sha2::Digest;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
pub fn routes() -> Router<Arc<AppState>> {
|
pub fn routes() -> Router<Arc<AppState>> {
|
||||||
Router::new().route("/npm/{*path}", get(handle_request))
|
Router::new()
|
||||||
|
.route("/npm/{*path}", get(handle_request))
|
||||||
|
.route("/npm/{*path}", put(handle_publish))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build NORA base URL from config (for URL rewriting)
|
||||||
|
fn nora_base_url(state: &AppState) -> String {
|
||||||
|
state.config.server.public_url.clone().unwrap_or_else(|| {
|
||||||
|
format!(
|
||||||
|
"http://{}:{}",
|
||||||
|
state.config.server.host, state.config.server.port
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Rewrite tarball URLs in npm metadata to point to NORA.
|
||||||
|
///
|
||||||
|
/// Replaces upstream registry URLs (e.g. `https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz`)
|
||||||
|
/// with NORA URLs (e.g. `http://nora:5000/npm/lodash/-/lodash-4.17.21.tgz`).
|
||||||
|
fn rewrite_tarball_urls(data: &[u8], nora_base: &str, upstream_url: &str) -> Result<Vec<u8>, ()> {
|
||||||
|
let mut json: serde_json::Value = serde_json::from_slice(data).map_err(|_| ())?;
|
||||||
|
|
||||||
|
let upstream_trimmed = upstream_url.trim_end_matches('/');
|
||||||
|
let nora_npm_base = format!("{}/npm", nora_base.trim_end_matches('/'));
|
||||||
|
|
||||||
|
if let Some(versions) = json.get_mut("versions").and_then(|v| v.as_object_mut()) {
|
||||||
|
for (_ver, version_data) in versions.iter_mut() {
|
||||||
|
if let Some(tarball_url) = version_data
|
||||||
|
.get("dist")
|
||||||
|
.and_then(|d| d.get("tarball"))
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
{
|
||||||
|
let rewritten = tarball_url.replace(upstream_trimmed, &nora_npm_base);
|
||||||
|
if let Some(dist) = version_data.get_mut("dist") {
|
||||||
|
dist["tarball"] = serde_json::Value::String(rewritten);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serde_json::to_vec(&json).map_err(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_request(State(state): State<Arc<AppState>>, Path(path): Path<String>) -> Response {
|
async fn handle_request(State(state): State<Arc<AppState>>, Path(path): Path<String>) -> Response {
|
||||||
let is_tarball = path.contains("/-/");
|
let is_tarball = path.contains("/-/");
|
||||||
|
|
||||||
let key = if is_tarball {
|
let key = if is_tarball {
|
||||||
let parts: Vec<&str> = path.split("/-/").collect();
|
let parts: Vec<&str> = path.splitn(2, "/-/").collect();
|
||||||
if parts.len() == 2 {
|
if parts.len() == 2 {
|
||||||
format!("npm/{}/tarballs/{}", parts[0], parts[1])
|
format!("npm/{}/tarballs/{}", parts[0], parts[1])
|
||||||
} else {
|
} else {
|
||||||
@@ -38,27 +83,83 @@ async fn handle_request(State(state): State<Arc<AppState>>, Path(path): Path<Str
|
|||||||
path.clone()
|
path.clone()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// --- Cache hit path ---
|
||||||
if let Ok(data) = state.storage.get(&key).await {
|
if let Ok(data) = state.storage.get(&key).await {
|
||||||
if is_tarball {
|
// Metadata TTL: if stale, try to refetch from upstream
|
||||||
state.metrics.record_download("npm");
|
if !is_tarball {
|
||||||
state.metrics.record_cache_hit();
|
let ttl = state.config.npm.metadata_ttl;
|
||||||
state.activity.push(ActivityEntry::new(
|
if ttl > 0 {
|
||||||
ActionType::CacheHit,
|
if let Some(meta) = state.storage.stat(&key).await {
|
||||||
package_name,
|
let now = std::time::SystemTime::now()
|
||||||
"npm",
|
.duration_since(std::time::UNIX_EPOCH)
|
||||||
"CACHE",
|
.map(|d| d.as_secs())
|
||||||
));
|
.unwrap_or(0);
|
||||||
|
if now.saturating_sub(meta.modified) > ttl {
|
||||||
|
if let Some(fresh) = refetch_metadata(&state, &path, &key).await {
|
||||||
|
return with_content_type(false, fresh.into()).into_response();
|
||||||
|
}
|
||||||
|
// Upstream failed — serve stale cache
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return with_content_type(false, data).into_response();
|
||||||
}
|
}
|
||||||
return with_content_type(is_tarball, data).into_response();
|
|
||||||
|
// Tarball: integrity check if hash exists
|
||||||
|
let hash_key = format!("{}.sha256", key);
|
||||||
|
if let Ok(stored_hash) = state.storage.get(&hash_key).await {
|
||||||
|
let computed = format!("{:x}", sha2::Sha256::digest(&data));
|
||||||
|
let expected = String::from_utf8_lossy(&stored_hash);
|
||||||
|
if computed != expected.as_ref() {
|
||||||
|
tracing::error!(
|
||||||
|
key = %key,
|
||||||
|
expected = %expected,
|
||||||
|
computed = %computed,
|
||||||
|
"SECURITY: npm tarball integrity check FAILED — possible tampering"
|
||||||
|
);
|
||||||
|
return (StatusCode::INTERNAL_SERVER_ERROR, "Integrity check failed")
|
||||||
|
.into_response();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
state.metrics.record_download("npm");
|
||||||
|
state.metrics.record_cache_hit();
|
||||||
|
state.activity.push(ActivityEntry::new(
|
||||||
|
ActionType::CacheHit,
|
||||||
|
package_name,
|
||||||
|
"npm",
|
||||||
|
"CACHE",
|
||||||
|
));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("cache_hit", "api", "", "npm", ""));
|
||||||
|
return with_content_type(true, data).into_response();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- Proxy fetch path ---
|
||||||
if let Some(proxy_url) = &state.config.npm.proxy {
|
if let Some(proxy_url) = &state.config.npm.proxy {
|
||||||
let url = format!("{}/{}", proxy_url.trim_end_matches('/'), path);
|
let url = format!("{}/{}", proxy_url.trim_end_matches('/'), path);
|
||||||
|
|
||||||
if let Ok(data) =
|
if let Ok(data) = fetch_from_proxy(
|
||||||
fetch_from_proxy(&state.http_client, &url, state.config.npm.proxy_timeout).await
|
&state.http_client,
|
||||||
|
&url,
|
||||||
|
state.config.npm.proxy_timeout,
|
||||||
|
state.config.npm.proxy_auth.as_deref(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
{
|
{
|
||||||
|
let data_to_cache;
|
||||||
|
let data_to_serve;
|
||||||
|
|
||||||
if is_tarball {
|
if is_tarball {
|
||||||
|
// Compute and store sha256
|
||||||
|
let hash = format!("{:x}", sha2::Sha256::digest(&data));
|
||||||
|
let hash_key = format!("{}.sha256", key);
|
||||||
|
let storage = state.storage.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let _ = storage.put(&hash_key, hash.as_bytes()).await;
|
||||||
|
});
|
||||||
|
|
||||||
state.metrics.record_download("npm");
|
state.metrics.record_download("npm");
|
||||||
state.metrics.record_cache_miss();
|
state.metrics.record_cache_miss();
|
||||||
state.activity.push(ActivityEntry::new(
|
state.activity.push(ActivityEntry::new(
|
||||||
@@ -67,37 +168,268 @@ async fn handle_request(State(state): State<Arc<AppState>>, Path(path): Path<Str
|
|||||||
"npm",
|
"npm",
|
||||||
"PROXY",
|
"PROXY",
|
||||||
));
|
));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("proxy_fetch", "api", "", "npm", ""));
|
||||||
|
|
||||||
|
data_to_cache = data.clone();
|
||||||
|
data_to_serve = data;
|
||||||
|
} else {
|
||||||
|
// Metadata: rewrite tarball URLs to point to NORA
|
||||||
|
let nora_base = nora_base_url(&state);
|
||||||
|
let rewritten = rewrite_tarball_urls(&data, &nora_base, proxy_url)
|
||||||
|
.unwrap_or_else(|_| data.clone());
|
||||||
|
|
||||||
|
data_to_cache = rewritten.clone();
|
||||||
|
data_to_serve = rewritten;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Cache in background
|
||||||
let storage = state.storage.clone();
|
let storage = state.storage.clone();
|
||||||
let key_clone = key.clone();
|
let key_clone = key.clone();
|
||||||
let data_clone = data.clone();
|
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
let _ = storage.put(&key_clone, &data_clone).await;
|
let _ = storage.put(&key_clone, &data_to_cache).await;
|
||||||
});
|
});
|
||||||
|
|
||||||
if is_tarball {
|
if is_tarball {
|
||||||
state.repo_index.invalidate("npm");
|
state.repo_index.invalidate("npm");
|
||||||
}
|
}
|
||||||
|
|
||||||
return with_content_type(is_tarball, data.into()).into_response();
|
return with_content_type(is_tarball, data_to_serve.into()).into_response();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
StatusCode::NOT_FOUND.into_response()
|
StatusCode::NOT_FOUND.into_response()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Refetch metadata from upstream, rewrite URLs, update cache.
|
||||||
|
/// Returns None if upstream is unavailable (caller serves stale cache).
|
||||||
|
async fn refetch_metadata(state: &Arc<AppState>, path: &str, key: &str) -> Option<Vec<u8>> {
|
||||||
|
let proxy_url = state.config.npm.proxy.as_ref()?;
|
||||||
|
let url = format!("{}/{}", proxy_url.trim_end_matches('/'), path);
|
||||||
|
|
||||||
|
let data = fetch_from_proxy(
|
||||||
|
&state.http_client,
|
||||||
|
&url,
|
||||||
|
state.config.npm.proxy_timeout,
|
||||||
|
state.config.npm.proxy_auth.as_deref(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.ok()?;
|
||||||
|
|
||||||
|
let nora_base = nora_base_url(state);
|
||||||
|
let rewritten =
|
||||||
|
rewrite_tarball_urls(&data, &nora_base, proxy_url).unwrap_or_else(|_| data.clone());
|
||||||
|
|
||||||
|
let storage = state.storage.clone();
|
||||||
|
let key_clone = key.to_string();
|
||||||
|
let cache_data = rewritten.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let _ = storage.put(&key_clone, &cache_data).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
Some(rewritten)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// npm publish
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/// Validate attachment filename: only safe characters, no path traversal.
|
||||||
|
fn is_valid_attachment_name(name: &str) -> bool {
|
||||||
|
!name.is_empty()
|
||||||
|
&& !name.contains("..")
|
||||||
|
&& !name.contains('/')
|
||||||
|
&& !name.contains('\\')
|
||||||
|
&& !name.contains('\0')
|
||||||
|
&& name
|
||||||
|
.chars()
|
||||||
|
.all(|c| c.is_ascii_alphanumeric() || matches!(c, '.' | '-' | '_' | '@'))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_publish(
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
Path(path): Path<String>,
|
||||||
|
body: Bytes,
|
||||||
|
) -> Response {
|
||||||
|
let package_name = path;
|
||||||
|
|
||||||
|
let payload: serde_json::Value = match serde_json::from_slice(&body) {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(e) => return (StatusCode::BAD_REQUEST, format!("Invalid JSON: {}", e)).into_response(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Security: verify payload name matches URL path
|
||||||
|
if let Some(payload_name) = payload.get("name").and_then(|n| n.as_str()) {
|
||||||
|
if payload_name != package_name {
|
||||||
|
tracing::warn!(
|
||||||
|
url_name = %package_name,
|
||||||
|
payload_name = %payload_name,
|
||||||
|
"SECURITY: npm publish name mismatch — possible spoofing attempt"
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
"Package name in URL does not match payload",
|
||||||
|
)
|
||||||
|
.into_response();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let attachments = match payload.get("_attachments").and_then(|a| a.as_object()) {
|
||||||
|
Some(a) => a,
|
||||||
|
None => return (StatusCode::BAD_REQUEST, "Missing _attachments").into_response(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let new_versions = match payload.get("versions").and_then(|v| v.as_object()) {
|
||||||
|
Some(v) => v,
|
||||||
|
None => return (StatusCode::BAD_REQUEST, "Missing versions").into_response(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Load or create metadata
|
||||||
|
let metadata_key = format!("npm/{}/metadata.json", package_name);
|
||||||
|
let mut metadata = if let Ok(existing) = state.storage.get(&metadata_key).await {
|
||||||
|
serde_json::from_slice::<serde_json::Value>(&existing)
|
||||||
|
.unwrap_or_else(|_| serde_json::json!({}))
|
||||||
|
} else {
|
||||||
|
serde_json::json!({})
|
||||||
|
};
|
||||||
|
|
||||||
|
// Version immutability
|
||||||
|
if let Some(existing_versions) = metadata.get("versions").and_then(|v| v.as_object()) {
|
||||||
|
for ver in new_versions.keys() {
|
||||||
|
if existing_versions.contains_key(ver) {
|
||||||
|
return (
|
||||||
|
StatusCode::CONFLICT,
|
||||||
|
format!("Version {} already exists", ver),
|
||||||
|
)
|
||||||
|
.into_response();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store tarballs
|
||||||
|
for (filename, attachment_data) in attachments {
|
||||||
|
if !is_valid_attachment_name(filename) {
|
||||||
|
tracing::warn!(
|
||||||
|
filename = %filename,
|
||||||
|
package = %package_name,
|
||||||
|
"SECURITY: npm publish rejected — invalid attachment filename"
|
||||||
|
);
|
||||||
|
return (StatusCode::BAD_REQUEST, "Invalid attachment filename").into_response();
|
||||||
|
}
|
||||||
|
|
||||||
|
let base64_data = match attachment_data.get("data").and_then(|d| d.as_str()) {
|
||||||
|
Some(d) => d,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let tarball_bytes = match base64::engine::general_purpose::STANDARD.decode(base64_data) {
|
||||||
|
Ok(b) => b,
|
||||||
|
Err(_) => {
|
||||||
|
return (StatusCode::BAD_REQUEST, "Invalid base64 in attachment").into_response()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let tarball_key = format!("npm/{}/tarballs/{}", package_name, filename);
|
||||||
|
if state
|
||||||
|
.storage
|
||||||
|
.put(&tarball_key, &tarball_bytes)
|
||||||
|
.await
|
||||||
|
.is_err()
|
||||||
|
{
|
||||||
|
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store sha256
|
||||||
|
let hash = format!("{:x}", sha2::Sha256::digest(&tarball_bytes));
|
||||||
|
let hash_key = format!("{}.sha256", tarball_key);
|
||||||
|
let _ = state.storage.put(&hash_key, hash.as_bytes()).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge versions
|
||||||
|
let meta_obj = metadata.as_object_mut().unwrap();
|
||||||
|
let stored_versions = meta_obj.entry("versions").or_insert(serde_json::json!({}));
|
||||||
|
if let Some(sv) = stored_versions.as_object_mut() {
|
||||||
|
for (ver, ver_data) in new_versions {
|
||||||
|
sv.insert(ver.clone(), ver_data.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy standard fields
|
||||||
|
for field in &["name", "_id", "description", "readme", "license"] {
|
||||||
|
if let Some(val) = payload.get(*field) {
|
||||||
|
meta_obj.insert(field.to_string(), val.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge dist-tags
|
||||||
|
if let Some(new_dist_tags) = payload.get("dist-tags").and_then(|d| d.as_object()) {
|
||||||
|
let stored_dist_tags = meta_obj.entry("dist-tags").or_insert(serde_json::json!({}));
|
||||||
|
if let Some(sdt) = stored_dist_tags.as_object_mut() {
|
||||||
|
for (tag, ver) in new_dist_tags {
|
||||||
|
sdt.insert(tag.clone(), ver.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rewrite tarball URLs for published packages
|
||||||
|
let nora_base = nora_base_url(&state);
|
||||||
|
if let Some(versions) = metadata.get_mut("versions").and_then(|v| v.as_object_mut()) {
|
||||||
|
for (ver, ver_data) in versions.iter_mut() {
|
||||||
|
if let Some(dist) = ver_data.get_mut("dist") {
|
||||||
|
let short_name = package_name.split('/').next_back().unwrap_or(&package_name);
|
||||||
|
let tarball_url = format!(
|
||||||
|
"{}/npm/{}/-/{}-{}.tgz",
|
||||||
|
nora_base.trim_end_matches('/'),
|
||||||
|
package_name,
|
||||||
|
short_name,
|
||||||
|
ver
|
||||||
|
);
|
||||||
|
dist["tarball"] = serde_json::Value::String(tarball_url);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store metadata
|
||||||
|
match serde_json::to_vec(&metadata) {
|
||||||
|
Ok(bytes) => {
|
||||||
|
if state.storage.put(&metadata_key, &bytes).await.is_err() {
|
||||||
|
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => return StatusCode::INTERNAL_SERVER_ERROR.into_response(),
|
||||||
|
}
|
||||||
|
|
||||||
|
state.metrics.record_upload("npm");
|
||||||
|
state.activity.push(ActivityEntry::new(
|
||||||
|
ActionType::Push,
|
||||||
|
package_name,
|
||||||
|
"npm",
|
||||||
|
"LOCAL",
|
||||||
|
));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("push", "api", "", "npm", ""));
|
||||||
|
state.repo_index.invalidate("npm");
|
||||||
|
|
||||||
|
StatusCode::CREATED.into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helpers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
async fn fetch_from_proxy(
|
async fn fetch_from_proxy(
|
||||||
client: &reqwest::Client,
|
client: &reqwest::Client,
|
||||||
url: &str,
|
url: &str,
|
||||||
timeout_secs: u64,
|
timeout_secs: u64,
|
||||||
|
auth: Option<&str>,
|
||||||
) -> Result<Vec<u8>, ()> {
|
) -> Result<Vec<u8>, ()> {
|
||||||
let response = client
|
let mut request = client.get(url).timeout(Duration::from_secs(timeout_secs));
|
||||||
.get(url)
|
if let Some(credentials) = auth {
|
||||||
.timeout(Duration::from_secs(timeout_secs))
|
request = request.header("Authorization", basic_auth_header(credentials));
|
||||||
.send()
|
}
|
||||||
.await
|
let response = request.send().await.map_err(|_| ())?;
|
||||||
.map_err(|_| ())?;
|
|
||||||
|
|
||||||
if !response.status().is_success() {
|
if !response.status().is_success() {
|
||||||
return Err(());
|
return Err(());
|
||||||
@@ -118,3 +450,129 @@ fn with_content_type(
|
|||||||
|
|
||||||
(StatusCode::OK, [(header::CONTENT_TYPE, content_type)], data)
|
(StatusCode::OK, [(header::CONTENT_TYPE, content_type)], data)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rewrite_tarball_urls_regular_package() {
|
||||||
|
let metadata = serde_json::json!({
|
||||||
|
"name": "lodash",
|
||||||
|
"versions": {
|
||||||
|
"4.17.21": {
|
||||||
|
"dist": {
|
||||||
|
"tarball": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||||
|
"shasum": "abc123"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let data = serde_json::to_vec(&metadata).unwrap();
|
||||||
|
let result =
|
||||||
|
rewrite_tarball_urls(&data, "http://nora:5000", "https://registry.npmjs.org").unwrap();
|
||||||
|
let json: serde_json::Value = serde_json::from_slice(&result).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
json["versions"]["4.17.21"]["dist"]["tarball"],
|
||||||
|
"http://nora:5000/npm/lodash/-/lodash-4.17.21.tgz"
|
||||||
|
);
|
||||||
|
assert_eq!(json["versions"]["4.17.21"]["dist"]["shasum"], "abc123");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rewrite_tarball_urls_scoped_package() {
|
||||||
|
let metadata = serde_json::json!({
|
||||||
|
"name": "@babel/core",
|
||||||
|
"versions": {
|
||||||
|
"7.26.0": {
|
||||||
|
"dist": {
|
||||||
|
"tarball": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz",
|
||||||
|
"integrity": "sha512-test"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let data = serde_json::to_vec(&metadata).unwrap();
|
||||||
|
let result =
|
||||||
|
rewrite_tarball_urls(&data, "http://nora:5000", "https://registry.npmjs.org").unwrap();
|
||||||
|
let json: serde_json::Value = serde_json::from_slice(&result).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
json["versions"]["7.26.0"]["dist"]["tarball"],
|
||||||
|
"http://nora:5000/npm/@babel/core/-/core-7.26.0.tgz"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rewrite_tarball_urls_multiple_versions() {
|
||||||
|
let metadata = serde_json::json!({
|
||||||
|
"name": "express",
|
||||||
|
"versions": {
|
||||||
|
"4.18.2": { "dist": { "tarball": "https://registry.npmjs.org/express/-/express-4.18.2.tgz" } },
|
||||||
|
"4.19.0": { "dist": { "tarball": "https://registry.npmjs.org/express/-/express-4.19.0.tgz" } }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let data = serde_json::to_vec(&metadata).unwrap();
|
||||||
|
let result = rewrite_tarball_urls(
|
||||||
|
&data,
|
||||||
|
"https://demo.getnora.io",
|
||||||
|
"https://registry.npmjs.org",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let json: serde_json::Value = serde_json::from_slice(&result).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
json["versions"]["4.18.2"]["dist"]["tarball"],
|
||||||
|
"https://demo.getnora.io/npm/express/-/express-4.18.2.tgz"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
json["versions"]["4.19.0"]["dist"]["tarball"],
|
||||||
|
"https://demo.getnora.io/npm/express/-/express-4.19.0.tgz"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rewrite_tarball_urls_no_versions() {
|
||||||
|
let metadata = serde_json::json!({ "name": "empty-pkg" });
|
||||||
|
let data = serde_json::to_vec(&metadata).unwrap();
|
||||||
|
let result =
|
||||||
|
rewrite_tarball_urls(&data, "http://nora:5000", "https://registry.npmjs.org").unwrap();
|
||||||
|
let json: serde_json::Value = serde_json::from_slice(&result).unwrap();
|
||||||
|
assert_eq!(json["name"], "empty-pkg");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rewrite_invalid_json() {
|
||||||
|
assert!(rewrite_tarball_urls(
|
||||||
|
b"not json",
|
||||||
|
"http://nora:5000",
|
||||||
|
"https://registry.npmjs.org"
|
||||||
|
)
|
||||||
|
.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_valid_attachment_names() {
|
||||||
|
assert!(is_valid_attachment_name("lodash-4.17.21.tgz"));
|
||||||
|
assert!(is_valid_attachment_name("core-7.26.0.tgz"));
|
||||||
|
assert!(is_valid_attachment_name("my_package-1.0.0.tgz"));
|
||||||
|
assert!(is_valid_attachment_name("@scope-pkg-1.0.0.tgz"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_path_traversal_attachment_names() {
|
||||||
|
assert!(!is_valid_attachment_name("../../etc/passwd"));
|
||||||
|
assert!(!is_valid_attachment_name(
|
||||||
|
"../docker/nginx/manifests/latest.json"
|
||||||
|
));
|
||||||
|
assert!(!is_valid_attachment_name("foo/bar.tgz"));
|
||||||
|
assert!(!is_valid_attachment_name("foo\\bar.tgz"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_empty_and_null_attachment_names() {
|
||||||
|
assert!(!is_valid_attachment_name(""));
|
||||||
|
assert!(!is_valid_attachment_name("foo\0bar.tgz"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::activity_log::{ActionType, ActivityEntry};
|
use crate::activity_log::{ActionType, ActivityEntry};
|
||||||
|
use crate::audit::AuditEntry;
|
||||||
|
use crate::config::basic_auth_header;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{Path, State},
|
extract::{Path, State},
|
||||||
@@ -85,8 +87,13 @@ async fn package_versions(
|
|||||||
if let Some(proxy_url) = &state.config.pypi.proxy {
|
if let Some(proxy_url) = &state.config.pypi.proxy {
|
||||||
let url = format!("{}/{}/", proxy_url.trim_end_matches('/'), normalized);
|
let url = format!("{}/{}/", proxy_url.trim_end_matches('/'), normalized);
|
||||||
|
|
||||||
if let Ok(html) =
|
if let Ok(html) = fetch_package_page(
|
||||||
fetch_package_page(&state.http_client, &url, state.config.pypi.proxy_timeout).await
|
&state.http_client,
|
||||||
|
&url,
|
||||||
|
state.config.pypi.proxy_timeout,
|
||||||
|
state.config.pypi.proxy_auth.as_deref(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
{
|
{
|
||||||
// Rewrite URLs in the HTML to point to our registry
|
// Rewrite URLs in the HTML to point to our registry
|
||||||
let rewritten = rewrite_pypi_links(&html, &normalized);
|
let rewritten = rewrite_pypi_links(&html, &normalized);
|
||||||
@@ -115,6 +122,9 @@ async fn download_file(
|
|||||||
"pypi",
|
"pypi",
|
||||||
"CACHE",
|
"CACHE",
|
||||||
));
|
));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("cache_hit", "api", "", "pypi", ""));
|
||||||
|
|
||||||
let content_type = if filename.ends_with(".whl") {
|
let content_type = if filename.ends_with(".whl") {
|
||||||
"application/zip"
|
"application/zip"
|
||||||
@@ -136,6 +146,7 @@ async fn download_file(
|
|||||||
&state.http_client,
|
&state.http_client,
|
||||||
&page_url,
|
&page_url,
|
||||||
state.config.pypi.proxy_timeout,
|
state.config.pypi.proxy_timeout,
|
||||||
|
state.config.pypi.proxy_auth.as_deref(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
@@ -145,6 +156,7 @@ async fn download_file(
|
|||||||
&state.http_client,
|
&state.http_client,
|
||||||
&file_url,
|
&file_url,
|
||||||
state.config.pypi.proxy_timeout,
|
state.config.pypi.proxy_timeout,
|
||||||
|
state.config.pypi.proxy_auth.as_deref(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
@@ -156,6 +168,9 @@ async fn download_file(
|
|||||||
"pypi",
|
"pypi",
|
||||||
"PROXY",
|
"PROXY",
|
||||||
));
|
));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("proxy_fetch", "api", "", "pypi", ""));
|
||||||
|
|
||||||
// Cache in local storage
|
// Cache in local storage
|
||||||
let storage = state.storage.clone();
|
let storage = state.storage.clone();
|
||||||
@@ -195,14 +210,16 @@ async fn fetch_package_page(
|
|||||||
client: &reqwest::Client,
|
client: &reqwest::Client,
|
||||||
url: &str,
|
url: &str,
|
||||||
timeout_secs: u64,
|
timeout_secs: u64,
|
||||||
|
auth: Option<&str>,
|
||||||
) -> Result<String, ()> {
|
) -> Result<String, ()> {
|
||||||
let response = client
|
let mut request = client
|
||||||
.get(url)
|
.get(url)
|
||||||
.timeout(Duration::from_secs(timeout_secs))
|
.timeout(Duration::from_secs(timeout_secs))
|
||||||
.header("Accept", "text/html")
|
.header("Accept", "text/html");
|
||||||
.send()
|
if let Some(credentials) = auth {
|
||||||
.await
|
request = request.header("Authorization", basic_auth_header(credentials));
|
||||||
.map_err(|_| ())?;
|
}
|
||||||
|
let response = request.send().await.map_err(|_| ())?;
|
||||||
|
|
||||||
if !response.status().is_success() {
|
if !response.status().is_success() {
|
||||||
return Err(());
|
return Err(());
|
||||||
@@ -212,13 +229,17 @@ async fn fetch_package_page(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch file from upstream
|
/// Fetch file from upstream
|
||||||
async fn fetch_file(client: &reqwest::Client, url: &str, timeout_secs: u64) -> Result<Vec<u8>, ()> {
|
async fn fetch_file(
|
||||||
let response = client
|
client: &reqwest::Client,
|
||||||
.get(url)
|
url: &str,
|
||||||
.timeout(Duration::from_secs(timeout_secs))
|
timeout_secs: u64,
|
||||||
.send()
|
auth: Option<&str>,
|
||||||
.await
|
) -> Result<Vec<u8>, ()> {
|
||||||
.map_err(|_| ())?;
|
let mut request = client.get(url).timeout(Duration::from_secs(timeout_secs));
|
||||||
|
if let Some(credentials) = auth {
|
||||||
|
request = request.header("Authorization", basic_auth_header(credentials));
|
||||||
|
}
|
||||||
|
let response = request.send().await.map_err(|_| ())?;
|
||||||
|
|
||||||
if !response.status().is_success() {
|
if !response.status().is_success() {
|
||||||
return Err(());
|
return Err(());
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::activity_log::{ActionType, ActivityEntry};
|
use crate::activity_log::{ActionType, ActivityEntry};
|
||||||
|
use crate::audit::AuditEntry;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use axum::{
|
use axum::{
|
||||||
body::Bytes,
|
body::Bytes,
|
||||||
@@ -35,6 +36,9 @@ async fn download(State(state): State<Arc<AppState>>, Path(path): Path<String>)
|
|||||||
state
|
state
|
||||||
.activity
|
.activity
|
||||||
.push(ActivityEntry::new(ActionType::Pull, path, "raw", "LOCAL"));
|
.push(ActivityEntry::new(ActionType::Pull, path, "raw", "LOCAL"));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("pull", "api", "", "raw", ""));
|
||||||
|
|
||||||
// Guess content type from extension
|
// Guess content type from extension
|
||||||
let content_type = guess_content_type(&key);
|
let content_type = guess_content_type(&key);
|
||||||
@@ -72,6 +76,9 @@ async fn upload(
|
|||||||
state
|
state
|
||||||
.activity
|
.activity
|
||||||
.push(ActivityEntry::new(ActionType::Push, path, "raw", "LOCAL"));
|
.push(ActivityEntry::new(ActionType::Push, path, "raw", "LOCAL"));
|
||||||
|
state
|
||||||
|
.audit
|
||||||
|
.log(AuditEntry::new("push", "api", "", "raw", ""));
|
||||||
StatusCode::CREATED.into_response()
|
StatusCode::CREATED.into_response()
|
||||||
}
|
}
|
||||||
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
|
Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
|
||||||
|
|||||||
@@ -173,35 +173,41 @@ async fn build_docker_index(storage: &Storage) -> Vec<RepoInfo> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(rest) = key.strip_prefix("docker/") {
|
if let Some(rest) = key.strip_prefix("docker/") {
|
||||||
|
// Support both single-segment and namespaced images:
|
||||||
|
// docker/alpine/manifests/latest.json → name="alpine"
|
||||||
|
// docker/library/alpine/manifests/latest.json → name="library/alpine"
|
||||||
let parts: Vec<_> = rest.split('/').collect();
|
let parts: Vec<_> = rest.split('/').collect();
|
||||||
if parts.len() >= 3 && parts[1] == "manifests" && key.ends_with(".json") {
|
let manifest_pos = parts.iter().position(|&p| p == "manifests");
|
||||||
let name = parts[0].to_string();
|
if let Some(pos) = manifest_pos {
|
||||||
let entry = repos.entry(name).or_insert((0, 0, 0));
|
if pos >= 1 && key.ends_with(".json") {
|
||||||
entry.0 += 1;
|
let name = parts[..pos].join("/");
|
||||||
|
let entry = repos.entry(name).or_insert((0, 0, 0));
|
||||||
|
entry.0 += 1;
|
||||||
|
|
||||||
if let Ok(data) = storage.get(key).await {
|
if let Ok(data) = storage.get(key).await {
|
||||||
if let Ok(m) = serde_json::from_slice::<serde_json::Value>(&data) {
|
if let Ok(m) = serde_json::from_slice::<serde_json::Value>(&data) {
|
||||||
let cfg = m
|
let cfg = m
|
||||||
.get("config")
|
.get("config")
|
||||||
.and_then(|c| c.get("size"))
|
.and_then(|c| c.get("size"))
|
||||||
.and_then(|s| s.as_u64())
|
.and_then(|s| s.as_u64())
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
let layers: u64 = m
|
let layers: u64 = m
|
||||||
.get("layers")
|
.get("layers")
|
||||||
.and_then(|l| l.as_array())
|
.and_then(|l| l.as_array())
|
||||||
.map(|arr| {
|
.map(|arr| {
|
||||||
arr.iter()
|
arr.iter()
|
||||||
.filter_map(|l| l.get("size").and_then(|s| s.as_u64()))
|
.filter_map(|l| l.get("size").and_then(|s| s.as_u64()))
|
||||||
.sum()
|
.sum()
|
||||||
})
|
})
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
entry.1 += cfg + layers;
|
entry.1 += cfg + layers;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(meta) = storage.stat(key).await {
|
if let Some(meta) = storage.stat(key).await {
|
||||||
if meta.modified > entry.2 {
|
if meta.modified > entry.2 {
|
||||||
entry.2 = meta.modified;
|
entry.2 = meta.modified;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -244,10 +250,16 @@ async fn build_npm_index(storage: &Storage) -> Vec<RepoInfo> {
|
|||||||
for key in &keys {
|
for key in &keys {
|
||||||
if let Some(rest) = key.strip_prefix("npm/") {
|
if let Some(rest) = key.strip_prefix("npm/") {
|
||||||
// Pattern: npm/{package}/tarballs/{file}.tgz
|
// Pattern: npm/{package}/tarballs/{file}.tgz
|
||||||
|
// Scoped: npm/@scope/package/tarballs/{file}.tgz
|
||||||
if rest.contains("/tarballs/") && key.ends_with(".tgz") {
|
if rest.contains("/tarballs/") && key.ends_with(".tgz") {
|
||||||
let parts: Vec<_> = rest.split('/').collect();
|
let parts: Vec<_> = rest.split('/').collect();
|
||||||
if !parts.is_empty() {
|
if !parts.is_empty() {
|
||||||
let name = parts[0].to_string();
|
// Scoped packages: @scope/package → parts[0]="@scope", parts[1]="package"
|
||||||
|
let name = if parts[0].starts_with('@') && parts.len() >= 4 {
|
||||||
|
format!("{}/{}", parts[0], parts[1])
|
||||||
|
} else {
|
||||||
|
parts[0].to_string()
|
||||||
|
};
|
||||||
let entry = packages.entry(name).or_insert((0, 0, 0));
|
let entry = packages.entry(name).or_insert((0, 0, 0));
|
||||||
entry.0 += 1;
|
entry.0 += 1;
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
#![allow(dead_code)] // Foundational code for future S3/Vault integration
|
|
||||||
|
|
||||||
//! Secrets management for NORA
|
//! Secrets management for NORA
|
||||||
//!
|
//!
|
||||||
//! Provides a trait-based architecture for secrets providers:
|
//! Provides a trait-based architecture for secrets providers:
|
||||||
@@ -34,6 +32,7 @@ use async_trait::async_trait;
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[allow(dead_code)] // Variants used by provider impls; external error handling planned for v0.4
|
||||||
/// Secrets provider error
|
/// Secrets provider error
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum SecretsError {
|
pub enum SecretsError {
|
||||||
@@ -56,9 +55,11 @@ pub enum SecretsError {
|
|||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait SecretsProvider: Send + Sync {
|
pub trait SecretsProvider: Send + Sync {
|
||||||
/// Get a secret by key (required)
|
/// Get a secret by key (required)
|
||||||
|
#[allow(dead_code)]
|
||||||
async fn get_secret(&self, key: &str) -> Result<ProtectedString, SecretsError>;
|
async fn get_secret(&self, key: &str) -> Result<ProtectedString, SecretsError>;
|
||||||
|
|
||||||
/// Get a secret by key (optional, returns None if not found)
|
/// Get a secret by key (optional, returns None if not found)
|
||||||
|
#[allow(dead_code)]
|
||||||
async fn get_secret_optional(&self, key: &str) -> Option<ProtectedString> {
|
async fn get_secret_optional(&self, key: &str) -> Option<ProtectedString> {
|
||||||
self.get_secret(key).await.ok()
|
self.get_secret(key).await.ok()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,12 +13,14 @@ use zeroize::{Zeroize, Zeroizing};
|
|||||||
/// - Implements Zeroize: memory is overwritten with zeros when dropped
|
/// - Implements Zeroize: memory is overwritten with zeros when dropped
|
||||||
/// - Debug shows `***REDACTED***` instead of actual value
|
/// - Debug shows `***REDACTED***` instead of actual value
|
||||||
/// - Clone creates a new protected copy
|
/// - Clone creates a new protected copy
|
||||||
|
#[allow(dead_code)] // Used internally by SecretsProvider impls; external callers planned for v0.4
|
||||||
#[derive(Clone, Zeroize)]
|
#[derive(Clone, Zeroize)]
|
||||||
#[zeroize(drop)]
|
#[zeroize(drop)]
|
||||||
pub struct ProtectedString {
|
pub struct ProtectedString {
|
||||||
inner: String,
|
inner: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
impl ProtectedString {
|
impl ProtectedString {
|
||||||
/// Create a new protected string
|
/// Create a new protected string
|
||||||
pub fn new(value: String) -> Self {
|
pub fn new(value: String) -> Self {
|
||||||
@@ -68,6 +70,7 @@ impl From<&str> for ProtectedString {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// S3 credentials with protected secrets
|
/// S3 credentials with protected secrets
|
||||||
|
#[allow(dead_code)] // S3 storage backend planned for v0.4
|
||||||
#[derive(Clone, Zeroize)]
|
#[derive(Clone, Zeroize)]
|
||||||
#[zeroize(drop)]
|
#[zeroize(drop)]
|
||||||
pub struct S3Credentials {
|
pub struct S3Credentials {
|
||||||
@@ -77,6 +80,7 @@ pub struct S3Credentials {
|
|||||||
pub region: Option<String>,
|
pub region: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
impl S3Credentials {
|
impl S3Credentials {
|
||||||
pub fn new(access_key_id: String, secret_access_key: String) -> Self {
|
pub fn new(access_key_id: String, secret_access_key: String) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|||||||
@@ -11,6 +11,35 @@ use uuid::Uuid;
|
|||||||
|
|
||||||
const TOKEN_PREFIX: &str = "nra_";
|
const TOKEN_PREFIX: &str = "nra_";
|
||||||
|
|
||||||
|
/// Access role for API tokens
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "lowercase")]
|
||||||
|
pub enum Role {
|
||||||
|
Read,
|
||||||
|
Write,
|
||||||
|
Admin,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Role {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Role::Read => write!(f, "read"),
|
||||||
|
Role::Write => write!(f, "write"),
|
||||||
|
Role::Admin => write!(f, "admin"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Role {
|
||||||
|
pub fn can_write(&self) -> bool {
|
||||||
|
matches!(self, Role::Write | Role::Admin)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn can_admin(&self) -> bool {
|
||||||
|
matches!(self, Role::Admin)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// API Token metadata stored on disk
|
/// API Token metadata stored on disk
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct TokenInfo {
|
pub struct TokenInfo {
|
||||||
@@ -20,6 +49,12 @@ pub struct TokenInfo {
|
|||||||
pub expires_at: u64,
|
pub expires_at: u64,
|
||||||
pub last_used: Option<u64>,
|
pub last_used: Option<u64>,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
|
#[serde(default = "default_role")]
|
||||||
|
pub role: Role,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_role() -> Role {
|
||||||
|
Role::Read
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Token store for managing API tokens
|
/// Token store for managing API tokens
|
||||||
@@ -44,6 +79,7 @@ impl TokenStore {
|
|||||||
user: &str,
|
user: &str,
|
||||||
ttl_days: u64,
|
ttl_days: u64,
|
||||||
description: Option<String>,
|
description: Option<String>,
|
||||||
|
role: Role,
|
||||||
) -> Result<String, TokenError> {
|
) -> Result<String, TokenError> {
|
||||||
// Generate random token
|
// Generate random token
|
||||||
let raw_token = format!(
|
let raw_token = format!(
|
||||||
@@ -67,6 +103,7 @@ impl TokenStore {
|
|||||||
expires_at,
|
expires_at,
|
||||||
last_used: None,
|
last_used: None,
|
||||||
description,
|
description,
|
||||||
|
role,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Save to file
|
// Save to file
|
||||||
@@ -81,7 +118,7 @@ impl TokenStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Verify a token and return user info if valid
|
/// Verify a token and return user info if valid
|
||||||
pub fn verify_token(&self, token: &str) -> Result<String, TokenError> {
|
pub fn verify_token(&self, token: &str) -> Result<(String, Role), TokenError> {
|
||||||
if !token.starts_with(TOKEN_PREFIX) {
|
if !token.starts_with(TOKEN_PREFIX) {
|
||||||
return Err(TokenError::InvalidFormat);
|
return Err(TokenError::InvalidFormat);
|
||||||
}
|
}
|
||||||
@@ -121,7 +158,7 @@ impl TokenStore {
|
|||||||
let _ = fs::write(&file_path, json);
|
let _ = fs::write(&file_path, json);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(info.user)
|
Ok((info.user, info.role))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List all tokens for a user
|
/// List all tokens for a user
|
||||||
@@ -210,7 +247,7 @@ mod tests {
|
|||||||
let store = TokenStore::new(temp_dir.path());
|
let store = TokenStore::new(temp_dir.path());
|
||||||
|
|
||||||
let token = store
|
let token = store
|
||||||
.create_token("testuser", 30, Some("Test token".to_string()))
|
.create_token("testuser", 30, Some("Test token".to_string()), Role::Write)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert!(token.starts_with("nra_"));
|
assert!(token.starts_with("nra_"));
|
||||||
@@ -222,10 +259,13 @@ mod tests {
|
|||||||
let temp_dir = TempDir::new().unwrap();
|
let temp_dir = TempDir::new().unwrap();
|
||||||
let store = TokenStore::new(temp_dir.path());
|
let store = TokenStore::new(temp_dir.path());
|
||||||
|
|
||||||
let token = store.create_token("testuser", 30, None).unwrap();
|
let token = store
|
||||||
let user = store.verify_token(&token).unwrap();
|
.create_token("testuser", 30, None, Role::Write)
|
||||||
|
.unwrap();
|
||||||
|
let (user, role) = store.verify_token(&token).unwrap();
|
||||||
|
|
||||||
assert_eq!(user, "testuser");
|
assert_eq!(user, "testuser");
|
||||||
|
assert_eq!(role, Role::Write);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -252,7 +292,9 @@ mod tests {
|
|||||||
let store = TokenStore::new(temp_dir.path());
|
let store = TokenStore::new(temp_dir.path());
|
||||||
|
|
||||||
// Create token and manually set it as expired
|
// Create token and manually set it as expired
|
||||||
let token = store.create_token("testuser", 1, None).unwrap();
|
let token = store
|
||||||
|
.create_token("testuser", 1, None, Role::Write)
|
||||||
|
.unwrap();
|
||||||
let token_hash = hash_token(&token);
|
let token_hash = hash_token(&token);
|
||||||
let file_path = temp_dir.path().join(format!("{}.json", &token_hash[..16]));
|
let file_path = temp_dir.path().join(format!("{}.json", &token_hash[..16]));
|
||||||
|
|
||||||
@@ -272,9 +314,9 @@ mod tests {
|
|||||||
let temp_dir = TempDir::new().unwrap();
|
let temp_dir = TempDir::new().unwrap();
|
||||||
let store = TokenStore::new(temp_dir.path());
|
let store = TokenStore::new(temp_dir.path());
|
||||||
|
|
||||||
store.create_token("user1", 30, None).unwrap();
|
store.create_token("user1", 30, None, Role::Write).unwrap();
|
||||||
store.create_token("user1", 30, None).unwrap();
|
store.create_token("user1", 30, None, Role::Write).unwrap();
|
||||||
store.create_token("user2", 30, None).unwrap();
|
store.create_token("user2", 30, None, Role::Read).unwrap();
|
||||||
|
|
||||||
let user1_tokens = store.list_tokens("user1");
|
let user1_tokens = store.list_tokens("user1");
|
||||||
assert_eq!(user1_tokens.len(), 2);
|
assert_eq!(user1_tokens.len(), 2);
|
||||||
@@ -291,7 +333,9 @@ mod tests {
|
|||||||
let temp_dir = TempDir::new().unwrap();
|
let temp_dir = TempDir::new().unwrap();
|
||||||
let store = TokenStore::new(temp_dir.path());
|
let store = TokenStore::new(temp_dir.path());
|
||||||
|
|
||||||
let token = store.create_token("testuser", 30, None).unwrap();
|
let token = store
|
||||||
|
.create_token("testuser", 30, None, Role::Write)
|
||||||
|
.unwrap();
|
||||||
let token_hash = hash_token(&token);
|
let token_hash = hash_token(&token);
|
||||||
let hash_prefix = &token_hash[..16];
|
let hash_prefix = &token_hash[..16];
|
||||||
|
|
||||||
@@ -320,9 +364,9 @@ mod tests {
|
|||||||
let temp_dir = TempDir::new().unwrap();
|
let temp_dir = TempDir::new().unwrap();
|
||||||
let store = TokenStore::new(temp_dir.path());
|
let store = TokenStore::new(temp_dir.path());
|
||||||
|
|
||||||
store.create_token("user1", 30, None).unwrap();
|
store.create_token("user1", 30, None, Role::Write).unwrap();
|
||||||
store.create_token("user1", 30, None).unwrap();
|
store.create_token("user1", 30, None, Role::Write).unwrap();
|
||||||
store.create_token("user2", 30, None).unwrap();
|
store.create_token("user2", 30, None, Role::Read).unwrap();
|
||||||
|
|
||||||
let revoked = store.revoke_all_for_user("user1");
|
let revoked = store.revoke_all_for_user("user1");
|
||||||
assert_eq!(revoked, 2);
|
assert_eq!(revoked, 2);
|
||||||
@@ -336,7 +380,9 @@ mod tests {
|
|||||||
let temp_dir = TempDir::new().unwrap();
|
let temp_dir = TempDir::new().unwrap();
|
||||||
let store = TokenStore::new(temp_dir.path());
|
let store = TokenStore::new(temp_dir.path());
|
||||||
|
|
||||||
let token = store.create_token("testuser", 30, None).unwrap();
|
let token = store
|
||||||
|
.create_token("testuser", 30, None, Role::Write)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// First verification
|
// First verification
|
||||||
store.verify_token(&token).unwrap();
|
store.verify_token(&token).unwrap();
|
||||||
@@ -352,7 +398,12 @@ mod tests {
|
|||||||
let store = TokenStore::new(temp_dir.path());
|
let store = TokenStore::new(temp_dir.path());
|
||||||
|
|
||||||
store
|
store
|
||||||
.create_token("testuser", 30, Some("CI/CD Pipeline".to_string()))
|
.create_token(
|
||||||
|
"testuser",
|
||||||
|
30,
|
||||||
|
Some("CI/CD Pipeline".to_string()),
|
||||||
|
Role::Admin,
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let tokens = store.list_tokens("testuser");
|
let tokens = store.list_tokens("testuser");
|
||||||
|
|||||||
@@ -141,11 +141,14 @@ pub async fn api_dashboard(State(state): State<Arc<AppState>>) -> Json<Dashboard
|
|||||||
let pypi_size: u64 = pypi_repos.iter().map(|r| r.size).sum();
|
let pypi_size: u64 = pypi_repos.iter().map(|r| r.size).sum();
|
||||||
let total_storage = docker_size + maven_size + npm_size + cargo_size + pypi_size;
|
let total_storage = docker_size + maven_size + npm_size + cargo_size + pypi_size;
|
||||||
|
|
||||||
let total_artifacts = docker_repos.len()
|
// Count total versions/tags, not just repositories
|
||||||
+ maven_repos.len()
|
let docker_versions: usize = docker_repos.iter().map(|r| r.versions).sum();
|
||||||
+ npm_repos.len()
|
let maven_versions: usize = maven_repos.iter().map(|r| r.versions).sum();
|
||||||
+ cargo_repos.len()
|
let npm_versions: usize = npm_repos.iter().map(|r| r.versions).sum();
|
||||||
+ pypi_repos.len();
|
let cargo_versions: usize = cargo_repos.iter().map(|r| r.versions).sum();
|
||||||
|
let pypi_versions: usize = pypi_repos.iter().map(|r| r.versions).sum();
|
||||||
|
let total_artifacts =
|
||||||
|
docker_versions + maven_versions + npm_versions + cargo_versions + pypi_versions;
|
||||||
|
|
||||||
let global_stats = GlobalStats {
|
let global_stats = GlobalStats {
|
||||||
downloads: state.metrics.downloads.load(Ordering::Relaxed),
|
downloads: state.metrics.downloads.load(Ordering::Relaxed),
|
||||||
@@ -158,35 +161,35 @@ pub async fn api_dashboard(State(state): State<Arc<AppState>>) -> Json<Dashboard
|
|||||||
let registry_card_stats = vec![
|
let registry_card_stats = vec![
|
||||||
RegistryCardStats {
|
RegistryCardStats {
|
||||||
name: "docker".to_string(),
|
name: "docker".to_string(),
|
||||||
artifact_count: docker_repos.len(),
|
artifact_count: docker_versions,
|
||||||
downloads: state.metrics.get_registry_downloads("docker"),
|
downloads: state.metrics.get_registry_downloads("docker"),
|
||||||
uploads: state.metrics.get_registry_uploads("docker"),
|
uploads: state.metrics.get_registry_uploads("docker"),
|
||||||
size_bytes: docker_size,
|
size_bytes: docker_size,
|
||||||
},
|
},
|
||||||
RegistryCardStats {
|
RegistryCardStats {
|
||||||
name: "maven".to_string(),
|
name: "maven".to_string(),
|
||||||
artifact_count: maven_repos.len(),
|
artifact_count: maven_versions,
|
||||||
downloads: state.metrics.get_registry_downloads("maven"),
|
downloads: state.metrics.get_registry_downloads("maven"),
|
||||||
uploads: state.metrics.get_registry_uploads("maven"),
|
uploads: state.metrics.get_registry_uploads("maven"),
|
||||||
size_bytes: maven_size,
|
size_bytes: maven_size,
|
||||||
},
|
},
|
||||||
RegistryCardStats {
|
RegistryCardStats {
|
||||||
name: "npm".to_string(),
|
name: "npm".to_string(),
|
||||||
artifact_count: npm_repos.len(),
|
artifact_count: npm_versions,
|
||||||
downloads: state.metrics.get_registry_downloads("npm"),
|
downloads: state.metrics.get_registry_downloads("npm"),
|
||||||
uploads: 0,
|
uploads: 0,
|
||||||
size_bytes: npm_size,
|
size_bytes: npm_size,
|
||||||
},
|
},
|
||||||
RegistryCardStats {
|
RegistryCardStats {
|
||||||
name: "cargo".to_string(),
|
name: "cargo".to_string(),
|
||||||
artifact_count: cargo_repos.len(),
|
artifact_count: cargo_versions,
|
||||||
downloads: state.metrics.get_registry_downloads("cargo"),
|
downloads: state.metrics.get_registry_downloads("cargo"),
|
||||||
uploads: 0,
|
uploads: 0,
|
||||||
size_bytes: cargo_size,
|
size_bytes: cargo_size,
|
||||||
},
|
},
|
||||||
RegistryCardStats {
|
RegistryCardStats {
|
||||||
name: "pypi".to_string(),
|
name: "pypi".to_string(),
|
||||||
artifact_count: pypi_repos.len(),
|
artifact_count: pypi_versions,
|
||||||
downloads: state.metrics.get_registry_downloads("pypi"),
|
downloads: state.metrics.get_registry_downloads("pypi"),
|
||||||
uploads: 0,
|
uploads: 0,
|
||||||
size_bytes: pypi_size,
|
size_bytes: pypi_size,
|
||||||
@@ -197,12 +200,17 @@ pub async fn api_dashboard(State(state): State<Arc<AppState>>) -> Json<Dashboard
|
|||||||
MountPoint {
|
MountPoint {
|
||||||
registry: "Docker".to_string(),
|
registry: "Docker".to_string(),
|
||||||
mount_path: "/v2/".to_string(),
|
mount_path: "/v2/".to_string(),
|
||||||
proxy_upstream: None,
|
proxy_upstream: state.config.docker.upstreams.first().map(|u| u.url.clone()),
|
||||||
},
|
},
|
||||||
MountPoint {
|
MountPoint {
|
||||||
registry: "Maven".to_string(),
|
registry: "Maven".to_string(),
|
||||||
mount_path: "/maven2/".to_string(),
|
mount_path: "/maven2/".to_string(),
|
||||||
proxy_upstream: state.config.maven.proxies.first().cloned(),
|
proxy_upstream: state
|
||||||
|
.config
|
||||||
|
.maven
|
||||||
|
.proxies
|
||||||
|
.first()
|
||||||
|
.map(|p| p.url().to_string()),
|
||||||
},
|
},
|
||||||
MountPoint {
|
MountPoint {
|
||||||
registry: "npm".to_string(),
|
registry: "npm".to_string(),
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
// Copyright (c) 2026 Volkov Pavel | DevITWay
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
#![allow(dead_code)]
|
|
||||||
//! Input validation for artifact registry paths and identifiers
|
//! Input validation for artifact registry paths and identifiers
|
||||||
//!
|
//!
|
||||||
//! Provides security validation to prevent path traversal attacks and
|
//! Provides security validation to prevent path traversal attacks and
|
||||||
@@ -309,63 +308,6 @@ pub fn validate_docker_reference(reference: &str) -> Result<(), ValidationError>
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate Maven artifact path.
|
|
||||||
///
|
|
||||||
/// Maven paths follow the pattern: groupId/artifactId/version/filename
|
|
||||||
/// Example: `org/apache/commons/commons-lang3/3.12.0/commons-lang3-3.12.0.jar`
|
|
||||||
pub fn validate_maven_path(path: &str) -> Result<(), ValidationError> {
|
|
||||||
validate_storage_key(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Validate npm package name.
|
|
||||||
pub fn validate_npm_name(name: &str) -> Result<(), ValidationError> {
|
|
||||||
if name.is_empty() {
|
|
||||||
return Err(ValidationError::EmptyInput);
|
|
||||||
}
|
|
||||||
|
|
||||||
if name.len() > 214 {
|
|
||||||
return Err(ValidationError::TooLong {
|
|
||||||
max: 214,
|
|
||||||
actual: name.len(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for path traversal
|
|
||||||
if name.contains("..") {
|
|
||||||
return Err(ValidationError::PathTraversal);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Validate Cargo crate name.
|
|
||||||
pub fn validate_crate_name(name: &str) -> Result<(), ValidationError> {
|
|
||||||
if name.is_empty() {
|
|
||||||
return Err(ValidationError::EmptyInput);
|
|
||||||
}
|
|
||||||
|
|
||||||
if name.len() > 64 {
|
|
||||||
return Err(ValidationError::TooLong {
|
|
||||||
max: 64,
|
|
||||||
actual: name.len(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for path traversal
|
|
||||||
if name.contains("..") || name.contains('/') {
|
|
||||||
return Err(ValidationError::PathTraversal);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Crate names: alphanumeric, underscores, hyphens
|
|
||||||
for c in name.chars() {
|
|
||||||
if !matches!(c, 'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '-') {
|
|
||||||
return Err(ValidationError::ForbiddenCharacter(c));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|||||||
@@ -19,10 +19,10 @@ serde.workspace = true
|
|||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
tracing-subscriber.workspace = true
|
tracing-subscriber.workspace = true
|
||||||
toml = "0.8"
|
toml = "1.0"
|
||||||
uuid = { version = "1", features = ["v4"] }
|
uuid = { version = "1", features = ["v4"] }
|
||||||
sha2 = "0.10"
|
sha2 = "0.10"
|
||||||
base64 = "0.22"
|
base64 = "0.22"
|
||||||
httpdate = "1"
|
httpdate = "1"
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
quick-xml = { version = "0.31", features = ["serialize"] }
|
quick-xml = { version = "0.39", features = ["serialize"] }
|
||||||
|
|||||||
4050
nora-storage/nora-storage.cdx.json
Normal file
4050
nora-storage/nora-storage.cdx.json
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user