security: migrate token hashing from SHA256 to Argon2id (#55)

* docs: add DCO, governance model, roles, vulnerability credit policy

* security: migrate token hashing from SHA256 to Argon2id

- Replace unsalted SHA256 with Argon2id (salted) for API token hashing
- Fix TOCTOU race: replace exists()+read() with read()+match on error
- Set chmod 600 on token files and 700 on token storage directory
- Auto-migrate legacy SHA256 tokens to Argon2id on first verification
- Add regression tests: argon2 format, legacy migration, file permissions
This commit is contained in:
2026-03-25 01:56:43 +03:00
committed by GitHub
parent 975264c353
commit 432e8d35af
5 changed files with 254 additions and 52 deletions

View File

@@ -2,6 +2,34 @@
Thank you for your interest in contributing to NORA! Thank you for your interest in contributing to NORA!
## Developer Certificate of Origin (DCO)
By submitting a pull request, you agree to the [Developer Certificate of Origin](https://developercertificate.org/).
Your contribution will be licensed under the [MIT License](LICENSE).
You confirm that you have the right to submit the code and that it does not violate any third-party rights.
## Project Governance
NORA uses a **Benevolent Dictator** governance model:
- **Maintainer:** [@devitway](https://github.com/devitway) — final decisions on features, releases, and architecture
- **Contributors:** anyone who submits issues, PRs, or docs improvements
- **Decision process:** proposals via GitHub Issues → discussion → maintainer decision
- **Release authority:** maintainer only
### Roles and Responsibilities
| Role | Person | Responsibilities |
|------|--------|-----------------|
| Maintainer | @devitway | Code review, releases, roadmap, security response |
| Contributor | anyone | Issues, PRs, documentation, testing |
| Dependabot | automated | Dependency updates |
### Continuity
The GitHub organization [getnora-io](https://github.com/getnora-io) has multiple admin accounts to ensure project continuity. Source code is MIT-licensed, enabling anyone to fork and continue the project.
## Getting Started ## Getting Started
1. Fork the repository 1. Fork the repository

66
Cargo.lock generated
View File

@@ -68,7 +68,7 @@ version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc"
dependencies = [ dependencies = [
"windows-sys 0.60.2", "windows-sys 0.61.2",
] ]
[[package]] [[package]]
@@ -79,7 +79,7 @@ checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"once_cell_polyfill", "once_cell_polyfill",
"windows-sys 0.60.2", "windows-sys 0.61.2",
] ]
[[package]] [[package]]
@@ -97,6 +97,18 @@ dependencies = [
"derive_arbitrary", "derive_arbitrary",
] ]
[[package]]
name = "argon2"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072"
dependencies = [
"base64ct",
"blake2",
"cpufeatures",
"password-hash",
]
[[package]] [[package]]
name = "assert-json-diff" name = "assert-json-diff"
version = "2.0.2" version = "2.0.2"
@@ -188,6 +200,12 @@ version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
name = "base64ct"
version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06"
[[package]] [[package]]
name = "bcrypt" name = "bcrypt"
version = "0.19.0" version = "0.19.0"
@@ -207,6 +225,15 @@ version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
[[package]]
name = "blake2"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
dependencies = [
"digest",
]
[[package]] [[package]]
name = "block-buffer" name = "block-buffer"
version = "0.10.4" version = "0.10.4"
@@ -475,7 +502,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.60.2", "windows-sys 0.61.2",
] ]
[[package]] [[package]]
@@ -1279,6 +1306,7 @@ dependencies = [
name = "nora-registry" name = "nora-registry"
version = "0.2.35" version = "0.2.35"
dependencies = [ dependencies = [
"argon2",
"async-trait", "async-trait",
"axum", "axum",
"base64", "base64",
@@ -1320,7 +1348,7 @@ version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [ dependencies = [
"windows-sys 0.60.2", "windows-sys 0.61.2",
] ]
[[package]] [[package]]
@@ -1377,6 +1405,17 @@ dependencies = [
"windows-link", "windows-link",
] ]
[[package]]
name = "password-hash"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166"
dependencies = [
"base64ct",
"rand_core 0.6.4",
"subtle",
]
[[package]] [[package]]
name = "percent-encoding" name = "percent-encoding"
version = "2.3.2" version = "2.3.2"
@@ -1585,7 +1624,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
dependencies = [ dependencies = [
"rand_chacha", "rand_chacha",
"rand_core", "rand_core 0.9.5",
] ]
[[package]] [[package]]
@@ -1595,7 +1634,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
dependencies = [ dependencies = [
"ppv-lite86", "ppv-lite86",
"rand_core", "rand_core 0.9.5",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom 0.2.17",
] ]
[[package]] [[package]]
@@ -1767,7 +1815,7 @@ dependencies = [
"errno", "errno",
"libc", "libc",
"linux-raw-sys", "linux-raw-sys",
"windows-sys 0.60.2", "windows-sys 0.61.2",
] ]
[[package]] [[package]]
@@ -2056,7 +2104,7 @@ dependencies = [
"getrandom 0.4.1", "getrandom 0.4.1",
"once_cell", "once_cell",
"rustix", "rustix",
"windows-sys 0.60.2", "windows-sys 0.61.2",
] ]
[[package]] [[package]]
@@ -2729,7 +2777,7 @@ version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
dependencies = [ dependencies = [
"windows-sys 0.60.2", "windows-sys 0.61.2",
] ]
[[package]] [[package]]

View File

@@ -50,4 +50,6 @@ When deploying NORA:
## Acknowledgments ## Acknowledgments
We appreciate responsible disclosure and will acknowledge security researchers who report valid vulnerabilities. We appreciate responsible disclosure and will acknowledge security researchers who report valid vulnerabilities in our release notes and CHANGELOG, unless the reporter requests anonymity.
If you have previously reported a vulnerability and would like to be credited, please let us know.

View File

@@ -49,6 +49,7 @@ tower_governor = "0.8"
governor = "0.10" governor = "0.10"
parking_lot = "0.12" parking_lot = "0.12"
zeroize = { version = "1.8", features = ["derive"] } zeroize = { version = "1.8", features = ["derive"] }
argon2 = { version = "0.5", features = ["std", "rand"] }
tower-http = { version = "0.6", features = ["set-header"] } tower-http = { version = "0.6", features = ["set-header"] }
[dev-dependencies] [dev-dependencies]

View File

@@ -1,9 +1,14 @@
// Copyright (c) 2026 Volkov Pavel | DevITWay // Copyright (c) 2026 Volkov Pavel | DevITWay
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
use argon2::{
password_hash::{rand_core::OsRng, PasswordHash, PasswordHasher, PasswordVerifier, SaltString},
Argon2,
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::fs; use std::fs;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
use thiserror::Error; use thiserror::Error;
@@ -66,8 +71,12 @@ pub struct TokenStore {
impl TokenStore { impl TokenStore {
/// Create a new token store /// Create a new token store
pub fn new(storage_path: &Path) -> Self { pub fn new(storage_path: &Path) -> Self {
// Ensure directory exists // Ensure directory exists with restricted permissions
let _ = fs::create_dir_all(storage_path); let _ = fs::create_dir_all(storage_path);
#[cfg(unix)]
{
let _ = fs::set_permissions(storage_path, fs::Permissions::from_mode(0o700));
}
Self { Self {
storage_path: storage_path.to_path_buf(), storage_path: storage_path.to_path_buf(),
} }
@@ -87,7 +96,9 @@ impl TokenStore {
TOKEN_PREFIX, TOKEN_PREFIX,
Uuid::new_v4().to_string().replace("-", "") Uuid::new_v4().to_string().replace("-", "")
); );
let token_hash = hash_token(&raw_token); let token_hash = hash_token_argon2(&raw_token)?;
// Use SHA256 of token as filename (deterministic, for lookup)
let file_id = sha256_hex(&raw_token);
let now = SystemTime::now() let now = SystemTime::now()
.duration_since(UNIX_EPOCH) .duration_since(UNIX_EPOCH)
@@ -97,7 +108,7 @@ impl TokenStore {
let expires_at = now + (ttl_days * 24 * 60 * 60); let expires_at = now + (ttl_days * 24 * 60 * 60);
let info = TokenInfo { let info = TokenInfo {
token_hash: token_hash.clone(), token_hash,
user: user.to_string(), user: user.to_string(),
created_at: now, created_at: now,
expires_at, expires_at,
@@ -106,13 +117,12 @@ impl TokenStore {
role, role,
}; };
// Save to file // Save to file with restricted permissions
let file_path = self let file_path = self.storage_path.join(format!("{}.json", &file_id[..16]));
.storage_path
.join(format!("{}.json", &token_hash[..16]));
let json = let json =
serde_json::to_string_pretty(&info).map_err(|e| TokenError::Storage(e.to_string()))?; serde_json::to_string_pretty(&info).map_err(|e| TokenError::Storage(e.to_string()))?;
fs::write(&file_path, json).map_err(|e| TokenError::Storage(e.to_string()))?; fs::write(&file_path, &json).map_err(|e| TokenError::Storage(e.to_string()))?;
set_file_permissions_600(&file_path);
Ok(raw_token) Ok(raw_token)
} }
@@ -123,22 +133,43 @@ impl TokenStore {
return Err(TokenError::InvalidFormat); return Err(TokenError::InvalidFormat);
} }
let token_hash = hash_token(token); let file_id = sha256_hex(token);
let file_path = self let file_path = self.storage_path.join(format!("{}.json", &file_id[..16]));
.storage_path
.join(format!("{}.json", &token_hash[..16]));
if !file_path.exists() { // TOCTOU fix: read directly, handle NotFound from IO error
return Err(TokenError::NotFound); let content = match fs::read_to_string(&file_path) {
} Ok(c) => c,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
return Err(TokenError::NotFound);
}
Err(e) => return Err(TokenError::Storage(e.to_string())),
};
let content =
fs::read_to_string(&file_path).map_err(|e| TokenError::Storage(e.to_string()))?;
let mut info: TokenInfo = let mut info: TokenInfo =
serde_json::from_str(&content).map_err(|e| TokenError::Storage(e.to_string()))?; serde_json::from_str(&content).map_err(|e| TokenError::Storage(e.to_string()))?;
// Verify hash matches // Verify hash: try Argon2id first, fall back to legacy SHA256
if info.token_hash != token_hash { let hash_valid = if info.token_hash.starts_with("$argon2") {
verify_token_argon2(token, &info.token_hash)
} else {
// Legacy SHA256 hash (no salt) — verify and migrate
let legacy_hash = sha256_hex(token);
if info.token_hash == legacy_hash {
// Migrate to Argon2id
if let Ok(new_hash) = hash_token_argon2(token) {
info.token_hash = new_hash;
if let Ok(json) = serde_json::to_string_pretty(&info) {
let _ = fs::write(&file_path, &json);
set_file_permissions_600(&file_path);
}
}
true
} else {
false
}
};
if !hash_valid {
return Err(TokenError::NotFound); return Err(TokenError::NotFound);
} }
@@ -155,7 +186,8 @@ impl TokenStore {
// Update last_used // Update last_used
info.last_used = Some(now); info.last_used = Some(now);
if let Ok(json) = serde_json::to_string_pretty(&info) { if let Ok(json) = serde_json::to_string_pretty(&info) {
let _ = fs::write(&file_path, json); let _ = fs::write(&file_path, &json);
set_file_permissions_600(&file_path);
} }
Ok((info.user, info.role)) Ok((info.user, info.role))
@@ -185,13 +217,12 @@ impl TokenStore {
pub fn revoke_token(&self, hash_prefix: &str) -> Result<(), TokenError> { pub fn revoke_token(&self, hash_prefix: &str) -> Result<(), TokenError> {
let file_path = self.storage_path.join(format!("{}.json", hash_prefix)); let file_path = self.storage_path.join(format!("{}.json", hash_prefix));
if !file_path.exists() { // TOCTOU fix: try remove directly
return Err(TokenError::NotFound); match fs::remove_file(&file_path) {
Ok(()) => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Err(TokenError::NotFound),
Err(e) => Err(TokenError::Storage(e.to_string())),
} }
fs::remove_file(&file_path).map_err(|e| TokenError::Storage(e.to_string()))?;
Ok(())
} }
/// Revoke all tokens for a user /// Revoke all tokens for a user
@@ -214,13 +245,41 @@ impl TokenStore {
} }
} }
/// Hash a token using SHA256 /// Hash a token using Argon2id with random salt
fn hash_token(token: &str) -> String { fn hash_token_argon2(token: &str) -> Result<String, TokenError> {
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::default();
argon2
.hash_password(token.as_bytes(), &salt)
.map(|h| h.to_string())
.map_err(|e| TokenError::Storage(format!("hash error: {e}")))
}
/// Verify a token against an Argon2id hash
fn verify_token_argon2(token: &str, hash: &str) -> bool {
match PasswordHash::new(hash) {
Ok(parsed) => Argon2::default()
.verify_password(token.as_bytes(), &parsed)
.is_ok(),
Err(_) => false,
}
}
/// SHA256 hex digest (used for file naming and legacy hash verification)
fn sha256_hex(input: &str) -> String {
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
hasher.update(token.as_bytes()); hasher.update(input.as_bytes());
format!("{:x}", hasher.finalize()) format!("{:x}", hasher.finalize())
} }
/// Set file permissions to 600 (owner read/write only)
fn set_file_permissions_600(path: &Path) {
#[cfg(unix)]
{
let _ = fs::set_permissions(path, fs::Permissions::from_mode(0o600));
}
}
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum TokenError { pub enum TokenError {
#[error("Invalid token format")] #[error("Invalid token format")]
@@ -254,6 +313,19 @@ mod tests {
assert_eq!(token.len(), 4 + 32); // prefix + uuid without dashes assert_eq!(token.len(), 4 + 32); // prefix + uuid without dashes
} }
#[test]
fn test_token_hash_is_argon2() {
let temp_dir = TempDir::new().unwrap();
let store = TokenStore::new(temp_dir.path());
let token = store
.create_token("testuser", 30, None, Role::Write)
.unwrap();
let tokens = store.list_tokens("testuser");
assert!(tokens[0].token_hash.starts_with("$argon2"));
}
#[test] #[test]
fn test_verify_valid_token() { fn test_verify_valid_token() {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
@@ -291,24 +363,80 @@ mod tests {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
let store = TokenStore::new(temp_dir.path()); let store = TokenStore::new(temp_dir.path());
// Create token and manually set it as expired
let token = store let token = store
.create_token("testuser", 1, None, Role::Write) .create_token("testuser", 1, None, Role::Write)
.unwrap(); .unwrap();
let token_hash = hash_token(&token); let file_id = sha256_hex(&token);
let file_path = temp_dir.path().join(format!("{}.json", &token_hash[..16])); let file_path = temp_dir.path().join(format!("{}.json", &file_id[..16]));
// Read and modify the token to be expired
let content = std::fs::read_to_string(&file_path).unwrap(); let content = std::fs::read_to_string(&file_path).unwrap();
let mut info: TokenInfo = serde_json::from_str(&content).unwrap(); let mut info: TokenInfo = serde_json::from_str(&content).unwrap();
info.expires_at = 0; // Set to epoch (definitely expired) info.expires_at = 0;
std::fs::write(&file_path, serde_json::to_string(&info).unwrap()).unwrap(); std::fs::write(&file_path, serde_json::to_string(&info).unwrap()).unwrap();
// Token should now be expired
let result = store.verify_token(&token); let result = store.verify_token(&token);
assert!(matches!(result, Err(TokenError::Expired))); assert!(matches!(result, Err(TokenError::Expired)));
} }
#[test]
fn test_legacy_sha256_migration() {
let temp_dir = TempDir::new().unwrap();
let store = TokenStore::new(temp_dir.path());
// Simulate a legacy token with SHA256 hash
let raw_token = "nra_00112233445566778899aabbccddeeff";
let legacy_hash = sha256_hex(raw_token);
let file_id = sha256_hex(raw_token);
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
let info = TokenInfo {
token_hash: legacy_hash.clone(),
user: "legacyuser".to_string(),
created_at: now,
expires_at: now + 86400,
last_used: None,
description: None,
role: Role::Read,
};
let file_path = temp_dir.path().join(format!("{}.json", &file_id[..16]));
fs::write(&file_path, serde_json::to_string_pretty(&info).unwrap()).unwrap();
// Verify should work with legacy hash
let (user, role) = store.verify_token(raw_token).unwrap();
assert_eq!(user, "legacyuser");
assert_eq!(role, Role::Read);
// After verification, hash should be migrated to Argon2id
let content = fs::read_to_string(&file_path).unwrap();
let updated: TokenInfo = serde_json::from_str(&content).unwrap();
assert!(updated.token_hash.starts_with("$argon2"));
}
#[test]
fn test_file_permissions() {
let temp_dir = TempDir::new().unwrap();
let store = TokenStore::new(temp_dir.path());
let token = store
.create_token("testuser", 30, None, Role::Write)
.unwrap();
let file_id = sha256_hex(&token);
let file_path = temp_dir.path().join(format!("{}.json", &file_id[..16]));
#[cfg(unix)]
{
let metadata = fs::metadata(&file_path).unwrap();
let mode = metadata.permissions().mode() & 0o777;
assert_eq!(mode, 0o600);
}
}
#[test] #[test]
fn test_list_tokens() { fn test_list_tokens() {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
@@ -336,16 +464,13 @@ mod tests {
let token = store let token = store
.create_token("testuser", 30, None, Role::Write) .create_token("testuser", 30, None, Role::Write)
.unwrap(); .unwrap();
let token_hash = hash_token(&token); let file_id = sha256_hex(&token);
let hash_prefix = &token_hash[..16]; let hash_prefix = &file_id[..16];
// Verify token works
assert!(store.verify_token(&token).is_ok()); assert!(store.verify_token(&token).is_ok());
// Revoke
store.revoke_token(hash_prefix).unwrap(); store.revoke_token(hash_prefix).unwrap();
// Verify token no longer works
let result = store.verify_token(&token); let result = store.verify_token(&token);
assert!(matches!(result, Err(TokenError::NotFound))); assert!(matches!(result, Err(TokenError::NotFound)));
} }
@@ -384,10 +509,8 @@ mod tests {
.create_token("testuser", 30, None, Role::Write) .create_token("testuser", 30, None, Role::Write)
.unwrap(); .unwrap();
// First verification
store.verify_token(&token).unwrap(); store.verify_token(&token).unwrap();
// Check last_used is set
let tokens = store.list_tokens("testuser"); let tokens = store.list_tokens("testuser");
assert!(tokens[0].last_used.is_some()); assert!(tokens[0].last_used.is_some());
} }