add auth persistence

This commit is contained in:
code002lover 2026-01-12 15:59:04 +01:00
parent 76281892a2
commit b756204514
5 changed files with 836 additions and 262 deletions

811
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -6,17 +6,19 @@ default-run = "backend"
[dependencies]
prost = "0.14.1"
prost-types = "0.14.1"
prost = "0.14"
prost-types = "0.14"
rocket = { git = "https://github.com/rwf2/Rocket", rev = "504efef179622df82ba1dbd37f2e0d9ed2b7c9e4" }
bytes = "1"
rocket_prost_responder_derive = { path = "rocket_prost_responder_derive" }
uuid = { version = "1.19.0", features = ["v4"] }
uuid = { version = "1.19", features = ["v4"] }
bcrypt = "0.17.1"
bincode = "2.0.1"
bincode = "2.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
reqwest = { version = "0.12.24", features = ["json"] }
reqwest = { version = "0.13", features = ["json"] }
aes-gcm = "0.10"
base64 = "0.22"
[build-dependencies]
prost-build = "0.14.1"
prost-build = "0.14"

View File

@ -1,3 +1,4 @@
use crate::auth_persistence::AuthStorage;
use crate::items;
use crate::proto_utils::Proto;
use rocket::State;
@ -8,12 +9,16 @@ use uuid::Uuid;
pub struct AuthState {
// Map token -> username
tokens: Mutex<HashMap<String, String>>,
storage: AuthStorage,
}
impl AuthState {
pub fn new() -> Self {
let storage = AuthStorage::new();
let tokens = storage.load_tokens();
Self {
tokens: Mutex::new(HashMap::new()),
tokens: Mutex::new(tokens),
storage,
}
}
}
@ -138,7 +143,8 @@ pub async fn login(
{
let token = Uuid::new_v4().to_string();
let mut tokens = state.tokens.lock().await;
tokens.insert(token.clone(), req.username);
tokens.insert(token.clone(), req.username.clone());
state.storage.save_tokens(&tokens);
return items::LoginResponse {
token,
@ -163,6 +169,7 @@ pub async fn logout(
let mut tokens = state.tokens.lock().await;
if tokens.remove(&req.token).is_some() {
state.storage.save_tokens(&tokens);
items::LogoutResponse {
success: true,
message: "Logged out successfully".to_string(),

View File

@ -0,0 +1,261 @@
use aes_gcm::{
Aes256Gcm, Nonce,
aead::{Aead, KeyInit},
};
use base64::{Engine, engine::general_purpose::STANDARD};
use bincode::{config, decode_from_slice, encode_to_vec};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs::File;
use std::io::{self, BufReader, BufWriter, Write};
use std::os::unix::fs::PermissionsExt;
#[derive(Debug, Serialize, Deserialize, bincode::Encode, bincode::Decode)]
pub struct TokenEntry {
pub token: String,
pub username: String,
pub created_at: u64,
}
const AUTH_KEY_FILE: &str = ".auth_key";
const TOKENS_FILE: &str = "tokens.bin";
const NONCE_SIZE: usize = 12;
const KEY_SIZE: usize = 32;
pub struct AuthStorage {
cipher: Aes256Gcm,
}
impl Default for AuthStorage {
fn default() -> Self {
Self::new()
}
}
impl AuthStorage {
pub fn new() -> Self {
let key = Self::load_or_create_key();
let cipher = Aes256Gcm::new_from_slice(&key).expect("Invalid key length");
Self { cipher }
}
fn load_or_create_key() -> Vec<u8> {
if let Ok(existing_key) = Self::load_key_from_file() {
return existing_key;
}
let key = Self::generate_key();
if let Err(e) = Self::save_key_to_file(&key) {
eprintln!("Warning: Failed to save auth key to file: {}", e);
}
key
}
fn generate_key() -> Vec<u8> {
use std::time::{SystemTime, UNIX_EPOCH};
let mut key = [0u8; KEY_SIZE];
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_nanos();
let mut seed = timestamp as u64;
for byte in key.iter_mut() {
seed = seed.wrapping_mul(1103515245).wrapping_add(12345);
*byte = (seed >> (seed % 8)) as u8;
}
key.to_vec()
}
fn load_key_from_file() -> io::Result<Vec<u8>> {
let file = File::open(AUTH_KEY_FILE)?;
let reader = BufReader::new(file);
let encoded = std::io::read_to_string(reader)?;
let key = STANDARD.decode(encoded).map_err(|e| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("Base64 decode error: {}", e),
)
})?;
if key.len() != KEY_SIZE {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"Invalid key length",
));
}
Ok(key)
}
fn save_key_to_file(key: &[u8]) -> io::Result<()> {
let encoded = STANDARD.encode(key);
let mut file = File::create(AUTH_KEY_FILE)?;
let mut permissions = file.metadata()?.permissions();
permissions.set_mode(0o600);
file.set_permissions(permissions)?;
file.write_all(encoded.as_bytes())?;
file.sync_all()?;
Ok(())
}
pub fn load_tokens(&self) -> HashMap<String, String> {
let file = match File::open(TOKENS_FILE) {
Ok(f) => f,
Err(_) => {
eprintln!("Warning: No existing tokens file, starting with empty token list");
return HashMap::new();
}
};
let reader = BufReader::new(file);
let encrypted_data = match std::io::read_to_string(reader) {
Ok(data) => data,
Err(_) => {
eprintln!("Warning: Failed to read tokens file, starting with empty token list");
return HashMap::new();
}
};
let decoded = match STANDARD.decode(&encrypted_data) {
Ok(data) => data,
Err(_) => {
eprintln!("Warning: Failed to decode tokens file, starting with empty token list");
return HashMap::new();
}
};
if decoded.len() <= NONCE_SIZE {
eprintln!("Warning: Invalid tokens file format, starting with empty token list");
return HashMap::new();
}
let (nonce_bytes, ciphertext) = decoded.split_at(NONCE_SIZE);
let nonce = Nonce::from_slice(nonce_bytes);
let plaintext = match self.cipher.decrypt(nonce, ciphertext.as_ref()) {
Ok(p) => p,
Err(_) => {
eprintln!("Warning: Failed to decrypt tokens file, starting with empty token list");
return HashMap::new();
}
};
let config = config::standard();
let (entries, _): (Vec<TokenEntry>, usize) = match decode_from_slice(&plaintext, config) {
Ok(e) => e,
Err(_) => {
eprintln!("Warning: Failed to deserialize tokens, starting with empty token list");
return HashMap::new();
}
};
entries
.into_iter()
.map(|entry| (entry.token, entry.username))
.collect()
}
pub fn save_tokens(&self, tokens: &HashMap<String, String>) {
use std::time::{SystemTime, UNIX_EPOCH};
let entries: Vec<TokenEntry> = tokens
.iter()
.map(|(token, username)| TokenEntry {
token: token.clone(),
username: username.clone(),
created_at: SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
})
.collect();
let config = config::standard();
let plaintext: Vec<u8> = match encode_to_vec(&entries, config) {
Ok(data) => data,
Err(e) => {
eprintln!("Warning: Failed to serialize tokens: {}", e);
return;
}
};
let nonce_bytes: [u8; NONCE_SIZE] = rand::thread_rng().generate_bytes();
let nonce = Nonce::from_slice(&nonce_bytes);
let ciphertext = match self.cipher.encrypt(nonce, plaintext.as_ref()) {
Ok(encrypted) => encrypted,
Err(e) => {
eprintln!("Warning: Failed to encrypt tokens: {}", e);
return;
}
};
let mut encrypted_data = nonce_bytes.to_vec();
encrypted_data.extend_from_slice(&ciphertext);
let encoded = STANDARD.encode(&encrypted_data);
match File::create(TOKENS_FILE) {
Ok(file) => {
if let Ok(metadata) = file.metadata() {
let mut permissions = metadata.permissions();
permissions.set_mode(0o600);
if let Err(e) = file.set_permissions(permissions) {
eprintln!("Warning: Failed to set permissions on tokens file: {}", e);
}
}
let mut writer = BufWriter::new(file);
if let Err(e) = writer.write_all(encoded.as_bytes()) {
eprintln!("Warning: Failed to write tokens file: {}", e);
}
if let Err(e) = writer.flush() {
eprintln!("Warning: Failed to flush tokens file: {}", e);
}
}
Err(e) => {
eprintln!("Warning: Failed to create tokens file: {}", e);
}
}
}
}
mod rand {
use std::cell::Cell;
thread_local! {
static STATE: Cell<u64> = Cell::new(
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_nanos() as u64
);
}
pub struct ThreadRng;
pub fn thread_rng() -> ThreadRng {
ThreadRng
}
impl ThreadRng {
pub fn generate_bytes<const N: usize>(&mut self) -> [u8; N] {
let mut result = [0u8; N];
STATE.with(|state| {
let mut s = state.get();
for byte in result.iter_mut() {
s = s.wrapping_mul(1103515245).wrapping_add(12345);
*byte = (s >> (s % 8)) as u8;
}
state.set(s);
});
result
}
}
}

View File

@ -6,6 +6,7 @@ pub mod items {
}
pub mod auth;
pub mod auth_persistence;
pub mod proto_utils;
pub mod store;