Compare commits

...

6 Commits

Author SHA1 Message Date
c239819376 validate file version on upload 2025-02-04 19:40:34 +01:00
50d0f6dfa5 implement user creation, getting and deletion, add locking to file uploads, add some more .bru 2025-02-02 17:34:58 +01:00
163d2a61ca implement simple locking to be used for locking simultaneous updates of files 2025-02-01 22:53:24 +01:00
Nico Fricke
c53bdb0ad3 improve sqlite integration
- add BEFORE_DISCONNECT and AFTER_CONNECT to run some pragmas on connection creation and destruction
- add STRICT keyword to create table statements and fix not existing DATETIME in sqlite
- use env::temp_dir to get a temp directory to run the database tests against which should make this runnable on windows as well
- create/update version of FileModel in database when updating it
2025-01-31 09:55:02 +01:00
851c74cd2d implement repo and add some tests 2025-01-30 21:16:34 +01:00
Nico Fricke
0384364579 wip: add repository trait, implement simple database with sqlite 2025-01-30 15:50:58 +01:00
26 changed files with 900 additions and 62 deletions

2
.gitignore vendored
View File

@ -1,2 +1,4 @@
/target
.idea
casket.sqlite*
data

191
Cargo.lock generated
View File

@ -17,6 +17,18 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
[[package]]
name = "ahash"
version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "atomic"
version = "0.6.0"
@ -190,6 +202,12 @@ version = "1.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.9.0"
@ -207,12 +225,17 @@ dependencies = [
"futures",
"futures-util",
"problem_details",
"r2d2",
"r2d2_sqlite",
"rusqlite",
"rusqlite_migration",
"serde",
"tokio",
"tokio-util",
"tracing",
"tracing-log",
"tracing-subscriber",
"uuid",
]
[[package]]
@ -311,6 +334,18 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "fallible-iterator"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
[[package]]
name = "fallible-streaming-iterator"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]]
name = "fastrand"
version = "2.3.0"
@ -498,12 +533,30 @@ dependencies = [
"tracing",
]
[[package]]
name = "hashbrown"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
dependencies = [
"ahash",
]
[[package]]
name = "hashbrown"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
[[package]]
name = "hashlink"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af"
dependencies = [
"hashbrown 0.14.5",
]
[[package]]
name = "headers"
version = "0.4.0"
@ -824,7 +877,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f"
dependencies = [
"equivalent",
"hashbrown",
"hashbrown 0.15.2",
]
[[package]]
@ -880,6 +933,17 @@ version = "0.2.169"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
[[package]]
name = "libsqlite3-sys"
version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
dependencies = [
"cc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "linux-raw-sys"
version = "0.4.15"
@ -1125,6 +1189,15 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
[[package]]
name = "ppv-lite86"
version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
dependencies = [
"zerocopy",
]
[[package]]
name = "problem_details"
version = "0.7.0"
@ -1169,6 +1242,58 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "r2d2"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93"
dependencies = [
"log",
"parking_lot",
"scheduled-thread-pool",
]
[[package]]
name = "r2d2_sqlite"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb14dba8247a6a15b7fdbc7d389e2e6f03ee9f184f87117706d509c092dfe846"
dependencies = [
"r2d2",
"rusqlite",
"uuid",
]
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
]
[[package]]
name = "redox_syscall"
version = "0.5.8"
@ -1233,6 +1358,30 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "rusqlite"
version = "0.32.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e"
dependencies = [
"bitflags 2.6.0",
"fallible-iterator",
"fallible-streaming-iterator",
"hashlink",
"libsqlite3-sys",
"smallvec",
]
[[package]]
name = "rusqlite_migration"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "923b42e802f7dc20a0a6b5e097ba7c83fe4289da07e49156fecf6af08aa9cd1c"
dependencies = [
"log",
"rusqlite",
]
[[package]]
name = "rustc-demangle"
version = "0.1.24"
@ -1282,6 +1431,15 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "scheduled-thread-pool"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19"
dependencies = [
"parking_lot",
]
[[package]]
name = "scopeguard"
version = "1.2.0"
@ -1783,6 +1941,16 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
[[package]]
name = "uuid"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3758f5e68192bb96cc8f9b7e2c2cfdabb435499a28499a42f8f984092adad4b"
dependencies = [
"getrandom",
"rand",
]
[[package]]
name = "valuable"
version = "0.1.0"
@ -2128,6 +2296,27 @@ dependencies = [
"synstructure",
]
[[package]]
name = "zerocopy"
version = "0.7.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
dependencies = [
"byteorder",
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "zerofrom"
version = "0.1.5"

View File

@ -18,4 +18,11 @@ serde = { version = "1.0.217", features = ["derive"] }
tracing = "0.1.41"
tracing-log = "0.2.0"
tracing-subscriber = "0.3.19"
problem_details = { version = "0.7.0", features = ["axum"] }
problem_details = { version = "0.7.0", features = ["axum"] }
# database
rusqlite = { version = "0.32.1", features = ["bundled"] }
rusqlite_migration = "1.3.1"
r2d2 = "0.8.10"
r2d2_sqlite = "0.25.0"
uuid = { version = "1.12.1", features = ["v4"] }

View File

@ -9,7 +9,8 @@ pub const CONFIG_LOCATIONS: [&str; 2] = ["casket-backend/casket.toml", "/config/
pub struct Config {
pub server: Server,
pub files: Files,
pub oidc: Oidc
pub oidc: Oidc,
pub database: Database,
}
#[derive(Deserialize, Clone, Debug)]
@ -27,6 +28,12 @@ pub struct Oidc {
pub oidc_endpoint: String
}
#[derive(Deserialize, Clone, Debug)]
pub struct Database {
pub path: PathBuf,
}
pub fn get_config() -> figment::Result<Config> {
CONFIG_LOCATIONS
.iter()

View File

@ -0,0 +1,2 @@
pub mod repository;
pub mod sqlite;

View File

@ -0,0 +1,51 @@
use crate::db::repository::insert::File;
use crate::db::repository::models::{FileModel, UserModel};
use problem_details::ProblemDetails;
pub trait Repository {
fn migrate(&self) -> impl std::future::Future<Output = Result<(), ProblemDetails>> + Send;
fn get_user(
&self,
id: &str,
) -> impl std::future::Future<Output = Result<Option<UserModel>, ProblemDetails>> + Send;
fn create_user(
&self,
id: &str,
) -> impl std::future::Future<Output = Result<UserModel, ProblemDetails>> + Send;
fn delete_user(&self, id: &str) -> impl std::future::Future<Output = ()> + Send;
fn create_file(
&self,
file: File,
) -> impl std::future::Future<Output = Result<FileModel, ProblemDetails>> + Send;
fn bump_version(
&self,
file_model: FileModel,
) -> impl std::future::Future<Output = Result<(), ProblemDetails>> + Send;
fn get_file(
&self,
user_id: &str,
path: &str,
) -> impl std::future::Future<Output = Result<Option<FileModel>, ProblemDetails>> + Send;
}
pub mod models {
pub struct UserModel {
pub uuid: String,
}
pub struct FileModel {
pub id: u64,
pub user_id: String,
pub path: String,
pub version: u64,
}
}
pub mod insert {
pub struct File {
pub user_id: String,
pub path: String,
}
}

View File

@ -0,0 +1,207 @@
use crate::db::repository::insert::File;
use crate::db::repository::models::{FileModel, UserModel};
use crate::db::repository::Repository;
use crate::errors::to_internal_error;
use axum::http::StatusCode;
use problem_details::ProblemDetails;
use r2d2::{CustomizeConnection, PooledConnection};
use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::{Connection, OptionalExtension};
use rusqlite_migration::{Migrations, M};
use std::path::PathBuf;
const BEFORE_DISCONNECT: &str = "PRAGMA analysis_limit=400;
PRAGMA optimize;";
const AFTER_CONNECT: &str = "PRAGMA foreign_keys=1;
PRAGMA journal_mode=WAL;
PRAGMA synchronous=NORMAL;";
#[derive(Clone)]
pub struct Sqlite {
pool: r2d2::Pool<SqliteConnectionManager>,
path: PathBuf,
}
impl Sqlite {
pub fn from_path(path: &PathBuf) -> Sqlite {
let pool = r2d2::Pool::builder()
.connection_customizer(Box::new(ConnectionCustomizer))
.build(
SqliteConnectionManager::file(path)
.with_init(|connection| connection.execute_batch(AFTER_CONNECT)),
)
.unwrap();
Sqlite {
pool,
path: path.clone(),
}
}
fn get_connection(&self) -> PooledConnection<SqliteConnectionManager> {
let pool = self.pool.clone();
pool.get().unwrap()
}
}
#[derive(Debug)]
struct ConnectionCustomizer;
impl<E> CustomizeConnection<Connection, E> for ConnectionCustomizer {
fn on_release(&self, connection: Connection) {
connection.execute_batch(BEFORE_DISCONNECT).unwrap();
}
}
impl Repository for Sqlite {
async fn migrate(&self) -> Result<(), ProblemDetails> {
let migrations = Migrations::new(vec![
M::up("CREATE TABLE users(uuid TEXT PRIMARY KEY, created_at TEXT DEFAULT CURRENT_TIMESTAMP) STRICT;"),
M::up("CREATE TABLE files(id INTEGER PRIMARY KEY, user_id TEXT, file_path TEXT, version INTEGER, FOREIGN KEY(user_id) REFERENCES users(uuid)) STRICT;"),
M::up("CREATE INDEX files_user_path_idx ON files(user_id, file_path);"),
]);
let mut connection = Connection::open(&self.path).unwrap();
connection.execute_batch(AFTER_CONNECT).unwrap();
migrations
.to_latest(&mut connection)
.map_err(to_internal_error)
}
async fn get_user(&self, id: &str) -> Result<Option<UserModel>, ProblemDetails> {
let conn = self.get_connection();
let mut statement = conn
.prepare("SELECT uuid FROM users WHERE uuid = ?1")
.unwrap();
statement
.query_row([id], |row| {
Ok(UserModel {
uuid: row.get(0).unwrap(),
})
})
.optional()
.map_err(to_internal_error)
}
async fn create_user(&self, id: &str) -> Result<UserModel, ProblemDetails> {
self.get_connection()
.execute("INSERT INTO users (uuid) VALUES (?);", (id,))
.unwrap();
self.get_user(id).await.map(|option| option.unwrap())
}
async fn delete_user(&self, id: &str) {
self.get_connection()
.execute("DELETE FROM files WHERE user_id = ?;", (id,))
.unwrap();
self.get_connection()
.execute("DELETE FROM users WHERE uuid = ?;", (id,))
.unwrap();
}
async fn create_file(&self, file: File) -> Result<FileModel, ProblemDetails> {
self.get_connection()
.execute(
"INSERT INTO files (user_id, file_path, version) VALUES (?, ?, ?);",
(&file.user_id, &file.path, 0),
)
.unwrap();
self.get_file(&file.user_id, &file.path)
.await
.map(|option| option.unwrap())
}
async fn bump_version(&self, file_model: FileModel) -> Result<(), ProblemDetails> {
let conn = self.get_connection();
let updated = conn
.execute(
"UPDATE files SET version = ?1 WHERE id = ?2 AND version = ?3",
(file_model.version + 1, file_model.id, file_model.version),
)
.unwrap();
if updated == 1 {
Ok(())
} else {
Err(ProblemDetails::from_status_code(StatusCode::CONFLICT)
.with_detail("File version was updated by another application!"))
}
}
async fn get_file(
&self,
user_id: &str,
path: &str,
) -> Result<Option<FileModel>, ProblemDetails> {
let conn = self.get_connection();
let mut statement = conn
.prepare(
"SELECT id, user_id, file_path, version FROM files WHERE user_id = ?1 AND file_path = ?2",
)
.unwrap();
statement
.query_row([user_id, path], |row| {
Ok(FileModel {
id: row.get(0).unwrap(),
user_id: row.get(1).unwrap(),
path: row.get(2).unwrap(),
version: row.get(3).unwrap(),
})
})
.optional()
.map_err(to_internal_error)
}
}
#[cfg(test)]
mod tests {
use crate::db::repository::{insert, Repository};
use crate::db::sqlite::Sqlite;
use std::env;
use tracing_log::log::debug;
use uuid::Uuid;
const FILE_PATH: &str = "test";
async fn create_repository() -> Sqlite {
let path = env::temp_dir().join(Uuid::new_v4().to_string());
debug!("Using this file for database tests: {:?}", path);
let sqlite = Sqlite::from_path(&path);
sqlite.migrate().await.unwrap();
sqlite
}
#[tokio::test]
async fn test_file_creation() {
tracing_subscriber::fmt()
.with_max_level(tracing::Level::DEBUG)
.init();
let repo = create_repository().await;
let user_id = Uuid::new_v4().to_string();
repo.create_user(&user_id).await.unwrap();
let created = repo
.create_file(insert::File {
user_id: user_id.clone(),
path: FILE_PATH.to_string(),
})
.await
.unwrap();
assert_eq!(user_id, created.user_id);
assert_eq!(FILE_PATH, created.path);
assert_eq!(0, created.version);
}
#[tokio::test]
async fn test_file_version_bump() {
let repo = create_repository().await;
let user_id = Uuid::new_v4().to_string();
repo.create_user(&user_id).await.unwrap();
let created = repo
.create_file(insert::File {
user_id: user_id.clone(),
path: FILE_PATH.to_string(),
})
.await
.unwrap();
assert!(repo.bump_version(created).await.is_ok());
}
}

View File

@ -1,6 +1,6 @@
use crate::errors::to_internal_error;
use crate::extractor_helper::WithProblemDetails;
use crate::files::build_system_path;
use crate::files::PathInfo;
use crate::AppState;
use axum::body::Body;
use axum::extract::{Query, State};
@ -20,7 +20,8 @@ pub async fn handler(
axum::extract::Path(user_id): axum::extract::Path<String>,
WithProblemDetails(Query(query)): WithProblemDetails<Query<SingleFileQuery>>,
) -> Result<impl IntoResponse, ProblemDetails> {
let filesystem_path = build_system_path(&state.config.files.directory, &user_id, &query.path)?;
let filesystem_path =
PathInfo::build(&state.config.files.directory, &user_id, &query.path)?.file_system_location;
let file_name = get_file_name(&filesystem_path)?;
let file = open_file(&filesystem_path).await?;

View File

@ -1,6 +1,6 @@
use crate::errors::to_internal_error;
use crate::extractor_helper::WithProblemDetails;
use crate::files::build_system_path;
use crate::files::PathInfo;
use crate::{errors, AppState};
use axum::extract::{Query, State};
use axum::http::StatusCode;
@ -19,7 +19,8 @@ pub async fn query_file_tree(
axum::extract::Path(user_id): axum::extract::Path<String>,
WithProblemDetails(Query(query)): WithProblemDetails<Query<FileQuery>>,
) -> Result<Json<PathElements>, ProblemDetails> {
let path = build_system_path(&state.config.files.directory, &user_id, &query.path)?;
let path =
PathInfo::build(&state.config.files.directory, &user_id, &query.path)?.file_system_location;
debug!("Loading path: {:?}", path);
PathElements::load(path, query.nesting)
.await

View File

@ -7,20 +7,33 @@ pub mod download;
pub mod list;
pub mod upload;
pub fn build_system_path(
base_directory: &Path,
user_id: &str,
user_path: &PathBuf,
) -> Result<PathBuf, ProblemDetails> {
if user_path.is_absolute() {
Err(
ProblemDetails::from_status_code(StatusCode::BAD_REQUEST).with_detail(
errors::ERROR_DETAILS_ABSOLUTE_PATH_NOT_ALLOWED.to_owned()
+ format!(" Provided Path: {user_path:?}",).as_str(),
),
)
} else {
sanitize_path(&base_directory.join(user_id).join(user_path))
#[derive(Debug, PartialEq)]
pub struct PathInfo {
file_system_location: PathBuf,
relative_user_location: PathBuf,
}
impl PathInfo {
pub fn build(
base_directory: &Path,
user_id: &str,
user_path: &PathBuf,
) -> Result<Self, ProblemDetails> {
if user_path.is_absolute() {
Err(
ProblemDetails::from_status_code(StatusCode::BAD_REQUEST).with_detail(
errors::ERROR_DETAILS_ABSOLUTE_PATH_NOT_ALLOWED.to_owned()
+ format!(" Provided Path: {user_path:?}",).as_str(),
),
)
} else {
let user_path = sanitize_path(user_path)?;
let system_path = sanitize_path(&base_directory.join(&user_id))?.join(&user_path);
Ok(Self {
file_system_location: system_path,
relative_user_location: user_path,
})
}
}
}
@ -96,21 +109,23 @@ mod tests {
}
#[test]
fn test_build_system_path_success() {
fn test_build_path_info() {
let input_path = PathBuf::from("tmp/blub");
let user_id = "bla";
let path_info = PathInfo::build(&PathBuf::from("/tmp/bla"), user_id, &input_path).unwrap();
assert_eq!(
build_system_path(&PathBuf::from("/tmp/bla"), user_id, &input_path).unwrap(),
path_info.file_system_location,
PathBuf::from("/tmp/bla/bla/tmp/blub")
);
assert_eq!(path_info.relative_user_location, PathBuf::from(input_path));
}
#[test]
fn test_build_system_path_error_with_absolute_user_path_returns_error() {
let input_path = PathBuf::from("tmp/blub");
fn test_build_path_info_error_with_absolute_user_path_returns_error() {
let input_path = PathBuf::from("/tmp/blub");
let user_id = "bla";
assert_eq!(
build_system_path(&PathBuf::from("/tmp/bla"), user_id, &input_path),
PathInfo::build(&PathBuf::from("/tmp/bla"), user_id, &input_path),
Err(
ProblemDetails::from_status_code(StatusCode::BAD_REQUEST).with_detail(
errors::ERROR_DETAILS_ABSOLUTE_PATH_NOT_ALLOWED.to_owned()

View File

@ -1,5 +1,8 @@
use crate::db::repository::models::FileModel;
use crate::db::repository::{insert, Repository};
use crate::errors::to_internal_error;
use crate::files::build_system_path;
use crate::files::PathInfo;
use crate::locking::Lock;
use crate::{errors, AppState};
use axum::body::Bytes;
use axum::debug_handler;
@ -12,9 +15,10 @@ use futures_util::{StreamExt, TryStreamExt};
use problem_details::ProblemDetails;
use std::io;
use std::io::Error;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use tokio::fs::{create_dir_all, File};
use tokio::io::AsyncWriteExt;
use tracing::debug;
#[debug_handler]
pub async fn handle_file_uploads(
@ -44,17 +48,130 @@ async fn handle_single_file_upload<'field_lifetime>(
None | Some("") => Err(ProblemDetails::from_status_code(StatusCode::BAD_REQUEST)
.with_detail(errors::ERROR_DETAILS_NO_NAME_PROVIDED_MULTIPART_FIELD)),
Some(field_name) => {
let path = PathBuf::from(field_name);
let filesystem_path = build_system_path(&state.config.files.directory, user_id, &path)?;
save_file(filesystem_path, map_error_to_io_error(field))
.await
.map_err(to_internal_error)
let parsed = UploadParameter::from_string(field_name)?;
let path_info = PathInfo::build(&state.config.files.directory, user_id, &parsed.path)?;
let lock_id = &build_lock_id(user_id, &path_info.relative_user_location);
if state.get_lock().lock(lock_id).await {
validate_file_version(
state.get_repository(),
user_id,
&path_info.relative_user_location,
&parsed.version,
)
.await?;
let result = update_file(state, user_id, field, &path_info).await;
state.get_lock().unlock(lock_id).await;
result
} else {
Err(ProblemDetails::from_status_code(StatusCode::CONFLICT)
.with_detail(format!("Cannot upload file because another application is currently modifying that: {:?}", path_info.relative_user_location)))
}
}
}
}
async fn save_file(
async fn validate_file_version(
repo: &impl Repository,
user_id: &str,
path: &Path,
version: &u64,
) -> Result<(), ProblemDetails> {
match repo.get_file(user_id, &path.to_string_lossy()).await? {
None => validate_initial_version(version)?,
Some(file_model) => validate_version_increment(&file_model, version)?,
}
Ok(())
}
fn validate_version_increment(model: &FileModel, version: &u64) -> Result<(), ProblemDetails> {
if model.version + 1 != *version {
return Err(
ProblemDetails::from_status_code(StatusCode::CONFLICT).with_detail(format!(
"Version does not match. Provided {} is not one higher than existing {}",
version, model.version
)),
);
}
Ok(())
}
fn validate_initial_version(version: &u64) -> Result<(), ProblemDetails> {
if *version != 0 {
return Err(
ProblemDetails::from_status_code(StatusCode::BAD_REQUEST).with_detail(format!(
"Version for new files must be 0. Provided version {}",
version
)),
);
}
Ok(())
}
struct UploadParameter {
path: PathBuf,
version: u64,
}
impl UploadParameter {
fn from_string(name: &str) -> Result<Self, ProblemDetails> {
let mut split = name.split(';');
let path = split.next().ok_or_else(build_wrong_field_name_error)?;
let version: u64 = split
.next()
.ok_or_else(build_wrong_field_name_error)?
.parse()
.map_err(|err| {
debug!("Error while parsing version: {}", err);
ProblemDetails::from_status_code(StatusCode::BAD_REQUEST)
.with_detail("Version cannot be parsed as a number")
})?;
let path = PathBuf::from(path);
Ok(Self { path, version })
}
}
fn build_wrong_field_name_error() -> ProblemDetails {
ProblemDetails::from_status_code(StatusCode::BAD_REQUEST)
.with_detail("Multipart field name must have the following structure: {path_to_file};{version_number + 1}. Example: folder/nested/file.pdf;15")
}
async fn update_file<'field_lifetime>(
state: &AppState,
user_id: &str,
field: Field<'field_lifetime>,
path_info: &PathInfo,
) -> Result<(), ProblemDetails> {
save_file(
&path_info.file_system_location,
map_error_to_io_error(field),
)
.await
.map_err(to_internal_error)?;
update_file_version(state, user_id, &path_info.relative_user_location).await
}
async fn update_file_version(
state: &AppState,
user_id: &str,
filesystem_path: &Path,
) -> Result<(), ProblemDetails> {
let repository = state.get_repository();
let path_string = filesystem_path.to_string_lossy();
match repository.get_file(user_id, path_string.as_ref()).await {
Err(error) => Err(to_internal_error(error)),
Ok(None) => repository
.create_file(insert::File {
user_id: user_id.to_string(),
path: path_string.to_string(),
})
.await
.map(|_| ()),
Ok(Some(file)) => repository.bump_version(file).await,
}
}
async fn save_file(
path: &PathBuf,
mut content: impl Stream<Item = Result<Bytes, Error>> + Unpin,
) -> Result<(), Error> {
create_dir_all(path.parent().unwrap()).await?;
@ -68,3 +185,8 @@ async fn save_file(
fn map_error_to_io_error(field: Field) -> MapErr<Field, fn(MultipartError) -> Error> {
field.map_err(|err| Error::new(io::ErrorKind::Other, err))
}
fn build_lock_id(user_id: &str, path: &Path) -> String {
let path_string = path.to_string_lossy();
format!("UPLOAD_{user_id}_{path_string}")
}

View File

@ -0,0 +1,55 @@
use std::collections::HashSet;
use std::future::Future;
use std::sync::Arc;
use tokio::sync::Mutex;
#[derive(Clone)]
pub struct SimpleLock {
locks: Arc<Mutex<HashSet<String>>>,
}
impl SimpleLock {
pub fn new() -> Self {
Self {
locks: Arc::new(Mutex::new(HashSet::new())),
}
}
}
impl Lock for SimpleLock {
async fn lock(&self, id: &str) -> bool {
let mut locks = self.locks.lock().await;
if locks.contains(id) {
false
} else {
locks.insert(id.into());
true
}
}
async fn unlock(&self, id: &str) {
let mut locks = self.locks.lock().await;
locks.remove(id);
}
}
pub trait Lock {
fn lock(&self, id: &str) -> impl Future<Output = bool> + Send;
fn unlock(&self, id: &str) -> impl Future<Output = ()> + Send;
}
#[cfg(test)]
mod tests {
use crate::locking::{Lock, SimpleLock};
const LOCK_ID: &str = "1";
#[tokio::test]
async fn test_locking() {
let lock = SimpleLock::new();
assert!(lock.lock(LOCK_ID).await);
assert!(!lock.lock(LOCK_ID).await);
lock.unlock(LOCK_ID).await;
assert!(lock.lock(LOCK_ID).await);
}
}

View File

@ -2,12 +2,17 @@
mod auth;
mod config;
mod db;
mod errors;
mod extractor_helper;
mod files;
mod locking;
mod routes;
use crate::config::Config;
use crate::db::repository::Repository;
use crate::db::sqlite::Sqlite;
use crate::locking::{Lock, SimpleLock};
use axum::{middleware, Router};
use axum_jwks::Jwks;
use std::env;
@ -17,8 +22,22 @@ use tracing::{debug, error, info};
#[derive(Clone)]
pub struct AppState {
config: config::Config,
config: Config,
pub jwks: Jwks,
sqlite: Sqlite,
pub locks: SimpleLock,
}
impl AppState {
#[must_use]
pub fn get_repository(&self) -> &impl Repository {
&self.sqlite
}
#[must_use]
pub fn get_lock(&self) -> &impl Lock {
&self.locks
}
}
#[tokio::main]
@ -31,8 +50,14 @@ async fn main() {
Ok(config) => {
debug!("Config loaded {:?}", &config,);
let jwks = load_jwks(&config).await;
let db = connect_database(&config).await;
let bind = format!("{}:{}", &config.server.bind_address, &config.server.port);
let state = AppState { config, jwks };
let state: AppState = AppState {
config,
jwks,
sqlite: db,
locks: SimpleLock::new(),
};
let app = Router::new()
.merge(routes::routes())
.route_layer(middleware::from_fn_with_state(
@ -56,6 +81,12 @@ async fn main() {
}
}
async fn connect_database(config: &Config) -> Sqlite {
let sqlite = db::sqlite::Sqlite::from_path(&config.database.path);
sqlite.migrate().await.unwrap();
sqlite
}
async fn load_jwks(config: &Config) -> Jwks {
Jwks::from_oidc_url(&config.oidc.oidc_endpoint, None)
.await

View File

@ -1,9 +1,13 @@
use crate::db::repository::Repository;
use crate::files;
use crate::files::download;
use crate::AppState;
use axum::extract::State;
use axum::http::StatusCode;
use axum::routing::post;
use axum::{response::Html, routing::get, Router};
use files::list::query_file_tree;
use problem_details::ProblemDetails;
pub fn routes() -> Router<AppState> {
Router::new()
@ -13,8 +17,48 @@ pub fn routes() -> Router<AppState> {
post(files::upload::handle_file_uploads).get(query_file_tree),
)
.route("/api/v1/user/{:user_id}/download", get(download::handler))
.route(
"/api/v1/user/{:user_id}",
post(create_user).get(get_user).delete(delete_user),
)
}
async fn handler() -> Html<&'static str> {
Html("<h1>Hello, World!</h1>")
}
async fn create_user(
State(state): State<AppState>,
axum::extract::Path(user_id): axum::extract::Path<String>,
) -> Result<(), ProblemDetails> {
if state.sqlite.get_user(&user_id).await?.is_some() {
Err(ProblemDetails::from_status_code(StatusCode::CONFLICT)
.with_detail("User already exists".to_string()))
} else {
state.sqlite.create_user(&user_id).await?;
Ok(())
}
}
async fn get_user(
State(state): State<AppState>,
axum::extract::Path(user_id): axum::extract::Path<String>,
) -> Result<(), ProblemDetails> {
if state.sqlite.get_user(&user_id).await?.is_none() {
Err(ProblemDetails::from_status_code(StatusCode::NOT_FOUND))
} else {
Ok(())
}
}
async fn delete_user(
State(state): State<AppState>,
axum::extract::Path(user_id): axum::extract::Path<String>,
) -> Result<(), ProblemDetails> {
if state.sqlite.get_user(&user_id).await?.is_none() {
Ok(())
} else {
state.get_repository().delete_user(&user_id).await;
Ok(())
}
}

View File

@ -1,7 +1,7 @@
meta {
name: Auth
type: http
seq: 5
seq: 1
}
get {

View File

@ -0,0 +1,15 @@
meta {
name: No Auth Header
type: http
seq: 3
}
get {
url: {{casket_host}}/api/v1/user/test123/files
body: none
auth: none
}
assert {
res.status: eq 401
}

View File

@ -0,0 +1,19 @@
meta {
name: Not Matching User Id
type: http
seq: 2
}
get {
url: {{casket_host}}/api/v1/user/test123/files
body: none
auth: bearer
}
auth:bearer {
token: {{access_token}}
}
assert {
res.status: eq 403
}

View File

@ -1,7 +1,7 @@
meta {
name: Download File
type: http
seq: 4
seq: 3
}
get {

View File

@ -0,0 +1,46 @@
meta {
name: Get Files
type: http
seq: 2
}
get {
url: {{casket_host}}/api/v1/user/{{user_id}}/files?path=&nesting=5
body: none
auth: bearer
}
params:query {
path:
nesting: 5
}
auth:bearer {
token: {{access_token}}
}
assert {
res.status: eq 200
}
tests {
test("body should be correct", function() {
const data = res.getBody();
expect(data).to.eql({
"files": [
{
"name": "test",
"is_dir": true,
"children": [
{
"name": "blub.txt",
"is_dir": false,
"children": []
}
]
}
]
})
})
}

View File

@ -1,7 +1,7 @@
meta {
name: Upload File
type: http
seq: 2
seq: 1
}
post {

View File

@ -1,20 +0,0 @@
meta {
name: Get Files
type: http
seq: 3
}
get {
url: {{casket_host}}/api/v1/user/{{user_id}}/files?path=&nesting=5
body: none
auth: bearer
}
params:query {
path:
nesting: 5
}
auth:bearer {
token: {{access_token}}
}

View File

@ -0,0 +1,19 @@
meta {
name: Create User
type: http
seq: 2
}
post {
url: {{casket_host}}/api/v1/user/{{user_id}}
body: none
auth: bearer
}
auth:bearer {
token: {{access_token}}
}
assert {
res.status: eq 200
}

View File

@ -0,0 +1,11 @@
meta {
name: Delete User
type: http
seq: 3
}
delete {
url: {{casket_host}}/api/v1/user/{{user_id}}
body: none
auth: inherit
}

View File

@ -1,11 +1,11 @@
meta {
name: Invalid
name: Get User
type: http
seq: 6
seq: 1
}
get {
url: {{casket_host}}/api/v1/user//files
url: {{casket_host}}/api/v1/user/{{user_id}}
body: none
auth: bearer
}

View File

@ -0,0 +1,7 @@
auth {
mode: bearer
}
auth:bearer {
token: {{access_token}}
}

View File

@ -0,0 +1,7 @@
vars {
casket_host: http://localhost:3000
}
vars:secret [
keycloak_host,
keycloak_realm
]