Replaced http server library, added tracing and metrics
This commit is contained in:
parent
89b6513905
commit
c10964fdc7
@ -1,3 +0,0 @@
|
|||||||
[build]
|
|
||||||
rustflags = ["--cfg", "tokio_unstable"]
|
|
||||||
incremental = true
|
|
22
backend/.idea/runConfigurations/Run.xml
Normal file
22
backend/.idea/runConfigurations/Run.xml
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="Run" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
|
<option name="command" value="run --package backend_rust --bin backend_rust" />
|
||||||
|
<option name="workingDirectory" value="file://$PROJECT_DIR$/../run" />
|
||||||
|
<option name="channel" value="STABLE" />
|
||||||
|
<option name="requiredFeatures" value="true" />
|
||||||
|
<option name="allFeatures" value="false" />
|
||||||
|
<option name="emulateTerminal" value="false" />
|
||||||
|
<option name="withSudo" value="false" />
|
||||||
|
<option name="buildTarget" value="REMOTE" />
|
||||||
|
<option name="backtrace" value="SHORT" />
|
||||||
|
<envs>
|
||||||
|
<env name="RUSTFLAGS" value="--cfg tokio_unstable" />
|
||||||
|
<env name="TOKIO_CONSOLE_BIND" value="127.0.0.1:9999" />
|
||||||
|
</envs>
|
||||||
|
<option name="isRedirectInput" value="false" />
|
||||||
|
<option name="redirectInputPath" value="" />
|
||||||
|
<method v="2">
|
||||||
|
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
|
||||||
|
</method>
|
||||||
|
</configuration>
|
||||||
|
</component>
|
19
backend/.idea/runConfigurations/clippy.xml
Normal file
19
backend/.idea/runConfigurations/clippy.xml
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="clippy" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
|
<option name="command" value="clippy" />
|
||||||
|
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||||
|
<option name="channel" value="DEFAULT" />
|
||||||
|
<option name="requiredFeatures" value="true" />
|
||||||
|
<option name="allFeatures" value="false" />
|
||||||
|
<option name="emulateTerminal" value="false" />
|
||||||
|
<option name="withSudo" value="false" />
|
||||||
|
<option name="buildTarget" value="REMOTE" />
|
||||||
|
<option name="backtrace" value="SHORT" />
|
||||||
|
<envs />
|
||||||
|
<option name="isRedirectInput" value="false" />
|
||||||
|
<option name="redirectInputPath" value="" />
|
||||||
|
<method v="2">
|
||||||
|
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
|
||||||
|
</method>
|
||||||
|
</configuration>
|
||||||
|
</component>
|
19
backend/.idea/runConfigurations/fmt.xml
Normal file
19
backend/.idea/runConfigurations/fmt.xml
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="fmt" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||||
|
<option name="command" value="fmt --all" />
|
||||||
|
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||||
|
<option name="channel" value="NIGHTLY" />
|
||||||
|
<option name="requiredFeatures" value="true" />
|
||||||
|
<option name="allFeatures" value="false" />
|
||||||
|
<option name="emulateTerminal" value="false" />
|
||||||
|
<option name="withSudo" value="false" />
|
||||||
|
<option name="buildTarget" value="REMOTE" />
|
||||||
|
<option name="backtrace" value="SHORT" />
|
||||||
|
<envs />
|
||||||
|
<option name="isRedirectInput" value="false" />
|
||||||
|
<option name="redirectInputPath" value="" />
|
||||||
|
<method v="2">
|
||||||
|
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
|
||||||
|
</method>
|
||||||
|
</configuration>
|
||||||
|
</component>
|
@ -11,22 +11,17 @@ diesel = { version = "2.0.1", features = ["sqlite", "r2d2", "returning_clauses_f
|
|||||||
diesel_migrations = "2.0.0"
|
diesel_migrations = "2.0.0"
|
||||||
r2d2_sqlite = "0.21.0"
|
r2d2_sqlite = "0.21.0"
|
||||||
|
|
||||||
warp = { version = "0.3.3", features = ["compression", "compression-brotli", "compression-gzip"] }
|
tiny_http = "0.12.0"
|
||||||
headers = "0.3"
|
rayon-core = "1.9.3"
|
||||||
tokio = { version = "1.21.2", features = ["full", "tracing"] }
|
parking_lot = "0.12.1"
|
||||||
tokio-util = { version = "0.7.4", features = ["codec"] }
|
ctrlc = { version = "3.2.3", features = ["termination"] }
|
||||||
console-subscriber = "0.1.8"
|
|
||||||
futures = "0.3.24"
|
|
||||||
bytes = "1.2.1"
|
|
||||||
tracing = { version = "0.1.37", features = ["log-always"] }
|
|
||||||
log = "0.4.17"
|
|
||||||
|
|
||||||
serde = { version = "1.0.145", features = ["derive"] }
|
serde = { version = "1.0.145", features = ["derive"] }
|
||||||
|
serde_json = "1.0.86"
|
||||||
serde_repr = "0.1.9"
|
serde_repr = "0.1.9"
|
||||||
|
serde_urlencoded = "0.7.1"
|
||||||
|
|
||||||
pretty_env_logger = "0.4"
|
once_cell = "1.15.0"
|
||||||
lazy_static = "1.4.0"
|
|
||||||
json = "0.12.4"
|
|
||||||
|
|
||||||
jsonwebtoken = "8.1.1"
|
jsonwebtoken = "8.1.1"
|
||||||
thiserror = "1.0.37"
|
thiserror = "1.0.37"
|
||||||
@ -37,8 +32,12 @@ ureq = { version = "2.5.0", features = ["json"] }
|
|||||||
totp-rs = { version = "3.0.1", features = ["qr"] }
|
totp-rs = { version = "3.0.1", features = ["qr"] }
|
||||||
ring = { version = "0.16.20", default-features = false }
|
ring = { version = "0.16.20", default-features = false }
|
||||||
mime_guess = "2.0.4"
|
mime_guess = "2.0.4"
|
||||||
zip = "0.6.2"
|
zip = { version = "0.6.2", default-features = false }
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
image = "0.24.4"
|
image = "0.24.4"
|
||||||
cached = "0.39.0"
|
|
||||||
stretto = "0.7.1"
|
stretto = "0.7.1"
|
||||||
|
|
||||||
|
rustracing = "0.6.0"
|
||||||
|
rustracing_jaeger = "0.8.1"
|
||||||
|
prometheus = { version = "0.13.2", features = ["process"] }
|
||||||
|
prometheus-static-metric = "0.5.1"
|
||||||
|
76
backend/rustfmt.toml
Normal file
76
backend/rustfmt.toml
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
max_width = 120
|
||||||
|
hard_tabs = false
|
||||||
|
tab_spaces = 4
|
||||||
|
newline_style = "Unix"
|
||||||
|
indent_style = "Block"
|
||||||
|
use_small_heuristics = "Default"
|
||||||
|
fn_call_width = 60
|
||||||
|
attr_fn_like_width = 70
|
||||||
|
struct_lit_width = 18
|
||||||
|
struct_variant_width = 35
|
||||||
|
array_width = 60
|
||||||
|
chain_width = 100
|
||||||
|
single_line_if_else_max_width = 50
|
||||||
|
wrap_comments = false
|
||||||
|
format_code_in_doc_comments = false
|
||||||
|
doc_comment_code_block_width = 100
|
||||||
|
comment_width = 80
|
||||||
|
normalize_comments = false
|
||||||
|
normalize_doc_attributes = false
|
||||||
|
format_strings = false
|
||||||
|
format_macro_matchers = false
|
||||||
|
format_macro_bodies = true
|
||||||
|
hex_literal_case = "Preserve"
|
||||||
|
empty_item_single_line = true
|
||||||
|
struct_lit_single_line = true
|
||||||
|
fn_single_line = true
|
||||||
|
where_single_line = true
|
||||||
|
imports_indent = "Block"
|
||||||
|
imports_layout = "HorizontalVertical"
|
||||||
|
imports_granularity = "Crate"
|
||||||
|
group_imports = "StdExternalCrate"
|
||||||
|
reorder_imports = true
|
||||||
|
reorder_modules = true
|
||||||
|
reorder_impl_items = true
|
||||||
|
type_punctuation_density = "Wide"
|
||||||
|
space_before_colon = false
|
||||||
|
space_after_colon = true
|
||||||
|
spaces_around_ranges = false
|
||||||
|
binop_separator = "Front"
|
||||||
|
remove_nested_parens = true
|
||||||
|
combine_control_expr = true
|
||||||
|
short_array_element_width_threshold = 10
|
||||||
|
overflow_delimited_expr = true
|
||||||
|
struct_field_align_threshold = 0
|
||||||
|
enum_discrim_align_threshold = 0
|
||||||
|
match_arm_blocks = false
|
||||||
|
match_arm_leading_pipes = "Never"
|
||||||
|
force_multiline_blocks = false
|
||||||
|
fn_args_layout = "Tall"
|
||||||
|
brace_style = "PreferSameLine"
|
||||||
|
control_brace_style = "AlwaysSameLine"
|
||||||
|
trailing_semicolon = true
|
||||||
|
trailing_comma = "Never"
|
||||||
|
match_block_trailing_comma = false
|
||||||
|
blank_lines_upper_bound = 1
|
||||||
|
blank_lines_lower_bound = 0
|
||||||
|
edition = "2021"
|
||||||
|
version = "Two"
|
||||||
|
inline_attribute_width = 0
|
||||||
|
format_generated_files = true
|
||||||
|
merge_derives = true
|
||||||
|
use_try_shorthand = true
|
||||||
|
use_field_init_shorthand = true
|
||||||
|
force_explicit_abi = true
|
||||||
|
condense_wildcard_suffixes = false
|
||||||
|
color = "Auto"
|
||||||
|
required_version = "1.5.1"
|
||||||
|
unstable_features = false
|
||||||
|
disable_all_formatting = false
|
||||||
|
skip_children = false
|
||||||
|
hide_parse_errors = false
|
||||||
|
error_on_line_overflow = false
|
||||||
|
error_on_unformatted = false
|
||||||
|
ignore = []
|
||||||
|
emit_mode = "Files"
|
||||||
|
make_backup = false
|
@ -1,9 +1,8 @@
|
|||||||
use lazy_static::lazy_static;
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
lazy_static! {
|
pub static CONFIG: Lazy<Config> = Lazy::new(Config::read);
|
||||||
pub static ref CONFIG: Config = Config::read();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
pub gitlab_id: String,
|
pub gitlab_id: String,
|
||||||
pub gitlab_secret: String,
|
pub gitlab_secret: String,
|
||||||
@ -19,17 +18,6 @@ pub struct Config {
|
|||||||
impl Config {
|
impl Config {
|
||||||
fn read() -> Self {
|
fn read() -> Self {
|
||||||
let config = std::fs::read_to_string("config.json").expect("Failed to read config.json");
|
let config = std::fs::read_to_string("config.json").expect("Failed to read config.json");
|
||||||
let config = json::parse(config.as_str()).expect("Failed to parse config.json");
|
serde_json::from_str(config.as_str()).expect("Failed to parse config.json")
|
||||||
Self {
|
|
||||||
gitlab_id: config["gitlab_id"].as_str().expect("Config is missing 'gitlab_id'").to_string(),
|
|
||||||
gitlab_secret: config["gitlab_secret"].as_str().expect("Config is missing 'gitlab_secret'").to_string(),
|
|
||||||
gitlab_url: config["gitlab_url"].as_str().expect("Config is missing 'gitlab_url'").to_string(),
|
|
||||||
gitlab_api_url: config["gitlab_api_url"].as_str().expect("Config is missing 'gitlab_api_url'").to_string(),
|
|
||||||
gitlab_redirect_url: config["gitlab_redirect_url"].as_str().expect("Config is missing 'gitlab_redirect_url'").to_string(),
|
|
||||||
smtp_server: config["smtp_server"].as_str().expect("Config is missing 'smtp_server'").to_string(),
|
|
||||||
smtp_port: config["smtp_port"].as_u16().expect("Config is missing 'smtp_port'"),
|
|
||||||
smtp_user: config["smtp_user"].as_str().expect("Config is missing 'smtp_user'").to_string(),
|
|
||||||
smtp_password: config["smtp_password"].as_str().expect("Config is missing 'smtp_password'").to_string()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,25 +1,20 @@
|
|||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use crate::db::manager::DB_MANAGER;
|
use rustracing_jaeger::Span;
|
||||||
|
|
||||||
|
use crate::{db::manager::DB_MANAGER, metrics};
|
||||||
|
|
||||||
pub struct DBConnection {
|
pub struct DBConnection {
|
||||||
db: super::RawDBConnection
|
db: super::RawDBConnection
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<super::RawDBConnection> for DBConnection {
|
impl From<super::RawDBConnection> for DBConnection {
|
||||||
fn from(conn: super::RawDBConnection) -> Self {
|
fn from(conn: super::RawDBConnection) -> Self { Self { db: conn } }
|
||||||
Self {
|
|
||||||
db: conn
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DBConnection {
|
impl DBConnection {
|
||||||
// Users
|
// Users
|
||||||
pub fn create_user_password(
|
pub fn create_user_password(&mut self, span: &Span, name: String, password: String) -> super::User {
|
||||||
&mut self,
|
let _span = metrics::span("create_user_password_in_db", span);
|
||||||
name: String,
|
|
||||||
password: String
|
|
||||||
) -> super::User {
|
|
||||||
let mut new_user: super::User = diesel::insert_into(crate::schema::user::table)
|
let mut new_user: super::User = diesel::insert_into(crate::schema::user::table)
|
||||||
.values(super::user::NewUser {
|
.values(super::user::NewUser {
|
||||||
name,
|
name,
|
||||||
@ -35,19 +30,22 @@ impl DBConnection {
|
|||||||
.get_result(&mut self.db)
|
.get_result(&mut self.db)
|
||||||
.expect("Failed to insert new user");
|
.expect("Failed to insert new user");
|
||||||
|
|
||||||
let root_node = crate::routes::fs::create_node("".to_owned(), &new_user, false, None, true, self).expect("Couldn't create root node");
|
let root_node = crate::routes::fs::create_node(span, "".to_owned(), &new_user, false, None, true, self)
|
||||||
|
.expect("Couldn't create root node");
|
||||||
new_user.root_id = root_node.id;
|
new_user.root_id = root_node.id;
|
||||||
self.save_user(&new_user);
|
self.save_user(span, &new_user);
|
||||||
new_user
|
new_user
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_user_gitlab(
|
pub fn create_user_gitlab(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
span: &Span,
|
||||||
name: String,
|
name: String,
|
||||||
role: super::UserRole,
|
role: super::UserRole,
|
||||||
gitlab_at: String,
|
gitlab_at: String,
|
||||||
gitlab_rt: String
|
gitlab_rt: String
|
||||||
) -> super::User {
|
) -> super::User {
|
||||||
|
let _span = metrics::span("create_user_gitlab_in_db", span);
|
||||||
let mut new_user: super::User = diesel::insert_into(crate::schema::user::table)
|
let mut new_user: super::User = diesel::insert_into(crate::schema::user::table)
|
||||||
.values(super::user::NewUser {
|
.values(super::user::NewUser {
|
||||||
name,
|
name,
|
||||||
@ -63,19 +61,22 @@ impl DBConnection {
|
|||||||
.get_result(&mut self.db)
|
.get_result(&mut self.db)
|
||||||
.expect("Failed to insert new user");
|
.expect("Failed to insert new user");
|
||||||
|
|
||||||
let root_node = crate::routes::fs::create_node("".to_owned(), &new_user, false, None, true, self).expect("Couldn't create root node");
|
let root_node = crate::routes::fs::create_node(span, "".to_owned(), &new_user, false, None, true, self)
|
||||||
|
.expect("Couldn't create root node");
|
||||||
new_user.root_id = root_node.id;
|
new_user.root_id = root_node.id;
|
||||||
self.save_user(&new_user);
|
self.save_user(span, &new_user);
|
||||||
new_user
|
new_user
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_user(&mut self, _id: i32) -> Option<super::User> {
|
pub fn get_user(&mut self, span: &Span, _id: i32) -> Option<super::User> {
|
||||||
use crate::schema::user::dsl::*;
|
use crate::schema::user::dsl::*;
|
||||||
|
let _span = metrics::span("get_user_from_db", span);
|
||||||
user.find(_id).first(&mut self.db).ok()
|
user.find(_id).first(&mut self.db).ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_user(&mut self, _name: &str, _gitlab: bool) -> Option<super::User> {
|
pub fn find_user(&mut self, span: &Span, _name: &str, _gitlab: bool) -> Option<super::User> {
|
||||||
use crate::schema::user::dsl::*;
|
use crate::schema::user::dsl::*;
|
||||||
|
let _span = metrics::span("find_user_in_db", span);
|
||||||
user.filter(name.eq(name)).filter(gitlab.eq(_gitlab)).first(&mut self.db).ok()
|
user.filter(name.eq(name)).filter(gitlab.eq(_gitlab)).first(&mut self.db).ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -83,11 +84,9 @@ impl DBConnection {
|
|||||||
crate::schema::user::table.load(&mut self.db).expect("Could not load users")
|
crate::schema::user::table.load(&mut self.db).expect("Could not load users")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_user(&mut self, user: &super::User) {
|
pub fn save_user(&mut self, span: &Span, user: &super::User) {
|
||||||
diesel::update(user)
|
let _span = metrics::span("save_user_to_db", span);
|
||||||
.set(user.clone())
|
diesel::update(user).set(user.clone()).execute(&mut self.db).expect("Failed to save user");
|
||||||
.execute(&mut self.db)
|
|
||||||
.expect("Failed to save user");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_user(&mut self, user: &super::User) {
|
pub fn delete_user(&mut self, user: &super::User) {
|
||||||
@ -95,7 +94,8 @@ impl DBConnection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Tokens
|
// Tokens
|
||||||
pub fn create_token(&mut self, _owner: i32, _exp: i64) -> super::Token {
|
pub fn create_token(&mut self, span: &Span, _owner: i32, _exp: i64) -> super::Token {
|
||||||
|
let _span = metrics::span("create_token_in_db", span);
|
||||||
diesel::insert_into(crate::schema::tokens::table)
|
diesel::insert_into(crate::schema::tokens::table)
|
||||||
.values(&super::token::NewToken {
|
.values(&super::token::NewToken {
|
||||||
owner_id: _owner,
|
owner_id: _owner,
|
||||||
@ -105,53 +105,64 @@ impl DBConnection {
|
|||||||
.expect("Failed to save new token to database")
|
.expect("Failed to save new token to database")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_token(&mut self, _id: i32) -> Option<super::Token> {
|
pub fn get_token(&mut self, span: &Span, _id: i32) -> Option<super::Token> {
|
||||||
use crate::schema::tokens::dsl::*;
|
use crate::schema::tokens::dsl::*;
|
||||||
|
let _span = metrics::span("get_token_from_db", span);
|
||||||
tokens.find(_id).first(&mut self.db).ok()
|
tokens.find(_id).first(&mut self.db).ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_token(&mut self, _id: i32) {
|
pub fn delete_token(&mut self, span: &Span, _id: i32) {
|
||||||
use crate::schema::tokens::dsl::*;
|
use crate::schema::tokens::dsl::*;
|
||||||
diesel::delete(tokens.find(_id))
|
let _span = metrics::span("delete_token_from_db", span);
|
||||||
.execute(&mut self.db)
|
diesel::delete(tokens.find(_id)).execute(&mut self.db).expect("Failed to delete token");
|
||||||
.expect("Failed to delete token");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_tokens(&mut self, _owner: i32) {
|
pub fn delete_all_tokens(&mut self, span: &Span, _owner: i32) {
|
||||||
use crate::schema::tokens::dsl::*;
|
use crate::schema::tokens::dsl::*;
|
||||||
|
let _span = metrics::span("delete_user_tokens_from_db", span);
|
||||||
diesel::delete(tokens.filter(owner_id.eq(_owner)))
|
diesel::delete(tokens.filter(owner_id.eq(_owner)))
|
||||||
.execute(&mut self.db)
|
.execute(&mut self.db)
|
||||||
.expect("Failed to delete token");
|
.expect("Failed to delete token");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cleanup_tokens(&mut self) {
|
pub fn cleanup_tokens(&mut self, span: &Span) {
|
||||||
use crate::schema::tokens::dsl::*;
|
use crate::schema::tokens::dsl::*;
|
||||||
|
let _span = metrics::span("cleanup_tokens", span);
|
||||||
let current_time = chrono::Utc::now().timestamp();
|
let current_time = chrono::Utc::now().timestamp();
|
||||||
diesel::delete(tokens.filter(exp.le(current_time))).execute(&mut self.db).expect("Failed to cleanup tokens");
|
diesel::delete(tokens.filter(exp.le(current_time)))
|
||||||
|
.execute(&mut self.db)
|
||||||
|
.expect("Failed to cleanup tokens");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Nodes
|
// Nodes
|
||||||
pub async fn get_lock(user: i32) -> std::sync::Arc<tokio::sync::RwLock<()>> {
|
pub fn get_lock(user: i32) -> super::manager::TracingLock { DB_MANAGER.get_lock(user) }
|
||||||
DB_MANAGER.get_lock(user).await
|
|
||||||
|
pub fn create_node(
|
||||||
|
&mut self,
|
||||||
|
span: &Span,
|
||||||
|
file: bool,
|
||||||
|
name: String,
|
||||||
|
parent: Option<i32>,
|
||||||
|
owner: i32
|
||||||
|
) -> super::Inode {
|
||||||
|
DB_MANAGER.create_node(span, &mut self.db, file, name, parent, owner)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_node(&mut self, file: bool, name: String, parent: Option<i32>, owner: i32) -> super::Inode {
|
pub fn get_all_nodes(&mut self) -> Vec<super::Inode> {
|
||||||
DB_MANAGER.create_node(&mut self.db, file, name, parent, owner)
|
crate::schema::inode::table.load(&mut self.db).expect("Could not load nodes")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_node(&mut self, id: i32) -> Option<super::Inode> {
|
pub fn get_node(&mut self, span: &Span, id: i32) -> Option<super::Inode> {
|
||||||
DB_MANAGER.get_node(&mut self.db, id)
|
DB_MANAGER.get_node(span, &mut self.db, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_children(&mut self, id: i32) -> Vec<super::Inode> {
|
pub fn get_children(&mut self, span: &Span, id: i32) -> Vec<super::Inode> {
|
||||||
DB_MANAGER.get_children(&mut self.db, id)
|
DB_MANAGER.get_children(span, &mut self.db, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_node(&mut self, node: &super::Inode) {
|
pub fn save_node(&mut self, span: &Span, node: &super::Inode) { DB_MANAGER.save_node(span, &mut self.db, node); }
|
||||||
DB_MANAGER.save_node(&mut self.db, node);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete_node(&mut self, node: &super::Inode) {
|
pub fn delete_node(&mut self, span: &Span, node: &super::Inode) {
|
||||||
DB_MANAGER.delete_node(&mut self.db, node);
|
DB_MANAGER.delete_node(span, &mut self.db, node);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,16 +1,30 @@
|
|||||||
use std::collections::HashMap;
|
use std::{collections::HashMap, sync::Arc};
|
||||||
use std::sync::Arc;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use stretto::Cache;
|
|
||||||
use tokio::sync::{Mutex, RwLock};
|
|
||||||
use diesel::prelude::*;
|
|
||||||
use crate::db::Inode;
|
|
||||||
|
|
||||||
lazy_static! {
|
use diesel::prelude::*;
|
||||||
pub(super) static ref DB_MANAGER: DBManager = DBManager::new();
|
use once_cell::sync::Lazy;
|
||||||
|
use parking_lot::{Mutex, RwLock, RwLockReadGuard, RwLockWriteGuard};
|
||||||
|
use rustracing_jaeger::Span;
|
||||||
|
use stretto::Cache;
|
||||||
|
|
||||||
|
use crate::{db::Inode, metrics};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct TracingLock(Arc<RwLock<()>>);
|
||||||
|
impl TracingLock {
|
||||||
|
pub fn read(&self, span: &Span) -> RwLockReadGuard<'_, ()> {
|
||||||
|
let _span = metrics::span("get_read_lock", span);
|
||||||
|
self.0.read()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write(&self, span: &Span) -> RwLockWriteGuard<'_, ()> {
|
||||||
|
let _span = metrics::span("get_write_lock", span);
|
||||||
|
self.0.write()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) struct DBManager {
|
pub static DB_MANAGER: Lazy<DBManager> = Lazy::new(DBManager::new);
|
||||||
|
|
||||||
|
pub struct DBManager {
|
||||||
locks: Mutex<HashMap<i32, Arc<RwLock<()>>>>,
|
locks: Mutex<HashMap<i32, Arc<RwLock<()>>>>,
|
||||||
node_cache: Cache<i32, Inode>,
|
node_cache: Cache<i32, Inode>,
|
||||||
children_cache: Cache<i32, Vec<Inode>>
|
children_cache: Cache<i32, Vec<Inode>>
|
||||||
@ -25,8 +39,17 @@ impl DBManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_node(&self, db: &mut super::RawDBConnection, file: bool, _name: String, parent: Option<i32>, owner: i32) -> Inode {
|
pub fn create_node(
|
||||||
|
&self,
|
||||||
|
span: &Span,
|
||||||
|
db: &mut super::RawDBConnection,
|
||||||
|
file: bool,
|
||||||
|
_name: String,
|
||||||
|
parent: Option<i32>,
|
||||||
|
owner: i32
|
||||||
|
) -> Inode {
|
||||||
use crate::schema::inode::dsl::*;
|
use crate::schema::inode::dsl::*;
|
||||||
|
let _span = metrics::span("insert_node_into_db", span);
|
||||||
let node: Inode = diesel::insert_into(inode)
|
let node: Inode = diesel::insert_into(inode)
|
||||||
.values(crate::db::inode::NewInode {
|
.values(crate::db::inode::NewInode {
|
||||||
is_file: file,
|
is_file: file,
|
||||||
@ -44,15 +67,28 @@ impl DBManager {
|
|||||||
self.children_cache.remove(&parent);
|
self.children_cache.remove(&parent);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let owner = node.owner_id.to_string();
|
||||||
|
if file {
|
||||||
|
metrics::NODES.with_label_values(&["file", owner.as_str()]).inc();
|
||||||
|
} else {
|
||||||
|
metrics::NODES.with_label_values(&["folder", owner.as_str()]).inc();
|
||||||
|
}
|
||||||
|
|
||||||
node
|
node
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_node(&self, db: &mut super::RawDBConnection, node_id: i32) -> Option<Inode> {
|
pub fn get_node(&self, span: &Span, db: &mut super::RawDBConnection, node_id: i32) -> Option<Inode> {
|
||||||
use crate::schema::inode::dsl::*;
|
use crate::schema::inode::dsl::*;
|
||||||
|
let inner_span = metrics::span("get_node", span);
|
||||||
let node = self.node_cache.get(&node_id);
|
let node = self.node_cache.get(&node_id);
|
||||||
match node {
|
match node {
|
||||||
Some(v) => Some(v.value().clone()),
|
Some(v) => {
|
||||||
|
metrics::CACHE.node.hit.inc();
|
||||||
|
Some(v.value().clone())
|
||||||
|
}
|
||||||
None => {
|
None => {
|
||||||
|
let _span = metrics::span("get_node_from_db", &inner_span);
|
||||||
|
metrics::CACHE.node.miss.inc();
|
||||||
let v: Inode = inode.find(node_id).first(db).ok()?;
|
let v: Inode = inode.find(node_id).first(db).ok()?;
|
||||||
self.node_cache.insert(node_id, v.clone(), 1);
|
self.node_cache.insert(node_id, v.clone(), 1);
|
||||||
Some(v)
|
Some(v)
|
||||||
@ -60,12 +96,18 @@ impl DBManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_children(&self, db: &mut super::RawDBConnection, node_id: i32) -> Vec<Inode> {
|
pub fn get_children(&self, span: &Span, db: &mut super::RawDBConnection, node_id: i32) -> Vec<Inode> {
|
||||||
use crate::schema::inode::dsl::*;
|
use crate::schema::inode::dsl::*;
|
||||||
|
let inner_span = metrics::span("get_children", span);
|
||||||
let children = self.children_cache.get(&node_id);
|
let children = self.children_cache.get(&node_id);
|
||||||
match children {
|
match children {
|
||||||
Some(v) => v.value().clone(),
|
Some(v) => {
|
||||||
|
metrics::CACHE.children.hit.inc();
|
||||||
|
v.value().clone()
|
||||||
|
}
|
||||||
None => {
|
None => {
|
||||||
|
let _span = metrics::span("get_children_from_db", &inner_span);
|
||||||
|
metrics::CACHE.children.miss.inc();
|
||||||
let v = inode.filter(parent_id.eq(node_id)).load(db).expect("Failed to get children of node");
|
let v = inode.filter(parent_id.eq(node_id)).load(db).expect("Failed to get children of node");
|
||||||
self.children_cache.insert(node_id, v.clone(), 1);
|
self.children_cache.insert(node_id, v.clone(), 1);
|
||||||
v
|
v
|
||||||
@ -73,16 +115,20 @@ impl DBManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_node(&self, db: &mut super::RawDBConnection, node: &Inode) {
|
pub fn save_node(&self, span: &Span, db: &mut super::RawDBConnection, node: &Inode) {
|
||||||
|
let _span = metrics::span("save_node_to_db", span);
|
||||||
self.node_cache.insert(node.id, node.clone(), 1);
|
self.node_cache.insert(node.id, node.clone(), 1);
|
||||||
diesel::update(node)
|
if let Some(p) = node.parent_id {
|
||||||
.set(node.clone())
|
self.children_cache.remove(&p);
|
||||||
.execute(db)
|
}
|
||||||
.expect("Failed to save node");
|
diesel::update(node).set(node.clone()).execute(db).expect("Failed to save node");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_node(&self, db: &mut super::RawDBConnection, node: &Inode) {
|
pub fn delete_node(&self, span: &Span, db: &mut super::RawDBConnection, node: &Inode) {
|
||||||
|
let inner_span = metrics::span("delete_node", span);
|
||||||
|
let owner = node.owner_id.to_string();
|
||||||
if node.is_file {
|
if node.is_file {
|
||||||
|
let _span = metrics::span("delete_node_files", &inner_span);
|
||||||
let file_name = format!("./files/{}", node.id);
|
let file_name = format!("./files/{}", node.id);
|
||||||
let file = std::path::Path::new(&file_name);
|
let file = std::path::Path::new(&file_name);
|
||||||
let preview_name = format!("./files/{}_preview.jpg", node.id);
|
let preview_name = format!("./files/{}_preview.jpg", node.id);
|
||||||
@ -93,17 +139,22 @@ impl DBManager {
|
|||||||
if preview.exists() {
|
if preview.exists() {
|
||||||
std::fs::remove_file(preview).expect("Failed to delete preview");
|
std::fs::remove_file(preview).expect("Failed to delete preview");
|
||||||
}
|
}
|
||||||
|
metrics::NODES.with_label_values(&["file", owner.as_str()]).dec();
|
||||||
|
metrics::DISK_USAGE.with_label_values(&[owner.as_str()]).sub(node.size.unwrap_or(0));
|
||||||
|
} else {
|
||||||
|
metrics::NODES.with_label_values(&["folder", owner.as_str()]).dec();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let _span = metrics::span("delete_node_from_db", &inner_span);
|
||||||
diesel::delete(node).execute(db).expect("Failed to delete node");
|
diesel::delete(node).execute(db).expect("Failed to delete node");
|
||||||
self.node_cache.remove(&node.id);
|
self.node_cache.remove(&node.id);
|
||||||
self.children_cache.remove(&node.id);
|
self.children_cache.remove(&node.id);
|
||||||
if let Some(p) = node.parent_id { self.children_cache.remove(&p); }
|
if let Some(p) = node.parent_id {
|
||||||
|
self.children_cache.remove(&p);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_lock(&self, user: i32) -> Arc<RwLock<()>> {
|
pub fn get_lock(&self, user: i32) -> TracingLock {
|
||||||
self.locks.lock().await
|
TracingLock(self.locks.lock().entry(user).or_insert_with(|| Arc::new(RwLock::new(()))).clone())
|
||||||
.entry(user)
|
|
||||||
.or_insert_with(|| Arc::new(RwLock::new(())))
|
|
||||||
.clone()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,19 +1,18 @@
|
|||||||
|
mod connection;
|
||||||
mod inode;
|
mod inode;
|
||||||
|
pub mod manager;
|
||||||
mod token;
|
mod token;
|
||||||
mod user;
|
mod user;
|
||||||
pub mod manager;
|
|
||||||
mod connection;
|
|
||||||
|
|
||||||
use diesel::connection::SimpleConnection;
|
use diesel::{
|
||||||
use diesel::sqlite::SqliteConnection;
|
connection::SimpleConnection,
|
||||||
use diesel::r2d2::{ConnectionManager, Pool, PooledConnection};
|
r2d2::{ConnectionManager, Pool, PooledConnection},
|
||||||
|
sqlite::SqliteConnection
|
||||||
|
};
|
||||||
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
|
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
|
||||||
use warp::Filter;
|
|
||||||
|
|
||||||
pub use inode::Inode;
|
pub use inode::Inode;
|
||||||
pub use token::Token;
|
pub use token::Token;
|
||||||
pub use user::{User, TfaTypes, UserRole};
|
pub use user::{TfaTypes, User, UserRole};
|
||||||
use crate::routes::AppError;
|
|
||||||
|
|
||||||
type RawDBConnection = PooledConnection<ConnectionManager<SqliteConnection>>;
|
type RawDBConnection = PooledConnection<ConnectionManager<SqliteConnection>>;
|
||||||
pub type DBPool = Pool<ConnectionManager<SqliteConnection>>;
|
pub type DBPool = Pool<ConnectionManager<SqliteConnection>>;
|
||||||
@ -41,14 +40,3 @@ pub fn build_pool() -> Pool<ConnectionManager<SqliteConnection>> {
|
|||||||
pub fn run_migrations(db: &mut RawDBConnection) {
|
pub fn run_migrations(db: &mut RawDBConnection) {
|
||||||
db.run_pending_migrations(MIGRATIONS).expect("Failed to run migrations");
|
db.run_pending_migrations(MIGRATIONS).expect("Failed to run migrations");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_db(pool: DBPool) -> impl Filter<Extract=(DBConnection, ), Error=warp::reject::Rejection> + Clone {
|
|
||||||
warp::any()
|
|
||||||
.map(move || pool.clone())
|
|
||||||
.and_then(|pool: DBPool| async move {
|
|
||||||
match pool.get() {
|
|
||||||
Ok(v) => Ok(DBConnection::from(v)),
|
|
||||||
Err(_) => AppError::InternalError("Failed to get a database connection").err()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
use diesel::backend::RawValue;
|
use diesel::{
|
||||||
use diesel::deserialize::{FromSql, FromSqlRow};
|
backend::RawValue,
|
||||||
use diesel::prelude::*;
|
deserialize::{FromSql, FromSqlRow},
|
||||||
use diesel::serialize::{IsNull, Output, ToSql};
|
prelude::*,
|
||||||
use diesel::sql_types::SmallInt;
|
serialize::{IsNull, Output, ToSql},
|
||||||
use diesel::sqlite::Sqlite;
|
sql_types::SmallInt,
|
||||||
|
sqlite::Sqlite
|
||||||
|
};
|
||||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||||
|
|
||||||
#[repr(i16)]
|
#[repr(i16)]
|
||||||
|
@ -1,34 +1,225 @@
|
|||||||
mod db;
|
|
||||||
mod schema;
|
|
||||||
mod dto;
|
|
||||||
mod routes;
|
|
||||||
mod config;
|
mod config;
|
||||||
|
mod db;
|
||||||
|
mod dto;
|
||||||
|
mod metrics;
|
||||||
|
mod routes;
|
||||||
|
mod schema;
|
||||||
|
|
||||||
#[tokio::main]
|
use std::fs::File;
|
||||||
async fn main() {
|
|
||||||
console_subscriber::init();
|
|
||||||
|
|
||||||
pretty_env_logger::formatted_builder().filter_level(log::LevelFilter::Info).init();
|
use rayon_core::ThreadPoolBuilder;
|
||||||
|
use rustracing::tag::Tag;
|
||||||
|
use rustracing_jaeger::Span;
|
||||||
|
use tiny_http::{Method, Request, Response, ResponseBox, Server, StatusCode};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
db::DBConnection,
|
||||||
|
metrics::TRACER,
|
||||||
|
routes::{get_reply, header, AppError}
|
||||||
|
};
|
||||||
|
|
||||||
|
static THREAD_COUNT: usize = 10;
|
||||||
|
|
||||||
|
fn index_resp() -> Response<File> {
|
||||||
|
Response::from_file(File::open(std::path::Path::new("./static/index.html")).unwrap())
|
||||||
|
.with_header(header("content-type", "text/html; charset=utf-8"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_body<S>(span: &mut Span, req: &mut Request) -> Result<S, AppError>
|
||||||
|
where S: for<'a> serde::Deserialize<'a> {
|
||||||
|
let _span = TRACER.span("parse_body").child_of(span.context().unwrap()).start();
|
||||||
|
serde_json::from_reader(req.as_reader()).map_err(|_| AppError::BadRequest("Invalid query data"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_request(mut req: Request, db: db::DBPool) {
|
||||||
|
let path = req.url().to_string();
|
||||||
|
let resp = if !path.starts_with("/api") {
|
||||||
|
match req.method() {
|
||||||
|
&Method::Get =>
|
||||||
|
if !(path.contains('\\') || path.contains("..") || path.contains(':')) {
|
||||||
|
let path_str = "./static".to_owned() + &path;
|
||||||
|
let path = std::path::Path::new(&path_str);
|
||||||
|
if path.exists() {
|
||||||
|
let resp = Response::from_file(File::open(path).unwrap());
|
||||||
|
match path.extension().map(|s| s.to_str()).unwrap_or(None) {
|
||||||
|
Some("html") => resp.with_header(header("content-type", "text/html; charset=utf-8")),
|
||||||
|
Some("css") => resp.with_header(header("content-type", "text/css; charset=utf-8")),
|
||||||
|
Some("js") => resp.with_header(header(
|
||||||
|
"content-type",
|
||||||
|
"application/x-javascript; charset=utf-8"
|
||||||
|
)),
|
||||||
|
Some("svg") => resp.with_header(header("content-type", "image/svg+xml")),
|
||||||
|
_ => resp
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
} else {
|
||||||
|
index_resp().boxed()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
index_resp().boxed()
|
||||||
|
},
|
||||||
|
_ => Response::empty(StatusCode::from(405)).boxed()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let meth = req.method().clone();
|
||||||
|
let mut span = TRACER
|
||||||
|
.span("handle_api_request")
|
||||||
|
.tag(Tag::new("http.target", path))
|
||||||
|
.tag(Tag::new("http.method", meth.to_string()))
|
||||||
|
.start();
|
||||||
|
let resp = match handle_api_request(&mut span, &mut req, db) {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(v) => {
|
||||||
|
let code = match v {
|
||||||
|
AppError::BadRequest(_) => 400,
|
||||||
|
AppError::Unauthorized(_) => 401,
|
||||||
|
AppError::Forbidden(_) => 403,
|
||||||
|
AppError::NotFound => 404,
|
||||||
|
AppError::InternalError(_) => 500
|
||||||
|
};
|
||||||
|
Response::from_data(
|
||||||
|
serde_json::to_vec(&dto::responses::Error {
|
||||||
|
statusCode: code,
|
||||||
|
message: match v {
|
||||||
|
AppError::BadRequest(v) => v.to_string(),
|
||||||
|
AppError::Unauthorized(v) => v.to_string(),
|
||||||
|
AppError::Forbidden(v) => v.to_string(),
|
||||||
|
AppError::NotFound => "Not found".to_owned(),
|
||||||
|
AppError::InternalError(v) => v.to_string()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap()
|
||||||
|
)
|
||||||
|
.with_header(header("content-type", "application/json; charset=utf-8"))
|
||||||
|
.with_status_code(code)
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
span.set_tag(|| Tag::new("http.status_code", resp.status_code().0 as i64));
|
||||||
|
resp
|
||||||
|
};
|
||||||
|
req.respond(resp).expect("Failed to send response");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
fn handle_api_request(span: &mut Span, req: &mut Request, pool: db::DBPool) -> Result<ResponseBox, AppError> {
|
||||||
|
metrics::REQUEST.inc();
|
||||||
|
let db = &mut db::DBConnection::from(pool.get().unwrap());
|
||||||
|
let (path, query) = {
|
||||||
|
let url = req.url().to_string();
|
||||||
|
let mut splits = url.splitn(2, '?');
|
||||||
|
(
|
||||||
|
splits.next().unwrap().to_string(),
|
||||||
|
splits.next().unwrap_or("").to_string()
|
||||||
|
)
|
||||||
|
};
|
||||||
|
match (path.as_str(), req.method()) {
|
||||||
|
("/api/metrics", Method::Get) => metrics::get_metrics(),
|
||||||
|
("/api/auth/login", Method::Post) => parse_body(span, req).and_then(|v|routes::auth::basic::login(span, req, db, v)),
|
||||||
|
("/api/auth/signup", Method::Post) => parse_body(span, req).and_then(|v| routes::auth::basic::signup(span, req, db, v)),
|
||||||
|
("/api/auth/gitlab", Method::Get) => routes::auth::gitlab::gitlab(span, req, db),
|
||||||
|
("/api/auth/gitlab_callback", Method::Get) => routes::auth::gitlab::gitlab_callback(span, req, db, &query),
|
||||||
|
("/api/fs/download", Method::Post) => routes::fs::routes::download(span, req, db),
|
||||||
|
("/api/fs/download_multi", Method::Post) => routes::fs::routes::download_multi(span, req, db),
|
||||||
|
_ => {
|
||||||
|
let span_auth = TRACER.span("parse_auth_and_path").child_of(span.context().unwrap()).start();
|
||||||
|
let header = req.headers().iter().find(|h| h.field.as_str().as_str().eq_ignore_ascii_case("Authorization"))
|
||||||
|
.ok_or(AppError::Unauthorized("Unauthorized"))?;
|
||||||
|
let auth = header.value.as_str();
|
||||||
|
let token = auth.starts_with("Bearer ").then(|| auth.trim_start_matches("Bearer "))
|
||||||
|
.ok_or(AppError::Unauthorized("Invalid auth header"))?;
|
||||||
|
let info = routes::filters::authorize_jwt(span, token, db)?;
|
||||||
|
let (path, last_id) = path.to_string().rsplit_once('/')
|
||||||
|
.map(|(short_path, last)|
|
||||||
|
last.parse::<i32>()
|
||||||
|
.map_or((path.clone(), None), |i| (short_path.to_string(), Some(i)))
|
||||||
|
)
|
||||||
|
.unwrap_or((path.to_string(), None));
|
||||||
|
drop(span_auth);
|
||||||
|
let span = &mut TRACER.span("handle_auth_request").child_of(span.context().unwrap()).start();
|
||||||
|
match (path.as_str(), req.method(), last_id) {
|
||||||
|
("/api/admin/users", Method::Get, None) => routes::admin::users(span, req, db, info),
|
||||||
|
("/api/admin/set_role", Method::Post, None) => parse_body(span, req).and_then(|v| routes::admin::set_role(span, req, db, info, v)),
|
||||||
|
("/api/admin/logout", Method::Post, None) => parse_body(span, req).and_then(|v| routes::admin::logout(span, req, db, info, v)),
|
||||||
|
("/api/admin/delete", Method::Post, None) => parse_body(span, req).and_then(|v| routes::admin::delete_user(span, req, db, info, v)),
|
||||||
|
("/api/admin/disable_2fa", Method::Post, None) => parse_body(span, req).and_then(|v| routes::admin::disable_2fa(span, req, db, info, v)),
|
||||||
|
("/api/admin/is_admin", Method::Post, None) => get_reply(&dto::responses::Success { statusCode: 200 }),
|
||||||
|
("/api/admin/get_token", Method::Get, Some(v)) => routes::admin::get_token(span, req, db, info, v),
|
||||||
|
("/api/auth/refresh", Method::Post, None) => routes::auth::basic::refresh(span, req, db, info),
|
||||||
|
("/api/auth/logout_all", Method::Post, None) => routes::auth::basic::logout_all(span, req, db, info),
|
||||||
|
("/api/auth/change_password", Method::Post, None) => parse_body(span, req).and_then(|v| routes::auth::basic::change_password(span, req, db, info, v)),
|
||||||
|
("/api/auth/2fa/setup", Method::Post, None) => parse_body(span, req).and_then(|v| routes::auth::tfa::tfa_setup(span, req, db, info, v)),
|
||||||
|
("/api/auth/2fa/complete", Method::Post, None) => parse_body(span, req).and_then(|v| routes::auth::tfa::tfa_complete(span, req, db, info, v)),
|
||||||
|
("/api/auth/2fa/disable", Method::Post, None) => routes::auth::tfa::tfa_disable(span, req, db, info),
|
||||||
|
("/api/user/info", Method::Get, None) => routes::user::info(span, req, db, info),
|
||||||
|
("/api/user/delete", Method::Post, None) => routes::user::delete_user(span, req, db, info),
|
||||||
|
("/api/fs/root", Method::Get, None) => routes::fs::routes::root(span, req, db, info),
|
||||||
|
("/api/fs/node", Method::Get, Some(v)) => routes::fs::routes::node(span, req, db, info, v),
|
||||||
|
("/api/fs/path", Method::Get, Some(v)) => routes::fs::routes::path(span, req, db, info, v),
|
||||||
|
("/api/fs/create_folder", Method::Post, None) => parse_body(span, req).and_then(|v| routes::fs::routes::create_node(span, req, db, info, v, false)),
|
||||||
|
("/api/fs/create_file", Method::Post, None) => parse_body(span, req).and_then(|v| routes::fs::routes::create_node(span, req, db, info, v, true)),
|
||||||
|
("/api/fs/delete", Method::Post, Some(v)) => routes::fs::routes::delete_node(span, req, db, info, v, &pool),
|
||||||
|
("/api/fs/upload", Method::Post, Some(v)) => routes::fs::routes::upload(span, req, db, info, v),
|
||||||
|
("/api/fs/create_zip", Method::Post, None) => parse_body(span, req).and_then(|v| routes::fs::routes::create_zip(span, req, db, info, v, &pool)),
|
||||||
|
("/api/fs/download_preview", Method::Get, Some(v)) => routes::fs::routes::download_preview(span, req, db, info, v),
|
||||||
|
("/api/fs/get_type", Method::Get, Some(v)) => routes::fs::routes::get_type(span, req, db, info, v),
|
||||||
|
_ => AppError::NotFound.err()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
let _ = config::CONFIG;
|
let _ = config::CONFIG;
|
||||||
|
|
||||||
let pool: db::DBPool = db::build_pool();
|
let db_pool: db::DBPool = db::build_pool();
|
||||||
|
|
||||||
db::run_migrations(&mut pool.get().unwrap());
|
db::run_migrations(&mut db_pool.get().unwrap());
|
||||||
|
|
||||||
if !std::path::Path::new("files").exists() {
|
if !std::path::Path::new("files").exists() {
|
||||||
std::fs::create_dir("files").expect("Failed to create files directory");
|
std::fs::create_dir("files").expect("Failed to create files directory");
|
||||||
}
|
}
|
||||||
|
|
||||||
let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel();
|
if !std::path::Path::new("temp").is_dir() {
|
||||||
|
std::fs::create_dir("temp").expect("Failed to create temp dir");
|
||||||
let (_addr, server) = warp::serve(routes::build_routes(pool.clone())).bind_with_graceful_shutdown(([0, 0, 0, 0], 2345), async {
|
}
|
||||||
shutdown_rx.await.ok();
|
std::fs::read_dir("temp").expect("Failed to iter temp dir").for_each(|dir| {
|
||||||
|
std::fs::remove_file(dir.expect("Failed to retrieve temp dir entry").path())
|
||||||
|
.expect("Failed to delete file in temp dir");
|
||||||
});
|
});
|
||||||
|
|
||||||
tokio::task::spawn(server);
|
metrics::init(DBConnection::from(db_pool.get().unwrap()));
|
||||||
|
|
||||||
|
let shutdown = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false));
|
||||||
|
let ctrlc_shutdown = shutdown.clone();
|
||||||
|
|
||||||
|
let server = std::sync::Arc::new(Server::http("0.0.0.0:2345").unwrap());
|
||||||
|
let ctrlc_server = server.clone();
|
||||||
|
|
||||||
|
ctrlc::set_handler(move || {
|
||||||
|
ctrlc_shutdown.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||||
|
ctrlc_server.unblock();
|
||||||
|
})
|
||||||
|
.expect("Could not set ctrl-c handler");
|
||||||
|
|
||||||
|
let pool = ThreadPoolBuilder::new()
|
||||||
|
.num_threads(THREAD_COUNT)
|
||||||
|
.thread_name(|i| format!("Http listener {}", i))
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
'server: loop {
|
||||||
|
match server.recv() {
|
||||||
|
Ok(req) => {
|
||||||
|
let inner_pool = db_pool.clone();
|
||||||
|
pool.spawn(move || handle_request(req, inner_pool))
|
||||||
|
}
|
||||||
|
Err(_) =>
|
||||||
|
if shutdown.load(std::sync::atomic::Ordering::Relaxed) {
|
||||||
|
break 'server;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
tokio::signal::ctrl_c().await.expect("Failed to wait for ctrl-c");
|
|
||||||
println!("Quitting");
|
println!("Quitting");
|
||||||
shutdown_tx.send(()).expect("Failed to shutdown server");
|
|
||||||
}
|
}
|
||||||
|
83
backend/src/metrics.rs
Normal file
83
backend/src/metrics.rs
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
use std::{borrow::Cow, clone::Clone, net::ToSocketAddrs};
|
||||||
|
|
||||||
|
use once_cell::sync::{Lazy, OnceCell};
|
||||||
|
use prometheus::{register_int_counter, register_int_counter_vec, register_int_gauge_vec, IntCounter, IntGaugeVec};
|
||||||
|
use prometheus_static_metric::make_static_metric;
|
||||||
|
use rustracing_jaeger::{span::SpanReceiver, Span, Tracer};
|
||||||
|
use tiny_http::{Response, ResponseBox};
|
||||||
|
|
||||||
|
use crate::AppError;
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
static SERVICE_NAME: &str = "fileserver-testing";
|
||||||
|
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
static SERVICE_NAME: &str = "fileserver";
|
||||||
|
|
||||||
|
pub static TRACER_QUIT: std::sync::atomic::AtomicBool = std::sync::atomic::AtomicBool::new(false);
|
||||||
|
static TRACER_INNER: Lazy<(Tracer, SpanReceiver)> = Lazy::new(|| Tracer::new(rustracing::sampler::AllSampler));
|
||||||
|
pub static TRACER: Lazy<Tracer> = Lazy::new(|| TRACER_INNER.0.clone());
|
||||||
|
pub static TRACER_THREAD: OnceCell<std::thread::JoinHandle<()>> = OnceCell::new();
|
||||||
|
|
||||||
|
make_static_metric! {
|
||||||
|
struct CacheMetrics: IntCounter {
|
||||||
|
"cache" => {
|
||||||
|
node,
|
||||||
|
children
|
||||||
|
},
|
||||||
|
"type" => {
|
||||||
|
hit,
|
||||||
|
miss
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub static NODES: Lazy<IntGaugeVec> =
|
||||||
|
Lazy::new(|| register_int_gauge_vec!("nodes", "All nodes by user, type", &["type", "user"]).unwrap());
|
||||||
|
pub static DISK_USAGE: Lazy<IntGaugeVec> =
|
||||||
|
Lazy::new(|| register_int_gauge_vec!("disk_usage", "Disk usage by user", &["user"]).unwrap());
|
||||||
|
pub static REQUEST: Lazy<IntCounter> = Lazy::new(|| register_int_counter!("request", "Count of requests").unwrap());
|
||||||
|
pub static CACHE: Lazy<CacheMetrics> = Lazy::new(|| {
|
||||||
|
CacheMetrics::from(®ister_int_counter_vec!("cache", "Node cache hits/misses", &["cache", "type"]).unwrap())
|
||||||
|
});
|
||||||
|
|
||||||
|
pub fn init(mut db: crate::db::DBConnection) {
|
||||||
|
TRACER_THREAD
|
||||||
|
.set(std::thread::spawn(|| {
|
||||||
|
let recv = TRACER_INNER.1.clone();
|
||||||
|
let mut reporter = rustracing_jaeger::reporter::JaegerCompactReporter::new(SERVICE_NAME).unwrap();
|
||||||
|
reporter.set_agent_addr("grafana.lan:6831".to_socket_addrs().unwrap().next().unwrap());
|
||||||
|
reporter.set_reporter_addr("0.0.0.0:0".parse().unwrap()).unwrap();
|
||||||
|
while let Ok(span) = recv.recv() {
|
||||||
|
if TRACER_QUIT.load(std::sync::atomic::Ordering::Relaxed) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
reporter.report(&[span]).unwrap();
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let nodes = Lazy::force(&NODES);
|
||||||
|
let disk_usage = Lazy::force(&DISK_USAGE);
|
||||||
|
Lazy::force(&REQUEST);
|
||||||
|
Lazy::force(&CACHE);
|
||||||
|
db.get_all_nodes().iter().for_each(|n| {
|
||||||
|
let owner = n.owner_id.to_string();
|
||||||
|
if n.is_file {
|
||||||
|
nodes.with_label_values(&["file", owner.as_str()]).inc();
|
||||||
|
disk_usage.with_label_values(&[owner.as_str()]).add(n.size.unwrap_or(0));
|
||||||
|
} else {
|
||||||
|
nodes.with_label_values(&["folder", owner.as_str()]).inc();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_metrics() -> Result<ResponseBox, AppError> {
|
||||||
|
let metric = prometheus::gather();
|
||||||
|
Ok(Response::from_string(prometheus::TextEncoder::new().encode_to_string(&metric).unwrap()).boxed())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn span<N>(name: N, span: &Span) -> Span
|
||||||
|
where N: Into<Cow<'static, str>> {
|
||||||
|
TRACER.span(name).child_of(span.context().unwrap()).start()
|
||||||
|
}
|
@ -1,55 +1,17 @@
|
|||||||
use warp::{Filter, Reply};
|
use rustracing_jaeger::Span;
|
||||||
use crate::db::{DBConnection, DBPool, with_db};
|
use tiny_http::{Request, ResponseBox};
|
||||||
use crate::dto;
|
|
||||||
use crate::routes::{AppError, get_reply};
|
|
||||||
use crate::routes::filters::{admin, UserInfo};
|
|
||||||
|
|
||||||
pub fn build_routes(db: DBPool) -> impl Filter<Extract = impl Reply, Error = warp::Rejection> + Clone {
|
use crate::{
|
||||||
let users = warp::path!("admin" / "users")
|
db::{DBConnection, UserRole},
|
||||||
.and(warp::get())
|
dto,
|
||||||
.and(admin(db.clone()))
|
routes::{filters::UserInfo, get_reply, AppError}
|
||||||
.and(with_db(db.clone()))
|
};
|
||||||
.and_then(users);
|
|
||||||
let set_role = warp::path!("admin" / "set_role")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(admin(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(set_role);
|
|
||||||
let logout = warp::path!("admin" / "logout")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(admin(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(logout);
|
|
||||||
let delete_user = warp::path!("admin" / "delete")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(admin(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(delete_user);
|
|
||||||
let disable_2fa = warp::path!("admin" / "disable_2fa")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(admin(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(disable_2fa);
|
|
||||||
let is_admin = warp::path!("admin" / "is_admin")
|
|
||||||
.and(warp::get())
|
|
||||||
.and(admin(db.clone()))
|
|
||||||
.and_then(|_| async { get_reply(&dto::responses::Success {
|
|
||||||
statusCode: 200
|
|
||||||
}) });
|
|
||||||
let get_token = warp::path!("admin" / "get_token" / i32)
|
|
||||||
.and(warp::get())
|
|
||||||
.and(admin(db.clone()))
|
|
||||||
.and(with_db(db))
|
|
||||||
.and_then(get_token);
|
|
||||||
|
|
||||||
users.or(set_role).or(logout).or(delete_user).or(disable_2fa).or(is_admin).or(get_token)
|
pub fn users(_: &Span, _: &mut Request, db: &mut DBConnection, info: UserInfo) -> Result<ResponseBox, AppError> {
|
||||||
}
|
if info.0.role != UserRole::Admin {
|
||||||
|
return AppError::Forbidden("Forbidden").err();
|
||||||
|
}
|
||||||
|
|
||||||
async fn users(_: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
|
||||||
let users = db.get_users();
|
let users = db.get_users();
|
||||||
|
|
||||||
let mut res = dto::responses::AdminUsers {
|
let mut res = dto::responses::AdminUsers {
|
||||||
@ -70,60 +32,99 @@ async fn users(_: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Re
|
|||||||
get_reply(&res)
|
get_reply(&res)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn set_role(data: dto::requests::AdminSetRole, _: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn set_role(
|
||||||
let mut user = db.get_user(data.user)
|
span: &Span,
|
||||||
.ok_or(AppError::Forbidden("Invalid user"))?;
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo,
|
||||||
|
data: dto::requests::AdminSetRole
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
if info.0.role != UserRole::Admin {
|
||||||
|
return AppError::Forbidden("Forbidden").err();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut user = db.get_user(span, data.user).ok_or(AppError::Forbidden("Invalid user"))?;
|
||||||
user.role = data.role;
|
user.role = data.role;
|
||||||
db.save_user(&user);
|
db.save_user(span, &user);
|
||||||
|
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn logout(data: dto::requests::Admin, _: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn logout(
|
||||||
db.delete_all_tokens(data.user);
|
span: &Span,
|
||||||
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo,
|
||||||
|
data: dto::requests::Admin
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
if info.0.role != UserRole::Admin {
|
||||||
|
return AppError::Forbidden("Forbidden").err();
|
||||||
|
}
|
||||||
|
|
||||||
get_reply(&dto::responses::Success {
|
db.delete_all_tokens(span, data.user);
|
||||||
statusCode: 200
|
|
||||||
})
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn delete_user(data: dto::requests::Admin, _: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn delete_user(
|
||||||
let user = db.get_user(data.user)
|
span: &Span,
|
||||||
.ok_or(AppError::Forbidden("Invalid user"))?;
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo,
|
||||||
|
data: dto::requests::Admin
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
if info.0.role != UserRole::Admin {
|
||||||
|
return AppError::Forbidden("Forbidden").err();
|
||||||
|
}
|
||||||
|
|
||||||
db.delete_all_tokens(data.user);
|
let user = db.get_user(span, data.user).ok_or(AppError::Forbidden("Invalid user"))?;
|
||||||
|
|
||||||
let root_node = super::fs::get_node_and_validate(&user, user.root_id, &mut db).expect("Failed to get root node for deleting");
|
db.delete_all_tokens(span, data.user);
|
||||||
|
|
||||||
super::fs::delete_node_root(&root_node, &mut db);
|
let root_node =
|
||||||
|
super::fs::get_node_and_validate(span, &user, user.root_id, db).expect("Failed to get root node for deleting");
|
||||||
|
|
||||||
|
super::fs::delete_node_root(span, &root_node, db);
|
||||||
|
|
||||||
db.delete_user(&user);
|
db.delete_user(&user);
|
||||||
|
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn disable_2fa(data: dto::requests::Admin, _: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn disable_2fa(
|
||||||
let mut user = db.get_user(data.user)
|
span: &Span,
|
||||||
.ok_or(AppError::Forbidden("Invalid user"))?;
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo,
|
||||||
|
data: dto::requests::Admin
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
if info.0.role != UserRole::Admin {
|
||||||
|
return AppError::Forbidden("Forbidden").err();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut user = db.get_user(span, data.user).ok_or(AppError::Forbidden("Invalid user"))?;
|
||||||
|
|
||||||
user.tfa_type = crate::db::TfaTypes::None;
|
user.tfa_type = crate::db::TfaTypes::None;
|
||||||
db.save_user(&user);
|
db.save_user(span, &user);
|
||||||
|
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_token(user: i32, _: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn get_token(
|
||||||
let user = db.get_user(user)
|
span: &Span,
|
||||||
.ok_or(AppError::Forbidden("Invalid user"))?;
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo,
|
||||||
|
user: i32
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
if info.0.role != UserRole::Admin {
|
||||||
|
return AppError::Forbidden("Forbidden").err();
|
||||||
|
}
|
||||||
|
|
||||||
|
let user = db.get_user(span, user).ok_or(AppError::Forbidden("Invalid user"))?;
|
||||||
|
|
||||||
get_reply(&dto::responses::Login {
|
get_reply(&dto::responses::Login {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
jwt: super::auth::get_token(&user, &mut db)
|
jwt: super::auth::get_token(span, &user, db)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,45 +1,20 @@
|
|||||||
use warp::Filter;
|
use rustracing_jaeger::Span;
|
||||||
use crate::db::{DBConnection, DBPool, with_db};
|
use tiny_http::{Request, ResponseBox};
|
||||||
use crate::db::{TfaTypes, UserRole};
|
|
||||||
use crate::dto;
|
|
||||||
use crate::dto::requests::ChangePassword;
|
|
||||||
use crate::routes::{AppError, get_reply};
|
|
||||||
use crate::routes::filters::{authenticated, UserInfo};
|
|
||||||
|
|
||||||
pub fn build_routes(db: DBPool) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
|
use crate::{
|
||||||
let login = warp::path!("auth" / "login")
|
db::{DBConnection, TfaTypes, UserRole},
|
||||||
.and(warp::post())
|
dto,
|
||||||
.and(warp::body::json())
|
routes::{filters::UserInfo, get_reply, AppError}
|
||||||
.and(with_db(db.clone()))
|
};
|
||||||
.and_then(login);
|
|
||||||
let signup = warp::path!("auth" / "signup")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(signup);
|
|
||||||
let refresh = warp::path!("auth" / "refresh")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(refresh);
|
|
||||||
let logout_all = warp::path!("auth" / "logout_all")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(logout_all);
|
|
||||||
let change_password = warp::path!("auth" / "change_password")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db))
|
|
||||||
.and_then(change_password);
|
|
||||||
|
|
||||||
login.or(signup).or(refresh).or(logout_all).or(change_password)
|
pub fn login(
|
||||||
}
|
span: &Span,
|
||||||
|
_: &mut Request,
|
||||||
async fn login(data: dto::requests::Login, mut db: DBConnection)
|
db: &mut DBConnection,
|
||||||
-> Result<impl warp::Reply, warp::Rejection> {
|
data: dto::requests::Login
|
||||||
let user = db.find_user(&data.username, false)
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
let user = db
|
||||||
|
.find_user(span, &data.username, false)
|
||||||
.ok_or(AppError::Unauthorized("Invalid username or password"))?;
|
.ok_or(AppError::Unauthorized("Invalid username or password"))?;
|
||||||
|
|
||||||
if !argon2::verify_encoded(user.password.as_str(), data.password.as_bytes()).unwrap_or(false) {
|
if !argon2::verify_encoded(user.password.as_str(), data.password.as_bytes()).unwrap_or(false) {
|
||||||
@ -56,60 +31,69 @@ async fn login(data: dto::requests::Login, mut db: DBConnection)
|
|||||||
return AppError::Unauthorized("Incorrect 2fa").err();
|
return AppError::Unauthorized("Incorrect 2fa").err();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if user.tfa_type == TfaTypes::Email { super::tfa::send_2fa_mail(&user); }
|
if user.tfa_type == TfaTypes::Email {
|
||||||
|
super::tfa::send_2fa_mail(span, &user);
|
||||||
|
}
|
||||||
|
|
||||||
return get_reply(&dto::responses::Success {
|
return get_reply(&dto::responses::Success { statusCode: 200 });
|
||||||
statusCode: 200
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
get_reply(&dto::responses::Login {
|
get_reply(&dto::responses::Login {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
jwt: super::get_token(&user, &mut db)
|
jwt: super::get_token(span, &user, db)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn signup(data: dto::requests::SignUp, mut db: DBConnection)
|
pub fn signup(
|
||||||
-> Result<impl warp::Reply, warp::Rejection> {
|
span: &Span,
|
||||||
if db.find_user(&data.username, false).is_some() {
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
data: dto::requests::SignUp
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
if db.find_user(span, &data.username, false).is_some() {
|
||||||
return AppError::BadRequest("Username is already taken").err();
|
return AppError::BadRequest("Username is already taken").err();
|
||||||
}
|
}
|
||||||
|
|
||||||
db.create_user_password(data.username, super::hash_password(&data.password));
|
db.create_user_password(span, data.username, super::hash_password(&data.password));
|
||||||
|
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn refresh(info: UserInfo, mut db: DBConnection) -> Result<impl warp::Reply, warp::Rejection> {
|
pub fn refresh(span: &Span, _: &mut Request, db: &mut DBConnection, info: UserInfo) -> Result<ResponseBox, AppError> {
|
||||||
db.delete_token(info.1.id);
|
db.delete_token(span, info.1.id);
|
||||||
|
|
||||||
get_reply(&dto::responses::Login {
|
get_reply(&dto::responses::Login {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
jwt: super::get_token(&info.0, &mut db)
|
jwt: super::get_token(span, &info.0, db)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn logout_all(info: UserInfo, mut db: DBConnection) -> Result<impl warp::Reply, warp::Rejection> {
|
pub fn logout_all(
|
||||||
db.delete_all_tokens(info.0.id);
|
span: &Span,
|
||||||
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
db.delete_all_tokens(span, info.0.id);
|
||||||
|
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn change_password(data: ChangePassword, mut info: UserInfo, mut db: DBConnection) -> Result<impl warp::Reply, warp::Rejection> {
|
pub fn change_password(
|
||||||
|
span: &Span,
|
||||||
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
mut info: UserInfo,
|
||||||
|
data: dto::requests::ChangePassword
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
if !argon2::verify_encoded(info.0.password.as_str(), data.oldPassword.as_bytes()).unwrap_or(false) {
|
if !argon2::verify_encoded(info.0.password.as_str(), data.oldPassword.as_bytes()).unwrap_or(false) {
|
||||||
return AppError::Unauthorized("Old password is wrong").err();
|
return AppError::Unauthorized("Old password is wrong").err();
|
||||||
}
|
}
|
||||||
|
|
||||||
info.0.password = super::hash_password(&data.newPassword);
|
info.0.password = super::hash_password(&data.newPassword);
|
||||||
db.save_user(&info.0);
|
db.save_user(span, &info.0);
|
||||||
db.delete_all_tokens(info.0.id);
|
db.delete_all_tokens(span, info.0.id);
|
||||||
|
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,15 @@
|
|||||||
use cached::proc_macro::cached;
|
use std::time::Duration;
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use warp::{Filter, Reply};
|
use once_cell::sync::Lazy;
|
||||||
use crate::config::CONFIG;
|
use rustracing_jaeger::Span;
|
||||||
use crate::db::{DBConnection, DBPool, with_db};
|
use tiny_http::{Request, Response, ResponseBox};
|
||||||
use crate::routes::AppError;
|
|
||||||
|
use crate::{
|
||||||
|
config::CONFIG,
|
||||||
|
db::DBConnection,
|
||||||
|
metrics,
|
||||||
|
routes::{header, AppError}
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(serde::Deserialize, Clone, Debug)]
|
#[derive(serde::Deserialize, Clone, Debug)]
|
||||||
pub struct GitlabTokens {
|
pub struct GitlabTokens {
|
||||||
@ -17,92 +23,112 @@ pub struct GitlabUser {
|
|||||||
pub is_admin: bool
|
pub is_admin: bool
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(serde::Serialize, serde::Deserialize, Clone, Debug)]
|
pub static REDIRECT_URL: Lazy<String> = Lazy::new(|| CONFIG.gitlab_redirect_url.clone() + "/api/auth/gitlab_callback");
|
||||||
pub struct GitlabCallbackQuery {
|
pub static TOKEN_URL: Lazy<String> = Lazy::new(|| format!("{}/oauth/token", CONFIG.gitlab_api_url.clone()));
|
||||||
pub code: String
|
pub static USER_URL: Lazy<String> = Lazy::new(|| format!("{}/api/v4/user", CONFIG.gitlab_api_url.clone()));
|
||||||
}
|
pub static AUTHORIZE_URL: Lazy<String> = Lazy::new(|| format!("{}/oauth/authorize", CONFIG.gitlab_url.clone()));
|
||||||
|
|
||||||
lazy_static! {
|
pub fn get_gitlab_token(span: &Span, code_or_token: String, token: bool) -> Option<GitlabTokens> {
|
||||||
static ref REDIRECT_URL: String = CONFIG.gitlab_redirect_url.clone() + "/api/auth/gitlab_callback";
|
let _span = metrics::span("get_gitlab_token", span);
|
||||||
static ref TOKEN_URL: String = format!("{}/oauth/token", CONFIG.gitlab_api_url.clone());
|
|
||||||
static ref USER_URL: String = format!("{}/api/v4/user", CONFIG.gitlab_api_url.clone());
|
|
||||||
static ref AUTHORIZE_URL: String = format!("{}/oauth/authorize", CONFIG.gitlab_url.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_gitlab_token(code_or_token: String, token: bool) -> Option<GitlabTokens> {
|
|
||||||
let mut req = ureq::post(&TOKEN_URL)
|
let mut req = ureq::post(&TOKEN_URL)
|
||||||
.query("redirect_uri", &REDIRECT_URL)
|
.query("redirect_uri", &REDIRECT_URL)
|
||||||
.query("client_id", &CONFIG.gitlab_id)
|
.query("client_id", &CONFIG.gitlab_id)
|
||||||
.query("client_secret", &CONFIG.gitlab_secret);
|
.query("client_secret", &CONFIG.gitlab_secret);
|
||||||
if token {
|
if token {
|
||||||
req = req
|
req = req.query("refresh_token", &code_or_token).query("grant_type", "refresh_token");
|
||||||
.query("refresh_token", &code_or_token)
|
|
||||||
.query("grant_type", "refresh_token");
|
|
||||||
} else {
|
} else {
|
||||||
req = req
|
req = req.query("code", &code_or_token).query("grant_type", "authorization_code");
|
||||||
.query("code", &code_or_token)
|
|
||||||
.query("grant_type", "authorization_code");
|
|
||||||
}
|
}
|
||||||
req.call().ok()?.into_json().ok()
|
req.call().ok()?.into_json().ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cached(time=300, time_refresh=false, option=true)]
|
pub fn get_gitlab_user(span: &Span, token: String) -> Option<GitlabUser> {
|
||||||
pub fn get_gitlab_user(token: String) -> Option<GitlabUser> {
|
static CACHE: Lazy<stretto::Cache<String, GitlabUser>> =
|
||||||
ureq::get(&USER_URL)
|
Lazy::new(|| stretto::CacheBuilder::new(1000, 100).finalize().expect("Failed to create gitlab cache"));
|
||||||
.set("Authorization", &format!("Bearer {}", token))
|
match CACHE.get(&token) {
|
||||||
.call()
|
None => {
|
||||||
.ok()?
|
let _span = metrics::span("get_gitlab_user", span);
|
||||||
.into_json().ok()
|
ureq::get(&USER_URL)
|
||||||
|
.set("Authorization", &format!("Bearer {}", token))
|
||||||
|
.call()
|
||||||
|
.ok()?
|
||||||
|
.into_json::<GitlabUser>()
|
||||||
|
.ok()
|
||||||
|
.map(|v| {
|
||||||
|
CACHE.insert_with_ttl(token, v.clone(), 1, Duration::from_secs(500));
|
||||||
|
v
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Some(v) => Some(v.value().clone())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_routes(db: DBPool) -> impl Filter<Extract = impl Reply, Error = warp::Rejection> + Clone {
|
pub fn gitlab(_: &Span, _: &mut Request, _: &mut DBConnection) -> Result<ResponseBox, AppError> {
|
||||||
let gitlab = warp::path!("auth" / "gitlab")
|
let uri = format!(
|
||||||
.and(warp::get())
|
"{}?redirect_uri={}&client_id={}&scope=read_user&response_type=code",
|
||||||
.and_then(gitlab);
|
AUTHORIZE_URL.as_str(),
|
||||||
let gitlab_callback = warp::path!("auth" / "gitlab_callback")
|
REDIRECT_URL.as_str(),
|
||||||
.and(warp::get())
|
CONFIG.gitlab_id
|
||||||
.and(warp::query::query::<GitlabCallbackQuery>())
|
);
|
||||||
.and(with_db(db))
|
Ok(Response::empty(302).with_header(header("location", &uri)).boxed())
|
||||||
.and_then(gitlab_callback);
|
|
||||||
|
|
||||||
gitlab.or(gitlab_callback)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn gitlab() -> Result<impl Reply, warp::Rejection> {
|
pub fn gitlab_callback(
|
||||||
let uri = format!("{}?redirect_uri={}&client_id={}&scope=read_user&response_type=code", AUTHORIZE_URL.as_str(), REDIRECT_URL.as_str(), CONFIG.gitlab_id);
|
span: &Span,
|
||||||
Ok(warp::redirect::found(uri.parse::<warp::http::Uri>().expect("Failed to parse gitlab auth uri")))
|
_: &mut Request,
|
||||||
}
|
db: &mut DBConnection,
|
||||||
|
query_string: &str
|
||||||
async fn gitlab_callback(code: GitlabCallbackQuery, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
) -> Result<ResponseBox, AppError> {
|
||||||
use crate::db::UserRole;
|
use crate::db::UserRole;
|
||||||
|
|
||||||
let tokens = get_gitlab_token(code.code, false).ok_or(AppError::Unauthorized("Invalid code"))?;
|
let code: &str = query_string
|
||||||
let gitlab_user = get_gitlab_user(tokens.access_token.clone()).ok_or(AppError::Unauthorized("Invalid code"))?;
|
// a=b&code=c&d
|
||||||
|
.split('&')
|
||||||
|
// ['a=b', 'code=c', 'd']
|
||||||
|
.find_map(|prop|
|
||||||
|
prop.split_once('=')
|
||||||
|
// [Some(('a', 'b')), Some(('code', 'c')), None]
|
||||||
|
.and_then(|v|
|
||||||
|
v.0.eq("code")
|
||||||
|
.then_some(v.1)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.ok_or(AppError::BadRequest("Query code missing"))?;
|
||||||
|
|
||||||
let user = db.find_user(&gitlab_user.username, true);
|
let tokens = get_gitlab_token(span, code.to_string(), false).ok_or(AppError::Unauthorized("Invalid code"))?;
|
||||||
|
let gitlab_user =
|
||||||
|
get_gitlab_user(span, tokens.access_token.clone()).ok_or(AppError::Unauthorized("Invalid code"))?;
|
||||||
|
|
||||||
|
let user = db.find_user(span, &gitlab_user.username, true);
|
||||||
|
|
||||||
let user = match user {
|
let user = match user {
|
||||||
Some(mut v) => {
|
Some(mut v) => {
|
||||||
v.gitlab_at = Some(tokens.access_token);
|
v.gitlab_at = Some(tokens.access_token);
|
||||||
v.gitlab_rt = Some(tokens.refresh_token);
|
v.gitlab_rt = Some(tokens.refresh_token);
|
||||||
db.save_user(&v);
|
db.save_user(span, &v);
|
||||||
v
|
v
|
||||||
},
|
|
||||||
None => {
|
|
||||||
db.create_user_gitlab(
|
|
||||||
gitlab_user.username,
|
|
||||||
if gitlab_user.is_admin { UserRole::Admin } else { UserRole::Disabled },
|
|
||||||
tokens.access_token,
|
|
||||||
tokens.refresh_token
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
None => db.create_user_gitlab(
|
||||||
|
span,
|
||||||
|
gitlab_user.username,
|
||||||
|
if gitlab_user.is_admin {
|
||||||
|
UserRole::Admin
|
||||||
|
} else {
|
||||||
|
UserRole::Disabled
|
||||||
|
},
|
||||||
|
tokens.access_token,
|
||||||
|
tokens.refresh_token
|
||||||
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
if user.role == UserRole::Disabled {
|
if user.role == UserRole::Disabled {
|
||||||
Ok(warp::reply::html("<!DOCTYPE html><html><h2>Your account is disabled, please contact an admin.<br/><a href=\"/login\">Go to login page</a></h2></html>").into_response())
|
Ok(
|
||||||
|
Response::from_data("<!DOCTYPE html><html><h2>Your account is disabled, please contact an admin.<br/><a href=\"/login\">Go to login page</a></h2></html>")
|
||||||
|
.with_header(header("content-type", "text/html; charset=utf-8"))
|
||||||
|
.boxed()
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
let uri = format!("/set_token?token={}", super::get_token(&user, &mut db));
|
let uri = format!("/set_token?token={}", super::get_token(span, &user, db));
|
||||||
Ok(warp::redirect::found(uri.parse::<warp::http::Uri>().expect("Failed to parse set_token uri")).into_response())
|
Ok(Response::empty(302).with_header(header("location", &uri)).boxed())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,20 +1,12 @@
|
|||||||
mod basic;
|
pub mod basic;
|
||||||
mod tfa;
|
|
||||||
pub mod gitlab;
|
pub mod gitlab;
|
||||||
|
pub mod tfa;
|
||||||
|
|
||||||
use std::ops::Add;
|
use std::ops::Add;
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use ring::rand;
|
|
||||||
use ring::rand::SecureRandom;
|
|
||||||
use warp::Filter;
|
|
||||||
use crate::db::DBPool;
|
|
||||||
|
|
||||||
pub fn build_routes(db: DBPool) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
|
use once_cell::sync::Lazy;
|
||||||
SEC_RANDOM.fill(&mut [0; 1]).expect("Failed to init secure random");
|
use ring::{rand, rand::SecureRandom};
|
||||||
basic::build_routes(db.clone())
|
use rustracing_jaeger::Span;
|
||||||
.or(tfa::build_routes(db.clone()))
|
|
||||||
.or(gitlab::build_routes(db))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
#[derive(Debug, serde::Deserialize, serde::Serialize)]
|
||||||
pub struct JWTClaims {
|
pub struct JWTClaims {
|
||||||
@ -25,13 +17,12 @@ pub struct JWTClaims {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub static JWT_ALGORITHM: jsonwebtoken::Algorithm = jsonwebtoken::Algorithm::HS512;
|
pub static JWT_ALGORITHM: jsonwebtoken::Algorithm = jsonwebtoken::Algorithm::HS512;
|
||||||
|
pub static SEC_RANDOM: Lazy<rand::SystemRandom> = Lazy::new(rand::SystemRandom::new);
|
||||||
lazy_static! {
|
pub static JWT_SECRET: Lazy<Vec<u8>> = Lazy::new(get_jwt_secret);
|
||||||
pub static ref SEC_RANDOM: rand::SystemRandom = rand::SystemRandom::new();
|
pub static JWT_DECODE_KEY: Lazy<jsonwebtoken::DecodingKey> =
|
||||||
pub static ref JWT_SECRET: Vec<u8> = get_jwt_secret();
|
Lazy::new(|| jsonwebtoken::DecodingKey::from_secret(JWT_SECRET.as_slice()));
|
||||||
pub static ref JWT_DECODE_KEY: jsonwebtoken::DecodingKey = jsonwebtoken::DecodingKey::from_secret(JWT_SECRET.as_slice());
|
pub static JWT_ENCODE_KEY: Lazy<jsonwebtoken::EncodingKey> =
|
||||||
pub static ref JWT_ENCODE_KEY: jsonwebtoken::EncodingKey = jsonwebtoken::EncodingKey::from_secret(JWT_SECRET.as_slice());
|
Lazy::new(|| jsonwebtoken::EncodingKey::from_secret(JWT_SECRET.as_slice()));
|
||||||
}
|
|
||||||
|
|
||||||
fn get_jwt_secret() -> Vec<u8> {
|
fn get_jwt_secret() -> Vec<u8> {
|
||||||
let secret = std::fs::read("jwt.secret");
|
let secret = std::fs::read("jwt.secret");
|
||||||
@ -45,12 +36,12 @@ fn get_jwt_secret() -> Vec<u8> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_token(user: &crate::db::User, db: &mut crate::db::DBConnection) -> String {
|
pub fn get_token(span: &Span, user: &crate::db::User, db: &mut crate::db::DBConnection) -> String {
|
||||||
let iat = chrono::Utc::now();
|
let iat = chrono::Utc::now();
|
||||||
let exp = iat.add(chrono::Duration::hours(24)).timestamp();
|
let exp = iat.add(chrono::Duration::hours(24)).timestamp();
|
||||||
let iat = iat.timestamp();
|
let iat = iat.timestamp();
|
||||||
|
|
||||||
let token = db.create_token(user.id, exp);
|
let token = db.create_token(span, user.id, exp);
|
||||||
|
|
||||||
let claims = JWTClaims {
|
let claims = JWTClaims {
|
||||||
exp,
|
exp,
|
||||||
@ -59,8 +50,12 @@ pub fn get_token(user: &crate::db::User, db: &mut crate::db::DBConnection) -> St
|
|||||||
sub: user.id
|
sub: user.id
|
||||||
};
|
};
|
||||||
|
|
||||||
jsonwebtoken::encode(&jsonwebtoken::Header::new(JWT_ALGORITHM), &claims, &JWT_ENCODE_KEY)
|
jsonwebtoken::encode(
|
||||||
.expect("Failed to create JWT token")
|
&jsonwebtoken::Header::new(JWT_ALGORITHM),
|
||||||
|
&claims,
|
||||||
|
&JWT_ENCODE_KEY
|
||||||
|
)
|
||||||
|
.expect("Failed to create JWT token")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hash_password(password: &String) -> String {
|
pub fn hash_password(password: &String) -> String {
|
||||||
|
@ -1,44 +1,52 @@
|
|||||||
use lazy_static::lazy_static;
|
|
||||||
use lettre::Transport;
|
use lettre::Transport;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
use ring::rand::SecureRandom;
|
use ring::rand::SecureRandom;
|
||||||
use warp::Filter;
|
use rustracing_jaeger::Span;
|
||||||
use crate::config::CONFIG;
|
use tiny_http::{Request, ResponseBox};
|
||||||
use crate::db::{DBConnection, DBPool, with_db, TfaTypes};
|
|
||||||
use crate::dto;
|
use crate::{
|
||||||
use crate::routes::{AppError, get_reply};
|
config::CONFIG,
|
||||||
use crate::routes::filters::{authenticated, UserInfo};
|
db,
|
||||||
|
db::{DBConnection, TfaTypes},
|
||||||
|
dto,
|
||||||
|
metrics,
|
||||||
|
routes::{filters::UserInfo, get_reply, AppError}
|
||||||
|
};
|
||||||
|
|
||||||
fn build_mail_sender() -> lettre::SmtpTransport {
|
fn build_mail_sender() -> lettre::SmtpTransport {
|
||||||
lettre::SmtpTransport::builder_dangerous(CONFIG.smtp_server.clone())
|
lettre::SmtpTransport::builder_dangerous(CONFIG.smtp_server.clone())
|
||||||
.port(CONFIG.smtp_port)
|
.port(CONFIG.smtp_port)
|
||||||
.tls(
|
.tls(lettre::transport::smtp::client::Tls::Required(
|
||||||
lettre::transport::smtp::client::Tls::Required(
|
lettre::transport::smtp::client::TlsParameters::new(CONFIG.smtp_server.clone()).unwrap()
|
||||||
lettre::transport::smtp::client::TlsParameters::new(
|
))
|
||||||
CONFIG.smtp_server.clone()
|
.credentials(lettre::transport::smtp::authentication::Credentials::new(
|
||||||
).unwrap()
|
CONFIG.smtp_user.clone(),
|
||||||
)
|
CONFIG.smtp_password.clone()
|
||||||
)
|
))
|
||||||
.credentials(lettre::transport::smtp::authentication::Credentials::new(CONFIG.smtp_user.clone(), CONFIG.smtp_password.clone()))
|
|
||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
static MAIL_SENDER: Lazy<lettre::SmtpTransport> = Lazy::new(build_mail_sender);
|
||||||
static ref MAIL_SENDER: lettre::SmtpTransport = build_mail_sender();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_totp(user: &crate::db::User) -> totp_rs::TOTP {
|
fn get_totp(user: &db::User) -> totp_rs::TOTP {
|
||||||
totp_rs::TOTP::from_rfc6238(
|
totp_rs::TOTP::from_rfc6238(
|
||||||
totp_rs::Rfc6238::new(
|
totp_rs::Rfc6238::new(
|
||||||
6,
|
6,
|
||||||
user.tfa_secret.clone().unwrap(),
|
user.tfa_secret.clone().unwrap(),
|
||||||
Some("MFileserver".to_owned()),
|
Some("MFileserver".to_owned()),
|
||||||
user.name.clone()
|
user.name.clone()
|
||||||
).unwrap()
|
)
|
||||||
).unwrap()
|
.unwrap()
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn verify2fa(user: &crate::db::User, code: String) -> bool {
|
pub fn verify2fa(user: &db::User, code: String) -> bool {
|
||||||
let allowed_skew = if user.tfa_type == TfaTypes::Totp {0} else {10};
|
let allowed_skew = if user.tfa_type == TfaTypes::Totp {
|
||||||
|
0
|
||||||
|
} else {
|
||||||
|
10
|
||||||
|
};
|
||||||
let totp = get_totp(user);
|
let totp = get_totp(user);
|
||||||
let time = std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_secs();
|
let time = std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_secs();
|
||||||
let base_step = time / totp.step - allowed_skew;
|
let base_step = time / totp.step - allowed_skew;
|
||||||
@ -51,54 +59,39 @@ pub fn verify2fa(user: &crate::db::User, code: String) -> bool {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_2fa_mail(user: &crate::db::User) {
|
pub fn send_2fa_mail(span: &Span, user: &db::User) {
|
||||||
|
let _span = metrics::span("send_2fa_mail", span);
|
||||||
let totp = get_totp(user);
|
let totp = get_totp(user);
|
||||||
let code = totp.generate_current().unwrap();
|
let code = totp.generate_current().unwrap();
|
||||||
let mail = lettre::Message::builder()
|
let mail = lettre::Message::builder()
|
||||||
.from("fileserver@mattv.de".parse().unwrap())
|
.from("fileserver@mattv.de".parse().unwrap())
|
||||||
.to(user.name.parse().unwrap())
|
.to(user.name.parse().unwrap())
|
||||||
.subject("MFileserver - Email 2fa code")
|
.subject("MFileserver - Email 2fa code")
|
||||||
.body(format!("Your code is: {}\r\nIt is valid for 5 minutes", code))
|
.body(format!(
|
||||||
|
"Your code is: {}\r\nIt is valid for 5 minutes",
|
||||||
|
code
|
||||||
|
))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
MAIL_SENDER.send(&mail).expect("Failed to send mail");
|
MAIL_SENDER.send(&mail).expect("Failed to send mail");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_routes(db: DBPool) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
|
pub fn tfa_setup(
|
||||||
let tfa_setup = warp::path!("auth" / "2fa" / "setup")
|
span: &Span,
|
||||||
.and(warp::post())
|
_: &mut Request,
|
||||||
.and(warp::body::json())
|
db: &mut DBConnection,
|
||||||
.and(authenticated(db.clone()))
|
mut info: UserInfo,
|
||||||
.and(with_db(db.clone()))
|
data: dto::requests::TfaSetup
|
||||||
.and_then(tfa_setup);
|
) -> Result<ResponseBox, AppError> {
|
||||||
let tfa_complete = warp::path!("auth" / "2fa" / "complete")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(tfa_complete);
|
|
||||||
let tfa_disable = warp::path!("auth" / "2fa" / "disable")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db))
|
|
||||||
.and_then(tfa_disable);
|
|
||||||
|
|
||||||
tfa_setup.or(tfa_complete).or(tfa_disable)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn tfa_setup(data: dto::requests::TfaSetup, mut info: UserInfo, mut db: DBConnection)
|
|
||||||
-> Result<impl warp::Reply, warp::Rejection> {
|
|
||||||
let mut secret: [u8; 32] = [0; 32];
|
let mut secret: [u8; 32] = [0; 32];
|
||||||
super::SEC_RANDOM.fill(&mut secret).expect("Failed to generate secret");
|
super::SEC_RANDOM.fill(&mut secret).expect("Failed to generate secret");
|
||||||
let secret = Vec::from(secret);
|
let secret = Vec::from(secret);
|
||||||
info.0.tfa_secret = Some(secret);
|
info.0.tfa_secret = Some(secret);
|
||||||
db.save_user(&info.0);
|
db.save_user(span, &info.0);
|
||||||
|
|
||||||
if data.mail {
|
if data.mail {
|
||||||
send_2fa_mail(&info.0);
|
send_2fa_mail(span, &info.0);
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
let totp = get_totp(&info.0);
|
let totp = get_totp(&info.0);
|
||||||
get_reply(&dto::responses::TfaSetup {
|
get_reply(&dto::responses::TfaSetup {
|
||||||
@ -109,28 +102,37 @@ async fn tfa_setup(data: dto::requests::TfaSetup, mut info: UserInfo, mut db: DB
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn tfa_complete(data: dto::requests::TfaComplete, mut info: UserInfo, mut db: DBConnection)
|
pub fn tfa_complete(
|
||||||
-> Result<impl warp::Reply, warp::Rejection> {
|
span: &Span,
|
||||||
info.0.tfa_type = if data.mail { TfaTypes::Email } else { TfaTypes::Totp };
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
mut info: UserInfo,
|
||||||
|
data: dto::requests::TfaComplete
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
info.0.tfa_type = if data.mail {
|
||||||
|
TfaTypes::Email
|
||||||
|
} else {
|
||||||
|
TfaTypes::Totp
|
||||||
|
};
|
||||||
|
|
||||||
if verify2fa(&info.0, data.code) {
|
if verify2fa(&info.0, data.code) {
|
||||||
db.save_user(&info.0);
|
db.save_user(span, &info.0);
|
||||||
db.delete_all_tokens(info.0.id);
|
db.delete_all_tokens(span, info.0.id);
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
AppError::BadRequest("Incorrect 2fa code").err()
|
AppError::BadRequest("Incorrect 2fa code").err()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn tfa_disable(mut info: UserInfo, mut db: DBConnection)
|
pub fn tfa_disable(
|
||||||
-> Result<impl warp::Reply, warp::Rejection> {
|
span: &Span,
|
||||||
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
mut info: UserInfo
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
info.0.tfa_secret = None;
|
info.0.tfa_secret = None;
|
||||||
info.0.tfa_type = TfaTypes::None;
|
info.0.tfa_type = TfaTypes::None;
|
||||||
db.save_user(&info.0);
|
db.save_user(span, &info.0);
|
||||||
db.delete_all_tokens(info.0.id);
|
db.delete_all_tokens(span, info.0.id);
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
@ -1,50 +1,26 @@
|
|||||||
use warp::Filter;
|
use rustracing_jaeger::Span;
|
||||||
use warp::http::{HeaderMap, HeaderValue};
|
|
||||||
use crate::db::UserRole;
|
use crate::{
|
||||||
use crate::db::{DBConnection, DBPool, with_db};
|
db::{DBConnection, UserRole},
|
||||||
use crate::routes::AppError;
|
metrics,
|
||||||
use crate::routes::auth;
|
routes::{auth, AppError}
|
||||||
|
};
|
||||||
|
|
||||||
pub type UserInfo = (crate::db::User, crate::db::Token);
|
pub type UserInfo = (crate::db::User, crate::db::Token);
|
||||||
|
|
||||||
pub fn authenticated(db: DBPool) -> impl Filter<Extract=(UserInfo,), Error=warp::reject::Rejection> + Clone {
|
pub fn authorize_jwt(span: &Span, jwt: &str, db: &mut DBConnection) -> Result<UserInfo, AppError> {
|
||||||
warp::header::headers_cloned()
|
let inner_span = metrics::span("authorize_jwt", span);
|
||||||
.map(move |_headers: HeaderMap<HeaderValue>| _headers)
|
|
||||||
.and(with_db(db))
|
|
||||||
.and_then(authorize)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn admin(db: DBPool) -> impl Filter<Extract=(UserInfo, ), Error=warp::reject::Rejection> + Clone {
|
|
||||||
warp::header::headers_cloned()
|
|
||||||
.map(move |_headers: HeaderMap<HeaderValue>| _headers)
|
|
||||||
.and(with_db(db))
|
|
||||||
.and_then(|_headers, db| async {
|
|
||||||
let info = authorize(_headers, db).await?;
|
|
||||||
if info.0.role == UserRole::Admin {
|
|
||||||
Ok(info)
|
|
||||||
} else {
|
|
||||||
AppError::Forbidden("Forbidden").err()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn authorize(_headers: HeaderMap<HeaderValue>, mut db: DBConnection) -> Result<UserInfo, warp::reject::Rejection> {
|
|
||||||
authorize_jwt(extract_jwt(&_headers).map_err(|e| e.reject())?, &mut db).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn authorize_jwt(jwt: String, db: &mut DBConnection) -> Result<UserInfo, warp::reject::Rejection> {
|
|
||||||
let decoded = jsonwebtoken::decode::<auth::JWTClaims>(
|
let decoded = jsonwebtoken::decode::<auth::JWTClaims>(
|
||||||
&jwt,
|
jwt,
|
||||||
&crate::routes::auth::JWT_DECODE_KEY,
|
&auth::JWT_DECODE_KEY,
|
||||||
&jsonwebtoken::Validation::new(auth::JWT_ALGORITHM)
|
&jsonwebtoken::Validation::new(auth::JWT_ALGORITHM)
|
||||||
).map_err(|_| AppError::Forbidden("Invalid token"))?;
|
)
|
||||||
|
.map_err(|_| AppError::Forbidden("Invalid token"))?;
|
||||||
|
|
||||||
db.cleanup_tokens();
|
db.cleanup_tokens(span);
|
||||||
|
|
||||||
let mut user = db.get_user(decoded.claims.sub)
|
let mut user = db.get_user(&inner_span, decoded.claims.sub).ok_or(AppError::Forbidden("Invalid token"))?;
|
||||||
.ok_or(AppError::Forbidden("Invalid token"))?;
|
let token = db.get_token(&inner_span, decoded.claims.jti).ok_or(AppError::Forbidden("Invalid token"))?;
|
||||||
let token = db.get_token(decoded.claims.jti)
|
|
||||||
.ok_or(AppError::Forbidden("Invalid token"))?;
|
|
||||||
|
|
||||||
if user.id != token.owner_id {
|
if user.id != token.owner_id {
|
||||||
return AppError::Forbidden("Invalid token").err();
|
return AppError::Forbidden("Invalid token").err();
|
||||||
@ -53,38 +29,27 @@ pub async fn authorize_jwt(jwt: String, db: &mut DBConnection) -> Result<UserInf
|
|||||||
return AppError::Forbidden("Account disabled").err();
|
return AppError::Forbidden("Account disabled").err();
|
||||||
}
|
}
|
||||||
if user.gitlab {
|
if user.gitlab {
|
||||||
let info = auth::gitlab::get_gitlab_user(user.gitlab_at.clone().unwrap());
|
let info = auth::gitlab::get_gitlab_user(&inner_span, user.gitlab_at.clone().unwrap());
|
||||||
let info = match info {
|
let info = match info {
|
||||||
Some(v) => Some(v),
|
Some(v) => Some(v),
|
||||||
None => {
|
None => {
|
||||||
let tokens = auth::gitlab::get_gitlab_token(user.gitlab_rt.clone().unwrap(), true);
|
let tokens = auth::gitlab::get_gitlab_token(&inner_span, user.gitlab_rt.clone().unwrap(), true);
|
||||||
if let Some(tokens) = tokens {
|
if let Some(tokens) = tokens {
|
||||||
user.gitlab_at = Some(tokens.access_token.clone());
|
user.gitlab_at = Some(tokens.access_token.clone());
|
||||||
user.gitlab_rt = Some(tokens.refresh_token);
|
user.gitlab_rt = Some(tokens.refresh_token);
|
||||||
db.save_user(&user);
|
db.save_user(&inner_span, &user);
|
||||||
auth::gitlab::get_gitlab_user(tokens.access_token)
|
auth::gitlab::get_gitlab_user(&inner_span, tokens.access_token)
|
||||||
} else { None }
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if info.is_none() || info.unwrap().username != user.name {
|
if info.is_none() || info.unwrap().username != user.name {
|
||||||
db.delete_all_tokens(token.owner_id);
|
db.delete_all_tokens(&inner_span, token.owner_id);
|
||||||
db.delete_all_tokens(user.id);
|
db.delete_all_tokens(&inner_span, user.id);
|
||||||
return AppError::Forbidden("Invalid gitlab user").err();
|
return AppError::Forbidden("Invalid gitlab user").err();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((user, token))
|
Ok((user, token))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_jwt(_headers: &HeaderMap<HeaderValue>) -> Result<String, AppError> {
|
|
||||||
let header = match _headers.get(warp::http::header::AUTHORIZATION) {
|
|
||||||
Some(v) => v,
|
|
||||||
None => return Err(AppError::Unauthorized("Missing token"))
|
|
||||||
};
|
|
||||||
let header = header.to_str().map_err(|_| AppError::Unauthorized("Missing token"))?;
|
|
||||||
if !header.starts_with("Bearer ") {
|
|
||||||
Err(AppError::Unauthorized("Missing token"))
|
|
||||||
} else {
|
|
||||||
Ok(header.trim_start_matches("Bearer ").to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,30 +1,19 @@
|
|||||||
use std::collections::VecDeque;
|
pub mod routes;
|
||||||
use std::iter::Iterator;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering};
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use warp::Filter;
|
|
||||||
use futures::TryFutureExt;
|
|
||||||
use futures::TryStreamExt;
|
|
||||||
use crate::db::DBPool;
|
|
||||||
|
|
||||||
mod routes;
|
use std::{
|
||||||
|
collections::VecDeque,
|
||||||
pub fn build_routes(db: DBPool) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
|
iter::Iterator,
|
||||||
{
|
sync::{
|
||||||
if !std::path::Path::new("temp").is_dir() {
|
atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering},
|
||||||
std::fs::create_dir("temp").expect("Failed to create temp dir");
|
Arc
|
||||||
}
|
|
||||||
std::fs::read_dir("temp")
|
|
||||||
.expect("Failed to iter temp dir")
|
|
||||||
.for_each(|dir| {
|
|
||||||
std::fs::remove_file(dir.expect("Failed to retrieve temp dir entry").path()).expect("Failed to delete file in temp dir");
|
|
||||||
});
|
|
||||||
DELETE_RT.spawn(async {});
|
|
||||||
ZIP_RT.spawn(async {});
|
|
||||||
}
|
}
|
||||||
routes::build_routes(db)
|
};
|
||||||
}
|
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use rayon_core::ThreadPoolBuilder;
|
||||||
|
use rustracing_jaeger::Span;
|
||||||
|
|
||||||
|
use crate::metrics;
|
||||||
|
|
||||||
pub static WINDOWS_INVALID_CHARS: &str = "\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F<>:\"/\\|";
|
pub static WINDOWS_INVALID_CHARS: &str = "\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F<>:\"/\\|";
|
||||||
|
|
||||||
@ -43,19 +32,36 @@ pub enum CreateNodeResult {
|
|||||||
Exists(bool, i32)
|
Exists(bool, i32)
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
pub static DELETE_POOL: Lazy<rayon_core::ThreadPool> = Lazy::new(|| {
|
||||||
static ref DELETE_RT: tokio::runtime::Runtime = tokio::runtime::Builder::new_multi_thread().worker_threads(1).enable_time().build().expect("Failed to create delete runtime");
|
ThreadPoolBuilder::new()
|
||||||
static ref ZIP_RT: tokio::runtime::Runtime = tokio::runtime::Builder::new_multi_thread().worker_threads(3).enable_time().build().expect("Failed to create zip runtime");
|
.num_threads(1)
|
||||||
pub static ref ZIP_TO_PROGRESS: tokio::sync::RwLock<std::collections::HashMap<std::collections::BTreeSet<i32>, Arc<ZipProgressEntry>>> = tokio::sync::RwLock::new(std::collections::HashMap::new());
|
.thread_name(|i| format!("Delete thread {}", i))
|
||||||
}
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
});
|
||||||
|
pub static ZIP_POOL: Lazy<rayon_core::ThreadPool> = Lazy::new(|| {
|
||||||
|
ThreadPoolBuilder::new()
|
||||||
|
.num_threads(3)
|
||||||
|
.thread_name(|i| format!("Zip thread {}", i))
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
});
|
||||||
|
pub static ZIP_TO_PROGRESS: Lazy<
|
||||||
|
parking_lot::RwLock<std::collections::HashMap<std::collections::BTreeSet<i32>, Arc<ZipProgressEntry>>>
|
||||||
|
> = Lazy::new(|| parking_lot::RwLock::new(std::collections::HashMap::new()));
|
||||||
|
|
||||||
static NEXT_TEMP_ID: AtomicU64 = AtomicU64::new(0);
|
static NEXT_TEMP_ID: AtomicU64 = AtomicU64::new(0);
|
||||||
|
|
||||||
async fn cleanup_temp_zips() {
|
fn cleanup_temp_zips(span: &Span) {
|
||||||
let mut existing = ZIP_TO_PROGRESS.write().await;
|
let _span = metrics::span("zipping - cleanup", span);
|
||||||
|
let mut existing = ZIP_TO_PROGRESS.write();
|
||||||
existing.retain(|_, v| {
|
existing.retain(|_, v| {
|
||||||
if Arc::strong_count(v) == 1 && v.done.load(Ordering::Relaxed) && v.delete_after.load(Ordering::Relaxed) <= chrono::Utc::now().timestamp() {
|
if Arc::strong_count(v) == 1
|
||||||
std::fs::remove_file(std::path::Path::new(&format!("./temp/{}", v.temp_id))).expect("Failed to delete temp file");
|
&& v.done.load(Ordering::Relaxed)
|
||||||
|
&& v.delete_after.load(Ordering::Relaxed) <= chrono::Utc::now().timestamp()
|
||||||
|
{
|
||||||
|
std::fs::remove_file(std::path::Path::new(&format!("./temp/{}", v.temp_id)))
|
||||||
|
.expect("Failed to delete temp file");
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
true
|
true
|
||||||
@ -63,37 +69,54 @@ async fn cleanup_temp_zips() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_nodes_recursive(root: crate::db::Inode, db: &mut crate::db::DBConnection) -> VecDeque<crate::db::Inode> {
|
fn get_nodes_recursive(
|
||||||
|
span: &Span,
|
||||||
|
root: crate::db::Inode,
|
||||||
|
db: &mut crate::db::DBConnection
|
||||||
|
) -> VecDeque<crate::db::Inode> {
|
||||||
|
let inner_span = metrics::span("get_nodes_recursive", span);
|
||||||
let mut nodes = VecDeque::from(vec![root.clone()]);
|
let mut nodes = VecDeque::from(vec![root.clone()]);
|
||||||
if root.is_file { return nodes; }
|
if root.is_file {
|
||||||
|
return nodes;
|
||||||
|
}
|
||||||
let mut nodes_to_check = VecDeque::from(vec![root]);
|
let mut nodes_to_check = VecDeque::from(vec![root]);
|
||||||
while !nodes_to_check.is_empty() {
|
while !nodes_to_check.is_empty() {
|
||||||
let node = nodes_to_check.pop_front().unwrap();
|
let node = nodes_to_check.pop_front().unwrap();
|
||||||
db.get_children(node.id).iter().for_each(|node| {
|
db.get_children(&inner_span, node.id).iter().for_each(|node| {
|
||||||
nodes.push_back(node.clone());
|
nodes.push_back(node.clone());
|
||||||
if !node.is_file { nodes_to_check.push_front(node.clone()); }
|
if !node.is_file {
|
||||||
|
nodes_to_check.push_front(node.clone());
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
nodes
|
nodes
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_node_path(node: crate::db::Inode, db: &mut crate::db::DBConnection) -> VecDeque<crate::db::Inode> {
|
fn get_node_path(span: &Span, node: crate::db::Inode, db: &mut crate::db::DBConnection) -> VecDeque<crate::db::Inode> {
|
||||||
|
let inner_span = metrics::span("get_node_path", span);
|
||||||
let mut path = VecDeque::from(vec![node.clone()]);
|
let mut path = VecDeque::from(vec![node.clone()]);
|
||||||
let mut node = node;
|
let mut node = node;
|
||||||
while let Some(parent) = node.parent_id {
|
while let Some(parent) = node.parent_id {
|
||||||
node = db.get_node(parent).expect("Failed to get node parent");
|
node = db.get_node(&inner_span, parent).expect("Failed to get node parent");
|
||||||
path.push_front(node.clone());
|
path.push_front(node.clone());
|
||||||
}
|
}
|
||||||
path
|
path
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_total_size(node: crate::db::Inode, db: &mut crate::db::DBConnection) -> u64 {
|
fn get_total_size(span: &Span, node: crate::db::Inode, db: &mut crate::db::DBConnection) -> u64 {
|
||||||
let nodes = get_nodes_recursive(node, db);
|
let inner_span = metrics::span("get_total_size", span);
|
||||||
nodes.iter().fold(0_u64, |acc, node| acc + node.size.unwrap_or(0) as u64)
|
get_nodes_recursive(&inner_span, node, db)
|
||||||
|
.iter()
|
||||||
|
.fold(0_u64, |acc, node| acc + node.size.unwrap_or(0) as u64)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_node_and_validate(user: &crate::db::User, node: i32, db: &mut crate::db::DBConnection) -> Option<crate::db::Inode> {
|
pub fn get_node_and_validate(
|
||||||
let node = db.get_node(node)?;
|
span: &Span,
|
||||||
|
user: &crate::db::User,
|
||||||
|
node: i32,
|
||||||
|
db: &mut crate::db::DBConnection
|
||||||
|
) -> Option<crate::db::Inode> {
|
||||||
|
let node = db.get_node(span, node)?;
|
||||||
if node.owner_id != user.id {
|
if node.owner_id != user.id {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
@ -101,21 +124,33 @@ pub fn get_node_and_validate(user: &crate::db::User, node: i32, db: &mut crate::
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_node(name: String, owner: &crate::db::User, file: bool, parent: Option<i32>, force: bool, db: &mut crate::db::DBConnection)
|
pub fn create_node(
|
||||||
-> Result<crate::db::Inode, CreateNodeResult> {
|
span: &Span,
|
||||||
|
name: String,
|
||||||
|
owner: &crate::db::User,
|
||||||
|
file: bool,
|
||||||
|
parent: Option<i32>,
|
||||||
|
force: bool,
|
||||||
|
db: &mut crate::db::DBConnection
|
||||||
|
) -> Result<crate::db::Inode, CreateNodeResult> {
|
||||||
|
let inner_span = metrics::span("create_node", span);
|
||||||
if !force && (name.is_empty() || name.starts_with(' ') || name.contains(|c| {
|
if !force && (name.is_empty() || name.starts_with(' ') || name.contains(|c| {
|
||||||
WINDOWS_INVALID_CHARS.contains(c)
|
WINDOWS_INVALID_CHARS.contains(c)
|
||||||
} || name.ends_with(' ') || name.ends_with('.') || name == "." || name == "..")) {
|
} || name.ends_with(' ') || name.ends_with('.') || name == "." || name == "..")) {
|
||||||
return Err(CreateNodeResult::InvalidName);
|
return Err(CreateNodeResult::InvalidName);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(parent) = parent {
|
if let Some(parent) = parent {
|
||||||
let parent = match get_node_and_validate(owner, parent, db) {
|
let parent = match get_node_and_validate(&inner_span, owner, parent, db) {
|
||||||
None => { return Err(CreateNodeResult::InvalidParent); }
|
None => {
|
||||||
|
return Err(CreateNodeResult::InvalidParent);
|
||||||
|
}
|
||||||
Some(v) => v
|
Some(v) => v
|
||||||
};
|
};
|
||||||
if parent.is_file { return Err(CreateNodeResult::InvalidParent); }
|
if parent.is_file {
|
||||||
let children = db.get_children(parent.id);
|
return Err(CreateNodeResult::InvalidParent);
|
||||||
|
}
|
||||||
|
let children = db.get_children(&inner_span, parent.id);
|
||||||
for child in children {
|
for child in children {
|
||||||
if child.name == name {
|
if child.name == name {
|
||||||
return Err(CreateNodeResult::Exists(child.is_file, child.id));
|
return Err(CreateNodeResult::Exists(child.is_file, child.id));
|
||||||
@ -123,29 +158,36 @@ pub fn create_node(name: String, owner: &crate::db::User, file: bool, parent: Op
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(db.create_node(file, name, parent, owner.id))
|
Ok(db.create_node(&inner_span, file, name, parent, owner.id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_node_root(node: &crate::db::Inode, db: &mut crate::db::DBConnection) {
|
pub fn delete_node_root(span: &Span, node: &crate::db::Inode, db: &mut crate::db::DBConnection) {
|
||||||
get_nodes_recursive(node.clone(), db).iter().rev().for_each(|node| {
|
get_nodes_recursive(span, node.clone(), db).iter().rev().for_each(|node| {
|
||||||
db.delete_node(node);
|
db.delete_node(span, node);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_node(node: &crate::db::Inode, sender: &mut warp::hyper::body::Sender, db: &mut crate::db::DBConnection) {
|
pub fn delete_node(
|
||||||
if node.parent_id.is_none() { return; }
|
span: &Span,
|
||||||
|
node: &crate::db::Inode,
|
||||||
|
sender: &std::sync::mpsc::Sender<String>,
|
||||||
|
db: &mut crate::db::DBConnection
|
||||||
|
) {
|
||||||
|
if node.parent_id.is_none() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
for node in get_nodes_recursive(node.clone(), db).iter().rev() {
|
for node in get_nodes_recursive(span, node.clone(), db).iter().rev() {
|
||||||
sender.send_data(warp::hyper::body::Bytes::from(format!("Deleting {}...", generate_path(node, db)))).await.unwrap();
|
sender.send(format!("Deleting {}...", generate_path(span, node, db))).unwrap();
|
||||||
db.delete_node(node);
|
db.delete_node(span, node);
|
||||||
sender.send_data(warp::hyper::body::Bytes::from(" Done \n")).await.unwrap();
|
sender.send(" Done \n".to_owned()).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_path(node: &crate::db::Inode, db: &mut crate::db::DBConnection) -> String {
|
pub fn generate_path(span: &Span, node: &crate::db::Inode, db: &mut crate::db::DBConnection) -> String {
|
||||||
let mut path = String::new();
|
let mut path = String::new();
|
||||||
|
|
||||||
get_node_path(node.clone(), db).iter().for_each(|node| {
|
get_node_path(span, node.clone(), db).iter().for_each(|node| {
|
||||||
if node.parent_id.is_none() {
|
if node.parent_id.is_none() {
|
||||||
path += "/";
|
path += "/";
|
||||||
} else {
|
} else {
|
||||||
@ -159,12 +201,16 @@ pub fn generate_path(node: &crate::db::Inode, db: &mut crate::db::DBConnection)
|
|||||||
path
|
path
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_path_dto(node: &crate::db::Inode, db: &mut crate::db::DBConnection) -> crate::dto::responses::GetPath {
|
pub fn generate_path_dto(
|
||||||
|
span: &Span,
|
||||||
|
node: &crate::db::Inode,
|
||||||
|
db: &mut crate::db::DBConnection
|
||||||
|
) -> crate::dto::responses::GetPath {
|
||||||
let mut get_path = crate::dto::responses::GetPath {
|
let mut get_path = crate::dto::responses::GetPath {
|
||||||
segments: Vec::new()
|
segments: Vec::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
get_node_path(node.clone(), db).iter().for_each(|node| {
|
get_node_path(span, node.clone(), db).iter().for_each(|node| {
|
||||||
if node.parent_id.is_none() {
|
if node.parent_id.is_none() {
|
||||||
get_path.segments.push(crate::dto::responses::GetPathSegment {
|
get_path.segments.push(crate::dto::responses::GetPathSegment {
|
||||||
path: "/".to_owned(),
|
path: "/".to_owned(),
|
||||||
@ -186,14 +232,3 @@ pub fn generate_path_dto(node: &crate::db::Inode, db: &mut crate::db::DBConnecti
|
|||||||
|
|
||||||
get_path
|
get_path
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_file_stream_body(path: String) -> warp::hyper::Body {
|
|
||||||
warp::hyper::Body::wrap_stream(
|
|
||||||
tokio::fs::File::open(path)
|
|
||||||
.map_ok(|file|
|
|
||||||
tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
|
|
||||||
.map_ok(bytes::BytesMut::freeze)
|
|
||||||
)
|
|
||||||
.try_flatten_stream()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
@ -1,95 +1,40 @@
|
|||||||
use std::collections::{BTreeSet, HashMap};
|
use std::{
|
||||||
use std::io::{Read, Write};
|
collections::{BTreeSet, HashMap},
|
||||||
use std::sync::atomic::Ordering;
|
fs::File,
|
||||||
use futures::{Stream, StreamExt};
|
io::{Read, Write},
|
||||||
use headers::HeaderMapExt;
|
ops::DerefMut,
|
||||||
use warp::{Filter, Reply};
|
sync::atomic::Ordering
|
||||||
use crate::db::{DBConnection, DBPool, with_db};
|
};
|
||||||
use crate::dto;
|
|
||||||
use crate::routes::{AppError, get_reply};
|
|
||||||
use crate::routes::filters::{authenticated, UserInfo};
|
|
||||||
|
|
||||||
pub fn build_routes(db: DBPool) -> impl Filter<Extract = impl Reply, Error = warp::Rejection> + Clone {
|
use rustracing_jaeger::Span;
|
||||||
let root = warp::path!("fs" / "root")
|
use tiny_http::{Request, Response, ResponseBox, StatusCode};
|
||||||
.and(warp::get())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and_then(root);
|
|
||||||
let node = warp::path!("fs" / "node" / i32)
|
|
||||||
.and(warp::get())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(node)
|
|
||||||
.with(warp::compression::brotli());
|
|
||||||
let path = warp::path!("fs" / "path" / i32)
|
|
||||||
.and(warp::get())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(path);
|
|
||||||
let create_folder = warp::path!("fs" / "create_folder")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(|data, info, db| create_node(data, info, db, false));
|
|
||||||
let create_file = warp::path!("fs" / "create_file")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(|data, info, db| create_node(data, info, db, true));
|
|
||||||
let delete_node = warp::path!("fs" / "delete" / i32)
|
|
||||||
.and(warp::post())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(delete_node);
|
|
||||||
let upload = warp::path!("fs" / "upload" / i32)
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::stream())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(upload);
|
|
||||||
let create_zip = warp::path!("fs" / "create_zip")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::json())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(create_zip);
|
|
||||||
let download = warp::path!("fs" / "download")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::form())
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(download);
|
|
||||||
let download_multi = warp::path!("fs" / "download_multi")
|
|
||||||
.and(warp::post())
|
|
||||||
.and(warp::body::form())
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(download_multi);
|
|
||||||
let download_preview = warp::path!("fs" / "download_preview" / i32)
|
|
||||||
.and(warp::get())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db.clone()))
|
|
||||||
.and_then(download_preview);
|
|
||||||
let get_type = warp::path!("fs" / "get_type" / i32)
|
|
||||||
.and(warp::get())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db))
|
|
||||||
.and_then(get_type);
|
|
||||||
|
|
||||||
root.or(node).or(path).or(create_folder).or(create_file).or(delete_node).or(upload).or(create_zip).or(download).or(download_multi).or(download_preview).or(get_type)
|
use crate::{
|
||||||
}
|
db,
|
||||||
|
db::DBConnection,
|
||||||
|
dto,
|
||||||
|
header,
|
||||||
|
metrics,
|
||||||
|
routes::{filters::UserInfo, get_reply, AppError, ChannelReader}
|
||||||
|
};
|
||||||
|
|
||||||
async fn root(info: UserInfo) -> Result<impl Reply, warp::Rejection> {
|
pub fn root(_: &Span, _: &mut Request, _: &mut DBConnection, info: UserInfo) -> Result<ResponseBox, AppError> {
|
||||||
get_reply(&dto::responses::Root {
|
get_reply(&dto::responses::Root {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
rootId: info.0.root_id
|
rootId: info.0.root_id
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn node(node: i32, info: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn node(
|
||||||
let guard_lock = DBConnection::get_lock(info.0.id).await;
|
span: &Span,
|
||||||
let _guard = guard_lock.read().await;
|
_: &mut Request,
|
||||||
let node = super::get_node_and_validate(&info.0, node, &mut db)
|
db: &mut DBConnection,
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?;
|
info: UserInfo,
|
||||||
|
node: i32
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
let guard_lock = DBConnection::get_lock(info.0.id);
|
||||||
|
let _guard = guard_lock.read(span);
|
||||||
|
let node = super::get_node_and_validate(span, &info.0, node, db).ok_or(AppError::BadRequest("Unknown node"))?;
|
||||||
|
|
||||||
get_reply(&dto::responses::GetNode {
|
get_reply(&dto::responses::GetNode {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
@ -100,176 +45,227 @@ async fn node(node: i32, info: UserInfo, mut db: DBConnection) -> Result<impl Re
|
|||||||
parent: node.parent_id,
|
parent: node.parent_id,
|
||||||
size: node.size,
|
size: node.size,
|
||||||
children: (!node.is_file).then(|| {
|
children: (!node.is_file).then(|| {
|
||||||
db.get_children(node.id).iter().map(|child| dto::responses::GetNodeEntry {
|
db.get_children(span, node.id)
|
||||||
id: child.id,
|
.iter()
|
||||||
name: child.name.clone(),
|
.map(|child| dto::responses::GetNodeEntry {
|
||||||
isFile: child.is_file,
|
id: child.id,
|
||||||
preview: child.has_preview,
|
name: child.name.clone(),
|
||||||
parent: child.parent_id,
|
isFile: child.is_file,
|
||||||
size: child.size
|
preview: child.has_preview,
|
||||||
}).collect()
|
parent: child.parent_id,
|
||||||
|
size: child.size
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn path(node: i32, info: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn path(
|
||||||
let guard_lock = DBConnection::get_lock(info.0.id).await;
|
span: &Span,
|
||||||
let _guard = guard_lock.read().await;
|
_: &mut Request,
|
||||||
let node = super::get_node_and_validate(&info.0, node, &mut db)
|
db: &mut DBConnection,
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?;
|
info: UserInfo,
|
||||||
|
node: i32
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
let guard_lock = DBConnection::get_lock(info.0.id);
|
||||||
|
let _guard = guard_lock.read(span);
|
||||||
|
let node = super::get_node_and_validate(span, &info.0, node, db).ok_or(AppError::BadRequest("Unknown node"))?;
|
||||||
|
|
||||||
get_reply(&super::generate_path_dto(&node, &mut db))
|
get_reply(&super::generate_path_dto(span, &node, db))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_node(data: dto::requests::CreateNode, info: UserInfo, mut db: DBConnection, file: bool) -> Result<impl Reply, warp::Rejection> {
|
pub fn create_node(
|
||||||
let guard_lock = DBConnection::get_lock(info.0.id).await;
|
span: &Span,
|
||||||
let _guard = guard_lock.read().await;
|
_: &mut Request,
|
||||||
let node = super::create_node(data.name, &info.0, file, Some(data.parent), false, &mut db);
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo,
|
||||||
|
data: dto::requests::CreateNode,
|
||||||
|
file: bool
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
let guard_lock = DBConnection::get_lock(info.0.id);
|
||||||
|
let _guard = guard_lock.read(span);
|
||||||
|
let node = super::create_node(span, data.name, &info.0, file, Some(data.parent), false, db);
|
||||||
|
|
||||||
match node {
|
match node {
|
||||||
Ok(v) => get_reply(&dto::responses::NewNode {
|
Ok(v) => get_reply(&dto::responses::NewNode {
|
||||||
|
statusCode: 200,
|
||||||
|
id: v.id
|
||||||
|
}),
|
||||||
|
Err(v) => match v {
|
||||||
|
super::CreateNodeResult::InvalidName => AppError::BadRequest("Invalid name").err(),
|
||||||
|
super::CreateNodeResult::InvalidParent => AppError::BadRequest("Invalid parent").err(),
|
||||||
|
super::CreateNodeResult::Exists(file, id) => get_reply(&dto::responses::NodeExists {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
id: v.id
|
id,
|
||||||
}),
|
exists: true,
|
||||||
Err(v) => {
|
isFile: file
|
||||||
match v {
|
})
|
||||||
super::CreateNodeResult::InvalidName => AppError::BadRequest("Invalid name").err(),
|
|
||||||
super::CreateNodeResult::InvalidParent => AppError::BadRequest("Invalid parent").err(),
|
|
||||||
super::CreateNodeResult::Exists(file, id) => get_reply(&dto::responses::NodeExists {
|
|
||||||
statusCode: 200,
|
|
||||||
id,
|
|
||||||
exists: true,
|
|
||||||
isFile: file
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn delete_node(node: i32, info: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn delete_node(
|
||||||
let guard_lock = DBConnection::get_lock(info.0.id).await;
|
span: &Span,
|
||||||
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo,
|
||||||
|
node: i32,
|
||||||
|
pool: &db::DBPool
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
let guard_lock = DBConnection::get_lock(info.0.id);
|
||||||
let inner_guard_lock = guard_lock.clone();
|
let inner_guard_lock = guard_lock.clone();
|
||||||
let _guard = guard_lock.read().await;
|
let _guard = guard_lock.read(span);
|
||||||
let node = super::get_node_and_validate(&info.0, node, &mut db)
|
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?;
|
let node = super::get_node_and_validate(span, &info.0, node, db).ok_or(AppError::BadRequest("Unknown node"))?;
|
||||||
|
|
||||||
if node.parent_id.is_none() {
|
if node.parent_id.is_none() {
|
||||||
return AppError::BadRequest("Can't delete root").err();
|
return AppError::BadRequest("Can't delete root").err();
|
||||||
}
|
}
|
||||||
|
|
||||||
let (mut sender, body) = warp::hyper::Body::channel();
|
let (tx, rx) = std::sync::mpsc::channel::<String>();
|
||||||
|
let inner_pool = pool.clone();
|
||||||
|
|
||||||
sender.send_data(warp::hyper::body::Bytes::from("Waiting in queue\n")).await.unwrap();
|
tx.send("Waiting in queue\n".to_owned()).unwrap();
|
||||||
super::DELETE_RT.spawn(async move {
|
|
||||||
|
let inner_span = metrics::span("DELETE_POOL - queueing", span);
|
||||||
|
super::DELETE_POOL.spawn(move || {
|
||||||
|
let del_span = metrics::span("DELETE_POOL - deleting", &inner_span);
|
||||||
|
drop(inner_span);
|
||||||
|
let mut db = &mut DBConnection::from(inner_pool.get().unwrap());
|
||||||
let guard_lock = inner_guard_lock.clone();
|
let guard_lock = inner_guard_lock.clone();
|
||||||
let _guard = guard_lock.write().await;
|
let _guard = guard_lock.write(&del_span);
|
||||||
super::delete_node(&node, &mut sender, &mut db).await;
|
super::delete_node(&del_span, &node, &tx, db.deref_mut());
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut resp = warp::reply::Response::new(body);
|
Ok(Response::new(
|
||||||
*resp.status_mut() = warp::http::StatusCode::OK;
|
StatusCode::from(200),
|
||||||
resp.headers_mut().typed_insert(
|
vec![header("content-type", "text/plain; charset=utf-8")],
|
||||||
headers::ContentType::text_utf8()
|
ChannelReader(rx),
|
||||||
);
|
None,
|
||||||
|
None
|
||||||
Ok(resp)
|
)
|
||||||
|
.boxed())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn upload<S, B>(node: i32, stream: S, info: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection>
|
pub fn upload(
|
||||||
where
|
span: &Span,
|
||||||
S: Stream<Item = Result<B, warp::Error>>,
|
req: &mut Request,
|
||||||
S: StreamExt,
|
db: &mut DBConnection,
|
||||||
B: warp::Buf
|
info: UserInfo,
|
||||||
{
|
node: i32
|
||||||
let guard_lock = DBConnection::get_lock(info.0.id).await;
|
) -> Result<ResponseBox, AppError> {
|
||||||
let _guard = guard_lock.read().await;
|
let guard_lock = DBConnection::get_lock(info.0.id);
|
||||||
let mut node = super::get_node_and_validate(&info.0, node, &mut db)
|
let _guard = guard_lock.read(span);
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?;
|
let mut node = super::get_node_and_validate(span, &info.0, node, db).ok_or(AppError::BadRequest("Unknown node"))?;
|
||||||
|
|
||||||
if !node.is_file {
|
if !node.is_file {
|
||||||
return AppError::BadRequest("Can't upload to a directory").err();
|
return AppError::BadRequest("Can't upload to a directory node").err();
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut file_size = 0_i64;
|
let mut file_size = 0_i64;
|
||||||
let file_name = format!("./files/{}", node.id);
|
let file_name = format!("./files/{}", node.id);
|
||||||
{
|
{
|
||||||
let mut file = std::fs::File::create(file_name.clone()).unwrap();
|
let _span = metrics::span("receive_file", span);
|
||||||
|
let mut buf = vec![0_u8; 8 * 1024 * 1024];
|
||||||
|
let mut file = File::create(file_name.clone()).unwrap();
|
||||||
|
|
||||||
stream.for_each(|f| {
|
let reader = req.as_reader();
|
||||||
let mut buffer = f.unwrap();
|
|
||||||
file_size += buffer.remaining() as i64;
|
loop {
|
||||||
while buffer.remaining() != 0 {
|
let r = reader.read(&mut buf).unwrap();
|
||||||
let chunk = buffer.chunk();
|
if r == 0 {
|
||||||
buffer.advance(file.write(chunk).expect("Failed to write file"));
|
break;
|
||||||
}
|
}
|
||||||
futures::future::ready(())
|
file.write_all(&buf[..r]).unwrap();
|
||||||
}).await;
|
file_size += r as i64;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let generate_preview = || -> Option<()> {
|
metrics::DISK_USAGE
|
||||||
if file_size > 20 * 1024 * 1024 { return None; }
|
.with_label_values(&[node.owner_id.to_string().as_str()])
|
||||||
let mime = mime_guess::from_path(std::path::Path::new(&node.name)).first()?.to_string();
|
.add(file_size - node.size.unwrap_or(0));
|
||||||
let img = image::load(
|
{
|
||||||
std::io::BufReader::new(std::fs::File::open(file_name.clone()).unwrap()),
|
let _span = metrics::span("generate_preview", span);
|
||||||
image::ImageFormat::from_mime_type(mime)?
|
node.has_preview = (|| {
|
||||||
).ok()?;
|
if file_size > 20 * 1024 * 1024 {
|
||||||
let img = img.resize(300, 300, image::imageops::FilterType::Triangle);
|
return None;
|
||||||
img.save(std::path::Path::new(&(file_name + "_preview.jpg"))).expect("Failed to save preview image");
|
}
|
||||||
Some(())
|
let mime = mime_guess::from_path(std::path::Path::new(&node.name)).first()?.to_string();
|
||||||
};
|
let img = image::load(
|
||||||
|
std::io::BufReader::new(File::open(file_name.clone()).unwrap()),
|
||||||
|
image::ImageFormat::from_mime_type(mime)?
|
||||||
|
)
|
||||||
|
.ok()?;
|
||||||
|
let img = img.resize(300, 300, image::imageops::FilterType::Triangle);
|
||||||
|
img.save(std::path::Path::new(&(file_name + "_preview.jpg"))).expect("Failed to save preview image");
|
||||||
|
Some(())
|
||||||
|
})()
|
||||||
|
.is_some();
|
||||||
|
}
|
||||||
|
|
||||||
node.has_preview = generate_preview().is_some();
|
|
||||||
node.size = Some(file_size);
|
node.size = Some(file_size);
|
||||||
db.save_node(&node);
|
db.save_node(span, &node);
|
||||||
|
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_zip(data: dto::requests::CreateZip, info: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn create_zip(
|
||||||
let guard_lock = DBConnection::get_lock(info.0.id).await;
|
span: &Span,
|
||||||
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo,
|
||||||
|
data: dto::requests::CreateZip,
|
||||||
|
pool: &db::DBPool
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
let guard_lock = DBConnection::get_lock(info.0.id);
|
||||||
let inner_guard_lock = guard_lock.clone();
|
let inner_guard_lock = guard_lock.clone();
|
||||||
let _guard = guard_lock.read().await;
|
let _guard = guard_lock.read(span);
|
||||||
let mut nodes: Vec<crate::db::Inode> = Vec::new();
|
let mut nodes: Vec<db::Inode> = Vec::new();
|
||||||
for node in data.nodes.clone() {
|
for node in data.nodes.clone() {
|
||||||
nodes.push(
|
nodes.push(super::get_node_and_validate(span, &info.0, node, db).ok_or(AppError::BadRequest("Unknown node"))?);
|
||||||
super::get_node_and_validate(&info.0, node, &mut db)
|
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
let zip_nodes = BTreeSet::from_iter(data.nodes.iter().copied());
|
let zip_nodes = BTreeSet::from_iter(data.nodes.iter().copied());
|
||||||
|
|
||||||
{
|
{
|
||||||
let guard = super::ZIP_TO_PROGRESS.read().await;
|
let guard = super::ZIP_TO_PROGRESS.read();
|
||||||
if let Some(entry) = guard.get(&zip_nodes) {
|
if let Some(entry) = guard.get(&zip_nodes) {
|
||||||
return get_reply(&dto::responses::CreateZipDone {
|
return get_reply(&dto::responses::CreateZipDone {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
done: entry.done.load(Ordering::Relaxed),
|
done: entry.done.load(Ordering::Relaxed),
|
||||||
progress: Some(entry.progress.load(Ordering::Relaxed)),
|
progress: Some(entry.progress.load(Ordering::Relaxed)),
|
||||||
total: Some(entry.total.load(Ordering::Relaxed))
|
total: Some(entry.total.load(Ordering::Relaxed))
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let entry = {
|
let entry = {
|
||||||
let mut guard = super::ZIP_TO_PROGRESS.write().await;
|
let mut guard = super::ZIP_TO_PROGRESS.write();
|
||||||
guard.insert(zip_nodes.clone(), std::sync::Arc::from(super::ZipProgressEntry {
|
guard.insert(
|
||||||
temp_id: super::NEXT_TEMP_ID.fetch_add(1, Ordering::Relaxed),
|
zip_nodes.clone(),
|
||||||
done: std::sync::atomic::AtomicBool::new(false),
|
std::sync::Arc::from(super::ZipProgressEntry {
|
||||||
progress: std::sync::atomic::AtomicU64::new(0),
|
temp_id: super::NEXT_TEMP_ID.fetch_add(1, Ordering::Relaxed),
|
||||||
total: std::sync::atomic::AtomicU64::new(1),
|
done: std::sync::atomic::AtomicBool::new(false),
|
||||||
delete_after: std::sync::atomic::AtomicI64::new(0)
|
progress: std::sync::atomic::AtomicU64::new(0),
|
||||||
}));
|
total: std::sync::atomic::AtomicU64::new(1),
|
||||||
|
delete_after: std::sync::atomic::AtomicI64::new(0)
|
||||||
|
})
|
||||||
|
);
|
||||||
guard.get(&zip_nodes).unwrap().clone()
|
guard.get(&zip_nodes).unwrap().clone()
|
||||||
};
|
};
|
||||||
super::ZIP_RT.spawn(async move {
|
|
||||||
type NodeMap = HashMap<i32, crate::db::Inode>;
|
|
||||||
|
|
||||||
super::cleanup_temp_zips().await;
|
let inner_pool = pool.clone();
|
||||||
|
|
||||||
let _guard = inner_guard_lock.read().await;
|
let inner_span = metrics::span("ZIP_POOL - queueing", span);
|
||||||
|
super::ZIP_POOL.spawn(move || {
|
||||||
|
let mut zip_span = metrics::span("ZIP_POOL - zipping", &inner_span);
|
||||||
|
drop(inner_span);
|
||||||
|
let db = &mut DBConnection::from(inner_pool.get().unwrap());
|
||||||
|
type NodeMap = HashMap<i32, db::Inode>;
|
||||||
|
|
||||||
fn get_path(node: &crate::db::Inode, dirs: &NodeMap) -> String {
|
super::cleanup_temp_zips(&zip_span);
|
||||||
|
|
||||||
|
let _guard = inner_guard_lock.read(&zip_span);
|
||||||
|
|
||||||
|
fn get_path(node: &db::Inode, dirs: &NodeMap) -> String {
|
||||||
let mut path = node.name.clone();
|
let mut path = node.name.clone();
|
||||||
let mut _node = dirs.get(&node.parent_id.unwrap_or(-1));
|
let mut _node = dirs.get(&node.parent_id.unwrap_or(-1));
|
||||||
while let Some(node) = _node {
|
while let Some(node) = _node {
|
||||||
@ -278,35 +274,58 @@ async fn create_zip(data: dto::requests::CreateZip, info: UserInfo, mut db: DBCo
|
|||||||
}
|
}
|
||||||
path
|
path
|
||||||
}
|
}
|
||||||
|
|
||||||
nodes.iter().for_each(|node| {
|
|
||||||
entry.total.fetch_add(super::get_total_size(node.clone(), &mut db), Ordering::Relaxed);
|
|
||||||
});
|
|
||||||
entry.total.fetch_sub(1, Ordering::Relaxed);
|
|
||||||
{
|
{
|
||||||
let mut buf = vec![0_u8; 1024 * 1024 * 4];
|
let i_span = metrics::span("ZIP_POOL - calc total", &zip_span);
|
||||||
let file = std::fs::File::create(format!("./temp/{}", entry.temp_id)).expect("Failed to create temp file");
|
nodes.iter().for_each(|node| {
|
||||||
|
entry.total.fetch_add(
|
||||||
|
super::get_total_size(&i_span, node.clone(), db),
|
||||||
|
Ordering::Relaxed
|
||||||
|
);
|
||||||
|
});
|
||||||
|
entry.total.fetch_sub(1, Ordering::Relaxed);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let comp_span = metrics::span("ZIP_POOL - compressing total", &zip_span);
|
||||||
|
let mut buf = vec![0_u8; 16 * 1024 * 1024];
|
||||||
|
let file = File::create(format!("./temp/{}", entry.temp_id)).expect("Failed to create temp file");
|
||||||
let mut zip = zip::ZipWriter::new(file);
|
let mut zip = zip::ZipWriter::new(file);
|
||||||
let zip_options = zip::write::FileOptions::default().large_file(true);
|
let zip_options = zip::write::FileOptions::default().large_file(true);
|
||||||
let (files, dirs): (NodeMap, NodeMap) =
|
let (files, dirs): (NodeMap, NodeMap) = {
|
||||||
nodes.iter()
|
let _span = metrics::span("ZIP_POOL - gathering nodes", &comp_span);
|
||||||
.flat_map(|node| super::get_nodes_recursive(node.clone(), &mut db))
|
nodes
|
||||||
|
.iter()
|
||||||
|
.flat_map(|node| super::get_nodes_recursive(&comp_span, node.clone(), db))
|
||||||
.map(|node| (node.id, node))
|
.map(|node| (node.id, node))
|
||||||
.partition(|v| v.1.is_file);
|
.partition(|v| v.1.is_file)
|
||||||
|
};
|
||||||
|
zip_span.set_tags(|| {
|
||||||
|
vec![
|
||||||
|
rustracing::tag::Tag::new("files", files.len() as i64),
|
||||||
|
rustracing::tag::Tag::new("dirs", dirs.len() as i64),
|
||||||
|
]
|
||||||
|
});
|
||||||
|
{
|
||||||
|
let _span = metrics::span("ZIP_POOL - dirs", &comp_span);
|
||||||
|
dirs.values().for_each(|dir| {
|
||||||
|
zip.add_directory(get_path(dir, &dirs), zip_options).expect("Failed to add dir to zip");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let _span = metrics::span("ZIP_POOL - files", &comp_span);
|
||||||
|
files.values().for_each(|node| {
|
||||||
|
zip.start_file(get_path(node, &dirs), zip_options).expect("Failed to start zip file");
|
||||||
|
let mut file = File::open(format!("./files/{}", node.id)).expect("Failed to open file for zip");
|
||||||
|
|
||||||
dirs.values().for_each(|dir| {
|
loop {
|
||||||
zip.add_directory(get_path(dir, &dirs), zip_options).expect("Failed to add dir to zip");
|
let count = file.read(&mut buf).expect("Failed to read file for zip");
|
||||||
});
|
if count == 0 {
|
||||||
files.values().for_each(|node| {
|
break;
|
||||||
zip.start_file(get_path(node, &dirs), zip_options).expect("Failed to start zip file");
|
}
|
||||||
let mut file = std::fs::File::open(format!("./files/{}", node.id)).expect("Failed to open file for zip");
|
zip.write_all(&buf[..count]).expect("Failed to write zip");
|
||||||
loop {
|
entry.progress.fetch_add(count as u64, Ordering::Relaxed);
|
||||||
let count = file.read(&mut buf).expect("Failed to read file for zip");
|
}
|
||||||
if count == 0 { break; }
|
});
|
||||||
zip.write_all(&buf[..count]).expect("Failed to write zip");
|
}
|
||||||
entry.progress.fetch_add(count as u64, Ordering::Relaxed);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
zip.finish().expect("Failed to finish zip");
|
zip.finish().expect("Failed to finish zip");
|
||||||
}
|
}
|
||||||
@ -321,121 +340,115 @@ async fn create_zip(data: dto::requests::CreateZip, info: UserInfo, mut db: DBCo
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download(data: dto::requests::Download, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn download(span: &Span, req: &mut Request, db: &mut DBConnection) -> Result<ResponseBox, AppError> {
|
||||||
let info = crate::routes::filters::authorize_jwt(data.jwtToken, &mut db).await?;
|
let data: dto::requests::Download =
|
||||||
let guard_lock = DBConnection::get_lock(info.0.id).await;
|
serde_urlencoded::from_reader(req.as_reader()).map_err(|_| AppError::BadRequest("Invalid form data"))?;
|
||||||
let _guard = guard_lock.read().await;
|
let info = crate::routes::filters::authorize_jwt(span, &data.jwtToken, db)?;
|
||||||
|
let guard_lock = DBConnection::get_lock(info.0.id);
|
||||||
let node: crate::db::Inode = super::get_node_and_validate(&info.0, data.id, &mut db)
|
let _guard = guard_lock.read(span);
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?;
|
let node: db::Inode =
|
||||||
|
super::get_node_and_validate(span, &info.0, data.id, db).ok_or(AppError::BadRequest("Unknown node"))?;
|
||||||
|
|
||||||
if node.is_file {
|
if node.is_file {
|
||||||
let mut resp = warp::reply::Response::new(super::get_file_stream_body(
|
let file_name = format!("./files/{}", node.id);
|
||||||
format!("./files/{}", node.id)
|
let resp = Response::from_file(File::open(std::path::Path::new(&file_name)).unwrap())
|
||||||
));
|
.with_header(header("content-type", "application/octet-stream"))
|
||||||
*resp.status_mut() = warp::http::StatusCode::OK;
|
.with_header(header(
|
||||||
resp.headers_mut().typed_insert(
|
"content-disposition",
|
||||||
headers::ContentLength(node.size.unwrap() as u64)
|
&("attachment; filename=".to_owned() + &node.name)
|
||||||
);
|
))
|
||||||
resp.headers_mut().typed_insert(
|
.boxed();
|
||||||
headers::ContentType::from(
|
|
||||||
mime_guess::from_path(std::path::Path::new(&node.name)).first_or_octet_stream()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
resp.headers_mut().insert(
|
|
||||||
"Content-Disposition",
|
|
||||||
("attachment; filename=".to_owned() + &node.name).parse().unwrap()
|
|
||||||
);
|
|
||||||
Ok(resp)
|
Ok(resp)
|
||||||
} else {
|
} else {
|
||||||
let nodes_key = BTreeSet::from([node.id]);
|
let nodes_key = BTreeSet::from([node.id]);
|
||||||
let guard = super::ZIP_TO_PROGRESS.read().await;
|
let guard = super::ZIP_TO_PROGRESS.read();
|
||||||
let entry = guard.get(&nodes_key)
|
let entry = guard.get(&nodes_key).ok_or(AppError::BadRequest("Unknown node"))?;
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?;
|
|
||||||
if !entry.done.load(Ordering::Relaxed) {
|
if !entry.done.load(Ordering::Relaxed) {
|
||||||
AppError::BadRequest("Unknown node").err()
|
AppError::BadRequest("Unknown node").err()
|
||||||
} else {
|
} else {
|
||||||
let file = format!("./temp/{}", entry.temp_id);
|
let file_name = format!("./temp/{}", entry.temp_id);
|
||||||
let mut resp = warp::reply::Response::new(super::get_file_stream_body(file.clone()));
|
let resp = Response::from_file(File::open(std::path::Path::new(&file_name)).unwrap())
|
||||||
*resp.status_mut() = warp::http::StatusCode::OK;
|
.with_header(header("content-type", "application/zip"))
|
||||||
resp.headers_mut().typed_insert(
|
.with_header(header(
|
||||||
headers::ContentLength(std::fs::metadata(std::path::Path::new(&file)).unwrap().len())
|
"content-disposition",
|
||||||
);
|
&("attachment; filename=".to_owned() + &node.name + ".zip")
|
||||||
resp.headers_mut().typed_insert(
|
))
|
||||||
headers::ContentType::from(
|
.boxed();
|
||||||
mime_guess::from_ext("zip").first().unwrap()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
resp.headers_mut().insert(
|
|
||||||
"Content-Disposition",
|
|
||||||
("attachment; filename=".to_owned() + &node.name + ".zip").parse().unwrap()
|
|
||||||
);
|
|
||||||
Ok(resp)
|
Ok(resp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download_multi(data: dto::requests::DownloadMulti, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn download_multi(span: &Span, req: &mut Request, db: &mut DBConnection) -> Result<ResponseBox, AppError> {
|
||||||
let info = crate::routes::filters::authorize_jwt(data.jwtToken, &mut db).await?;
|
let data: dto::requests::DownloadMulti =
|
||||||
let guard_lock = DBConnection::get_lock(info.0.id).await;
|
serde_urlencoded::from_reader(req.as_reader()).map_err(|_| AppError::BadRequest("Invalid form data"))?;
|
||||||
let _guard = guard_lock.read().await;
|
let info = crate::routes::filters::authorize_jwt(span, &data.jwtToken, db)?;
|
||||||
|
let guard_lock = DBConnection::get_lock(info.0.id);
|
||||||
let mut nodes: Vec<crate::db::Inode> = Vec::new();
|
let _guard = guard_lock.read(span);
|
||||||
for node in data.id.split(',').map(|v| v.parse::<i32>()
|
let nodes: Vec<db::Inode> = {
|
||||||
.map_err(|_| AppError::BadRequest("Failed to parse").reject())
|
let _span = metrics::span("parsing_nodes", span);
|
||||||
) {
|
data.id
|
||||||
nodes.push(
|
.split(',')
|
||||||
super::get_node_and_validate(&info.0, node?, &mut db)
|
.map(|v| {
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?
|
v.parse::<i32>().map_err(|_| AppError::BadRequest("Failed to parse")).map(|n| {
|
||||||
);
|
super::get_node_and_validate(span, &info.0, n, db).ok_or(AppError::BadRequest("Unknown node"))
|
||||||
}
|
})
|
||||||
|
})
|
||||||
|
.into_iter()
|
||||||
|
.collect::<Result<Result<Vec<db::Inode>, AppError>, AppError>>()??
|
||||||
|
};
|
||||||
|
|
||||||
let nodes_key = BTreeSet::from_iter(nodes.iter().map(|node| node.id));
|
let nodes_key = BTreeSet::from_iter(nodes.iter().map(|node| node.id));
|
||||||
let guard = super::ZIP_TO_PROGRESS.read().await;
|
let guard = super::ZIP_TO_PROGRESS.read();
|
||||||
let entry = guard.get(&nodes_key)
|
let entry = guard.get(&nodes_key).ok_or(AppError::BadRequest("Unknown zip"))?;
|
||||||
.ok_or(AppError::BadRequest("Unknown zip"))?;
|
|
||||||
if !entry.done.load(Ordering::Relaxed) {
|
if !entry.done.load(Ordering::Relaxed) {
|
||||||
AppError::BadRequest("Unfinished zip").err()
|
AppError::BadRequest("Unfinished zip").err()
|
||||||
} else {
|
} else {
|
||||||
let file = format!("./temp/{}", entry.temp_id);
|
let file_name = format!("./temp/{}", entry.temp_id);
|
||||||
let mut resp = warp::reply::Response::new(super::get_file_stream_body(file.clone()));
|
let resp = Response::from_file(File::open(std::path::Path::new(&file_name)).unwrap())
|
||||||
*resp.status_mut() = warp::http::StatusCode::OK;
|
.with_header(header("content-type", "application/zip"))
|
||||||
resp.headers_mut().typed_insert(
|
.with_header(header(
|
||||||
headers::ContentLength(std::fs::metadata(std::path::Path::new(&file)).unwrap().len())
|
"content-disposition",
|
||||||
);
|
"attachment; filename=files.zip"
|
||||||
resp.headers_mut().typed_insert(
|
))
|
||||||
headers::ContentType::from(
|
.boxed();
|
||||||
mime_guess::from_ext("zip").first().unwrap()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
resp.headers_mut().insert(
|
|
||||||
"Content-Disposition",
|
|
||||||
"attachment; filename=files.zip".parse().unwrap()
|
|
||||||
);
|
|
||||||
Ok(resp)
|
Ok(resp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download_preview(node: i32, info: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn download_preview(
|
||||||
let guard_lock = DBConnection::get_lock(info.0.id).await;
|
span: &Span,
|
||||||
let _guard = guard_lock.read().await;
|
_: &mut Request,
|
||||||
let node: crate::db::Inode = super::get_node_and_validate(&info.0, node, &mut db)
|
db: &mut DBConnection,
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?;
|
info: UserInfo,
|
||||||
|
node: i32
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
let guard_lock = DBConnection::get_lock(info.0.id);
|
||||||
|
let _guard = guard_lock.read(span);
|
||||||
|
let node: db::Inode =
|
||||||
|
super::get_node_and_validate(span, &info.0, node, db).ok_or(AppError::BadRequest("Unknown node"))?;
|
||||||
|
|
||||||
if node.has_preview {
|
if node.has_preview {
|
||||||
let file = format!("./files/{}_preview.jpg", node.id);
|
let file = format!("./files/{}_preview.jpg", node.id);
|
||||||
get_reply(&dto::responses::DownloadBase64 {
|
get_reply(&dto::responses::DownloadBase64 {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
data: "data:image/png;base64,".to_owned() + &base64::encode(std::fs::read(std::path::Path::new(&file)).unwrap())
|
data: "data:image/png;base64,".to_owned()
|
||||||
|
+ &base64::encode(std::fs::read(std::path::Path::new(&file)).unwrap())
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
AppError::BadRequest("No preview").err()
|
AppError::BadRequest("No preview").err()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_type(node: i32, info: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn get_type(
|
||||||
let node: crate::db::Inode = super::get_node_and_validate(&info.0, node, &mut db)
|
span: &Span,
|
||||||
.ok_or(AppError::BadRequest("Unknown node"))?;
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo,
|
||||||
|
node: i32
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
let node: db::Inode =
|
||||||
|
super::get_node_and_validate(span, &info.0, node, db).ok_or(AppError::BadRequest("Unknown node"))?;
|
||||||
|
|
||||||
get_reply(&dto::responses::Type {
|
get_reply(&dto::responses::Type {
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
|
@ -1,28 +1,28 @@
|
|||||||
mod filters;
|
pub mod admin;
|
||||||
mod auth;
|
pub mod auth;
|
||||||
mod admin;
|
pub mod filters;
|
||||||
mod user;
|
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
|
pub mod user;
|
||||||
|
|
||||||
use warp::{Filter, Reply};
|
use std::io::Write;
|
||||||
use crate::db::DBPool;
|
|
||||||
use crate::dto;
|
|
||||||
|
|
||||||
pub fn build_routes(db: DBPool) -> impl Filter<Extract = impl Reply, Error = warp::Rejection> + Clone {
|
use tiny_http::{Response, ResponseBox};
|
||||||
warp::path::path("api")
|
|
||||||
.and(
|
struct ChannelReader(std::sync::mpsc::Receiver<String>);
|
||||||
auth::build_routes(db.clone())
|
|
||||||
.or(admin::build_routes(db.clone()))
|
impl std::io::Read for ChannelReader {
|
||||||
.or(user::build_routes(db.clone()))
|
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
|
||||||
.or(fs::build_routes(db))
|
self.0.recv().map(|s| buf.write(s.as_bytes())).unwrap_or(Ok(0))
|
||||||
.recover(error_handler)
|
}
|
||||||
)
|
|
||||||
.or(warp::fs::dir("./static/"))
|
|
||||||
.or(warp::fs::file("./static/index.html"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_reply<T>(data: &T) -> Result<warp::reply::Response, warp::Rejection> where T: serde::Serialize {
|
pub fn header(name: &str, data: &str) -> tiny_http::Header { tiny_http::Header::from_bytes(name, data).unwrap() }
|
||||||
Ok(warp::reply::with_status(warp::reply::json(data), warp::http::StatusCode::OK).into_response())
|
|
||||||
|
pub fn get_reply<T>(data: &T) -> Result<ResponseBox, AppError>
|
||||||
|
where T: serde::Serialize {
|
||||||
|
Ok(Response::from_data(serde_json::to_vec(data).unwrap())
|
||||||
|
.with_header(header("content-type", "application/json; charset=utf-8"))
|
||||||
|
.boxed())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug, Clone)]
|
#[derive(thiserror::Error, Debug, Clone)]
|
||||||
@ -33,82 +33,13 @@ pub enum AppError {
|
|||||||
Forbidden(&'static str),
|
Forbidden(&'static str),
|
||||||
#[error("bad request")]
|
#[error("bad request")]
|
||||||
BadRequest(&'static str),
|
BadRequest(&'static str),
|
||||||
|
#[error("not found")]
|
||||||
|
NotFound,
|
||||||
|
#[allow(dead_code)]
|
||||||
#[error("internal error")]
|
#[error("internal error")]
|
||||||
InternalError(&'static str)
|
InternalError(&'static str)
|
||||||
}
|
}
|
||||||
impl warp::reject::Reject for AppError {}
|
|
||||||
|
|
||||||
impl AppError {
|
impl AppError {
|
||||||
pub fn reject(&self) -> warp::reject::Rejection {
|
pub fn err<T>(&self) -> Result<T, AppError> { Err(self.clone()) }
|
||||||
warp::reject::custom(self.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn err<T>(&self) -> Result<T, warp::reject::Rejection> {
|
|
||||||
Err(self.reject())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn error_handler(err: warp::reject::Rejection) -> Result<impl Reply, std::convert::Infallible> {
|
|
||||||
if err.is_not_found() {
|
|
||||||
return Ok(warp::reply::with_status(
|
|
||||||
warp::reply::json(&dto::responses::Error {
|
|
||||||
statusCode: 404,
|
|
||||||
message: "bruh".to_owned()
|
|
||||||
}),
|
|
||||||
warp::http::StatusCode::NOT_FOUND
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if let Some(e) = err.find::<AppError>() {
|
|
||||||
return Ok(warp::reply::with_status(
|
|
||||||
warp::reply::json(&dto::responses::Error {
|
|
||||||
statusCode: match e {
|
|
||||||
AppError::BadRequest(_) => 400,
|
|
||||||
AppError::Unauthorized(_) => 401,
|
|
||||||
AppError::Forbidden(_) => 403,
|
|
||||||
AppError::InternalError(_) => 500
|
|
||||||
},
|
|
||||||
message: match e {
|
|
||||||
AppError::BadRequest(v) => v.to_string(),
|
|
||||||
AppError::Unauthorized(v) => v.to_string(),
|
|
||||||
AppError::Forbidden(v) => v.to_string(),
|
|
||||||
AppError::InternalError(v) => v.to_string()
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
match e {
|
|
||||||
AppError::BadRequest(_) => warp::http::StatusCode::BAD_REQUEST,
|
|
||||||
AppError::Unauthorized(_) => warp::http::StatusCode::UNAUTHORIZED,
|
|
||||||
AppError::Forbidden(_) => warp::http::StatusCode::FORBIDDEN,
|
|
||||||
AppError::InternalError(_) => warp::http::StatusCode::INTERNAL_SERVER_ERROR
|
|
||||||
}
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if let Some(e) = err.find::<warp::body::BodyDeserializeError>() {
|
|
||||||
return Ok(warp::reply::with_status(
|
|
||||||
warp::reply::json(&dto::responses::Error {
|
|
||||||
statusCode: 400,
|
|
||||||
message: e.to_string(),
|
|
||||||
}),
|
|
||||||
warp::http::StatusCode::BAD_REQUEST
|
|
||||||
))
|
|
||||||
}
|
|
||||||
if let Some(e) = err.find::<warp::reject::InvalidQuery>() {
|
|
||||||
return Ok(warp::reply::with_status(
|
|
||||||
warp::reply::json(&dto::responses::Error {
|
|
||||||
statusCode: 400,
|
|
||||||
message: e.to_string(),
|
|
||||||
}),
|
|
||||||
warp::http::StatusCode::BAD_REQUEST
|
|
||||||
))
|
|
||||||
}
|
|
||||||
if let Some(e) = err.find::<warp::reject::MethodNotAllowed>() {
|
|
||||||
return Ok(warp::reply::with_status(
|
|
||||||
warp::reply::json(&dto::responses::Error {
|
|
||||||
statusCode: 405,
|
|
||||||
message: e.to_string(),
|
|
||||||
}),
|
|
||||||
warp::http::StatusCode::METHOD_NOT_ALLOWED
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(err).expect("Can't handle error")
|
|
||||||
}
|
}
|
@ -1,24 +1,14 @@
|
|||||||
use warp::{Filter, Reply};
|
use rustracing_jaeger::Span;
|
||||||
use crate::db::{DBConnection, DBPool, with_db};
|
use tiny_http::{Request, ResponseBox};
|
||||||
use crate::dto;
|
|
||||||
use crate::routes::get_reply;
|
|
||||||
use crate::routes::filters::{authenticated, UserInfo};
|
|
||||||
|
|
||||||
pub fn build_routes(db: DBPool) -> impl Filter<Extract = impl Reply, Error = warp::Rejection> + Clone {
|
use crate::{
|
||||||
let info = warp::path!("user" / "info")
|
db::DBConnection,
|
||||||
.and(warp::get())
|
dto,
|
||||||
.and(authenticated(db.clone()))
|
routes::{filters::UserInfo, get_reply},
|
||||||
.and_then(info);
|
AppError
|
||||||
let delete_user = warp::path!("user" / "delete")
|
};
|
||||||
.and(warp::post())
|
|
||||||
.and(authenticated(db.clone()))
|
|
||||||
.and(with_db(db))
|
|
||||||
.and_then(delete_user);
|
|
||||||
|
|
||||||
info.or(delete_user)
|
pub fn info(_: &Span, _: &mut Request, _: &mut DBConnection, info: UserInfo) -> Result<ResponseBox, AppError> {
|
||||||
}
|
|
||||||
|
|
||||||
async fn info(info: UserInfo) -> Result<impl Reply, warp::Rejection> {
|
|
||||||
get_reply(&dto::responses::UserInfo {
|
get_reply(&dto::responses::UserInfo {
|
||||||
statusCode: info.0.id,
|
statusCode: info.0.id,
|
||||||
name: info.0.name,
|
name: info.0.name,
|
||||||
@ -27,16 +17,20 @@ async fn info(info: UserInfo) -> Result<impl Reply, warp::Rejection> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn delete_user(info: UserInfo, mut db: DBConnection) -> Result<impl Reply, warp::Rejection> {
|
pub fn delete_user(
|
||||||
db.delete_all_tokens(info.0.id);
|
span: &Span,
|
||||||
|
_: &mut Request,
|
||||||
|
db: &mut DBConnection,
|
||||||
|
info: UserInfo
|
||||||
|
) -> Result<ResponseBox, AppError> {
|
||||||
|
db.delete_all_tokens(span, info.0.id);
|
||||||
|
|
||||||
let root_node = super::fs::get_node_and_validate(&info.0, info.0.root_id, &mut db).expect("Failed to get root node for deleting");
|
let root_node = super::fs::get_node_and_validate(span, &info.0, info.0.root_id, db)
|
||||||
|
.expect("Failed to get root node for deleting");
|
||||||
|
|
||||||
super::fs::delete_node_root(&root_node, &mut db);
|
super::fs::delete_node_root(span, &root_node, db);
|
||||||
|
|
||||||
db.delete_user(&info.0);
|
db.delete_user(&info.0);
|
||||||
|
|
||||||
get_reply(&dto::responses::Success {
|
get_reply(&dto::responses::Success { statusCode: 200 })
|
||||||
statusCode: 200
|
|
||||||
})
|
|
||||||
}
|
}
|
@ -35,8 +35,4 @@ diesel::table! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
diesel::allow_tables_to_appear_in_same_query!(
|
diesel::allow_tables_to_appear_in_same_query!(inode, tokens, user,);
|
||||||
inode,
|
|
||||||
tokens,
|
|
||||||
user,
|
|
||||||
);
|
|
||||||
|
@ -1 +0,0 @@
|
|||||||
tokio-console http://127.0.0.1:9999/ --colorterm 24bit --retain-for "2s"
|
|
Loading…
Reference in New Issue
Block a user