Merge pull request #2 from Trivernis/feature/file-status

Feature/file status
pull/4/head
Julius Riegel 3 years ago committed by GitHub
commit 3c6599e66f

@ -79,6 +79,17 @@ version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
[[package]]
name = "async-recursion"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7d78656ba01f1b93024b7c3a0467f1608e4be67d725749fdcd7d2c7678fd7a2"
dependencies = [
"proc-macro2 1.0.35",
"quote 1.0.10",
"syn 1.0.84",
]
[[package]] [[package]]
name = "async-stream" name = "async-stream"
version = "0.3.2" version = "0.3.2"
@ -117,7 +128,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5" checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5"
dependencies = [ dependencies = [
"num-traits", "num-traits 0.2.14",
] ]
[[package]] [[package]]
@ -168,7 +179,7 @@ version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [ dependencies = [
"serde", "serde 1.0.132",
] ]
[[package]] [[package]]
@ -259,7 +270,7 @@ dependencies = [
"byteorder", "byteorder",
"futures 0.3.19", "futures 0.3.19",
"lazy_static", "lazy_static",
"serde", "serde 1.0.132",
"thiserror", "thiserror",
"tokio", "tokio",
"tracing", "tracing",
@ -328,8 +339,8 @@ checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
dependencies = [ dependencies = [
"libc", "libc",
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.14",
"serde", "serde 1.0.132",
"time 0.1.44", "time 0.1.44",
"winapi", "winapi",
] ]
@ -372,6 +383,22 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "120133d4db2ec47efe2e26502ee984747630c67f51974fca0b6c1340cf2368d3" checksum = "120133d4db2ec47efe2e26502ee984747630c67f51974fca0b6c1340cf2368d3"
[[package]]
name = "config"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1b9d958c2b1368a663f05538fc1b5975adce1e19f435acceae987aceeeb369"
dependencies = [
"lazy_static",
"nom 5.1.2",
"rust-ini",
"serde 1.0.132",
"serde-hjson",
"serde_json",
"toml",
"yaml-rust",
]
[[package]] [[package]]
name = "console-api" name = "console-api"
version = "0.1.0" version = "0.1.0"
@ -395,7 +422,7 @@ dependencies = [
"futures 0.3.19", "futures 0.3.19",
"hdrhistogram", "hdrhistogram",
"humantime", "humantime",
"serde", "serde 1.0.132",
"serde_json", "serde_json",
"thread_local", "thread_local",
"tokio", "tokio",
@ -847,7 +874,7 @@ dependencies = [
"byteorder", "byteorder",
"flate2", "flate2",
"nom 7.1.0", "nom 7.1.0",
"num-traits", "num-traits 0.2.14",
] ]
[[package]] [[package]]
@ -974,7 +1001,7 @@ dependencies = [
"jpeg-decoder", "jpeg-decoder",
"num-iter", "num-iter",
"num-rational", "num-rational",
"num-traits", "num-traits 0.2.14",
"png", "png",
"scoped_threadpool", "scoped_threadpool",
"tiff", "tiff",
@ -1065,6 +1092,19 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "lexical-core"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe"
dependencies = [
"arrayvec 0.5.2",
"bitflags",
"cfg-if 1.0.0",
"ryu",
"static_assertions",
]
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.112" version = "0.2.112"
@ -1101,6 +1141,12 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "linked-hash-map"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
[[package]] [[package]]
name = "lock_api" name = "lock_api"
version = "0.4.5" version = "0.4.5"
@ -1136,12 +1182,12 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
[[package]] [[package]]
name = "mediarepo-api" name = "mediarepo-api"
version = "0.20.0" version = "0.27.0"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=0c897acfd959c776fc10bd8fabdd2eb22b437be3#0c897acfd959c776fc10bd8fabdd2eb22b437be3" source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=9fd25e4696cdd68886fa98579aef4fa2ca561dc0#9fd25e4696cdd68886fa98579aef4fa2ca561dc0"
dependencies = [ dependencies = [
"bromine", "bromine",
"chrono", "chrono",
"serde", "serde 1.0.132",
"serde_piecewise_default", "serde_piecewise_default",
"thiserror", "thiserror",
"tracing", "tracing",
@ -1152,6 +1198,8 @@ name = "mediarepo-core"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"base64", "base64",
"config",
"data-encoding",
"futures 0.3.19", "futures 0.3.19",
"glob", "glob",
"itertools", "itertools",
@ -1159,11 +1207,12 @@ dependencies = [
"multibase", "multibase",
"multihash", "multihash",
"sea-orm", "sea-orm",
"serde", "serde 1.0.132",
"sqlx", "sqlx",
"thiserror", "thiserror",
"thumbnailer", "thumbnailer",
"tokio", "tokio",
"tokio-graceful-shutdown",
"toml", "toml",
"tracing", "tracing",
"typemap_rev", "typemap_rev",
@ -1213,7 +1262,7 @@ dependencies = [
"mime", "mime",
"mime_guess", "mime_guess",
"sea-orm", "sea-orm",
"serde", "serde 1.0.132",
"tokio", "tokio",
"tracing", "tracing",
"typemap_rev", "typemap_rev",
@ -1229,7 +1278,8 @@ dependencies = [
"mediarepo-database", "mediarepo-database",
"mediarepo-model", "mediarepo-model",
"port_check", "port_check",
"serde", "rayon",
"serde 1.0.132",
"tokio", "tokio",
"tracing", "tracing",
"tracing-futures", "tracing-futures",
@ -1385,6 +1435,7 @@ version = "5.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af" checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af"
dependencies = [ dependencies = [
"lexical-core",
"memchr", "memchr",
"version_check", "version_check",
] ]
@ -1417,7 +1468,7 @@ checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.14",
] ]
[[package]] [[package]]
@ -1427,7 +1478,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"num-traits", "num-traits 0.2.14",
] ]
[[package]] [[package]]
@ -1438,7 +1489,7 @@ checksum = "b2021c8337a54d21aca0d59a92577a029af9431cb59b909b03252b9c164fad59"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.14",
] ]
[[package]] [[package]]
@ -1449,7 +1500,16 @@ checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.14",
]
[[package]]
name = "num-traits"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31"
dependencies = [
"num-traits 0.2.14",
] ]
[[package]] [[package]]
@ -1890,6 +1950,12 @@ dependencies = [
"chrono", "chrono",
] ]
[[package]]
name = "rust-ini"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2"
[[package]] [[package]]
name = "rust_decimal" name = "rust_decimal"
version = "1.19.0" version = "1.19.0"
@ -1897,8 +1963,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c2d4912d369fb95a351c221475657970678d344d70c1a788223f6e74d1e3732" checksum = "3c2d4912d369fb95a351c221475657970678d344d70c1a788223f6e74d1e3732"
dependencies = [ dependencies = [
"arrayvec 0.7.2", "arrayvec 0.7.2",
"num-traits", "num-traits 0.2.14",
"serde", "serde 1.0.132",
] ]
[[package]] [[package]]
@ -1958,7 +2024,7 @@ dependencies = [
"sea-orm-macros", "sea-orm-macros",
"sea-query", "sea-query",
"sea-strum", "sea-strum",
"serde", "serde 1.0.132",
"serde_json", "serde_json",
"sqlx", "sqlx",
"tracing", "tracing",
@ -2050,6 +2116,12 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "serde"
version = "0.8.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.132" version = "1.0.132"
@ -2059,6 +2131,18 @@ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]]
name = "serde-hjson"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8"
dependencies = [
"lazy_static",
"num-traits 0.1.43",
"regex",
"serde 0.8.23",
]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.132" version = "1.0.132"
@ -2079,7 +2163,7 @@ dependencies = [
"indexmap", "indexmap",
"itoa 1.0.1", "itoa 1.0.1",
"ryu", "ryu",
"serde", "serde 1.0.132",
] ]
[[package]] [[package]]
@ -2088,7 +2172,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91a44b0f51aedd237f8f25c831e1f629982a187a5045c08ce4bccccce17b4b0" checksum = "d91a44b0f51aedd237f8f25c831e1f629982a187a5045c08ce4bccccce17b4b0"
dependencies = [ dependencies = [
"serde", "serde 1.0.132",
"serde_piecewise_default_derive", "serde_piecewise_default_derive",
] ]
@ -2100,7 +2184,7 @@ checksum = "19446953e7b22342c23c79ede938c04b1c12f4eb7513db30cda94193ce30ff2a"
dependencies = [ dependencies = [
"proc-macro2 0.4.30", "proc-macro2 0.4.30",
"quote 0.6.13", "quote 0.6.13",
"serde", "serde 1.0.132",
"syn 0.15.44", "syn 0.15.44",
] ]
@ -2144,6 +2228,15 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
[[package]]
name = "signal-hook-registry"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.4.5" version = "0.4.5"
@ -2221,7 +2314,7 @@ dependencies = [
"parking_lot", "parking_lot",
"percent-encoding", "percent-encoding",
"rust_decimal", "rust_decimal",
"serde", "serde 1.0.132",
"serde_json", "serde_json",
"sha2", "sha2",
"smallvec", "smallvec",
@ -2273,6 +2366,12 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]] [[package]]
name = "stringprep" name = "stringprep"
version = "0.1.2" version = "0.1.2"
@ -2471,7 +2570,9 @@ dependencies = [
"memchr", "memchr",
"mio", "mio",
"num_cpus", "num_cpus",
"once_cell",
"pin-project-lite", "pin-project-lite",
"signal-hook-registry",
"tokio-macros", "tokio-macros",
"tracing", "tracing",
"winapi", "winapi",
@ -2487,6 +2588,20 @@ dependencies = [
"futures 0.1.31", "futures 0.1.31",
] ]
[[package]]
name = "tokio-graceful-shutdown"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d08ebea7dc6b22273290d8ece2ca448f979f836e38ba629b650595c64204b4f2"
dependencies = [
"anyhow",
"async-recursion",
"futures 0.3.19",
"log",
"tokio",
"tokio-util",
]
[[package]] [[package]]
name = "tokio-io-timeout" name = "tokio-io-timeout"
version = "1.1.1" version = "1.1.1"
@ -2549,7 +2664,7 @@ version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa"
dependencies = [ dependencies = [
"serde", "serde 1.0.132",
] ]
[[package]] [[package]]
@ -2711,7 +2826,7 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb65ea441fbb84f9f6748fd496cf7f63ec9af5bca94dd86456978d055e8eb28b" checksum = "fb65ea441fbb84f9f6748fd496cf7f63ec9af5bca94dd86456978d055e8eb28b"
dependencies = [ dependencies = [
"serde", "serde 1.0.132",
"tracing-core", "tracing-core",
] ]
@ -2725,7 +2840,7 @@ dependencies = [
"lazy_static", "lazy_static",
"matchers", "matchers",
"regex", "regex",
"serde", "serde 1.0.132",
"serde_json", "serde_json",
"sharded-slab", "sharded-slab",
"smallvec", "smallvec",
@ -2833,7 +2948,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
dependencies = [ dependencies = [
"getrandom", "getrandom",
"serde", "serde 1.0.132",
] ]
[[package]] [[package]]
@ -2992,3 +3107,12 @@ name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0" version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]

@ -17,7 +17,9 @@ typemap_rev = "^0.1.5"
futures = "^0.3.19" futures = "^0.3.19"
itertools = "^0.10.3" itertools = "^0.10.3"
glob = "^0.3.0" glob = "^0.3.0"
tracing = "0.1.29" tracing = "^0.1.29"
data-encoding = "^2.3.2"
tokio-graceful-shutdown = "^0.4.3"
[dependencies.thumbnailer] [dependencies.thumbnailer]
version = "^0.2.4" version = "^0.2.4"
@ -36,9 +38,13 @@ features = ["migrate"]
version = "^1.15.0" version = "^1.15.0"
features = ["fs", "io-util", "io-std"] features = ["fs", "io-util", "io-std"]
[dependencies.config]
version = "^0.11.0"
features = ["toml"]
[dependencies.mediarepo-api] [dependencies.mediarepo-api]
git = "https://github.com/Trivernis/mediarepo-api.git" git = "https://github.com/Trivernis/mediarepo-api.git"
rev = "0c897acfd959c776fc10bd8fabdd2eb22b437be3" rev = "9fd25e4696cdd68886fa98579aef4fa2ca561dc0"
features = ["bromine"] features = ["bromine"]
[features] [features]

@ -0,0 +1,65 @@
use crate::error::RepoResult;
use multihash::{Code, MultihashDigest};
/// Creates a new content descriptor for the given file
pub fn create_content_descriptor(bytes: &[u8]) -> Vec<u8> {
Code::Sha2_256.digest(bytes).to_bytes()
}
/// Encodes a content descriptor while respecting the version
pub fn encode_content_descriptor(descriptor: &[u8]) -> String {
if is_v1_content_descriptor(descriptor) {
encode_content_descriptor_v1(descriptor)
} else {
encode_content_descriptor_v2(descriptor)
}
}
/// Encodes a v1 descriptor that is already stored encoded in the database (only interprets it as string)
pub fn encode_content_descriptor_v1(descriptor: &[u8]) -> String {
String::from_utf8_lossy(descriptor).to_string()
}
/// Encodes the content descriptor as base32 lowercase
pub fn encode_content_descriptor_v2(descriptor: &[u8]) -> String {
data_encoding::BASE32_DNSSEC.encode(descriptor)
}
/// Decodes a content descriptor
pub fn decode_content_descriptor<S: AsRef<str>>(descriptor: S) -> RepoResult<Vec<u8>> {
// check for v1 descriptor with a fixed length of 53 starting with the prefix of the base and hash
if is_v1_content_descriptor_string(descriptor.as_ref()) {
decode_content_descriptor_v1(descriptor)
} else {
decode_content_descriptor_v2(descriptor)
}
}
/// Decodes the first version of content descriptors (multibase)
pub fn decode_content_descriptor_v1<S: AsRef<str>>(descriptor: S) -> RepoResult<Vec<u8>> {
Ok(descriptor.as_ref().as_bytes().to_vec())
}
/// Decodes the second version of content descriptors (faster fixed base32)
pub fn decode_content_descriptor_v2<S: AsRef<str>>(descriptor: S) -> RepoResult<Vec<u8>> {
let data = data_encoding::BASE32_DNSSEC.decode(descriptor.as_ref().as_bytes())?;
Ok(data)
}
/// Decodes the data stored in the v1 content descriptor into the v2 format
pub fn convert_v1_descriptor_to_v2(descriptor_v1: &[u8]) -> RepoResult<Vec<u8>> {
let (_, data) = multibase::decode(encode_content_descriptor_v1(descriptor_v1))?;
Ok(data)
}
/// Checks if a binary descriptor is v1
pub fn is_v1_content_descriptor(descriptor: &[u8]) -> bool {
descriptor.len() == 56 && descriptor.starts_with(b"bciq")
}
/// Checks if a descriptor string is a v1 descriptor
pub fn is_v1_content_descriptor_string<S: AsRef<str>>(descriptor: S) -> bool {
descriptor.as_ref().len() == 56 && descriptor.as_ref().starts_with("bciq")
}

@ -1,5 +1,5 @@
use sea_orm::DbErr; use sea_orm::DbErr;
use std::fmt::{Display, Formatter}; use std::fmt::{Debug, Formatter};
use thiserror::Error; use thiserror::Error;
pub type RepoResult<T> = Result<T, RepoError>; pub type RepoResult<T> = Result<T, RepoError>;
@ -31,8 +31,14 @@ pub enum RepoError {
#[error(transparent)] #[error(transparent)]
Thumbnailer(#[from] thumbnailer::error::ThumbError), Thumbnailer(#[from] thumbnailer::error::ThumbError),
#[error("No free tcp port available")] #[error("no free tcp port available")]
PortUnavailable, PortUnavailable,
#[error("failed to decode data {0}")]
Decode(#[from] data_encoding::DecodeError),
#[error("Failed to read repo.toml configuration file {0}")]
Config(#[from] config::ConfigError),
} }
#[derive(Error, Debug)] #[derive(Error, Debug)]
@ -56,9 +62,9 @@ pub enum RepoDatabaseError {
#[derive(Debug)] #[derive(Debug)]
pub struct StringError(String); pub struct StringError(String);
impl Display for StringError { impl std::fmt::Display for StringError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f) std::fmt::Display::fmt(&self.0, f)
} }
} }

@ -0,0 +1,35 @@
use std::io::Result;
use std::path::{Path, PathBuf};
use tokio::fs::{File, OpenOptions};
/// A file that only exists while being owned.
/// Will automatically be deleted on Drop
pub struct DropFile {
path: PathBuf,
}
impl DropFile {
pub async fn new<P: AsRef<Path>>(path: P) -> Result<(File, Self)> {
let file = OpenOptions::new()
.write(true)
.read(true)
.create(true)
.open(path.as_ref())
.await?;
Ok((file, Self::from_path(path)))
}
pub fn from_path<P: AsRef<Path>>(path: P) -> Self {
Self {
path: path.as_ref().to_path_buf(),
}
}
}
impl Drop for DropFile {
fn drop(&mut self) {
if let Err(e) = std::fs::remove_file(&self.path) {
tracing::error!("failed to remove drop file '{}'", e);
}
}
}

@ -1,14 +1,11 @@
use crate::content_descriptor::{create_content_descriptor, encode_content_descriptor};
use crate::error::RepoResult; use crate::error::RepoResult;
use crate::utils::get_folder_size; use crate::utils::get_folder_size;
use multibase::Base;
use multihash::{Code, MultihashDigest};
use std::path::PathBuf; use std::path::PathBuf;
use tokio::fs; use tokio::fs;
use tokio::fs::{File, OpenOptions}; use tokio::fs::{File, OpenOptions};
use tokio::io::{AsyncRead, AsyncReadExt, BufReader}; use tokio::io::{AsyncRead, AsyncReadExt, BufReader};
static STORE_BASE: Base = Base::Base32Lower;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct FileHashStore { pub struct FileHashStore {
path: PathBuf, path: PathBuf,
@ -24,35 +21,32 @@ impl FileHashStore {
&self, &self,
mut reader: R, mut reader: R,
extension: Option<&str>, extension: Option<&str>,
) -> RepoResult<String> { ) -> RepoResult<Vec<u8>> {
let mut buf = Vec::new(); let mut buf = Vec::new();
reader.read_to_end(&mut buf).await?; reader.read_to_end(&mut buf).await?;
let hash: Vec<u8> = Code::Sha2_256.digest(&buf).to_bytes(); let descriptor = create_content_descriptor(&buf);
let hash: String = multibase::encode(STORE_BASE, &hash); let file_path = self.descriptor_to_file_path(&descriptor);
let folder_path = self.hash_to_folder_path(&hash); let folder_path = file_path.parent().unwrap();
if !folder_path.exists() { if !folder_path.exists() {
fs::create_dir(folder_path).await?; fs::create_dir(folder_path).await?;
} }
let mut file_path = self.hash_to_file_path(&hash); let mut file_path = self.descriptor_to_file_path(&descriptor);
if let Some(extension) = extension { if let Some(extension) = extension {
file_path.set_extension(extension); file_path.set_extension(extension);
} }
fs::write(file_path, buf).await?; fs::write(file_path, buf).await?;
Ok(hash) Ok(descriptor)
} }
/// Returns the file extension and a reader for the file by hash /// Returns the file extension and a reader for the file by hash
pub async fn get_file( pub async fn get_file(
&self, &self,
mut hash: String, descriptor: &[u8],
) -> RepoResult<(Option<String>, BufReader<File>)> { ) -> RepoResult<(Option<String>, BufReader<File>)> {
let (base, data) = multibase::decode(&hash)?; let file_path = self.descriptor_to_file_path(descriptor);
if base != STORE_BASE { tracing::debug!("Opening file {:?}", file_path);
hash = multibase::encode(STORE_BASE, data);
}
let file_path = self.hash_to_file_path(&hash);
let extension = file_path let extension = file_path
.extension() .extension()
.and_then(|s| s.to_str()) .and_then(|s| s.to_str())
@ -63,23 +57,56 @@ impl FileHashStore {
Ok((extension, reader)) Ok((extension, reader))
} }
/// Renames a file
pub async fn rename_file(
&self,
src_descriptor: &[u8],
dst_descriptor: &[u8],
) -> RepoResult<()> {
let src_path = self.descriptor_to_file_path(src_descriptor);
if !src_path.exists() {
tracing::warn!("file {:?} doesn't exist", src_path);
return Ok(());
}
let dst_path = self.descriptor_to_file_path(dst_descriptor);
let dst_parent = dst_path.parent().unwrap();
if !dst_parent.exists() {
fs::create_dir(dst_parent).await?;
}
fs::rename(src_path, dst_path).await?;
Ok(())
}
pub async fn delete_file(&self, descriptor: &[u8]) -> RepoResult<()> {
let path = self.descriptor_to_file_path(descriptor);
if !path.exists() {
tracing::warn!("file {:?} doesn't exist", path);
return Ok(());
}
fs::remove_file(path).await?;
Ok(())
}
/// Scans the size of the folder /// Scans the size of the folder
#[inline] #[inline]
pub async fn get_size(&self) -> RepoResult<u64> { pub async fn get_size(&self) -> RepoResult<u64> {
get_folder_size(self.path.to_owned()).await get_folder_size(self.path.to_owned()).await
} }
fn hash_to_file_path(&self, hash: &str) -> PathBuf { fn descriptor_to_file_path(&self, descriptor: &[u8]) -> PathBuf {
let mut path = self.hash_to_folder_path(hash); let descriptor_string = encode_content_descriptor(descriptor);
path.push(hash); let mut path = self.descriptor_string_to_folder_path(&descriptor_string);
path.push(descriptor_string);
path path
} }
fn hash_to_folder_path(&self, hash: &str) -> PathBuf { fn descriptor_string_to_folder_path(&self, descriptor: &str) -> PathBuf {
assert!(hash.len() >= 2); assert!(descriptor.len() >= 2);
let mut path = self.path.clone(); let mut path = self.path.clone();
path.push(&hash[hash.len() - 3..hash.len() - 1]); path.push(&descriptor[descriptor.len() - 3..descriptor.len() - 1]);
path path
} }

@ -1,2 +1,3 @@
pub mod drop_file;
pub mod file_hash_store; pub mod file_hash_store;
pub mod thumbnail_store; pub mod thumbnail_store;

@ -1,5 +1,6 @@
use crate::error::RepoResult; use crate::error::RepoResult;
use crate::utils::get_folder_size; use crate::utils::get_folder_size;
use std::fmt::Debug;
use std::io::Result; use std::io::Result;
use std::path::PathBuf; use std::path::PathBuf;
use tokio::fs; use tokio::fs;
@ -77,6 +78,37 @@ impl ThumbnailStore {
Ok(entries) Ok(entries)
} }
/// Renames a thumbnail parent
#[tracing::instrument(level = "debug")]
pub async fn rename_parent<S1: AsRef<str> + Debug, S2: AsRef<str> + Debug>(
&self,
src: S1,
dst: S2,
) -> Result<()> {
let src_dir = self.path.join(src.as_ref());
if !src_dir.exists() {
tracing::warn!("directory {:?} doesn't exist", src_dir);
return Ok(());
}
let dst_dir = self.path.join(dst.as_ref());
fs::rename(src_dir, dst_dir).await
}
/// Deletes all thumbnails of a parent
#[tracing::instrument(level = "debug")]
pub async fn delete_parent<S: AsRef<str> + Debug>(&self, parent: S) -> Result<()> {
let path = PathBuf::from(parent.as_ref());
if !path.exists() {
tracing::warn!("directory {:?} doesn't exist", path);
return Ok(());
}
fs::remove_dir_all(&path).await?;
Ok(())
}
/// Returns the size of the folder /// Returns the size of the folder
#[tracing::instrument(level = "debug")] #[tracing::instrument(level = "debug")]
pub async fn get_size(&self) -> RepoResult<u64> { pub async fn get_size(&self) -> RepoResult<u64> {

@ -3,7 +3,9 @@ pub use itertools;
pub use mediarepo_api; pub use mediarepo_api;
pub use mediarepo_api::bromine; pub use mediarepo_api::bromine;
pub use thumbnailer; pub use thumbnailer;
pub use tokio_graceful_shutdown;
pub mod content_descriptor;
pub mod context; pub mod context;
pub mod error; pub mod error;
pub mod fs; pub mod fs;

@ -1,38 +0,0 @@
use crate::error::RepoResult;
use serde::{Deserialize, Serialize};
use std::net::IpAddr;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Settings {
pub listen_address: IpAddr,
pub port_range: (u16, u16),
pub database_path: String,
pub default_file_store: String,
pub thumbnail_store: String,
}
impl Default for Settings {
fn default() -> Self {
Self {
listen_address: IpAddr::from([127, 0, 0, 1]),
port_range: (3400, 3500),
database_path: "./db/repo.db".to_string(),
default_file_store: "Main".to_string(),
thumbnail_store: "./thumbnails".to_string(),
}
}
}
impl Settings {
/// Parses settings from a string
pub fn from_toml_string(s: &str) -> RepoResult<Self> {
let settings = toml::from_str(s)?;
Ok(settings)
}
/// Converts the settings into a toml string
pub fn to_toml_string(&self) -> RepoResult<String> {
let string = toml::to_string(&self)?;
Ok(string)
}
}

@ -0,0 +1,42 @@
use serde::{Deserialize, Serialize};
use tracing::Level;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct LoggingSettings {
pub level: LogLevel,
pub trace_sql: bool,
pub trace_api_calls: bool,
}
impl Default for LoggingSettings {
fn default() -> Self {
Self {
level: LogLevel::Info,
trace_sql: false,
trace_api_calls: false,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum LogLevel {
Off,
Error,
Warn,
Info,
Debug,
Trace,
}
impl Into<Option<Level>> for LogLevel {
fn into(self) -> Option<Level> {
match self {
LogLevel::Off => None,
LogLevel::Error => Some(Level::ERROR),
LogLevel::Warn => Some(Level::WARN),
LogLevel::Info => Some(Level::INFO),
LogLevel::Debug => Some(Level::DEBUG),
LogLevel::Trace => Some(Level::TRACE),
}
}
}

@ -0,0 +1,76 @@
mod logging;
mod paths;
mod server;
pub mod v1;
use crate::error::RepoResult;
use crate::settings::v1::SettingsV1;
use config::{Config, FileFormat};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
pub use logging::*;
pub use paths::*;
pub use server::*;
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct Settings {
pub server: ServerSettings,
pub paths: PathSettings,
pub logging: LoggingSettings,
}
impl Settings {
pub fn read(root: &PathBuf) -> RepoResult<Self> {
let mut settings = Config::default();
settings
.merge(config::File::from_str(
&*Settings::default().to_toml_string()?,
FileFormat::Toml,
))?
.merge(config::File::from(root.join("repo")))?
.merge(config::Environment::with_prefix("MEDIAREPO").separator("."))?;
tracing::debug!("Settings are: {:#?}", settings);
Ok(settings.try_into::<Settings>()?)
}
/// Parses settings from a string
pub fn from_v1(settings_v1: SettingsV1) -> RepoResult<Self> {
let mut settings_main = Settings::default();
settings_main.server.tcp.enabled = true;
settings_main.server.tcp.port = PortSetting::Range(settings_v1.port_range);
settings_main.server.tcp.listen_address = settings_v1.listen_address;
settings_main.paths.thumbnail_directory = settings_v1.thumbnail_store.into();
settings_main.paths.database_directory = PathBuf::from(settings_v1.database_path)
.parent()
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_else(|| String::from("./"));
let mut settings = Config::default();
settings
.merge(config::File::from_str(
&*settings_main.to_toml_string()?,
FileFormat::Toml,
))?
.merge(config::Environment::with_prefix("MEDIAREPO"))?;
tracing::debug!("Settings are: {:#?}", settings);
Ok(settings.try_into::<Settings>()?)
}
/// Converts the settings into a toml string
pub fn to_toml_string(&self) -> RepoResult<String> {
let string = toml::to_string(&self)?;
Ok(string)
}
pub fn save(&self, root: &PathBuf) -> RepoResult<()> {
let string = toml::to_string_pretty(&self)?;
fs::write(root.join("repo.toml"), string.into_bytes())?;
Ok(())
}
}

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PathSettings {
pub(crate) database_directory: String,
pub(crate) files_directory: String,
pub(crate) thumbnail_directory: String,
}
impl Default for PathSettings {
fn default() -> Self {
Self {
database_directory: String::from("db"),
files_directory: String::from("files"),
thumbnail_directory: String::from("thumbnails"),
}
}
}
impl PathSettings {
#[inline]
pub fn database_dir(&self, root: &PathBuf) -> PathBuf {
root.join(&self.database_directory)
}
#[inline]
pub fn files_dir(&self, root: &PathBuf) -> PathBuf {
root.join(&self.files_directory)
}
#[inline]
pub fn thumbs_dir(&self, root: &PathBuf) -> PathBuf {
root.join(&self.thumbnail_directory)
}
#[inline]
pub fn db_file_path(&self, root: &PathBuf) -> PathBuf {
self.database_dir(root).join("repo.db")
}
#[inline]
pub fn frontend_state_file_path(&self, root: &PathBuf) -> PathBuf {
self.database_dir(root).join("frontend-state.json")
}
}

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
use std::net::IpAddr;
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct ServerSettings {
pub tcp: TcpServerSettings,
#[cfg(unix)]
pub unix_socket: UnixSocketServerSettings,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct TcpServerSettings {
pub enabled: bool,
pub listen_address: IpAddr,
pub port: PortSetting,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(untagged)]
pub enum PortSetting {
Fixed(u16),
Range((u16, u16)),
}
impl Default for TcpServerSettings {
fn default() -> Self {
Self {
enabled: cfg!(windows),
listen_address: IpAddr::from([127, 0, 0, 1]),
port: PortSetting::Range((13400, 13500)),
}
}
}
#[cfg(unix)]
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct UnixSocketServerSettings {
pub enabled: bool,
}
#[cfg(unix)]
impl Default for UnixSocketServerSettings {
fn default() -> Self {
Self { enabled: true }
}
}

@ -0,0 +1,20 @@
use crate::error::RepoResult;
use serde::{Deserialize, Serialize};
use std::net::IpAddr;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct SettingsV1 {
pub listen_address: IpAddr,
pub port_range: (u16, u16),
pub database_path: String,
pub default_file_store: String,
pub thumbnail_store: String,
}
impl SettingsV1 {
/// Parses settings from a string
pub fn from_toml_string(s: &str) -> RepoResult<Self> {
let settings = toml::from_str(s)?;
Ok(settings)
}
}

@ -1,5 +1,8 @@
use crate::settings::Settings; use crate::settings::Settings;
use mediarepo_api::types::repo::SizeType;
use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use tokio_graceful_shutdown::SubsystemHandle;
use typemap_rev::TypeMapKey; use typemap_rev::TypeMapKey;
pub struct SettingsKey; pub struct SettingsKey;
@ -13,3 +16,15 @@ pub struct RepoPathKey;
impl TypeMapKey for RepoPathKey { impl TypeMapKey for RepoPathKey {
type Value = PathBuf; type Value = PathBuf;
} }
pub struct SizeMetadataKey;
impl TypeMapKey for SizeMetadataKey {
type Value = HashMap<SizeType, u64>;
}
pub struct SubsystemKey;
impl TypeMapKey for SubsystemKey {
type Value = SubsystemHandle;
}

@ -0,0 +1,107 @@
-- Add migration script here
PRAGMA foreign_keys= off;
-- create backup files table
ALTER TABLE files
RENAME TO _files_old;
-- create backup hashes table
ALTER TABLE hashes
RENAME TO _hashes_old;
-- create backup hash_tag_mappings table
ALTER TABLE hash_tag_mappings
RENAME TO _hash_tag_mappings_old;
-- create backup hash_source_mappings table
ALTER TABLE hash_source_mappings
RENAME TO _hash_source_mappings_old;
-- create content id table
CREATE TABLE content_descriptors
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
descriptor BLOB NOT NULL
);
CREATE UNIQUE INDEX content_descriptor_values ON content_descriptors (descriptor);
-- create content-id tag mappings table
CREATE TABLE cd_tag_mappings
(
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
tag_id INTEGER NOT NULL REFERENCES tags (id),
PRIMARY KEY (cd_id, tag_id)
);
CREATE UNIQUE INDEX content_descriptor_tag_mapping_unique ON cd_tag_mappings (cd_id, tag_id);
CREATE INDEX content_descriptor_tag_mapping_tag ON cd_tag_mappings (tag_id);
-- create content-id source mappings table
CREATE TABLE cd_source_mappings
(
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
source_id INTEGER NOT NULL REFERENCES sources (id),
PRIMARY KEY (cd_id, source_id)
);
CREATE UNIQUE INDEX content_descriptor_source_mapping_unique ON cd_source_mappings (cd_id, source_id);
-- create new files table
CREATE TABLE files
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
status INTEGER NOT NULL DEFAULT 10,
storage_id INTEGER NOT NULL REFERENCES storage_locations (id),
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
mime_type VARCHAR(128) NOT NULL DEFAULT 'application/octet-stream'
);
CREATE INDEX files_contend_descriptor ON files (cd_id);
-- create metadata table
CREATE TABLE file_metadata
(
file_id INTEGER PRIMARY KEY REFERENCES files (id),
size INTEGER NOT NULL,
name VARCHAR(128),
comment VARCHAR(1024),
import_time DATETIME NOT NULL,
creation_time DATETIME NOT NULL,
change_time DATETIME NOT NULL
);
CREATE UNIQUE INDEX file_metadata_file_id_unique ON file_metadata (file_id);
-- add content identifiers from hashes table
INSERT INTO content_descriptors
SELECT id, value
FROM _hashes_old;
-- add files from files table
INSERT INTO files (id, storage_id, cd_id, mime_type)
SELECT id, storage_id, hash_id AS content_id, mime_type
FROM _files_old;
-- add metadata from files table
INSERT INTO file_metadata
SELECT id AS file_id, size, name, comment, import_time, creation_time, change_time
FROM _files_old;
-- add content tag mappings
INSERT INTO cd_tag_mappings
SELECT hash_id AS content_id, tag_id
FROM _hash_tag_mappings_old;
-- add content id source mappings
INSERT INTO cd_source_mappings
SELECT hash_id AS content_id, source_id
FROM _hash_source_mappings_old;
-- drop all old tables
DROP TABLE _hash_source_mappings_old;
DROP TABLE _hash_tag_mappings_old;
DROP TABLE _files_old;
DROP TABLE _hashes_old;
pragma foreign_keys= on;

@ -0,0 +1,50 @@
-- Add migration script here
PRAGMA foreign_keys= off;
-- rename old files table
ALTER TABLE files
RENAME TO _files_old;
-- rename metadata value (because of foreign key constraints)
ALTER TABLE file_metadata
RENAME TO _file_metadata_old;
-- create new files table
CREATE TABLE files
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
status INTEGER NOT NULL DEFAULT 10,
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
mime_type VARCHAR(128) NOT NULL DEFAULT 'application/octet-stream'
);
-- add data from files table
INSERT INTO files
SELECT id, status, cd_id, mime_type
FROM _files_old;
-- create metadata table
CREATE TABLE file_metadata
(
file_id INTEGER PRIMARY KEY REFERENCES files (id),
size INTEGER NOT NULL,
name VARCHAR(128),
comment VARCHAR(1024),
import_time DATETIME NOT NULL,
creation_time DATETIME NOT NULL,
change_time DATETIME NOT NULL
);
-- add back the old values
INSERT INTO file_metadata
SELECT *
FROM _file_metadata_old;
-- drop old tables
DROP TABLE _file_metadata_old;
DROP TABLE _files_old;
DROP TABLE storage_locations;
-- create indices on new tables
CREATE UNIQUE INDEX file_metadata_file_id_unique ON file_metadata (file_id);
CREATE INDEX files_content_descriptor ON files (cd_id);
PRAGMA foreign_keys= on;

@ -1,11 +1,11 @@
use sea_orm::prelude::*; use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "hashes")] #[sea_orm(table_name = "content_descriptors")]
pub struct Model { pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: i64, pub id: i64,
pub value: String, pub descriptor: Vec<u8>,
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@ -13,27 +13,35 @@ pub enum Relation {}
impl Related<super::file::Entity> for Entity { impl Related<super::file::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
super::file::Relation::Hash.def().rev() super::file::Relation::ContentDescriptorId.def().rev()
} }
} }
impl Related<super::tag::Entity> for Entity { impl Related<super::tag::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
super::hash_tag::Relation::Tag.def() super::content_descriptor_tag::Relation::Tag.def()
} }
fn via() -> Option<RelationDef> { fn via() -> Option<RelationDef> {
Some(super::hash_tag::Relation::Hash.def().rev()) Some(
super::content_descriptor_tag::Relation::ContentDescriptorId
.def()
.rev(),
)
} }
} }
impl Related<super::source::Entity> for Entity { impl Related<super::source::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
super::hash_source::Relation::Source.def() super::content_descriptor_source::Relation::Source.def()
} }
fn via() -> Option<RelationDef> { fn via() -> Option<RelationDef> {
Some(super::hash_source::Relation::Hash.def().rev()) Some(
super::content_descriptor_source::Relation::ContentDescriptorId
.def()
.rev(),
)
} }
} }

@ -1,10 +1,10 @@
use sea_orm::prelude::*; use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "hash_source_mappings")] #[sea_orm(table_name = "cd_source_mappings")]
pub struct Model { pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub hash_id: i64, pub cd_id: i64,
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub source_id: i64, pub source_id: i64,
} }
@ -12,11 +12,11 @@ pub struct Model {
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation { pub enum Relation {
#[sea_orm( #[sea_orm(
belongs_to = "super::hash::Entity", belongs_to = "super::content_descriptor::Entity",
from = "Column::HashId", from = "Column::CdId",
to = "super::hash::Column::Id" to = "super::content_descriptor::Column::Id"
)] )]
Hash, ContentDescriptorId,
#[sea_orm( #[sea_orm(
belongs_to = "super::source::Entity", belongs_to = "super::source::Entity",
from = "Column::SourceId", from = "Column::SourceId",
@ -25,9 +25,9 @@ pub enum Relation {
Source, Source,
} }
impl Related<super::hash::Entity> for Entity { impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
Relation::Hash.def() Relation::ContentDescriptorId.def()
} }
} }

@ -1,10 +1,10 @@
use sea_orm::prelude::*; use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "hash_tag_mappings")] #[sea_orm(table_name = "cd_tag_mappings")]
pub struct Model { pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub hash_id: i64, pub cd_id: i64,
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub tag_id: i64, pub tag_id: i64,
} }
@ -12,11 +12,11 @@ pub struct Model {
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation { pub enum Relation {
#[sea_orm( #[sea_orm(
belongs_to = "super::hash::Entity", belongs_to = "super::content_descriptor::Entity",
from = "Column::HashId", from = "Column::CdId",
to = "super::hash::Column::Id" to = "super::content_descriptor::Column::Id"
)] )]
Hash, ContentDescriptorId,
#[sea_orm( #[sea_orm(
belongs_to = "super::tag::Entity", belongs_to = "super::tag::Entity",
from = "Column::TagId", from = "Column::TagId",
@ -25,9 +25,9 @@ pub enum Relation {
Tag, Tag,
} }
impl Related<super::hash::Entity> for Entity { impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
Relation::Hash.def() Relation::ContentDescriptorId.def()
} }
} }

@ -1,4 +1,3 @@
use chrono::NaiveDateTime;
use sea_orm::prelude::*; use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] #[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
@ -6,44 +5,30 @@ use sea_orm::prelude::*;
pub struct Model { pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: i64, pub id: i64,
pub file_type: u32, pub status: i32,
pub name: Option<String>, pub mime_type: String,
pub comment: Option<String>, pub cd_id: i64,
pub mime_type: Option<String>,
pub size: Option<i64>,
pub storage_id: i64,
pub hash_id: i64,
pub import_time: NaiveDateTime,
pub creation_time: NaiveDateTime,
pub change_time: NaiveDateTime,
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation { pub enum Relation {
#[sea_orm( #[sea_orm(
belongs_to = "super::hash::Entity", belongs_to = "super::content_descriptor::Entity",
from = "Column::HashId", from = "Column::CdId",
to = "super::hash::Column::Id" to = "super::content_descriptor::Column::Id"
)] )]
Hash, ContentDescriptorId,
#[sea_orm(
belongs_to = "super::storage::Entity",
from = "Column::StorageId",
to = "super::storage::Column::Id"
)]
Storage,
} }
impl Related<super::hash::Entity> for Entity { impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
Relation::Hash.def() Relation::ContentDescriptorId.def()
} }
} }
impl Related<super::storage::Entity> for Entity { impl Related<super::file_metadata::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
Relation::Storage.def() super::file_metadata::Relation::File.def().rev()
} }
} }

@ -0,0 +1,32 @@
use chrono::NaiveDateTime;
use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "file_metadata")]
pub struct Model {
#[sea_orm(primary_key)]
pub file_id: i64,
pub name: Option<String>,
pub comment: Option<String>,
pub size: i64,
pub import_time: NaiveDateTime,
pub creation_time: NaiveDateTime,
pub change_time: NaiveDateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::file::Entity",
from = "Column::FileId",
to = "super::file::Column::Id"
)]
File,
}
impl Related<super::file::Entity> for Entity {
fn to() -> RelationDef {
Relation::File.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

@ -1,8 +1,8 @@
pub mod content_descriptor;
pub mod content_descriptor_source;
pub mod content_descriptor_tag;
pub mod file; pub mod file;
pub mod hash; pub mod file_metadata;
pub mod hash_source;
pub mod hash_tag;
pub mod namespace; pub mod namespace;
pub mod source; pub mod source;
pub mod storage;
pub mod tag; pub mod tag;

@ -11,13 +11,17 @@ pub struct Model {
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {} pub enum Relation {}
impl Related<super::hash::Entity> for Entity { impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
super::hash_source::Relation::Hash.def() super::content_descriptor_source::Relation::ContentDescriptorId.def()
} }
fn via() -> Option<RelationDef> { fn via() -> Option<RelationDef> {
Some(super::hash_source::Relation::Source.def().rev()) Some(
super::content_descriptor_source::Relation::Source
.def()
.rev(),
)
} }
} }

@ -1,24 +0,0 @@
use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "storage_locations")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::file::Entity")]
File,
}
impl Related<super::file::Entity> for Entity {
fn to() -> RelationDef {
Relation::File.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

@ -19,13 +19,13 @@ pub enum Relation {
Namespace, Namespace,
} }
impl Related<super::hash::Entity> for Entity { impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
super::hash_tag::Relation::Hash.def() super::content_descriptor_tag::Relation::ContentDescriptorId.def()
} }
fn via() -> Option<RelationDef> { fn via() -> Option<RelationDef> {
Some(super::hash_tag::Relation::Tag.def().rev()) Some(super::content_descriptor_tag::Relation::Tag.def().rev())
} }
} }

@ -1,6 +1,7 @@
use mediarepo_core::error::RepoDatabaseResult; use mediarepo_core::error::RepoDatabaseResult;
use sea_orm::{Database, DatabaseConnection}; use sea_orm::{ConnectOptions, Database, DatabaseConnection};
use sqlx::migrate::MigrateDatabase; use sqlx::migrate::MigrateDatabase;
use std::time::Duration;
pub mod entities; pub mod entities;
pub mod queries; pub mod queries;
@ -8,7 +9,11 @@ pub mod queries;
/// Connects to the database, runs migrations and returns the RepoDatabase wrapper type /// Connects to the database, runs migrations and returns the RepoDatabase wrapper type
pub async fn get_database<S: AsRef<str>>(uri: S) -> RepoDatabaseResult<DatabaseConnection> { pub async fn get_database<S: AsRef<str>>(uri: S) -> RepoDatabaseResult<DatabaseConnection> {
migrate(uri.as_ref()).await?; migrate(uri.as_ref()).await?;
let conn = Database::connect(uri.as_ref()).await?; let mut opt = ConnectOptions::new(uri.as_ref().to_string());
opt.connect_timeout(Duration::from_secs(10))
.idle_timeout(Duration::from_secs(10));
let conn = Database::connect(opt).await?;
Ok(conn) Ok(conn)
} }

@ -7,7 +7,7 @@ use mediarepo_core::error::{RepoError, RepoResult};
#[derive(Debug, FromQueryResult)] #[derive(Debug, FromQueryResult)]
pub struct Counts { pub struct Counts {
pub file_count: i64, pub file_count: i64,
pub hash_count: i64, pub cd_count: i64,
pub tag_count: i64, pub tag_count: i64,
pub namespace_count: i64, pub namespace_count: i64,
pub source_count: i64, pub source_count: i64,
@ -20,11 +20,11 @@ pub async fn get_all_counts(db: &DatabaseConnection) -> RepoResult<Counts> {
r#" r#"
SELECT * SELECT *
FROM (SELECT COUNT(*) AS file_count FROM files), FROM (SELECT COUNT(*) AS file_count FROM files),
(SELECT COUNT(*) AS hash_count FROM hashes), (SELECT COUNT(*) AS cd_count FROM content_descriptors),
(SELECT COUNT(*) AS tag_count FROM tags), (SELECT COUNT(*) AS tag_count FROM tags),
(SELECT COUNT(*) AS namespace_count FROM namespaces), (SELECT COUNT(*) AS namespace_count FROM namespaces),
(SELECT COUNT(*) AS source_count FROM sources), (SELECT COUNT(*) AS source_count FROM sources),
(SELECT COUNT(*) AS mapping_count FROM hash_tag_mappings) (SELECT COUNT(*) AS mapping_count FROM cd_tag_mappings)
"#, "#,
vec![], vec![],
)) ))

@ -7,27 +7,27 @@ use std::fmt::Display;
use std::iter::FromIterator; use std::iter::FromIterator;
#[derive(Debug, FromQueryResult)] #[derive(Debug, FromQueryResult)]
struct HashNamespaceTags { struct CIDNamespaceTag {
hash_id: i64, cd_id: i64,
namespace: String, namespace: String,
tag: String, tag: String,
} }
#[tracing::instrument(level = "debug", skip_all)] #[tracing::instrument(level = "debug", skip_all)]
pub async fn get_hashes_with_namespaced_tags( pub async fn get_cids_with_namespaced_tags(
db: &DatabaseConnection, db: &DatabaseConnection,
hash_ids: Vec<i64>, hash_ids: Vec<i64>,
) -> RepoResult<HashMap<i64, HashMap<String, Vec<String>>>> { ) -> RepoResult<HashMap<i64, HashMap<String, Vec<String>>>> {
let hash_namespace_tags: Vec<HashNamespaceTags> = let hash_namespace_tags: Vec<CIDNamespaceTag> =
HashNamespaceTags::find_by_statement(Statement::from_sql_and_values( CIDNamespaceTag::find_by_statement(Statement::from_sql_and_values(
DbBackend::Sqlite, DbBackend::Sqlite,
format!( format!(
r#"SELECT htm.hash_id, n.name as namespace, t.name as tag r#"SELECT ctm.cd_id, n.name as namespace, t.name as tag
FROM hash_tag_mappings htm FROM cd_tag_mappings ctm
INNER JOIN tags t on htm.tag_id = t.id INNER JOIN tags t on ctm.tag_id = t.id
JOIN namespaces n on t.namespace_id = n.id JOIN namespaces n on t.namespace_id = n.id
WHERE t.namespace_id IS NOT NULL WHERE t.namespace_id IS NOT NULL
AND htm.hash_id IN ({}) ORDER BY t.namespace_id;"#, AND ctm.cd_id IN ({}) ORDER BY t.namespace_id;"#,
vec_to_query_list(hash_ids) vec_to_query_list(hash_ids)
) )
.as_str(), .as_str(),
@ -35,49 +35,49 @@ pub async fn get_hashes_with_namespaced_tags(
)) ))
.all(db) .all(db)
.await?; .await?;
let mut hash_namespaces: HashMap<i64, HashMap<String, Vec<String>>> = HashMap::new(); let mut cd_id_namespaces: HashMap<i64, HashMap<String, Vec<String>>> = HashMap::new();
for hnt in hash_namespace_tags { for hnt in hash_namespace_tags {
if let Some(entry) = hash_namespaces.get_mut(&hnt.hash_id) { if let Some(entry) = cd_id_namespaces.get_mut(&hnt.cd_id) {
if let Some(nsp_entry) = entry.get_mut(&hnt.namespace) { if let Some(nsp_entry) = entry.get_mut(&hnt.namespace) {
nsp_entry.push(hnt.tag); nsp_entry.push(hnt.tag);
} else { } else {
entry.insert(hnt.namespace, vec![hnt.tag]); entry.insert(hnt.namespace, vec![hnt.tag]);
} }
} else { } else {
hash_namespaces.insert( cd_id_namespaces.insert(
hnt.hash_id, hnt.cd_id,
HashMap::from_iter(vec![(hnt.namespace, vec![hnt.tag])].into_iter()), HashMap::from_iter(vec![(hnt.namespace, vec![hnt.tag])].into_iter()),
); );
} }
} }
Ok(hash_namespaces) Ok(cd_id_namespaces)
} }
#[derive(Debug, FromQueryResult)] #[derive(Debug, FromQueryResult)]
struct HashTagCount { struct CIDTagCount {
hash_id: i64, cd_id: i64,
tag_count: i32, tag_count: i32,
} }
#[tracing::instrument(level = "debug", skip_all)] #[tracing::instrument(level = "debug", skip_all)]
pub async fn get_hashes_with_tag_count( pub async fn get_content_descriptors_with_tag_count(
db: &DatabaseConnection, db: &DatabaseConnection,
hash_ids: Vec<i64>, cd_ids: Vec<i64>,
) -> RepoResult<HashMap<i64, u32>> { ) -> RepoResult<HashMap<i64, u32>> {
if hash_ids.is_empty() { if cd_ids.is_empty() {
return Ok(HashMap::new()); return Ok(HashMap::new());
} }
let hash_tag_counts: Vec<HashTagCount> = let hash_tag_counts: Vec<CIDTagCount> =
HashTagCount::find_by_statement(Statement::from_sql_and_values( CIDTagCount::find_by_statement(Statement::from_sql_and_values(
DbBackend::Sqlite, DbBackend::Sqlite,
format!( format!(
r#" r#"
SELECT htm.hash_id, COUNT(htm.tag_id) AS "tag_count" from hash_tag_mappings htm SELECT ctm.cd_id, COUNT(ctm.tag_id) AS "tag_count" from cd_tag_mappings ctm
WHERE htm.hash_id IN ({}) WHERE ctm.cd_id IN ({})
GROUP BY hash_id GROUP BY cd_id
"#, "#,
vec_to_query_list(hash_ids) vec_to_query_list(cd_ids)
) )
.as_str(), .as_str(),
vec![], vec![],
@ -87,7 +87,12 @@ pub async fn get_hashes_with_tag_count(
let mappings = hash_tag_counts let mappings = hash_tag_counts
.into_iter() .into_iter()
.map(|HashTagCount { hash_id, tag_count }| (hash_id, tag_count as u32)) .map(
|CIDTagCount {
cd_id: hash_id,
tag_count,
}| (hash_id, tag_count as u32),
)
.collect::<HashMap<i64, u32>>(); .collect::<HashMap<i64, u32>>();
Ok(mappings) Ok(mappings)

@ -15,7 +15,6 @@ mime = "^0.3.16"
tracing = "^0.1.29" tracing = "^0.1.29"
async-trait = "^0.1.51" async-trait = "^0.1.51"
[dependencies.mediarepo-core] [dependencies.mediarepo-core]
path = "../mediarepo-core" path = "../mediarepo-core"

@ -1,26 +1,38 @@
use crate::file::File; use crate::file::File;
use mediarepo_core::content_descriptor::convert_v1_descriptor_to_v2;
use mediarepo_core::error::RepoResult; use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::file; use mediarepo_database::entities::file;
use mediarepo_database::entities::hash;
use sea_orm::prelude::*; use sea_orm::prelude::*;
use sea_orm::{DatabaseConnection, Set}; use sea_orm::{DatabaseConnection, Set};
use std::fmt::Debug; use std::fmt::Debug;
pub struct Hash { pub struct ContentDescriptor {
db: DatabaseConnection, db: DatabaseConnection,
model: hash::Model, model: content_descriptor::Model,
} }
impl Hash { impl ContentDescriptor {
#[tracing::instrument(level = "trace")] #[tracing::instrument(level = "trace")]
pub(crate) fn new(db: DatabaseConnection, model: hash::Model) -> Self { pub(crate) fn new(db: DatabaseConnection, model: content_descriptor::Model) -> Self {
Self { db, model } Self { db, model }
} }
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<Self>> {
let descriptors = content_descriptor::Entity::find()
.all(&db)
.await?
.into_iter()
.map(|model| Self::new(db.clone(), model))
.collect();
Ok(descriptors)
}
/// Searches for the hash by id /// Searches for the hash by id
#[tracing::instrument(level = "debug", skip(db))] #[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> { pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
let hash = hash::Entity::find_by_id(id) let hash = content_descriptor::Entity::find_by_id(id)
.one(&db) .one(&db)
.await? .await?
.map(|model| Self::new(db, model)); .map(|model| Self::new(db, model));
@ -30,24 +42,24 @@ impl Hash {
/// Returns the hash by value /// Returns the hash by value
#[tracing::instrument(level = "debug", skip(db))] #[tracing::instrument(level = "debug", skip(db))]
pub async fn by_value<S: AsRef<str> + Debug>( pub async fn by_value<D: AsRef<[u8]> + Debug>(
db: DatabaseConnection, db: DatabaseConnection,
value: S, descriptor: D,
) -> RepoResult<Option<Self>> { ) -> RepoResult<Option<Self>> {
let hash = hash::Entity::find() let cid = content_descriptor::Entity::find()
.filter(hash::Column::Value.eq(value.as_ref())) .filter(content_descriptor::Column::Descriptor.eq(descriptor.as_ref()))
.one(&db) .one(&db)
.await? .await?
.map(|model| Self::new(db, model)); .map(|model| Self::new(db, model));
Ok(hash) Ok(cid)
} }
/// Adds a new hash to the database /// Adds a new hash to the database
#[tracing::instrument(level = "debug", skip(db))] #[tracing::instrument(level = "debug", skip(db))]
pub async fn add(db: DatabaseConnection, value: String) -> RepoResult<Self> { pub async fn add(db: DatabaseConnection, descriptor: Vec<u8>) -> RepoResult<Self> {
let active_model = hash::ActiveModel { let active_model = content_descriptor::ActiveModel {
value: Set(value), descriptor: Set(descriptor),
..Default::default() ..Default::default()
}; };
let model = active_model.insert(&db).await?; let model = active_model.insert(&db).await?;
@ -59,8 +71,8 @@ impl Hash {
self.model.id self.model.id
} }
pub fn value(&self) -> &String { pub fn descriptor(&self) -> &[u8] {
&self.model.value &self.model.descriptor[..]
} }
/// Returns the file associated with the hash /// Returns the file associated with the hash
@ -75,4 +87,15 @@ impl Hash {
Ok(file) Ok(file)
} }
pub async fn convert_v1_to_v2(&mut self) -> RepoResult<()> {
let descriptor = convert_v1_descriptor_to_v2(&self.model.descriptor)?;
let active_model = content_descriptor::ActiveModel {
id: Set(self.id()),
descriptor: Set(descriptor),
};
self.model = active_model.update(&self.db).await?;
Ok(())
}
} }

@ -1,395 +0,0 @@
use std::fmt::Debug;
use std::io::Cursor;
use std::str::FromStr;
use chrono::{Local, NaiveDateTime};
use sea_orm::prelude::*;
use sea_orm::sea_query::{Expr, Query};
use sea_orm::{Condition, DatabaseConnection, Set};
use sea_orm::{JoinType, QuerySelect};
use tokio::io::{AsyncReadExt, BufReader};
use mediarepo_core::error::RepoResult;
use mediarepo_core::thumbnailer::{self, Thumbnail as ThumbnailerThumb, ThumbnailSize};
use mediarepo_database::entities::file;
use mediarepo_database::entities::hash;
use mediarepo_database::entities::hash_tag;
use mediarepo_database::entities::namespace;
use mediarepo_database::entities::tag;
use crate::file_type::FileType;
use crate::storage::Storage;
use crate::tag::Tag;
#[derive(Clone)]
pub struct File {
db: DatabaseConnection,
model: file::Model,
hash: hash::Model,
}
impl File {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(db: DatabaseConnection, model: file::Model, hash: hash::Model) -> Self {
Self { db, model, hash }
}
/// Returns a list of all known stored files
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<File>> {
let files: Vec<(file::Model, Option<hash::Model>)> = file::Entity::find()
.find_also_related(hash::Entity)
.all(&db)
.await?;
let files = files
.into_iter()
.filter_map(|(f, h)| {
let h = h?;
Some(Self::new(db.clone(), f, h))
})
.collect();
Ok(files)
}
/// Fetches the file by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
if let Some((model, Some(hash))) = file::Entity::find_by_id(id)
.find_also_related(hash::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by hash
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_hash<S: AsRef<str> + Debug>(
db: DatabaseConnection,
hash: S,
) -> RepoResult<Option<Self>> {
if let Some((hash, Some(model))) = hash::Entity::find()
.filter(hash::Column::Value.eq(hash.as_ref()))
.find_also_related(file::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by tags
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn find_by_tags(
db: DatabaseConnection,
tag_ids: Vec<Vec<(i64, bool)>>,
) -> RepoResult<Vec<Self>> {
let main_condition = build_find_filter_conditions(tag_ids);
let results: Vec<(hash::Model, Option<file::Model>)> = hash::Entity::find()
.find_also_related(file::Entity)
.filter(main_condition)
.group_by(file::Column::Id)
.all(&db)
.await?;
let files: Vec<Self> = results
.into_iter()
.filter_map(|(hash, tag)| Some(Self::new(db.clone(), tag?, hash)))
.collect();
Ok(files)
}
/// Adds a file with its hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn add(
db: DatabaseConnection,
storage_id: i64,
hash_id: i64,
file_type: FileType,
mime_type: Option<String>,
creation_time: NaiveDateTime,
change_time: NaiveDateTime,
) -> RepoResult<Self> {
let file = file::ActiveModel {
hash_id: Set(hash_id),
file_type: Set(file_type as u32),
mime_type: Set(mime_type),
storage_id: Set(storage_id),
import_time: Set(Local::now().naive_local()),
creation_time: Set(creation_time),
change_time: Set(change_time),
..Default::default()
};
let file: file::ActiveModel = file.insert(&db).await?.into();
let file = Self::by_id(db, file.id.unwrap())
.await?
.expect("Inserted file does not exist");
Ok(file)
}
/// Returns the unique identifier of the file
pub fn id(&self) -> i64 {
self.model.id
}
/// Returns the hash of the file (content identifier)
pub fn hash(&self) -> &String {
&self.hash.value
}
/// Returns the hash id of the file
pub fn hash_id(&self) -> i64 {
self.hash.id
}
/// Returns the type of the file
pub fn file_type(&self) -> FileType {
match self.model.file_type {
1 => FileType::Image,
2 => FileType::Video,
3 => FileType::Audio,
_ => FileType::Unknown,
}
}
/// Returns the optional mime type of the file
pub fn mime_type(&self) -> &Option<String> {
&self.model.mime_type
}
/// Returns the optional name of the file
pub fn name(&self) -> &Option<String> {
&self.model.name
}
/// Returns the comment of the file
pub fn comment(&self) -> &Option<String> {
&self.model.comment
}
/// Returns the import time of the file
pub fn import_time(&self) -> &NaiveDateTime {
&self.model.import_time
}
/// Returns the datetime when the file was created
pub fn creation_time(&self) -> &NaiveDateTime {
&self.model.creation_time
}
/// Returns the last time the file was changed
pub fn change_time(&self) -> &NaiveDateTime {
&self.model.change_time
}
/// Returns the storage where the file is stored
pub async fn storage(&self) -> RepoResult<Storage> {
let storage = Storage::by_id(self.db.clone(), self.model.storage_id)
.await?
.expect("The FK storage_id doesn't exist?!");
Ok(storage)
}
/// Returns the list of tags of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags(&self) -> RepoResult<Vec<Tag>> {
let tags: Vec<(tag::Model, Option<namespace::Model>)> = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(JoinType::LeftJoin, hash_tag::Relation::Tag.def().rev())
.join(JoinType::InnerJoin, hash_tag::Relation::Hash.def())
.filter(hash::Column::Id.eq(self.hash.id))
.all(&self.db)
.await?;
let tags = tags
.into_iter()
.map(|(tag, namespace)| Tag::new(self.db.clone(), tag, namespace))
.collect();
Ok(tags)
}
/// Changes the name of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_name<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
let mut active_file = self.get_active_model();
active_file.name = Set(Some(name.to_string()));
let active_file = active_file.update(&self.db).await?;
self.model.name = active_file.name;
Ok(())
}
/// Changes the comment of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_comment<S: ToString + Debug>(&mut self, comment: S) -> RepoResult<()> {
let mut active_file = self.get_active_model();
active_file.comment = Set(Some(comment.to_string()));
let active_file = active_file.update(&self.db).await?;
self.model.comment = active_file.comment;
Ok(())
}
/// Changes the type of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_file_type(&mut self, file_type: FileType) -> RepoResult<()> {
let mut active_file = self.get_active_model();
active_file.file_type = Set(file_type as u32);
let active_file = active_file.update(&self.db).await?;
self.model.file_type = active_file.file_type;
Ok(())
}
/// Adds a single tag to the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tag(&mut self, tag_id: i64) -> RepoResult<()> {
let hash_id = self.hash.id;
let active_model = hash_tag::ActiveModel {
hash_id: Set(hash_id),
tag_id: Set(tag_id),
};
active_model.insert(&self.db).await?;
Ok(())
}
/// Adds multiple tags to the file at once
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
if tag_ids.is_empty() {
return Ok(());
}
let hash_id = self.hash.id;
let models: Vec<hash_tag::ActiveModel> = tag_ids
.into_iter()
.map(|tag_id| hash_tag::ActiveModel {
hash_id: Set(hash_id),
tag_id: Set(tag_id),
})
.collect();
hash_tag::Entity::insert_many(models).exec(&self.db).await?;
Ok(())
}
/// Removes multiple tags from the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn remove_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
let hash_id = self.hash.id;
hash_tag::Entity::delete_many()
.filter(hash_tag::Column::HashId.eq(hash_id))
.filter(hash_tag::Column::TagId.is_in(tag_ids))
.exec(&self.db)
.await?;
Ok(())
}
/// Returns the reader for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_reader(&self) -> RepoResult<BufReader<tokio::fs::File>> {
let storage = self.storage().await?;
storage.get_file_reader(&self.hash.value).await
}
/// Retrieves the size of the file from its content
#[tracing::instrument(level = "trace", skip(self))]
pub async fn get_size(&self) -> RepoResult<u64> {
if let Some(size) = self.model.size {
Ok(size as u64)
} else {
let mut reader = self.get_reader().await?;
let size = {
let mut buf = Vec::new();
reader.read_to_end(&mut buf).await
}?;
let mut model = self.get_active_model();
model.size = Set(Some(size as i64));
model.update(&self.db).await?;
Ok(size as u64)
}
}
/// Creates a thumbnail for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn create_thumbnail<I: IntoIterator<Item = ThumbnailSize> + Debug>(
&self,
sizes: I,
) -> RepoResult<Vec<ThumbnailerThumb>> {
let mut buf = Vec::new();
self.get_reader().await?.read_to_end(&mut buf).await?;
let mime_type = self
.model
.mime_type
.clone()
.map(|mime_type| mime::Mime::from_str(&mime_type).unwrap())
.unwrap_or(mime::IMAGE_STAR);
let thumbs = thumbnailer::create_thumbnails(Cursor::new(buf), mime_type, sizes)?;
Ok(thumbs)
}
/// Returns the active model of the file with only the id set
fn get_active_model(&self) -> file::ActiveModel {
file::ActiveModel {
id: Set(self.id()),
..Default::default()
}
}
}
fn build_find_filter_conditions(tag_ids: Vec<Vec<(i64, bool)>>) -> Condition {
let mut main_condition = Condition::all();
for mut expression in tag_ids {
if expression.len() == 1 {
let (tag_id, negated) = expression.pop().unwrap();
main_condition = add_single_filter_expression(main_condition, tag_id, negated)
} else if !expression.is_empty() {
let mut sub_condition = Condition::any();
for (tag, negated) in expression {
sub_condition = add_single_filter_expression(sub_condition, tag, negated);
}
main_condition = main_condition.add(sub_condition);
}
}
main_condition
}
fn add_single_filter_expression(condition: Condition, tag_id: i64, negated: bool) -> Condition {
if negated {
condition.add(
hash::Column::Id.not_in_subquery(
Query::select()
.expr(Expr::col(hash_tag::Column::HashId))
.from(hash_tag::Entity)
.cond_where(hash_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
)
} else {
condition.add(
hash::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(hash_tag::Column::HashId))
.from(hash_tag::Entity)
.cond_where(hash_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
)
}
}

@ -0,0 +1,223 @@
use chrono::NaiveDateTime;
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::file;
use mediarepo_database::entities::file_metadata;
use sea_orm::sea_query::{Alias, Expr, Query, SimpleExpr};
use sea_orm::ColumnTrait;
use sea_orm::Condition;
macro_rules! apply_ordering_comparator {
($column:expr, $filter:expr) => {
match $filter {
OrderingComparator::Less(value) => $column.lt(value),
OrderingComparator::Equal(value) => $column.eq(value),
OrderingComparator::Greater(value) => $column.gt(value),
OrderingComparator::Between((min_value, max_value)) => {
$column.between(min_value, max_value)
}
}
};
}
#[derive(Clone, Debug)]
pub enum FilterProperty {
TagId(NegatableComparator<i64>),
TagWildcardIds(NegatableComparator<Vec<i64>>),
ContentDescriptor(NegatableComparator<Vec<u8>>),
TagCount(OrderingComparator<i64>),
FileProperty(FilterFileProperty),
}
#[derive(Clone, Debug)]
pub enum FilterFileProperty {
Id(NegatableComparator<i64>),
Status(NegatableComparator<i64>),
FileSize(OrderingComparator<i64>),
ImportedTime(OrderingComparator<NaiveDateTime>),
ChangedTime(OrderingComparator<NaiveDateTime>),
CreatedTime(OrderingComparator<NaiveDateTime>),
}
#[derive(Clone, Debug)]
pub enum OrderingComparator<T> {
Less(T),
Equal(T),
Greater(T),
Between((T, T)),
}
#[derive(Clone, Debug)]
pub enum NegatableComparator<T> {
Is(T),
IsNot(T),
}
#[tracing::instrument(level = "debug")]
pub fn build_find_filter_conditions(filters: Vec<Vec<FilterProperty>>) -> Condition {
filters
.into_iter()
.fold(Condition::all(), |all_cond, mut expression| {
if expression.len() == 1 {
let property = expression.pop().unwrap();
all_cond.add(build_single_filter(property))
} else if !expression.is_empty() {
let sub_condition = expression.into_iter().fold(Condition::any(), |cond, prop| {
cond.add(build_single_filter(prop))
});
all_cond.add(sub_condition)
} else {
all_cond
}
})
}
#[inline]
fn build_single_filter(property: FilterProperty) -> SimpleExpr {
match property {
FilterProperty::TagId(tag_filter) => build_tag_id_filter(tag_filter),
FilterProperty::TagWildcardIds(wildcard_filter) => {
build_tag_wildcard_ids_filter(wildcard_filter)
}
FilterProperty::ContentDescriptor(cd_filter) => build_content_descriptor_filter(cd_filter),
FilterProperty::TagCount(count_filter) => build_tag_count_filter(count_filter),
FilterProperty::FileProperty(property_filter) => {
build_file_property_filter(property_filter)
}
}
}
fn build_tag_id_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
match filter {
NegatableComparator::Is(tag_id) => content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
NegatableComparator::IsNot(tag_id) => content_descriptor::Column::Id.not_in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
}
}
fn build_tag_wildcard_ids_filter(filter: NegatableComparator<Vec<i64>>) -> SimpleExpr {
match filter {
NegatableComparator::Is(tag_ids) => content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.to_owned(),
),
NegatableComparator::IsNot(tag_ids) => content_descriptor::Column::Id.not_in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.to_owned(),
),
}
}
fn build_content_descriptor_filter(filter: NegatableComparator<Vec<u8>>) -> SimpleExpr {
match filter {
NegatableComparator::Is(cd) => content_descriptor::Column::Descriptor.eq(cd),
NegatableComparator::IsNot(cd) => content_descriptor::Column::Descriptor.ne(cd),
}
}
fn build_tag_count_filter(filter: OrderingComparator<i64>) -> SimpleExpr {
let count_column = Alias::new("count");
let cd_id_column = Alias::new("cd_id");
let count_subquery = Query::select()
.expr_as(
Expr::col(content_descriptor_tag::Column::CdId),
cd_id_column.clone(),
)
.expr_as(
content_descriptor_tag::Column::TagId.count(),
count_column.clone(),
)
.from(content_descriptor_tag::Entity)
.group_by_col(cd_id_column.clone())
.to_owned();
let count_expression = apply_ordering_comparator!(Expr::col(count_column), filter);
content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(cd_id_column))
.from_subquery(count_subquery, Alias::new("tag_counts"))
.cond_where(count_expression)
.to_owned(),
)
}
#[inline]
fn build_file_property_filter(property: FilterFileProperty) -> SimpleExpr {
match property {
FilterFileProperty::Id(id_filter) => build_file_id_filter(id_filter),
FilterFileProperty::Status(status_filter) => build_file_status_filter(status_filter),
FilterFileProperty::FileSize(size_filter) => {
build_file_metadata_filter(build_file_size_filter(size_filter))
}
FilterFileProperty::ImportedTime(time_filter) => {
build_file_metadata_filter(build_file_import_time_filter(time_filter))
}
FilterFileProperty::ChangedTime(time_filter) => {
build_file_metadata_filter(build_file_changed_time_filter(time_filter))
}
FilterFileProperty::CreatedTime(time_filter) => {
build_file_metadata_filter(build_file_created_time_filter(time_filter))
}
}
}
fn build_file_id_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
match filter {
NegatableComparator::Is(id) => file::Column::Id.eq(id),
NegatableComparator::IsNot(id) => file::Column::Id.ne(id),
}
}
fn build_file_status_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
match filter {
NegatableComparator::Is(status) => file::Column::Status.eq(status),
NegatableComparator::IsNot(status) => file::Column::Status.ne(status),
}
}
fn build_file_metadata_filter(property_condition: SimpleExpr) -> SimpleExpr {
file::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(file_metadata::Column::FileId))
.from(file_metadata::Entity)
.cond_where(property_condition)
.to_owned(),
)
}
fn build_file_size_filter(filter: OrderingComparator<i64>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::Size, filter)
}
fn build_file_import_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::ImportTime, filter)
}
fn build_file_changed_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::ChangeTime, filter)
}
fn build_file_created_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::CreationTime, filter)
}

@ -0,0 +1,334 @@
pub mod filter;
use std::fmt::Debug;
use std::io::Cursor;
use std::str::FromStr;
use mediarepo_core::content_descriptor::encode_content_descriptor;
use sea_orm::prelude::*;
use sea_orm::{ConnectionTrait, DatabaseConnection, Set};
use sea_orm::{JoinType, QuerySelect};
use tokio::io::{AsyncReadExt, BufReader};
use crate::file::filter::FilterProperty;
use crate::file_metadata::FileMetadata;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
use mediarepo_core::thumbnailer::{self, Thumbnail as ThumbnailerThumb, ThumbnailSize};
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::file;
use mediarepo_database::entities::file_metadata;
use mediarepo_database::entities::namespace;
use mediarepo_database::entities::tag;
use crate::tag::Tag;
pub enum FileStatus {
Imported = 10,
Archived = 20,
Deleted = 30,
}
impl From<ApiFileStatus> for FileStatus {
fn from(s: ApiFileStatus) -> Self {
match s {
ApiFileStatus::Imported => Self::Imported,
ApiFileStatus::Archived => Self::Archived,
ApiFileStatus::Deleted => Self::Deleted,
}
}
}
#[derive(Clone)]
pub struct File {
db: DatabaseConnection,
model: file::Model,
content_descriptor: content_descriptor::Model,
}
impl File {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(
db: DatabaseConnection,
model: file::Model,
hash: content_descriptor::Model,
) -> Self {
Self {
db,
model,
content_descriptor: hash,
}
}
/// Returns a list of all known stored files
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<File>> {
let files: Vec<(file::Model, Option<content_descriptor::Model>)> = file::Entity::find()
.find_also_related(content_descriptor::Entity)
.all(&db)
.await?;
let files = files
.into_iter()
.filter_map(|(f, h)| {
let h = h?;
Some(Self::new(db.clone(), f, h))
})
.collect();
Ok(files)
}
/// Fetches the file by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
if let Some((model, Some(hash))) = file::Entity::find_by_id(id)
.find_also_related(content_descriptor::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by hash
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_cd(db: DatabaseConnection, cd: &[u8]) -> RepoResult<Option<Self>> {
if let Some((hash, Some(model))) = content_descriptor::Entity::find()
.filter(content_descriptor::Column::Descriptor.eq(cd))
.find_also_related(file::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by tags
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn find_by_filters(
db: DatabaseConnection,
filters: Vec<Vec<FilterProperty>>,
) -> RepoResult<Vec<Self>> {
let main_condition = filter::build_find_filter_conditions(filters);
let results: Vec<(content_descriptor::Model, Option<file::Model>)> =
content_descriptor::Entity::find()
.find_also_related(file::Entity)
.filter(main_condition)
.group_by(file::Column::Id)
.all(&db)
.await?;
let files: Vec<Self> = results
.into_iter()
.filter_map(|(hash, tag)| Some(Self::new(db.clone(), tag?, hash)))
.collect();
Ok(files)
}
/// Adds a file with its hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn add(
db: DatabaseConnection,
cd_id: i64,
mime_type: String,
) -> RepoResult<Self> {
let file = file::ActiveModel {
cd_id: Set(cd_id),
mime_type: Set(mime_type),
..Default::default()
};
let file: file::ActiveModel = file.insert(&db).await?.into();
let file = Self::by_id(db, file.id.unwrap())
.await?
.expect("Inserted file does not exist");
Ok(file)
}
/// Returns the unique identifier of the file
pub fn id(&self) -> i64 {
self.model.id
}
/// Returns the hash of the file (content identifier)
pub fn cd(&self) -> &[u8] {
&self.content_descriptor.descriptor
}
/// Returns the encoded content descriptor
pub fn encoded_cd(&self) -> String {
encode_content_descriptor(self.cd())
}
/// Returns the id of the civ (content identifier value) of the file
pub fn cd_id(&self) -> i64 {
self.content_descriptor.id
}
/// Returns the mime type of the file
pub fn mime_type(&self) -> &String {
&self.model.mime_type
}
/// Returns the status of the file
pub fn status(&self) -> FileStatus {
match self.model.status {
10 => FileStatus::Imported,
20 => FileStatus::Archived,
30 => FileStatus::Deleted,
_ => FileStatus::Imported,
}
}
pub async fn set_status(&mut self, status: FileStatus) -> RepoResult<()> {
let active_model = file::ActiveModel {
id: Set(self.model.id),
status: Set(status as i32),
..Default::default()
};
self.model = active_model.update(&self.db).await?;
Ok(())
}
/// Returns the metadata associated with this file
/// A file MUST always have metadata associated
pub async fn metadata(&self) -> RepoResult<FileMetadata> {
FileMetadata::by_id(self.db.clone(), self.model.id)
.await
.and_then(|f| f.ok_or_else(|| RepoError::from("missing file metadata")))
}
/// Returns the list of tags of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags(&self) -> RepoResult<Vec<Tag>> {
let tags: Vec<(tag::Model, Option<namespace::Model>)> = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(
JoinType::LeftJoin,
content_descriptor_tag::Relation::Tag.def().rev(),
)
.join(
JoinType::InnerJoin,
content_descriptor_tag::Relation::ContentDescriptorId.def(),
)
.filter(content_descriptor::Column::Id.eq(self.content_descriptor.id))
.all(&self.db)
.await?;
let tags = tags
.into_iter()
.map(|(tag, namespace)| Tag::new(self.db.clone(), tag, namespace))
.collect();
Ok(tags)
}
/// Adds a single tag to the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tag(&mut self, tag_id: i64) -> RepoResult<()> {
let cd_id = self.content_descriptor.id;
let active_model = content_descriptor_tag::ActiveModel {
cd_id: Set(cd_id),
tag_id: Set(tag_id),
};
active_model.insert(&self.db).await?;
Ok(())
}
/// Adds multiple tags to the file at once
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
if tag_ids.is_empty() {
return Ok(());
}
let cd_id = self.content_descriptor.id;
let models: Vec<content_descriptor_tag::ActiveModel> = tag_ids
.into_iter()
.map(|tag_id| content_descriptor_tag::ActiveModel {
cd_id: Set(cd_id),
tag_id: Set(tag_id),
})
.collect();
content_descriptor_tag::Entity::insert_many(models)
.exec(&self.db)
.await?;
Ok(())
}
/// Removes multiple tags from the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn remove_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
let hash_id = self.content_descriptor.id;
content_descriptor_tag::Entity::delete_many()
.filter(content_descriptor_tag::Column::CdId.eq(hash_id))
.filter(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.exec(&self.db)
.await?;
Ok(())
}
/// Returns the reader for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_reader(
&self,
storage: &FileHashStore,
) -> RepoResult<BufReader<tokio::fs::File>> {
storage
.get_file(&self.content_descriptor.descriptor)
.await
.map(|(_, f)| f)
}
/// Creates a thumbnail for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn create_thumbnail<I: IntoIterator<Item = ThumbnailSize> + Debug>(
&self,
storage: &FileHashStore,
sizes: I,
) -> RepoResult<Vec<ThumbnailerThumb>> {
let mut buf = Vec::new();
self.get_reader(storage)
.await?
.read_to_end(&mut buf)
.await?;
let mime_type = self.model.mime_type.clone();
let mime_type =
mime::Mime::from_str(&mime_type).unwrap_or_else(|_| mime::APPLICATION_OCTET_STREAM);
let thumbs = thumbnailer::create_thumbnails(Cursor::new(buf), mime_type, sizes)?;
Ok(thumbs)
}
/// Deletes the file as well as the content descriptor, tag mappings and metadata about the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn delete(self) -> RepoResult<()> {
let trx = self.db.begin().await?;
file_metadata::Entity::delete_many()
.filter(file_metadata::Column::FileId.eq(self.model.id))
.exec(&trx)
.await?;
self.model.delete(&trx).await?;
content_descriptor_tag::Entity::delete_many()
.filter(content_descriptor_tag::Column::CdId.eq(self.content_descriptor.id))
.exec(&trx)
.await?;
content_descriptor::Entity::delete_many()
.filter(content_descriptor::Column::Id.eq(self.content_descriptor.id))
.exec(&trx)
.await?;
trx.commit().await?;
Ok(())
}
}

@ -0,0 +1,124 @@
use std::fmt::Debug;
use chrono::{Local, NaiveDateTime};
use sea_orm::prelude::*;
use sea_orm::{DatabaseConnection, Set};
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::file_metadata;
#[derive(Clone)]
pub struct FileMetadata {
db: DatabaseConnection,
model: file_metadata::Model,
}
impl FileMetadata {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(db: DatabaseConnection, model: file_metadata::Model) -> Self {
Self { db, model }
}
/// Fetches the file by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
let file_metadata = file_metadata::Entity::find_by_id(id)
.one(&db)
.await?
.map(|m| FileMetadata::new(db, m));
Ok(file_metadata)
}
/// Fetches metadata for all given file ids
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all_by_ids(db: DatabaseConnection, ids: Vec<i64>) -> RepoResult<Vec<Self>> {
let file_metadata = file_metadata::Entity::find()
.filter(file_metadata::Column::FileId.is_in(ids))
.all(&db)
.await?
.into_iter()
.map(|m| FileMetadata::new(db.clone(), m))
.collect();
Ok(file_metadata)
}
/// Adds a file with its hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn add(
db: DatabaseConnection,
file_id: i64,
size: i64,
creation_time: NaiveDateTime,
change_time: NaiveDateTime,
) -> RepoResult<Self> {
let file = file_metadata::ActiveModel {
file_id: Set(file_id),
size: Set(size),
import_time: Set(Local::now().naive_local()),
creation_time: Set(creation_time),
change_time: Set(change_time),
..Default::default()
};
let model = file.insert(&db).await?;
Ok(Self::new(db, model))
}
pub fn file_id(&self) -> i64 {
self.model.file_id
}
pub fn size(&self) -> i64 {
self.model.size
}
pub fn name(&self) -> &Option<String> {
&self.model.name
}
pub fn comment(&self) -> &Option<String> {
&self.model.comment
}
pub fn import_time(&self) -> &NaiveDateTime {
&self.model.import_time
}
pub fn creation_time(&self) -> &NaiveDateTime {
&self.model.creation_time
}
pub fn change_time(&self) -> &NaiveDateTime {
&self.model.change_time
}
/// Changes the name of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_name<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
let mut active_model = self.get_active_model();
active_model.name = Set(Some(name.to_string()));
self.model = active_model.update(&self.db).await?;
Ok(())
}
/// Changes the comment of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_comment<S: ToString + Debug>(&mut self, comment: S) -> RepoResult<()> {
let mut active_file = self.get_active_model();
active_file.comment = Set(Some(comment.to_string()));
self.model = active_file.update(&self.db).await?;
Ok(())
}
/// Returns the active model of the file with only the id set
fn get_active_model(&self) -> file_metadata::ActiveModel {
file_metadata::ActiveModel {
file_id: Set(self.file_id()),
..Default::default()
}
}
}

@ -1,22 +0,0 @@
use mime::Mime;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, PartialOrd, PartialEq)]
pub enum FileType {
Other = -1,
Unknown = 0,
Image = 1,
Video = 2,
Audio = 3,
}
impl From<Mime> for FileType {
fn from(mime_type: Mime) -> Self {
match mime_type.type_() {
mime::IMAGE => Self::Image,
mime::VIDEO => Self::Video,
mime::AUDIO => Self::Audio,
_ => Self::Other,
}
}
}

@ -1,16 +0,0 @@
pub mod tag_handle;
use async_trait::async_trait;
use mediarepo_core::error::RepoResult;
use sea_orm::DatabaseConnection;
#[async_trait]
pub trait EntityHandle {
type Model;
/// Returns the ID that is stored in the handle
fn id(&self) -> i64;
/// Returns the model associated with the handle
async fn model(&self, db: DatabaseConnection) -> RepoResult<Self::Model>;
}

@ -1,24 +0,0 @@
use crate::handles::EntityHandle;
use crate::tag::Tag;
use async_trait::async_trait;
use mediarepo_core::error::{RepoDatabaseError, RepoResult};
use sea_orm::DatabaseConnection;
pub struct TagHandle(pub(crate) i64);
#[async_trait]
impl EntityHandle for TagHandle {
type Model = Tag;
fn id(&self) -> i64 {
self.0
}
async fn model(&self, db: DatabaseConnection) -> RepoResult<Self::Model> {
let tag = Tag::by_id(db, self.0)
.await?
.ok_or_else(|| RepoDatabaseError::InvalidHandle(self.id()))?;
Ok(tag)
}
}

@ -1,10 +1,8 @@
pub mod content_descriptor;
pub mod file; pub mod file;
pub mod file_type; pub mod file_metadata;
pub mod handles;
pub mod hash;
pub mod namespace; pub mod namespace;
pub mod repo; pub mod repo;
pub mod storage;
pub mod tag; pub mod tag;
pub mod thumbnail; pub mod thumbnail;
pub mod type_keys; pub mod type_keys;

@ -1,11 +1,14 @@
use crate::content_descriptor::ContentDescriptor;
use crate::file::filter::FilterProperty;
use crate::file::File; use crate::file::File;
use crate::file_type::FileType; use crate::file_metadata::FileMetadata;
use crate::namespace::Namespace; use crate::namespace::Namespace;
use crate::storage::Storage;
use crate::tag::Tag; use crate::tag::Tag;
use crate::thumbnail::Thumbnail; use crate::thumbnail::Thumbnail;
use chrono::{Local, NaiveDateTime}; use chrono::{Local, NaiveDateTime};
use mediarepo_core::content_descriptor::{encode_content_descriptor, is_v1_content_descriptor};
use mediarepo_core::error::{RepoError, RepoResult}; use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::fs::thumbnail_store::{Dimensions, ThumbnailStore}; use mediarepo_core::fs::thumbnail_store::{Dimensions, ThumbnailStore};
use mediarepo_core::itertools::Itertools; use mediarepo_core::itertools::Itertools;
use mediarepo_core::thumbnailer::ThumbnailSize; use mediarepo_core::thumbnailer::ThumbnailSize;
@ -20,29 +23,37 @@ use std::iter::FromIterator;
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
use tokio::fs::OpenOptions; use tokio::fs::OpenOptions;
use tokio::io::BufReader; use tokio::io::AsyncReadExt;
#[derive(Clone)] #[derive(Clone)]
pub struct Repo { pub struct Repo {
db: DatabaseConnection, db: DatabaseConnection,
main_storage: Option<Storage>, main_storage: FileHashStore,
thumbnail_storage: Option<ThumbnailStore>, thumbnail_storage: ThumbnailStore,
} }
impl Repo { impl Repo {
pub(crate) fn new(db: DatabaseConnection) -> Self { pub(crate) fn new(
db: DatabaseConnection,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> Self {
Self { Self {
db, db,
main_storage: None, main_storage: FileHashStore::new(file_store_path),
thumbnail_storage: None, thumbnail_storage: ThumbnailStore::new(thumb_store_path),
} }
} }
/// Connects to the database with the given uri /// Connects to the database with the given uri
#[tracing::instrument(level = "debug")] #[tracing::instrument(level = "debug")]
pub async fn connect<S: AsRef<str> + Debug>(uri: S) -> RepoResult<Self> { pub async fn connect<S: AsRef<str> + Debug>(
uri: S,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> RepoResult<Self> {
let db = get_database(uri).await?; let db = get_database(uri).await?;
Ok(Self::new(db)) Ok(Self::new(db, file_store_path, thumb_store_path))
} }
/// Returns the database of the repo for raw sql queries /// Returns the database of the repo for raw sql queries
@ -50,49 +61,10 @@ impl Repo {
&self.db &self.db
} }
/// Returns all available storages
#[tracing::instrument(level = "debug", skip(self))]
pub async fn storages(&self) -> RepoResult<Vec<Storage>> {
Storage::all(self.db.clone()).await
}
/// Returns a storage by path
#[tracing::instrument(level = "debug", skip(self))]
pub async fn storage_by_path<S: ToString + Debug>(
&self,
path: S,
) -> RepoResult<Option<Storage>> {
Storage::by_path(self.db.clone(), path).await
}
/// Sets the main storage
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_main_storage<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
self.main_storage = Storage::by_name(self.db.clone(), name.to_string()).await?;
Ok(())
}
/// Sets the default thumbnail storage
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_thumbnail_storage(&mut self, path: PathBuf) -> RepoResult<()> {
self.thumbnail_storage = Some(ThumbnailStore::new(path));
Ok(())
}
/// Adds a storage to the repository
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_storage<S1: ToString + Debug, S2: ToString + Debug>(
&self,
name: S1,
path: S2,
) -> RepoResult<Storage> {
Storage::create(self.db.clone(), name, path).await
}
/// Returns a file by its mapped hash /// Returns a file by its mapped hash
#[tracing::instrument(level = "debug", skip(self))] #[tracing::instrument(level = "debug", skip(self))]
pub async fn file_by_hash<S: AsRef<str> + Debug>(&self, hash: S) -> RepoResult<Option<File>> { pub async fn file_by_cd(&self, cd: &[u8]) -> RepoResult<Option<File>> {
File::by_hash(self.db.clone(), hash).await File::by_cd(self.db.clone(), cd).await
} }
/// Returns a file by id /// Returns a file by id
@ -109,23 +81,17 @@ impl Repo {
/// Finds all files by a list of tags /// Finds all files by a list of tags
#[tracing::instrument(level = "debug", skip(self))] #[tracing::instrument(level = "debug", skip(self))]
pub async fn find_files_by_tags( pub async fn find_files_by_filters(
&self, &self,
tags: Vec<Vec<(String, bool)>>, filters: Vec<Vec<FilterProperty>>,
) -> RepoResult<Vec<File>> { ) -> RepoResult<Vec<File>> {
let parsed_tags = tags File::find_by_filters(self.db.clone(), filters).await
.iter() }
.flat_map(|e| e.into_iter().map(|t| parse_namespace_and_tag(t.0.clone())))
.unique()
.collect();
let db_tags = self.tags_by_names(parsed_tags).await?;
let tag_map: HashMap<String, i64> =
HashMap::from_iter(db_tags.into_iter().map(|t| (t.normalized_name(), t.id())));
let tag_ids = process_filters_with_tag_ids(tags, tag_map);
File::find_by_tags(self.db.clone(), tag_ids).await /// Returns all file metadata entries for the given file ids
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_file_metadata_for_ids(&self, ids: Vec<i64>) -> RepoResult<Vec<FileMetadata>> {
FileMetadata::all_by_ids(self.db.clone(), ids).await
} }
/// Adds a file from bytes to the database /// Adds a file from bytes to the database
@ -137,63 +103,69 @@ impl Repo {
creation_time: NaiveDateTime, creation_time: NaiveDateTime,
change_time: NaiveDateTime, change_time: NaiveDateTime,
) -> RepoResult<File> { ) -> RepoResult<File> {
let storage = self.get_main_storage()?; let file_size = content.len();
let reader = Cursor::new(content); let reader = Cursor::new(content);
let hash = storage.store_entry(reader).await?; let cd_binary = self.main_storage.add_file(reader, None).await?;
let cd = ContentDescriptor::add(self.db.clone(), cd_binary).await?;
let (mime_type, file_type) = mime_type let mime_type = mime_type
.and_then(|m| mime::Mime::from_str(&m).ok()) .and_then(|m| mime::Mime::from_str(&m).ok())
.map(|m| (Some(m.to_string()), FileType::from(m))) .unwrap_or_else(|| mime::APPLICATION_OCTET_STREAM)
.unwrap_or((None, FileType::Unknown)); .to_string();
File::add( let file = File::add(self.db.clone(), cd.id(), mime_type).await?;
FileMetadata::add(
self.db.clone(), self.db.clone(),
storage.id(), file.id(),
hash.id(), file_size as i64,
file_type,
mime_type,
creation_time, creation_time,
change_time, change_time,
) )
.await .await?;
Ok(file)
} }
/// Adds a file to the database by its readable path in the file system /// Adds a file to the database by its readable path in the file system
#[tracing::instrument(level = "debug", skip(self))] #[tracing::instrument(level = "debug", skip(self))]
pub async fn add_file_by_path(&self, path: PathBuf) -> RepoResult<File> { pub async fn add_file_by_path(&self, path: PathBuf) -> RepoResult<File> {
let mime_match = mime_guess::from_path(&path).first(); let mime_type = mime_guess::from_path(&path).first().map(|m| m.to_string());
let (mime_type, file_type) = if let Some(mime) = mime_match { let mut os_file = OpenOptions::new().read(true).open(&path).await?;
(Some(mime.clone().to_string()), FileType::from(mime)) let mut buf = Vec::new();
} else { os_file.read_to_end(&mut buf).await?;
(None, FileType::Unknown)
};
let os_file = OpenOptions::new().read(true).open(&path).await?;
let reader = BufReader::new(os_file);
let storage = self.get_main_storage()?; self.add_file(
let hash = storage.store_entry(reader).await?;
File::add(
self.db.clone(),
storage.id(),
hash.id(),
file_type,
mime_type, mime_type,
buf,
Local::now().naive_local(), Local::now().naive_local(),
Local::now().naive_local(), Local::now().naive_local(),
) )
.await .await
} }
/// Deletes a file from the database and disk
#[tracing::instrument(level = "debug", skip(self, file))]
pub async fn delete_file(&self, file: File) -> RepoResult<()> {
let cd = file.cd().to_owned();
let cd_string = file.encoded_cd();
file.delete().await?;
self.main_storage.delete_file(&cd).await?;
self.thumbnail_storage.delete_parent(&cd_string).await?;
Ok(())
}
/// Returns all thumbnails of a file /// Returns all thumbnails of a file
pub async fn get_file_thumbnails(&self, file_hash: String) -> RepoResult<Vec<Thumbnail>> { pub async fn get_file_thumbnails(&self, file_cd: &[u8]) -> RepoResult<Vec<Thumbnail>> {
let thumb_store = self.get_thumbnail_storage()?; let file_cd = encode_content_descriptor(file_cd);
let thumbnails = thumb_store let thumbnails = self
.get_thumbnails(&file_hash) .thumbnail_storage
.get_thumbnails(&file_cd)
.await? .await?
.into_iter() .into_iter()
.map(|(size, path)| Thumbnail { .map(|(size, path)| Thumbnail {
file_hash: file_hash.to_owned(), file_hash: file_cd.to_owned(),
path, path,
size, size,
mime_type: mime::IMAGE_PNG.to_string(), mime_type: mime::IMAGE_PNG.to_string(),
@ -203,18 +175,25 @@ impl Repo {
Ok(thumbnails) Ok(thumbnails)
} }
pub async fn get_file_bytes(&self, file: &File) -> RepoResult<Vec<u8>> {
let mut buf = Vec::new();
let mut reader = file.get_reader(&self.main_storage).await?;
reader.read_to_end(&mut buf).await?;
Ok(buf)
}
/// Creates thumbnails of all sizes for a file /// Creates thumbnails of all sizes for a file
#[tracing::instrument(level = "debug", skip(self, file))] #[tracing::instrument(level = "debug", skip(self, file))]
pub async fn create_thumbnails_for_file(&self, file: &File) -> RepoResult<Vec<Thumbnail>> { pub async fn create_thumbnails_for_file(&self, file: &File) -> RepoResult<Vec<Thumbnail>> {
let thumb_storage = self.get_thumbnail_storage()?;
let size = ThumbnailSize::Medium; let size = ThumbnailSize::Medium;
let (height, width) = size.dimensions(); let (height, width) = size.dimensions();
let thumbs = file.create_thumbnail([size]).await?; let thumbs = file.create_thumbnail(&self.main_storage, [size]).await?;
let mut created_thumbs = Vec::with_capacity(1); let mut created_thumbs = Vec::with_capacity(1);
for thumb in thumbs { for thumb in thumbs {
let entry = self let entry = self
.store_single_thumbnail(file.hash().to_owned(), thumb_storage, height, width, thumb) .store_single_thumbnail(file.encoded_cd(), height, width, thumb)
.await?; .await?;
created_thumbs.push(entry); created_thumbs.push(entry);
} }
@ -228,15 +207,14 @@ impl Repo {
file: &File, file: &File,
size: ThumbnailSize, size: ThumbnailSize,
) -> RepoResult<Thumbnail> { ) -> RepoResult<Thumbnail> {
let thumb_storage = self.get_thumbnail_storage()?;
let (height, width) = size.dimensions(); let (height, width) = size.dimensions();
let thumb = file let thumb = file
.create_thumbnail([size]) .create_thumbnail(&self.main_storage, [size])
.await? .await?
.pop() .pop()
.ok_or_else(|| RepoError::from("Failed to create thumbnail"))?; .ok_or_else(|| RepoError::from("Failed to create thumbnail"))?;
let thumbnail = self let thumbnail = self
.store_single_thumbnail(file.hash().to_owned(), thumb_storage, height, width, thumb) .store_single_thumbnail(file.encoded_cd(), height, width, thumb)
.await?; .await?;
Ok(thumbnail) Ok(thumbnail)
@ -246,7 +224,6 @@ impl Repo {
async fn store_single_thumbnail( async fn store_single_thumbnail(
&self, &self,
file_hash: String, file_hash: String,
thumb_storage: &ThumbnailStore,
height: u32, height: u32,
width: u32, width: u32,
thumb: mediarepo_core::thumbnailer::Thumbnail, thumb: mediarepo_core::thumbnailer::Thumbnail,
@ -254,7 +231,8 @@ impl Repo {
let mut buf = Vec::new(); let mut buf = Vec::new();
thumb.write_png(&mut buf)?; thumb.write_png(&mut buf)?;
let size = Dimensions { height, width }; let size = Dimensions { height, width };
let path = thumb_storage let path = self
.thumbnail_storage
.add_thumbnail(&file_hash, size.clone(), &buf) .add_thumbnail(&file_hash, size.clone(), &buf)
.await?; .await?;
@ -280,6 +258,22 @@ impl Repo {
Namespace::all(self.db.clone()).await Namespace::all(self.db.clone()).await
} }
/// Converts a list of tag names to tag ids
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tag_names_to_ids(&self, tags: Vec<String>) -> RepoResult<HashMap<String, i64>> {
let parsed_tags = tags
.iter()
.map(|tag| parse_namespace_and_tag(tag.clone()))
.unique()
.collect();
let db_tags = self.tags_by_names(parsed_tags).await?;
let tag_map: HashMap<String, i64> =
HashMap::from_iter(db_tags.into_iter().map(|t| (t.normalized_name(), t.id())));
Ok(tag_map)
}
/// Finds all tags by name /// Finds all tags by name
#[tracing::instrument(level = "debug", skip(self))] #[tracing::instrument(level = "debug", skip(self))]
pub async fn tags_by_names(&self, tags: Vec<(Option<String>, String)>) -> RepoResult<Vec<Tag>> { pub async fn tags_by_names(&self, tags: Vec<(Option<String>, String)>) -> RepoResult<Vec<Tag>> {
@ -288,8 +282,8 @@ impl Repo {
/// Finds all tags that are assigned to the given list of hashes /// Finds all tags that are assigned to the given list of hashes
#[tracing::instrument(level = "debug", skip_all)] #[tracing::instrument(level = "debug", skip_all)]
pub async fn find_tags_for_hashes(&self, hashes: Vec<String>) -> RepoResult<Vec<Tag>> { pub async fn find_tags_for_file_identifiers(&self, cds: Vec<Vec<u8>>) -> RepoResult<Vec<Tag>> {
Tag::for_hash_list(self.db.clone(), hashes).await Tag::for_cd_list(self.db.clone(), cds).await
} }
/// Adds all tags that are not in the database to the database and returns the ones already existing as well /// Adds all tags that are not in the database to the database and returns the ones already existing as well
@ -393,16 +387,14 @@ impl Repo {
#[inline] #[inline]
#[tracing::instrument(level = "debug", skip(self))] #[tracing::instrument(level = "debug", skip(self))]
pub async fn get_main_store_size(&self) -> RepoResult<u64> { pub async fn get_main_store_size(&self) -> RepoResult<u64> {
let main_storage = self.get_main_storage()?; self.main_storage.get_size().await
main_storage.get_size().await
} }
/// Returns the size of the thumbnail storage /// Returns the size of the thumbnail storage
#[inline] #[inline]
#[tracing::instrument(level = "debug", skip(self))] #[tracing::instrument(level = "debug", skip(self))]
pub async fn get_thumb_store_size(&self) -> RepoResult<u64> { pub async fn get_thumb_store_size(&self) -> RepoResult<u64> {
let thumb_storage = self.get_thumbnail_storage()?; self.thumbnail_storage.get_size().await
thumb_storage.get_size().await
} }
/// Returns all entity counts /// Returns all entity counts
@ -412,67 +404,29 @@ impl Repo {
get_all_counts(&self.db).await get_all_counts(&self.db).await
} }
#[tracing::instrument(level = "trace", skip(self))] pub async fn migrate(&self) -> RepoResult<()> {
fn get_main_storage(&self) -> RepoResult<&Storage> { let cds = ContentDescriptor::all(self.db.clone()).await?;
if let Some(storage) = &self.main_storage {
Ok(storage) tracing::info!("Converting content descriptors to v2 format...");
} else { let mut converted_count = 0;
Err(RepoError::from("No main storage configured."))
} for mut cd in cds {
} if is_v1_content_descriptor(cd.descriptor()) {
let src_cd = cd.descriptor().to_owned();
#[tracing::instrument(level = "trace", skip(self))] cd.convert_v1_to_v2().await?;
fn get_thumbnail_storage(&self) -> RepoResult<&ThumbnailStore> { let dst_cd = cd.descriptor().to_owned();
if let Some(storage) = &self.thumbnail_storage { self.main_storage.rename_file(&src_cd, &dst_cd).await?;
Ok(storage) self.thumbnail_storage
} else { .rename_parent(
Err(RepoError::from("No thumbnail storage configured.")) encode_content_descriptor(&src_cd),
} encode_content_descriptor(&dst_cd),
} )
} .await?;
converted_count += 1;
fn process_filters_with_tag_ids(
filters: Vec<Vec<(String, bool)>>,
tag_ids: HashMap<String, i64>,
) -> Vec<Vec<(i64, bool)>> {
let mut id_filters = Vec::new();
for expression in filters {
let mut id_sub_filters = Vec::new();
let mut negated_wildcard_filters = Vec::new();
for (tag, negate) in expression {
if tag.ends_with("*") {
let tag_prefix = tag.trim_end_matches('*');
let mut found_tag_ids = tag_ids
.iter()
.filter(|(k, _)| k.starts_with(tag_prefix))
.map(|(_, id)| (*id, negate))
.collect::<Vec<(i64, bool)>>();
if negate {
negated_wildcard_filters.push(found_tag_ids)
} else {
id_sub_filters.append(&mut found_tag_ids);
}
} else {
if let Some(id) = tag_ids.get(&tag) {
id_sub_filters.push((*id, negate));
}
} }
} }
if !negated_wildcard_filters.is_empty() { tracing::info!("Converted {} descriptors", converted_count);
for wildcard_filter in negated_wildcard_filters {
for query in wildcard_filter {
let mut sub_filters = id_sub_filters.clone();
sub_filters.push(query);
id_filters.push(sub_filters)
}
}
} else if !id_sub_filters.is_empty() {
id_filters.push(id_sub_filters);
}
}
id_filters Ok(())
}
} }

@ -1,198 +0,0 @@
use crate::hash::Hash;
use mediarepo_core::error::RepoResult;
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_database::entities::storage;
use mediarepo_database::entities::storage::ActiveModel as ActiveStorage;
use mediarepo_database::entities::storage::Model as StorageModel;
use sea_orm::prelude::*;
use sea_orm::{DatabaseConnection, NotSet, Set};
use std::fmt::Debug;
use std::path::PathBuf;
use tokio::fs;
use tokio::io::{AsyncRead, BufReader};
#[derive(Clone)]
pub struct Storage {
db: DatabaseConnection,
model: StorageModel,
store: FileHashStore,
}
impl Storage {
#[tracing::instrument(level = "trace")]
fn new(db: DatabaseConnection, model: StorageModel) -> Self {
let path = PathBuf::from(&model.path);
Self {
store: FileHashStore::new(path),
db,
model,
}
}
/// Returns all available storages
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<Self>> {
let storages: Vec<storage::Model> = storage::Entity::find().all(&db).await?;
let storages = storages
.into_iter()
.map(|s| Self::new(db.clone(), s))
.collect();
Ok(storages)
}
/// Returns the storage by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
if let Some(model) = storage::Entity::find_by_id(id).one(&db).await? {
let storage = Self::new(db, model);
Ok(Some(storage))
} else {
Ok(None)
}
}
/// Returns the storage by name
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_name<S: AsRef<str> + Debug>(
db: DatabaseConnection,
name: S,
) -> RepoResult<Option<Self>> {
if let Some(model) = storage::Entity::find()
.filter(storage::Column::Name.eq(name.as_ref()))
.one(&db)
.await?
{
let storage = Self::new(db, model);
Ok(Some(storage))
} else {
Ok(None)
}
}
/// Returns the storage by path
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_path<S: ToString + Debug>(
db: DatabaseConnection,
path: S,
) -> RepoResult<Option<Self>> {
if let Some(model) = storage::Entity::find()
.filter(storage::Column::Path.eq(path.to_string()))
.one(&db)
.await?
{
let storage = Self::new(db, model);
Ok(Some(storage))
} else {
Ok(None)
}
}
/// Creates a new active storage and also creates the associated directory
/// if it doesn't exist yet.
#[tracing::instrument(level = "debug", skip(db))]
pub async fn create<S1: ToString + Debug, S2: ToString + Debug>(
db: DatabaseConnection,
name: S1,
path: S2,
) -> RepoResult<Self> {
let path = path.to_string();
let name = name.to_string();
let path_buf = PathBuf::from(&path);
if !path_buf.exists() {
fs::create_dir(path_buf).await?;
}
let storage = ActiveStorage {
id: NotSet,
name: Set(name),
path: Set(path),
..Default::default()
};
let model = storage.insert(&db).await?;
Ok(Self::new(db, model))
}
/// Returns the unique identifier of this storage
pub fn id(&self) -> i64 {
self.model.id
}
/// Returns the name of the storage
pub fn name(&self) -> &String {
&self.model.name
}
/// Returns the path of the storage
pub fn path(&self) -> &String {
&self.model.path
}
/// Sets a new name for the storage
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_name<S: ToString + Debug>(&self, name: S) -> RepoResult<()> {
let mut active_storage: ActiveStorage = self.get_active_model();
active_storage.name = Set(name.to_string());
active_storage.update(&self.db).await?;
Ok(())
}
/// Sets a new path for the storage. This will only update the database record
/// so if the physical part of the storage is already created it needs to be migrated first
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_path<S: ToString + Debug>(&mut self, path: S) -> RepoResult<()> {
let mut active_storage: ActiveStorage = self.get_active_model();
active_storage.path = Set(path.to_string());
let storage = active_storage.update(&self.db).await?;
self.model = storage;
Ok(())
}
/// Checks if the storage exists on the harddrive
pub fn exists(&self) -> bool {
let path = PathBuf::from(&self.path());
path.exists()
}
/// Adds a thumbnail
#[tracing::instrument(level = "debug", skip(self, reader))]
pub async fn store_entry<R: AsyncRead + Unpin>(&self, reader: R) -> RepoResult<Hash> {
let hash = self.store.add_file(reader, None).await?;
if let Some(hash) = Hash::by_value(self.db.clone(), &hash).await? {
Ok(hash)
} else {
Hash::add(self.db.clone(), hash).await
}
}
/// Returns the buf reader to the given hash
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_file_reader<S: ToString + Debug>(
&self,
hash: S,
) -> RepoResult<BufReader<tokio::fs::File>> {
let (_ext, reader) = self.store.get_file(hash.to_string()).await?;
Ok(reader)
}
/// Returns the size of the storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_size(&self) -> RepoResult<u64> {
self.store.get_size().await
}
/// Returns the active model with only the ID filled so saves always perform an update
fn get_active_model(&self) -> ActiveStorage {
ActiveStorage {
id: Set(self.model.id),
..Default::default()
}
}
}

@ -1,8 +1,8 @@
use std::fmt::Debug; use std::fmt::Debug;
use mediarepo_core::error::RepoResult; use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::hash; use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::hash_tag; use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::namespace; use mediarepo_database::entities::namespace;
use mediarepo_database::entities::tag; use mediarepo_database::entities::tag;
use sea_orm::prelude::*; use sea_orm::prelude::*;
@ -118,15 +118,18 @@ impl Tag {
/// Returns all tags that are assigned to any of the passed hashes /// Returns all tags that are assigned to any of the passed hashes
#[tracing::instrument(level = "debug", skip_all)] #[tracing::instrument(level = "debug", skip_all)]
pub async fn for_hash_list( pub async fn for_cd_list(db: DatabaseConnection, cds: Vec<Vec<u8>>) -> RepoResult<Vec<Self>> {
db: DatabaseConnection,
hashes: Vec<String>,
) -> RepoResult<Vec<Self>> {
let tags: Vec<Self> = tag::Entity::find() let tags: Vec<Self> = tag::Entity::find()
.find_also_related(namespace::Entity) .find_also_related(namespace::Entity)
.join(JoinType::LeftJoin, hash_tag::Relation::Tag.def().rev()) .join(
.join(JoinType::InnerJoin, hash_tag::Relation::Hash.def()) JoinType::LeftJoin,
.filter(hash::Column::Value.is_in(hashes)) content_descriptor_tag::Relation::Tag.def().rev(),
)
.join(
JoinType::InnerJoin,
content_descriptor_tag::Relation::ContentDescriptorId.def(),
)
.filter(content_descriptor::Column::Descriptor.is_in(cds))
.group_by(tag::Column::Id) .group_by(tag::Column::Id)
.all(&db) .all(&db)
.await? .await?

@ -11,6 +11,7 @@ serde = "^1.0.130"
tracing = "^0.1.29" tracing = "^0.1.29"
compare = "^0.1.0" compare = "^0.1.0"
port_check = "^0.1.5" port_check = "^0.1.5"
rayon = "1.5.1"
[dependencies.mediarepo-core] [dependencies.mediarepo-core]
path = "../mediarepo-core" path = "../mediarepo-core"

@ -1,8 +1,9 @@
use mediarepo_core::mediarepo_api::types::files::{ use mediarepo_core::mediarepo_api::types::files::{
FileMetadataResponse, ThumbnailMetadataResponse, FileBasicDataResponse, FileMetadataResponse, FileStatus, ThumbnailMetadataResponse,
}; };
use mediarepo_core::mediarepo_api::types::tags::{NamespaceResponse, TagResponse}; use mediarepo_core::mediarepo_api::types::tags::{NamespaceResponse, TagResponse};
use mediarepo_model::file::File; use mediarepo_model::file::{File, FileStatus as FileStatusModel};
use mediarepo_model::file_metadata::FileMetadata;
use mediarepo_model::namespace::Namespace; use mediarepo_model::namespace::Namespace;
use mediarepo_model::tag::Tag; use mediarepo_model::tag::Tag;
use mediarepo_model::thumbnail::Thumbnail; use mediarepo_model::thumbnail::Thumbnail;
@ -11,18 +12,36 @@ pub trait FromModel<M> {
fn from_model(model: M) -> Self; fn from_model(model: M) -> Self;
} }
impl FromModel<File> for FileMetadataResponse { impl FromModel<FileMetadata> for FileMetadataResponse {
fn from_model(file: File) -> Self { fn from_model(metadata: FileMetadata) -> Self {
Self { Self {
file_id: metadata.file_id(),
name: metadata.name().to_owned(),
comment: metadata.comment().to_owned(),
creation_time: metadata.creation_time().to_owned(),
change_time: metadata.change_time().to_owned(),
import_time: metadata.import_time().to_owned(),
}
}
}
impl FromModel<File> for FileBasicDataResponse {
fn from_model(file: File) -> Self {
FileBasicDataResponse {
id: file.id(), id: file.id(),
name: file.name().to_owned(), status: FileStatus::from_model(file.status()),
comment: file.comment().to_owned(), cd: file.encoded_cd(),
hash: file.hash().to_owned(),
file_type: file.file_type() as u32,
mime_type: file.mime_type().to_owned(), mime_type: file.mime_type().to_owned(),
creation_time: file.creation_time().to_owned(), }
change_time: file.change_time().to_owned(), }
import_time: file.import_time().to_owned(), }
impl FromModel<FileStatusModel> for FileStatus {
fn from_model(status: FileStatusModel) -> Self {
match status {
FileStatusModel::Imported => FileStatus::Imported,
FileStatusModel::Archived => FileStatus::Archived,
FileStatusModel::Deleted => FileStatus::Deleted,
} }
} }
} }

@ -1,11 +1,12 @@
use mediarepo_core::bromine::prelude::*; use mediarepo_core::bromine::prelude::*;
use mediarepo_core::error::{RepoError, RepoResult}; use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::mediarepo_api::types::misc::InfoResponse; use mediarepo_core::mediarepo_api::types::misc::InfoResponse;
use mediarepo_core::settings::Settings; use mediarepo_core::settings::{PortSetting, Settings};
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey}; use mediarepo_core::tokio_graceful_shutdown::SubsystemHandle;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey, SizeMetadataKey, SubsystemKey};
use mediarepo_model::repo::Repo; use mediarepo_model::repo::Repo;
use mediarepo_model::type_keys::RepoKey; use mediarepo_model::type_keys::RepoKey;
use std::net::{IpAddr, SocketAddr}; use std::net::SocketAddr;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use tokio::net::TcpListener; use tokio::net::TcpListener;
@ -15,16 +16,25 @@ mod from_model;
mod namespaces; mod namespaces;
mod utils; mod utils;
#[tracing::instrument(skip(settings, repo))] #[tracing::instrument(skip(subsystem, settings, repo))]
pub fn start_tcp_server( pub fn start_tcp_server(
ip: IpAddr, subsystem: SubsystemHandle,
port_range: (u16, u16),
repo_path: PathBuf, repo_path: PathBuf,
settings: Settings, settings: Settings,
repo: Repo, repo: Repo,
) -> RepoResult<(String, JoinHandle<()>)> { ) -> RepoResult<(String, JoinHandle<()>)> {
let port = port_check::free_local_port_in_range(port_range.0, port_range.1) let port = match &settings.server.tcp.port {
.ok_or_else(|| RepoError::PortUnavailable)?; PortSetting::Fixed(p) => {
if port_check::is_local_port_free(*p) {
*p
} else {
return Err(RepoError::PortUnavailable);
}
}
PortSetting::Range((l, r)) => port_check::free_local_port_in_range(*l, *r)
.ok_or_else(|| RepoError::PortUnavailable)?,
};
let ip = settings.server.tcp.listen_address.to_owned();
let address = SocketAddr::new(ip, port); let address = SocketAddr::new(ip, port);
let address_string = address.to_string(); let address_string = address.to_string();
@ -32,9 +42,11 @@ pub fn start_tcp_server(
.name("mediarepo_tcp::listen") .name("mediarepo_tcp::listen")
.spawn(async move { .spawn(async move {
get_builder::<TcpListener>(address) get_builder::<TcpListener>(address)
.insert::<SubsystemKey>(subsystem)
.insert::<RepoKey>(Arc::new(repo)) .insert::<RepoKey>(Arc::new(repo))
.insert::<SettingsKey>(settings) .insert::<SettingsKey>(settings)
.insert::<RepoPathKey>(repo_path) .insert::<RepoPathKey>(repo_path)
.insert::<SizeMetadataKey>(Default::default())
.build_server() .build_server()
.await .await
.expect("Failed to start tcp server") .expect("Failed to start tcp server")
@ -44,8 +56,9 @@ pub fn start_tcp_server(
} }
#[cfg(unix)] #[cfg(unix)]
#[tracing::instrument(skip(settings, repo))] #[tracing::instrument(skip(subsystem, settings, repo))]
pub fn create_unix_socket( pub fn create_unix_socket(
subsystem: SubsystemHandle,
path: std::path::PathBuf, path: std::path::PathBuf,
repo_path: PathBuf, repo_path: PathBuf,
settings: Settings, settings: Settings,
@ -61,9 +74,11 @@ pub fn create_unix_socket(
.name("mediarepo_unix_socket::listen") .name("mediarepo_unix_socket::listen")
.spawn(async move { .spawn(async move {
get_builder::<UnixListener>(path) get_builder::<UnixListener>(path)
.insert::<SubsystemKey>(subsystem)
.insert::<RepoKey>(Arc::new(repo)) .insert::<RepoKey>(Arc::new(repo))
.insert::<SettingsKey>(settings) .insert::<SettingsKey>(settings)
.insert::<RepoPathKey>(repo_path) .insert::<RepoPathKey>(repo_path)
.insert::<SizeMetadataKey>(Default::default())
.build_server() .build_server()
.await .await
.expect("Failed to create unix domain socket"); .expect("Failed to create unix domain socket");
@ -73,7 +88,9 @@ pub fn create_unix_socket(
} }
fn get_builder<L: AsyncStreamProtocolListener>(address: L::AddressType) -> IPCBuilder<L> { fn get_builder<L: AsyncStreamProtocolListener>(address: L::AddressType) -> IPCBuilder<L> {
namespaces::build_namespaces(IPCBuilder::new().address(address)).on("info", callback!(info)) namespaces::build_namespaces(IPCBuilder::new().address(address))
.on("info", callback!(info))
.on("shutdown", callback!(shutdown))
} }
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
@ -86,3 +103,17 @@ async fn info(ctx: &Context, _: Event) -> IPCResult<()> {
Ok(()) Ok(())
} }
#[tracing::instrument(skip_all)]
async fn shutdown(ctx: &Context, _: Event) -> IPCResult<()> {
ctx.clone().stop().await?;
{
let data = ctx.data.read().await;
let subsystem = data.get::<SubsystemKey>().unwrap();
subsystem.request_shutdown();
subsystem.on_shutdown_requested().await;
}
ctx.emit("shutdown", ()).await?;
Ok(())
}

@ -1,36 +1,25 @@
mod searching;
mod sorting;
use crate::from_model::FromModel; use crate::from_model::FromModel;
use crate::utils::{file_by_identifier, get_repo_from_context, hash_by_identifier}; use crate::namespaces::files::searching::find_files_for_filters;
use chrono::NaiveDateTime; use crate::namespaces::files::sorting::sort_files_by_properties;
use compare::Compare; use crate::utils::{cd_by_identifier, file_by_identifier, get_repo_from_context};
use mediarepo_core::bromine::prelude::*; use mediarepo_core::bromine::prelude::*;
use mediarepo_core::fs::thumbnail_store::Dimensions; use mediarepo_core::fs::thumbnail_store::Dimensions;
use mediarepo_core::itertools::Itertools; use mediarepo_core::itertools::Itertools;
use mediarepo_core::mediarepo_api::types::files::{ use mediarepo_core::mediarepo_api::types::files::{
AddFileRequestHeader, FileMetadataResponse, FilterExpression, FindFilesRequest, AddFileRequestHeader, FileBasicDataResponse, FileMetadataResponse,
GetFileThumbnailOfSizeRequest, GetFileThumbnailsRequest, ReadFileRequest, SortDirection, GetFileThumbnailOfSizeRequest, GetFileThumbnailsRequest, ReadFileRequest,
SortKey, ThumbnailMetadataResponse, UpdateFileNameRequest, ThumbnailMetadataResponse, UpdateFileNameRequest, UpdateFileStatusRequest,
}; };
use mediarepo_core::mediarepo_api::types::filtering::FindFilesRequest;
use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier; use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier;
use mediarepo_core::thumbnailer::ThumbnailSize; use mediarepo_core::thumbnailer::ThumbnailSize;
use mediarepo_core::utils::parse_namespace_and_tag; use mediarepo_core::utils::parse_namespace_and_tag;
use mediarepo_database::queries::tags::{
get_hashes_with_namespaced_tags, get_hashes_with_tag_count,
};
use std::cmp::Ordering;
use std::collections::HashMap;
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
pub struct FilesNamespace; pub struct FilesNamespace;
pub struct FileSortContext {
name: Option<String>,
size: u64,
mime_type: Option<String>,
namespaces: HashMap<String, Vec<String>>,
tag_count: u32,
import_time: NaiveDateTime,
create_time: NaiveDateTime,
change_time: NaiveDateTime,
}
impl NamespaceProvider for FilesNamespace { impl NamespaceProvider for FilesNamespace {
fn name() -> &'static str { fn name() -> &'static str {
@ -41,6 +30,7 @@ impl NamespaceProvider for FilesNamespace {
events!(handler, events!(handler,
"all_files" => Self::all_files, "all_files" => Self::all_files,
"get_file" => Self::get_file, "get_file" => Self::get_file,
"get_file_metadata" => Self::get_file_metadata,
"get_files" => Self::get_files, "get_files" => Self::get_files,
"find_files" => Self::find_files, "find_files" => Self::find_files,
"add_file" => Self::add_file, "add_file" => Self::add_file,
@ -48,7 +38,9 @@ impl NamespaceProvider for FilesNamespace {
"get_thumbnails" => Self::thumbnails, "get_thumbnails" => Self::thumbnails,
"get_thumbnail_of_size" => Self::get_thumbnail_of_size, "get_thumbnail_of_size" => Self::get_thumbnail_of_size,
"update_file_name" => Self::update_file_name, "update_file_name" => Self::update_file_name,
"delete_thumbnails" => Self::delete_thumbnails "delete_thumbnails" => Self::delete_thumbnails,
"update_file_status" => Self::update_status,
"delete_file" => Self::delete_file
); );
} }
} }
@ -60,9 +52,9 @@ impl FilesNamespace {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let files = repo.files().await?; let files = repo.files().await?;
let responses: Vec<FileMetadataResponse> = files let responses: Vec<FileBasicDataResponse> = files
.into_iter() .into_iter()
.map(FileMetadataResponse::from_model) .map(FileBasicDataResponse::from_model)
.collect(); .collect();
ctx.emit_to(Self::name(), "all_files", responses).await?; ctx.emit_to(Self::name(), "all_files", responses).await?;
@ -76,12 +68,29 @@ impl FilesNamespace {
let id = event.payload::<FileIdentifier>()?; let id = event.payload::<FileIdentifier>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(id, &repo).await?; let file = file_by_identifier(id, &repo).await?;
let response = FileMetadataResponse::from_model(file); let response = FileBasicDataResponse::from_model(file);
ctx.emit_to(Self::name(), "get_file", response).await?; ctx.emit_to(Self::name(), "get_file", response).await?;
Ok(()) Ok(())
} }
/// Returns metadata for a given file
#[tracing::instrument(skip_all)]
async fn get_file_metadata(ctx: &Context, event: Event) -> IPCResult<()> {
let id = event.payload::<FileIdentifier>()?;
let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(id, &repo).await?;
let metadata = file.metadata().await?;
ctx.emit_to(
Self::name(),
"get_file_metadata",
FileMetadataResponse::from_model(metadata),
)
.await?;
Ok(())
}
/// Returns a list of files by identifier /// Returns a list of files by identifier
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn get_files(ctx: &Context, event: Event) -> IPCResult<()> { async fn get_files(ctx: &Context, event: Event) -> IPCResult<()> {
@ -93,7 +102,7 @@ impl FilesNamespace {
responses.push( responses.push(
file_by_identifier(id, &repo) file_by_identifier(id, &repo)
.await .await
.map(FileMetadataResponse::from_model)?, .map(FileBasicDataResponse::from_model)?,
); );
} }
ctx.emit_to(Self::name(), "get_files", responses).await?; ctx.emit_to(Self::name(), "get_files", responses).await?;
@ -107,57 +116,12 @@ impl FilesNamespace {
let req = event.payload::<FindFilesRequest>()?; let req = event.payload::<FindFilesRequest>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let tags = req let mut files = find_files_for_filters(&repo, req.filters).await?;
.filters sort_files_by_properties(&repo, req.sort_expression, &mut files).await?;
.into_iter()
.map(|e| match e {
FilterExpression::OrExpression(tags) => {
tags.into_iter().map(|t| (t.tag, t.negate)).collect_vec()
}
FilterExpression::Query(tag) => {
vec![(tag.tag, tag.negate)]
}
})
.collect();
let mut files = repo.find_files_by_tags(tags).await?; let responses: Vec<FileBasicDataResponse> = files
let hash_ids: Vec<i64> = files.iter().map(|f| f.hash_id()).collect();
let mut hash_nsp: HashMap<i64, HashMap<String, Vec<String>>> =
get_hashes_with_namespaced_tags(repo.db(), hash_ids.clone()).await?;
let mut hash_tag_counts = get_hashes_with_tag_count(repo.db(), hash_ids).await?;
let mut contexts = HashMap::new();
for file in &files {
let context = FileSortContext {
name: file.name().to_owned(),
size: file.get_size().await?,
mime_type: file.mime_type().to_owned(),
namespaces: hash_nsp
.remove(&file.hash_id())
.unwrap_or(HashMap::with_capacity(0)),
tag_count: hash_tag_counts.remove(&file.hash_id()).unwrap_or(0),
import_time: file.import_time().to_owned(),
create_time: file.import_time().to_owned(),
change_time: file.change_time().to_owned(),
};
contexts.insert(file.id(), context);
}
let sort_expression = req.sort_expression;
tracing::debug!("sort_expression = {:?}", sort_expression);
files.sort_by(|a, b| {
compare_files(
contexts.get(&a.id()).unwrap(),
contexts.get(&b.id()).unwrap(),
&sort_expression,
)
});
let responses: Vec<FileMetadataResponse> = files
.into_iter() .into_iter()
.map(FileMetadataResponse::from_model) .map(FileBasicDataResponse::from_model)
.collect(); .collect();
ctx.emit_to(Self::name(), "find_files", responses).await?; ctx.emit_to(Self::name(), "find_files", responses).await?;
Ok(()) Ok(())
@ -172,7 +136,7 @@ impl FilesNamespace {
let AddFileRequestHeader { metadata, tags } = request; let AddFileRequestHeader { metadata, tags } = request;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let mut file = repo let file = repo
.add_file( .add_file(
metadata.mime_type, metadata.mime_type,
bytes.into_inner(), bytes.into_inner(),
@ -180,7 +144,7 @@ impl FilesNamespace {
metadata.change_time, metadata.change_time,
) )
.await?; .await?;
file.set_name(metadata.name).await?; file.metadata().await?.set_name(metadata.name).await?;
let tags = repo let tags = repo
.add_all_tags(tags.into_iter().map(parse_namespace_and_tag).collect()) .add_all_tags(tags.into_iter().map(parse_namespace_and_tag).collect())
@ -191,7 +155,23 @@ impl FilesNamespace {
ctx.emit_to( ctx.emit_to(
Self::name(), Self::name(),
"add_file", "add_file",
FileMetadataResponse::from_model(file), FileBasicDataResponse::from_model(file),
)
.await?;
Ok(())
}
#[tracing::instrument(skip_all)]
async fn update_status(ctx: &Context, event: Event) -> IPCResult<()> {
let request = event.payload::<UpdateFileStatusRequest>()?;
let repo = get_repo_from_context(ctx).await;
let mut file = file_by_identifier(request.file_id, &repo).await?;
file.set_status(request.status.into()).await?;
ctx.emit_to(
Self::name(),
"update_file_status",
FileBasicDataResponse::from_model(file),
) )
.await?; .await?;
@ -202,26 +182,36 @@ impl FilesNamespace {
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn read_file(ctx: &Context, event: Event) -> IPCResult<()> { async fn read_file(ctx: &Context, event: Event) -> IPCResult<()> {
let request = event.payload::<ReadFileRequest>()?; let request = event.payload::<ReadFileRequest>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(request.id, &repo).await?; let file = file_by_identifier(request.id, &repo).await?;
let mut reader = file.get_reader().await?; let bytes = repo.get_file_bytes(&file).await?;
let mut buf = Vec::new();
reader.read_to_end(&mut buf).await?;
ctx.emit_to(Self::name(), "read_file", BytePayload::new(buf)) ctx.emit_to(Self::name(), "read_file", BytePayload::new(bytes))
.await?; .await?;
Ok(()) Ok(())
} }
/// Deletes a file
#[tracing::instrument(skip_all)]
async fn delete_file(ctx: &Context, event: Event) -> IPCResult<()> {
let id = event.payload::<FileIdentifier>()?;
let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(id, &repo).await?;
repo.delete_file(file).await?;
ctx.emit_to(Self::name(), "delete_file", ()).await?;
Ok(())
}
/// Returns a list of available thumbnails of a file /// Returns a list of available thumbnails of a file
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn thumbnails(ctx: &Context, event: Event) -> IPCResult<()> { async fn thumbnails(ctx: &Context, event: Event) -> IPCResult<()> {
let request = event.payload::<GetFileThumbnailsRequest>()?; let request = event.payload::<GetFileThumbnailsRequest>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let file_hash = hash_by_identifier(request.id.clone(), &repo).await?; let file_cd = cd_by_identifier(request.id.clone(), &repo).await?;
let mut thumbnails = repo.get_file_thumbnails(file_hash).await?; let mut thumbnails = repo.get_file_thumbnails(&file_cd).await?;
if thumbnails.is_empty() { if thumbnails.is_empty() {
tracing::debug!("No thumbnails for file found. Creating thumbnails..."); tracing::debug!("No thumbnails for file found. Creating thumbnails...");
@ -245,8 +235,8 @@ impl FilesNamespace {
async fn get_thumbnail_of_size(ctx: &Context, event: Event) -> IPCResult<()> { async fn get_thumbnail_of_size(ctx: &Context, event: Event) -> IPCResult<()> {
let request = event.payload::<GetFileThumbnailOfSizeRequest>()?; let request = event.payload::<GetFileThumbnailOfSizeRequest>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let file_hash = hash_by_identifier(request.id.clone(), &repo).await?; let file_cd = cd_by_identifier(request.id.clone(), &repo).await?;
let thumbnails = repo.get_file_thumbnails(file_hash).await?; let thumbnails = repo.get_file_thumbnails(&file_cd).await?;
let min_size = request.min_size; let min_size = request.min_size;
let max_size = request.max_size; let max_size = request.max_size;
@ -289,12 +279,14 @@ impl FilesNamespace {
async fn update_file_name(ctx: &Context, event: Event) -> IPCResult<()> { async fn update_file_name(ctx: &Context, event: Event) -> IPCResult<()> {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let request = event.payload::<UpdateFileNameRequest>()?; let request = event.payload::<UpdateFileNameRequest>()?;
let mut file = file_by_identifier(request.file_id, &repo).await?; let file = file_by_identifier(request.file_id, &repo).await?;
file.set_name(request.name).await?; let mut metadata = file.metadata().await?;
metadata.set_name(request.name).await?;
ctx.emit_to( ctx.emit_to(
Self::name(), Self::name(),
"update_file_name", "update_file_name",
FileMetadataResponse::from_model(file), FileMetadataResponse::from_model(metadata),
) )
.await?; .await?;
@ -307,7 +299,7 @@ impl FilesNamespace {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let id = event.payload::<FileIdentifier>()?; let id = event.payload::<FileIdentifier>()?;
let file = file_by_identifier(id, &repo).await?; let file = file_by_identifier(id, &repo).await?;
let thumbnails = repo.get_file_thumbnails(file.hash().to_owned()).await?; let thumbnails = repo.get_file_thumbnails(file.cd()).await?;
for thumb in thumbnails { for thumb in thumbnails {
thumb.delete().await?; thumb.delete().await?;
@ -316,113 +308,3 @@ impl FilesNamespace {
Ok(()) Ok(())
} }
} }
#[tracing::instrument(level = "trace", skip_all)]
fn compare_files(
ctx_a: &FileSortContext,
ctx_b: &FileSortContext,
expression: &Vec<SortKey>,
) -> Ordering {
let cmp_date = compare::natural();
let cmp_u64 = compare::natural();
let cmp_u32 = compare::natural();
for sort_key in expression {
let ordering = match sort_key {
SortKey::Namespace(namespace) => {
let list_a = ctx_a.namespaces.get(&namespace.name);
let list_b = ctx_b.namespaces.get(&namespace.name);
let cmp_result = if let (Some(list_a), Some(list_b)) = (list_a, list_b) {
compare_tag_lists(list_a, list_b)
} else if list_a.is_some() {
Ordering::Greater
} else if list_b.is_some() {
Ordering::Less
} else {
Ordering::Equal
};
adjust_for_dir(cmp_result, &namespace.direction)
}
SortKey::FileName(direction) => {
adjust_for_dir(compare_opts(&ctx_a.name, &ctx_b.name), direction)
}
SortKey::FileSize(direction) => {
adjust_for_dir(cmp_u64.compare(&ctx_a.size, &ctx_b.size), direction)
}
SortKey::FileImportedTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.import_time, &ctx_b.import_time),
direction,
),
SortKey::FileCreatedTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.create_time, &ctx_b.create_time),
direction,
),
SortKey::FileChangeTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.change_time, &ctx_b.change_time),
direction,
),
SortKey::FileType(direction) => {
adjust_for_dir(compare_opts(&ctx_a.mime_type, &ctx_b.mime_type), direction)
}
SortKey::NumTags(direction) => adjust_for_dir(
cmp_u32.compare(&ctx_a.tag_count, &ctx_b.tag_count),
direction,
),
};
if !ordering.is_eq() {
return ordering;
}
}
Ordering::Equal
}
fn compare_opts<T: Ord + Sized>(opt_a: &Option<T>, opt_b: &Option<T>) -> Ordering {
let cmp = compare::natural();
if let (Some(a), Some(b)) = (opt_a, opt_b) {
cmp.compare(a, b)
} else if opt_a.is_some() {
Ordering::Greater
} else if opt_b.is_some() {
Ordering::Less
} else {
Ordering::Equal
}
}
fn compare_f32(a: f32, b: f32) -> Ordering {
if a > b {
Ordering::Greater
} else if b > a {
Ordering::Less
} else {
Ordering::Equal
}
}
fn adjust_for_dir(ordering: Ordering, direction: &SortDirection) -> Ordering {
if *direction == SortDirection::Descending {
ordering.reverse()
} else {
ordering
}
}
fn compare_tag_lists(list_a: &Vec<String>, list_b: &Vec<String>) -> Ordering {
let first_diff = list_a
.into_iter()
.zip(list_b.into_iter())
.find(|(a, b)| *a != *b);
if let Some(diff) = first_diff {
if let (Some(num_a), Some(num_b)) = (diff.0.parse::<f32>().ok(), diff.1.parse::<f32>().ok())
{
compare_f32(num_a, num_b)
} else {
let cmp = compare::natural();
cmp.compare(diff.0, diff.1)
}
} else {
Ordering::Equal
}
}

@ -0,0 +1,185 @@
use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
use mediarepo_core::mediarepo_api::types::filtering::{
FilterExpression, FilterQuery, PropertyQuery, TagQuery, ValueComparator,
};
use mediarepo_model::file::filter::NegatableComparator::{Is, IsNot};
use mediarepo_model::file::filter::{FilterFileProperty, FilterProperty, OrderingComparator};
use mediarepo_model::file::{File, FileStatus};
use mediarepo_model::repo::Repo;
use std::collections::HashMap;
#[tracing::instrument(level = "debug", skip(repo))]
pub async fn find_files_for_filters(
repo: &Repo,
expressions: Vec<FilterExpression>,
) -> RepoResult<Vec<File>> {
let tag_names = get_tag_names_from_expressions(&expressions);
let tag_id_map = repo.tag_names_to_ids(tag_names).await?;
let filters = build_filters_from_expressions(expressions, &tag_id_map);
repo.find_files_by_filters(filters).await
}
#[tracing::instrument(level = "debug")]
fn get_tag_names_from_expressions(expressions: &Vec<FilterExpression>) -> Vec<String> {
expressions
.iter()
.flat_map(|f| match f {
FilterExpression::OrExpression(queries) => queries
.iter()
.filter_map(|q| match q {
FilterQuery::Tag(tag) => Some(tag.tag.to_owned()),
_ => None,
})
.collect::<Vec<String>>(),
FilterExpression::Query(q) => match q {
FilterQuery::Tag(tag) => {
vec![tag.tag.to_owned()]
}
FilterQuery::Property(_) => {
vec![]
}
},
})
.collect::<Vec<String>>()
}
#[tracing::instrument(level = "debug")]
fn build_filters_from_expressions(
expressions: Vec<FilterExpression>,
tag_id_map: &HashMap<String, i64>,
) -> Vec<Vec<FilterProperty>> {
expressions
.into_iter()
.filter_map(|e| {
let filters = match e {
FilterExpression::OrExpression(queries) => queries
.into_iter()
.filter_map(|q| map_query_to_filter(q, tag_id_map))
.collect(),
FilterExpression::Query(q) => {
if let Some(filter) = map_query_to_filter(q, tag_id_map) {
vec![filter]
} else {
vec![]
}
}
};
if filters.len() > 0 {
Some(filters)
} else {
None
}
})
.collect()
}
fn map_query_to_filter(
query: FilterQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
match query {
FilterQuery::Tag(tag_query) => map_tag_query_to_filter(tag_query, tag_id_map),
FilterQuery::Property(property) => map_property_query_to_filter(property),
}
}
fn map_tag_query_to_filter(
query: TagQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
if query.tag.ends_with("*") {
map_wildcard_tag_to_filter(query, tag_id_map)
} else {
map_tag_to_filter(query, tag_id_map)
}
}
fn map_wildcard_tag_to_filter(
query: TagQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
let filter_tag = query.tag.trim_end_matches("*");
let relevant_ids = tag_id_map
.iter()
.filter_map(|(name, id)| {
if name.starts_with(filter_tag) {
Some(*id)
} else {
None
}
})
.collect::<Vec<i64>>();
if relevant_ids.len() > 0 {
let comparator = if query.negate {
IsNot(relevant_ids)
} else {
Is(relevant_ids)
};
Some(FilterProperty::TagWildcardIds(comparator))
} else {
None
}
}
fn map_tag_to_filter(query: TagQuery, tag_id_map: &HashMap<String, i64>) -> Option<FilterProperty> {
tag_id_map.get(&query.tag).map(|id| {
let comparator = if query.negate { IsNot(*id) } else { Is(*id) };
FilterProperty::TagId(comparator)
})
}
fn map_property_query_to_filter(query: PropertyQuery) -> Option<FilterProperty> {
match query {
PropertyQuery::Status(s) => Some(FilterProperty::FileProperty(FilterFileProperty::Status(
Is(file_status_to_number(s)),
))),
PropertyQuery::FileSize(s) => Some(FilterProperty::FileProperty(
FilterFileProperty::FileSize(val_comparator_to_order(s, |v| v as i64)),
)),
PropertyQuery::ImportedTime(t) => Some(FilterProperty::FileProperty(
FilterFileProperty::ImportedTime(val_comparator_to_order(t, |t| t)),
)),
PropertyQuery::ChangedTime(t) => Some(FilterProperty::FileProperty(
FilterFileProperty::ChangedTime(val_comparator_to_order(t, |t| t)),
)),
PropertyQuery::CreatedTime(t) => Some(FilterProperty::FileProperty(
FilterFileProperty::CreatedTime(val_comparator_to_order(t, |t| t)),
)),
PropertyQuery::TagCount(c) => {
Some(FilterProperty::TagCount(val_comparator_to_order(c, |v| {
v as i64
})))
}
PropertyQuery::Cd(cd) => decode_content_descriptor(cd)
.ok()
.map(|cd| FilterProperty::ContentDescriptor(Is(cd))),
PropertyQuery::Id(id) => Some(FilterProperty::FileProperty(FilterFileProperty::Id(Is(id)))),
}
}
fn file_status_to_number(status: ApiFileStatus) -> i64 {
match status {
ApiFileStatus::Imported => FileStatus::Imported as i64,
ApiFileStatus::Archived => FileStatus::Archived as i64,
ApiFileStatus::Deleted => FileStatus::Deleted as i64,
}
}
#[inline]
fn val_comparator_to_order<T1, T2, F: Fn(T1) -> T2>(
comp: ValueComparator<T1>,
conv_fn: F,
) -> OrderingComparator<T2> {
match comp {
ValueComparator::Less(v) => OrderingComparator::Less(conv_fn(v)),
ValueComparator::Equal(v) => OrderingComparator::Equal(conv_fn(v)),
ValueComparator::Greater(v) => OrderingComparator::Greater(conv_fn(v)),
ValueComparator::Between((v1, v2)) => {
OrderingComparator::Between((conv_fn(v1), conv_fn(v2)))
}
}
}

@ -0,0 +1,193 @@
use chrono::NaiveDateTime;
use compare::Compare;
use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::filtering::{SortDirection, SortKey};
use mediarepo_database::queries::tags::{
get_cids_with_namespaced_tags, get_content_descriptors_with_tag_count,
};
use mediarepo_model::file::File;
use mediarepo_model::file_metadata::FileMetadata;
use mediarepo_model::repo::Repo;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::iter::FromIterator;
pub struct FileSortContext {
name: Option<String>,
size: u64,
mime_type: String,
namespaces: HashMap<String, Vec<String>>,
tag_count: u32,
import_time: NaiveDateTime,
create_time: NaiveDateTime,
change_time: NaiveDateTime,
}
#[tracing::instrument(level = "debug", skip(repo, files))]
pub async fn sort_files_by_properties(
repo: &Repo,
sort_expression: Vec<SortKey>,
files: &mut Vec<File>,
) -> RepoResult<()> {
let contexts = build_sort_context(repo, files).await?;
files.sort_by(|a, b| {
compare_files(
contexts.get(&a.id()).unwrap(),
contexts.get(&b.id()).unwrap(),
&sort_expression,
)
});
Ok(())
}
async fn build_sort_context(
repo: &Repo,
files: &Vec<File>,
) -> RepoResult<HashMap<i64, FileSortContext>> {
let hash_ids: Vec<i64> = files.par_iter().map(|f| f.cd_id()).collect();
let file_ids: Vec<i64> = files.par_iter().map(|f| f.id()).collect();
let mut cid_nsp: HashMap<i64, HashMap<String, Vec<String>>> =
get_cids_with_namespaced_tags(repo.db(), hash_ids.clone()).await?;
let mut cid_tag_counts = get_content_descriptors_with_tag_count(repo.db(), hash_ids).await?;
let files_metadata = repo.get_file_metadata_for_ids(file_ids).await?;
let mut file_metadata_map: HashMap<i64, FileMetadata> =
HashMap::from_iter(files_metadata.into_iter().map(|m| (m.file_id(), m)));
let mut contexts = HashMap::new();
for file in files {
if let Some(metadata) = file_metadata_map.remove(&file.id()) {
let context = FileSortContext {
name: metadata.name().to_owned(),
size: metadata.size() as u64,
mime_type: file.mime_type().to_owned(),
namespaces: cid_nsp
.remove(&file.cd_id())
.unwrap_or(HashMap::with_capacity(0)),
tag_count: cid_tag_counts.remove(&file.cd_id()).unwrap_or(0),
import_time: metadata.import_time().to_owned(),
create_time: metadata.import_time().to_owned(),
change_time: metadata.change_time().to_owned(),
};
contexts.insert(file.id(), context);
}
}
Ok(contexts)
}
#[tracing::instrument(level = "trace", skip_all)]
fn compare_files(
ctx_a: &FileSortContext,
ctx_b: &FileSortContext,
expression: &Vec<SortKey>,
) -> Ordering {
let cmp_date = compare::natural();
let cmp_u64 = compare::natural();
let cmp_u32 = compare::natural();
for sort_key in expression {
let ordering = match sort_key {
SortKey::Namespace(namespace) => {
let list_a = ctx_a.namespaces.get(&namespace.name);
let list_b = ctx_b.namespaces.get(&namespace.name);
let cmp_result = if let (Some(list_a), Some(list_b)) = (list_a, list_b) {
compare_tag_lists(list_a, list_b)
} else if list_a.is_some() {
Ordering::Greater
} else if list_b.is_some() {
Ordering::Less
} else {
Ordering::Equal
};
adjust_for_dir(cmp_result, &namespace.direction)
}
SortKey::FileName(direction) => {
adjust_for_dir(compare_opts(&ctx_a.name, &ctx_b.name), direction)
}
SortKey::FileSize(direction) => {
adjust_for_dir(cmp_u64.compare(&ctx_a.size, &ctx_b.size), direction)
}
SortKey::FileImportedTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.import_time, &ctx_b.import_time),
direction,
),
SortKey::FileCreatedTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.create_time, &ctx_b.create_time),
direction,
),
SortKey::FileChangeTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.change_time, &ctx_b.change_time),
direction,
),
SortKey::FileType(direction) => {
adjust_for_dir(ctx_a.mime_type.cmp(&ctx_b.mime_type), direction)
}
SortKey::NumTags(direction) => adjust_for_dir(
cmp_u32.compare(&ctx_a.tag_count, &ctx_b.tag_count),
direction,
),
};
if !ordering.is_eq() {
return ordering;
}
}
Ordering::Equal
}
fn compare_opts<T: Ord + Sized>(opt_a: &Option<T>, opt_b: &Option<T>) -> Ordering {
let cmp = compare::natural();
if let (Some(a), Some(b)) = (opt_a, opt_b) {
cmp.compare(a, b)
} else if opt_a.is_some() {
Ordering::Greater
} else if opt_b.is_some() {
Ordering::Less
} else {
Ordering::Equal
}
}
fn compare_f32(a: f32, b: f32) -> Ordering {
if a > b {
Ordering::Greater
} else if b > a {
Ordering::Less
} else {
Ordering::Equal
}
}
fn adjust_for_dir(ordering: Ordering, direction: &SortDirection) -> Ordering {
if *direction == SortDirection::Descending {
ordering.reverse()
} else {
ordering
}
}
fn compare_tag_lists(list_a: &Vec<String>, list_b: &Vec<String>) -> Ordering {
let first_diff = list_a
.into_iter()
.zip(list_b.into_iter())
.find(|(a, b)| *a != *b);
if let Some(diff) = first_diff {
if let (Some(num_a), Some(num_b)) = (diff.0.parse::<f32>().ok(), diff.1.parse::<f32>().ok())
{
compare_f32(num_a, num_b)
} else {
let cmp = compare::natural();
cmp.compare(diff.0, diff.1)
}
} else {
Ordering::Equal
}
}

@ -0,0 +1,55 @@
use crate::utils::{calculate_size, get_repo_from_context};
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::jobs::{JobType, RunJobRequest};
use mediarepo_core::mediarepo_api::types::repo::SizeType;
use mediarepo_core::type_keys::SizeMetadataKey;
pub struct JobsNamespace;
impl NamespaceProvider for JobsNamespace {
fn name() -> &'static str {
"jobs"
}
fn register(handler: &mut EventHandler) {
events!(handler,
"run_job" => Self::run_job
)
}
}
impl JobsNamespace {
#[tracing::instrument(skip_all)]
pub async fn run_job(ctx: &Context, event: Event) -> IPCResult<()> {
let run_request = event.payload::<RunJobRequest>()?;
let repo = get_repo_from_context(ctx).await;
match run_request.job_type {
JobType::MigrateContentDescriptors => repo.migrate().await?,
JobType::CalculateSizes => calculate_all_sizes(ctx).await?,
JobType::CheckIntegrity => {}
}
ctx.emit_to(Self::name(), "run_job", ()).await?;
Ok(())
}
}
async fn calculate_all_sizes(ctx: &Context) -> RepoResult<()> {
let size_types = vec![
SizeType::Total,
SizeType::FileFolder,
SizeType::ThumbFolder,
SizeType::DatabaseFile,
];
for size_type in size_types {
let size = calculate_size(&size_type, ctx).await?;
let mut data = ctx.data.write().await;
let size_map = data.get_mut::<SizeMetadataKey>().unwrap();
size_map.insert(size_type, size);
}
Ok(())
}

@ -2,6 +2,7 @@ use mediarepo_core::bromine::prelude::AsyncStreamProtocolListener;
use mediarepo_core::bromine::{namespace, namespace::Namespace, IPCBuilder}; use mediarepo_core::bromine::{namespace, namespace::Namespace, IPCBuilder};
pub mod files; pub mod files;
pub mod jobs;
pub mod repo; pub mod repo;
pub mod tags; pub mod tags;
@ -10,4 +11,5 @@ pub fn build_namespaces<L: AsyncStreamProtocolListener>(builder: IPCBuilder<L>)
.add_namespace(namespace!(files::FilesNamespace)) .add_namespace(namespace!(files::FilesNamespace))
.add_namespace(namespace!(tags::TagsNamespace)) .add_namespace(namespace!(tags::TagsNamespace))
.add_namespace(namespace!(repo::RepoNamespace)) .add_namespace(namespace!(repo::RepoNamespace))
.add_namespace(namespace!(jobs::JobsNamespace))
} }

@ -6,10 +6,9 @@ use mediarepo_core::bromine::prelude::*;
use mediarepo_core::mediarepo_api::types::repo::{ use mediarepo_core::mediarepo_api::types::repo::{
FrontendState, RepositoryMetadata, SizeMetadata, SizeType, FrontendState, RepositoryMetadata, SizeMetadata, SizeType,
}; };
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey}; use mediarepo_core::type_keys::{RepoPathKey, SettingsKey, SizeMetadataKey};
use mediarepo_core::utils::get_folder_size;
use crate::utils::get_repo_from_context; use crate::utils::{calculate_size, get_repo_from_context};
pub struct RepoNamespace; pub struct RepoNamespace;
@ -40,7 +39,7 @@ impl RepoNamespace {
tag_count: counts.tag_count as u64, tag_count: counts.tag_count as u64,
namespace_count: counts.namespace_count as u64, namespace_count: counts.namespace_count as u64,
mapping_count: counts.mapping_count as u64, mapping_count: counts.mapping_count as u64,
hash_count: counts.hash_count as u64, hash_count: counts.cd_count as u64,
}; };
tracing::debug!("metadata = {:?}", metadata); tracing::debug!("metadata = {:?}", metadata);
@ -53,29 +52,21 @@ impl RepoNamespace {
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn get_size_metadata(ctx: &Context, event: Event) -> IPCResult<()> { async fn get_size_metadata(ctx: &Context, event: Event) -> IPCResult<()> {
let size_type = event.payload::<SizeType>()?; let size_type = event.payload::<SizeType>()?;
let repo = get_repo_from_context(ctx).await; let data = ctx.data.read().await;
let (repo_path, settings) = { let size_cache = data.get::<SizeMetadataKey>().unwrap();
let data = ctx.data.read().await;
( let size = if let Some(size) = size_cache.get(&size_type) {
data.get::<RepoPathKey>().unwrap().clone(), *size
data.get::<SettingsKey>().unwrap().clone(), } else {
) calculate_size(&size_type, ctx).await?
};
let size = match &size_type {
SizeType::Total => get_folder_size(repo_path).await?,
SizeType::FileFolder => repo.get_main_store_size().await?,
SizeType::ThumbFolder => repo.get_thumb_store_size().await?,
SizeType::DatabaseFile => {
let db_path = repo_path.join(settings.database_path);
let database_metadata = fs::metadata(db_path).await?;
database_metadata.len()
}
}; };
let response = SizeMetadata { size, size_type };
tracing::debug!("size response = {:?}", response);
ctx.emit_to(Self::name(), "size_metadata", response).await?; ctx.emit_to(
Self::name(),
"size_metadata",
SizeMetadata { size, size_type },
)
.await?;
Ok(()) Ok(())
} }
@ -118,9 +109,7 @@ async fn get_frontend_state_path(ctx: &Context) -> IPCResult<PathBuf> {
let data = ctx.data.read().await; let data = ctx.data.read().await;
let settings = data.get::<SettingsKey>().unwrap(); let settings = data.get::<SettingsKey>().unwrap();
let repo_path = data.get::<RepoPathKey>().unwrap(); let repo_path = data.get::<RepoPathKey>().unwrap();
let state_path = repo_path let state_path = settings.paths.frontend_state_file_path(&repo_path);
.join(PathBuf::from(&settings.database_path).parent().unwrap())
.join("frontend-state.json");
Ok(state_path) Ok(state_path)
} }

@ -1,10 +1,12 @@
use crate::from_model::FromModel; use crate::from_model::FromModel;
use crate::utils::{file_by_identifier, get_repo_from_context}; use crate::utils::{file_by_identifier, get_repo_from_context};
use mediarepo_core::bromine::prelude::*; use mediarepo_core::bromine::prelude::*;
use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::mediarepo_api::types::files::{GetFileTagsRequest, GetFilesTagsRequest}; use mediarepo_core::mediarepo_api::types::files::{GetFileTagsRequest, GetFilesTagsRequest};
use mediarepo_core::mediarepo_api::types::tags::{ use mediarepo_core::mediarepo_api::types::tags::{
ChangeFileTagsRequest, NamespaceResponse, TagResponse, ChangeFileTagsRequest, NamespaceResponse, TagResponse,
}; };
use rayon::iter::{IntoParallelIterator, ParallelIterator};
pub struct TagsNamespace; pub struct TagsNamespace;
@ -78,7 +80,13 @@ impl TagsNamespace {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let request = event.payload::<GetFilesTagsRequest>()?; let request = event.payload::<GetFilesTagsRequest>()?;
let tag_responses: Vec<TagResponse> = repo let tag_responses: Vec<TagResponse> = repo
.find_tags_for_hashes(request.hashes) .find_tags_for_file_identifiers(
request
.cds
.into_par_iter()
.filter_map(|c| decode_content_descriptor(c).ok())
.collect(),
)
.await? .await?
.into_iter() .into_iter()
.map(TagResponse::from_model) .map(TagResponse::from_model)

@ -1,10 +1,15 @@
use mediarepo_core::bromine::ipc::context::Context; use mediarepo_core::bromine::ipc::context::Context;
use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::error::{RepoError, RepoResult}; use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier; use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier;
use mediarepo_core::mediarepo_api::types::repo::SizeType;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use mediarepo_core::utils::get_folder_size;
use mediarepo_model::file::File; use mediarepo_model::file::File;
use mediarepo_model::repo::Repo; use mediarepo_model::repo::Repo;
use mediarepo_model::type_keys::RepoKey; use mediarepo_model::type_keys::RepoKey;
use std::sync::Arc; use std::sync::Arc;
use tokio::fs;
pub async fn get_repo_from_context(ctx: &Context) -> Arc<Repo> { pub async fn get_repo_from_context(ctx: &Context) -> Arc<Repo> {
let data = ctx.data.read().await; let data = ctx.data.read().await;
@ -15,20 +20,44 @@ pub async fn get_repo_from_context(ctx: &Context) -> Arc<Repo> {
pub async fn file_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<File> { pub async fn file_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<File> {
let file = match identifier { let file = match identifier {
FileIdentifier::ID(id) => repo.file_by_id(id).await, FileIdentifier::ID(id) => repo.file_by_id(id).await,
FileIdentifier::Hash(hash) => repo.file_by_hash(hash).await, FileIdentifier::CD(cd) => repo.file_by_cd(&decode_content_descriptor(cd)?).await,
}?; }?;
file.ok_or_else(|| RepoError::from("Thumbnail not found")) file.ok_or_else(|| RepoError::from("File not found"))
} }
pub async fn hash_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<String> { pub async fn cd_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<Vec<u8>> {
match identifier { match identifier {
FileIdentifier::ID(id) => { FileIdentifier::ID(id) => {
let file = repo let file = repo
.file_by_id(id) .file_by_id(id)
.await? .await?
.ok_or_else(|| "Thumbnail not found")?; .ok_or_else(|| "Thumbnail not found")?;
Ok(file.hash().to_owned()) Ok(file.cd().to_owned())
} }
FileIdentifier::Hash(hash) => Ok(hash), FileIdentifier::CD(cd) => decode_content_descriptor(cd),
} }
} }
pub async fn calculate_size(size_type: &SizeType, ctx: &Context) -> RepoResult<u64> {
let repo = get_repo_from_context(ctx).await;
let (repo_path, settings) = {
let data = ctx.data.read().await;
(
data.get::<RepoPathKey>().unwrap().clone(),
data.get::<SettingsKey>().unwrap().clone(),
)
};
let size = match &size_type {
SizeType::Total => get_folder_size(repo_path).await?,
SizeType::FileFolder => repo.get_main_store_size().await?,
SizeType::ThumbFolder => repo.get_thumb_store_size().await?,
SizeType::DatabaseFile => {
let db_path = settings.paths.db_file_path(&repo_path);
let database_metadata = fs::metadata(db_path).await?;
database_metadata.len()
}
};
Ok(size)
}

@ -1,5 +0,0 @@
pub static SETTINGS_PATH: &str = "./repo.toml";
pub static DEFAULT_STORAGE_NAME: &str = "Main";
pub static DEFAULT_STORAGE_PATH: &str = "files";
pub static THUMBNAIL_STORAGE_NAME: &str = "Thumbnails";
pub static THUMBNAIL_STORAGE_PATH: &str = "./thumbnails";

@ -3,11 +3,12 @@ use rolling_file::RollingConditionBasic;
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
use mediarepo_core::settings::LoggingSettings;
use tracing::Level; use tracing::Level;
use tracing_appender::non_blocking::{NonBlocking, WorkerGuard}; use tracing_appender::non_blocking::{NonBlocking, WorkerGuard};
use tracing_flame::FlameLayer; use tracing_flame::FlameLayer;
use tracing_log::LogTracer; use tracing_log::LogTracer;
use tracing_subscriber::filter::{self}; use tracing_subscriber::filter::{self, Targets};
use tracing_subscriber::fmt::format::FmtSpan; use tracing_subscriber::fmt::format::FmtSpan;
use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt; use tracing_subscriber::util::SubscriberInitExt;
@ -19,7 +20,7 @@ use tracing_subscriber::{
#[allow(dyn_drop)] #[allow(dyn_drop)]
pub type DropGuard = Box<dyn Drop>; pub type DropGuard = Box<dyn Drop>;
pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> { pub fn init_tracing(repo_path: &PathBuf, log_cfg: &LoggingSettings) -> Vec<DropGuard> {
LogTracer::init().expect("failed to subscribe to log entries"); LogTracer::init().expect("failed to subscribe to log entries");
let log_path = repo_path.join("logs"); let log_path = repo_path.join("logs");
let mut guards = Vec::new(); let mut guards = Vec::new();
@ -54,12 +55,7 @@ pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
.pretty() .pretty()
.with_ansi(false) .with_ansi(false)
.with_span_events(FmtSpan::NONE) .with_span_events(FmtSpan::NONE)
.with_filter( .with_filter(get_sql_targets(log_cfg.trace_sql));
filter::Targets::new()
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::TRACE)
.with_target("mediarepo_database", Level::TRACE),
);
let (bromine_writer, guard) = get_bromine_log_writer(&log_path); let (bromine_writer, guard) = get_bromine_log_writer(&log_path);
guards.push(Box::new(guard) as DropGuard); guards.push(Box::new(guard) as DropGuard);
@ -69,7 +65,7 @@ pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
.pretty() .pretty()
.with_ansi(false) .with_ansi(false)
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_filter(filter::Targets::new().with_target("bromine", Level::DEBUG)); .with_filter(get_bromine_targets(log_cfg.trace_api_calls));
let (app_log_writer, guard) = get_application_log_writer(&log_path); let (app_log_writer, guard) = get_application_log_writer(&log_path);
guards.push(Box::new(guard) as DropGuard); guards.push(Box::new(guard) as DropGuard);
@ -79,16 +75,7 @@ pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
.pretty() .pretty()
.with_ansi(false) .with_ansi(false)
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_filter( .with_filter(get_app_targets(log_cfg.level.clone().into()));
filter::Targets::new()
.with_target("bromine", Level::WARN)
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::INFO)
.with_target("tokio", Level::WARN)
.with_target("console_subscriber", Level::INFO)
.with_target("h2", Level::INFO)
.with_default(Level::DEBUG),
);
let registry = Registry::default() let registry = Registry::default()
.with(stdout_layer) .with(stdout_layer)
@ -144,6 +131,36 @@ fn get_application_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard)
) )
} }
fn get_app_targets(level: Option<Level>) -> Targets {
filter::Targets::new()
.with_target("bromine", Level::WARN)
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::WARN)
.with_target("tokio", Level::WARN)
.with_target("console_subscriber", Level::ERROR)
.with_target("h2", Level::WARN)
.with_default(level)
}
fn get_sql_targets(trace_sql: bool) -> Targets {
if trace_sql {
filter::Targets::new()
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::TRACE)
.with_target("mediarepo_database", Level::TRACE)
} else {
filter::Targets::new().with_default(None)
}
}
fn get_bromine_targets(trace_bromine: bool) -> Targets {
if trace_bromine {
filter::Targets::new().with_target("bromine", Level::DEBUG)
} else {
filter::Targets::new().with_default(None)
}
}
pub fn init_tracing_flame() -> DropGuard { pub fn init_tracing_flame() -> DropGuard {
let fmt_layer = fmt::Layer::default(); let fmt_layer = fmt::Layer::default();
let (flame_layer, guard) = FlameLayer::with_file("./tracing.folded").unwrap(); let (flame_layer, guard) = FlameLayer::with_file("./tracing.folded").unwrap();

@ -6,22 +6,17 @@ use tokio::runtime;
use tokio::runtime::Runtime; use tokio::runtime::Runtime;
use mediarepo_core::error::RepoResult; use mediarepo_core::error::RepoResult;
use mediarepo_core::futures; use mediarepo_core::fs::drop_file::DropFile;
use mediarepo_core::settings::Settings; use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_core::utils::parse_tags_file; use mediarepo_core::tokio_graceful_shutdown::{SubsystemHandle, Toplevel};
use mediarepo_model::file::{File as RepoFile, File};
use mediarepo_model::repo::Repo; use mediarepo_model::repo::Repo;
use mediarepo_socket::start_tcp_server; use mediarepo_socket::start_tcp_server;
use num_integer::Integer;
use std::env; use std::env;
use std::time::Duration;
use tokio::io::AsyncWriteExt;
use crate::constants::{
DEFAULT_STORAGE_NAME, DEFAULT_STORAGE_PATH, SETTINGS_PATH, THUMBNAIL_STORAGE_NAME,
THUMBNAIL_STORAGE_PATH,
};
use crate::utils::{create_paths_for_repo, get_repo, load_settings}; use crate::utils::{create_paths_for_repo, get_repo, load_settings};
mod constants;
mod logging; mod logging;
mod utils; mod utils;
@ -50,17 +45,6 @@ enum SubCommand {
force: bool, force: bool,
}, },
/// Imports file from a folder (by glob pattern) into the repository
Import {
/// The path to the folder where the files are located
#[structopt()]
folder_path: String,
/// If imported files should be deleted after import
#[structopt(long)]
delete: bool,
},
/// Starts the event server for the selected repository /// Starts the event server for the selected repository
Start, Start,
} }
@ -69,27 +53,34 @@ fn main() -> RepoResult<()> {
let mut opt: Opt = Opt::from_args(); let mut opt: Opt = Opt::from_args();
opt.repo = env::current_dir().unwrap().join(opt.repo); opt.repo = env::current_dir().unwrap().join(opt.repo);
if opt.repo.exists() { let settings = if opt.repo.exists() {
opt.repo = opt.repo.canonicalize().unwrap(); opt.repo = opt.repo.canonicalize().unwrap();
} load_settings(&opt.repo)?
} else {
Settings::default()
};
clean_old_connection_files(&opt.repo)?;
let mut guards = Vec::new(); let mut guards = Vec::new();
if opt.profile { if opt.profile {
guards.push(logging::init_tracing_flame()); guards.push(logging::init_tracing_flame());
} else { } else {
guards.append(&mut logging::init_tracing(&opt.repo)); guards.append(&mut logging::init_tracing(&opt.repo, &settings.logging));
} }
match opt.cmd.clone() { let result = match opt.cmd.clone() {
SubCommand::Init { force } => get_single_thread_runtime().block_on(init(opt, force)), SubCommand::Init { force } => get_single_thread_runtime().block_on(init(opt, force)),
SubCommand::Start => get_multi_thread_runtime().block_on(start_server(opt)), SubCommand::Start => get_multi_thread_runtime().block_on(start_server(opt, settings)),
SubCommand::Import { };
folder_path,
delete,
} => get_single_thread_runtime().block_on(import(opt, folder_path, delete)),
}?;
Ok(()) match result {
Ok(_) => Ok(()),
Err(e) => {
tracing::error!("a critical error occurred when running the daemon: {}", e);
Err(e)
}
}
} }
fn get_single_thread_runtime() -> Runtime { fn get_single_thread_runtime() -> Runtime {
@ -109,43 +100,112 @@ fn get_multi_thread_runtime() -> Runtime {
.unwrap() .unwrap()
} }
async fn init_repo(opt: &Opt) -> RepoResult<(Settings, Repo)> { async fn init_repo(opt: &Opt, paths: &PathSettings) -> RepoResult<Repo> {
let settings = load_settings(&opt.repo.join(SETTINGS_PATH)).await?; let repo = get_repo(&opt.repo, paths).await?;
let mut repo = get_repo(&opt.repo.join(&settings.database_path).to_str().unwrap()).await?;
repo.set_main_storage(&settings.default_file_store).await?; Ok(repo)
repo.set_thumbnail_storage(opt.repo.join(&settings.thumbnail_store))
.await?;
Ok((settings, repo))
} }
/// Starts the server /// Starts the server
async fn start_server(opt: Opt) -> RepoResult<()> { async fn start_server(opt: Opt, settings: Settings) -> RepoResult<()> {
let (settings, repo) = init_repo(&opt).await?; let repo = init_repo(&opt, &settings.paths).await?;
let mut handles = Vec::new(); let mut top_level = Toplevel::new();
#[cfg(unix)] #[cfg(unix)]
{ {
let socket_path = opt.repo.join("repo.sock"); if settings.server.unix_socket.enabled {
let handle = mediarepo_socket::create_unix_socket( let settings = settings.clone();
socket_path, let repo_path = opt.repo.clone();
opt.repo.clone(), let repo = repo.clone();
settings.clone(),
repo.clone(), top_level = top_level.start("mediarepo-unix-socket", |subsystem| {
)?; Box::pin(async move {
handles.push(handle); start_and_await_unix_socket(subsystem, repo_path, settings, repo).await?;
Ok(())
})
})
}
}
if settings.server.tcp.enabled {
top_level = top_level.start("mediarepo-tcp", move |subsystem| {
Box::pin(async move {
start_and_await_tcp_server(subsystem, opt.repo, settings, repo).await?;
Ok(())
})
})
}
if let Err(e) = top_level
.catch_signals()
.handle_shutdown_requests(Duration::from_millis(1000))
.await
{
tracing::error!("an error occurred when running the servers {}", e);
} }
let (address, tcp_handle) = start_tcp_server( tracing::warn!(
settings.listen_address.clone(), r"All servers quit.
settings.port_range, Either they were requested to stop, a fatal error occurred or no servers are enabled in the config.
opt.repo.clone(), Stopping daemon..."
);
Ok(())
}
async fn start_and_await_tcp_server(
subsystem: SubsystemHandle,
repo_path: PathBuf,
settings: Settings,
repo: Repo,
) -> RepoResult<()> {
let (address, handle) = start_tcp_server(subsystem.clone(), repo_path.clone(), settings, repo)?;
let (mut file, _guard) = DropFile::new(repo_path.join("repo.tcp")).await?;
file.write_all(&address.into_bytes()).await?;
tokio::select! {
_ = subsystem.on_shutdown_requested() => {
tracing::info!("shutdown requested")
},
result = handle => {
if let Err(e) = result {
tracing::error!("the tcp server shut down with an error {}", e);
subsystem.request_shutdown();
}
}
}
Ok(())
}
#[cfg(unix)]
async fn start_and_await_unix_socket(
subsystem: SubsystemHandle,
repo_path: PathBuf,
settings: Settings,
repo: Repo,
) -> RepoResult<()> {
let socket_path = repo_path.join("repo.sock");
let handle = mediarepo_socket::create_unix_socket(
subsystem.clone(),
socket_path,
repo_path.clone(),
settings, settings,
repo, repo,
)?; )?;
handles.push(tcp_handle); let _guard = DropFile::from_path(repo_path.join("repo.sock"));
fs::write(opt.repo.join("repo.tcp"), &address.into_bytes()).await?;
futures::future::join_all(handles.into_iter()).await; tokio::select! {
_ = subsystem.on_shutdown_requested() => {
tracing::info!("shutdown requested")
},
result = handle => {
if let Err(e) = result {
tracing::error!("the unix socket shut down with an error {}", e);
subsystem.request_shutdown();
}
}
}
Ok(()) Ok(())
} }
@ -153,148 +213,40 @@ async fn start_server(opt: Opt) -> RepoResult<()> {
/// Initializes an empty repository /// Initializes an empty repository
async fn init(opt: Opt, force: bool) -> RepoResult<()> { async fn init(opt: Opt, force: bool) -> RepoResult<()> {
log::info!("Initializing repository at {:?}", opt.repo); log::info!("Initializing repository at {:?}", opt.repo);
if force { if force {
log::debug!("Removing old repository"); log::debug!("Removing old repository");
fs::remove_dir_all(&opt.repo).await?; fs::remove_dir_all(&opt.repo).await?;
} }
let settings = Settings::default(); let settings = Settings::default();
log::debug!("Creating paths"); log::debug!("Creating paths");
create_paths_for_repo( create_paths_for_repo(&opt.repo, &settings.paths).await?;
&opt.repo,
&settings, if settings.paths.db_file_path(&opt.repo).exists() {
DEFAULT_STORAGE_PATH,
THUMBNAIL_STORAGE_PATH,
)
.await?;
let db_path = opt.repo.join(&settings.database_path);
if db_path.exists() {
panic!("Database already exists in location. Use --force with init to delete everything and start a new repository"); panic!("Database already exists in location. Use --force with init to delete everything and start a new repository");
} }
log::debug!("Creating repo"); log::debug!("Creating repo");
let repo = get_repo(&db_path.to_str().unwrap()).await?; let _repo = get_repo(&opt.repo, &settings.paths).await?;
let storage_path = opt.repo.join(DEFAULT_STORAGE_PATH).canonicalize().unwrap();
log::debug!("Adding storage");
repo.add_storage(DEFAULT_STORAGE_NAME, storage_path.to_str().unwrap())
.await?;
let thumb_storage_path = opt
.repo
.join(THUMBNAIL_STORAGE_PATH)
.canonicalize()
.unwrap();
repo.add_storage(THUMBNAIL_STORAGE_NAME, thumb_storage_path.to_str().unwrap())
.await?;
let settings_string = settings.to_toml_string()?;
log::debug!("Writing settings"); log::debug!("Writing settings");
fs::write(opt.repo.join(SETTINGS_PATH), &settings_string.into_bytes()).await?; settings.save(&opt.repo)?;
log::info!("Repository initialized"); log::info!("Repository initialized");
Ok(()) Ok(())
} }
/// Imports files from a source into the database fn clean_old_connection_files(root: &PathBuf) -> RepoResult<()> {
async fn import(opt: Opt, path: String, delete_files: bool) -> RepoResult<()> { let paths = ["repo.tcp", "repo.sock"];
let (_s, repo) = init_repo(&opt).await?;
log::info!("Importing");
let paths: Vec<PathBuf> = glob::glob(&path)
.unwrap()
.into_iter()
.filter_map(|r| r.ok())
.filter(|e| e.is_file())
.collect();
for path in paths { for path in paths {
if let Err(e) = import_single_image(&path, &repo).await { let path = root.join(path);
log::error!("Import failed: {:?}", e);
if delete_files {
log::info!("Deleting file {:?}", path);
let _ = fs::remove_file(&path).await;
}
} else {
if delete_files {
log::info!("Deleting file {:?}", path);
let _ = fs::remove_file(&path).await;
}
}
}
log::info!("Creating thumbnails...");
let mut files = repo.files().await?;
for _ in 0..(files.len().div_ceil(&64)) {
futures::future::join_all(
(0..64)
.filter_map(|_| files.pop())
.map(|f| create_file_thumbnails(&repo, f)),
)
.await
.into_iter()
.filter_map(|r| r.err())
.for_each(|e| log::error!("Failed to create thumbnail: {:?}", e));
}
Ok(()) if path.exists() {
} std::fs::remove_file(&path)?;
/// Creates thumbnails of all sizes
async fn import_single_image(path: &PathBuf, repo: &Repo) -> RepoResult<()> {
log::info!("Importing file");
let file = repo.add_file_by_path(path.clone()).await?;
log::info!("Adding tags");
let tags_path = PathBuf::from(format!("{}{}", path.to_str().unwrap(), ".txt"));
add_tags_from_tags_file(tags_path, repo, file).await?;
Ok(())
}
async fn add_tags_from_tags_file(
tags_path: PathBuf,
repo: &Repo,
file: RepoFile,
) -> RepoResult<()> {
log::info!("Adding tags");
if tags_path.exists() {
let mut tags = parse_tags_file(tags_path).await?;
log::info!("Found {} tags in the tag file", tags.len());
let resolved_tags = repo.tags_by_names(tags.clone()).await?;
tags.retain(|tag| {
resolved_tags
.iter()
.find(|t| if let (Some(ns1), Some(ns2)) = (t.namespace(), &tag.0) {
*ns1.name() == *ns2
} else { t.namespace().is_none() && tag.0.is_none() } && *t.name() == *tag.1)
.is_none()
});
let mut tag_ids: Vec<i64> = resolved_tags.into_iter().map(|t| t.id()).collect();
log::info!("Existing tag_ids count is {}", tag_ids.len());
log::info!("{} tags need to be created", tags.len());
for (namespace, name) in tags {
let tag = if let Some(namespace) = namespace {
log::info!("Adding namespaced tag '{}:{}'", namespace, name);
repo.add_namespaced_tag(name, namespace).await?
} else {
log::info!("Adding unnamespaced tag '{}'", name);
repo.add_unnamespaced_tag(name).await?
};
tag_ids.push(tag.id());
}
log::info!("Mapping {} tags to the file", tag_ids.len());
if !tag_ids.is_empty() {
file.add_tags(tag_ids).await?;
} }
} else {
log::info!("No tags file '{:?}' found", tags_path);
} }
Ok(())
}
#[tracing::instrument(skip(repo, file))]
async fn create_file_thumbnails(repo: &Repo, file: File) -> RepoResult<()> {
let file_thumbnails = repo.get_file_thumbnails(file.hash().to_owned()).await?;
if file_thumbnails.is_empty() {
repo.create_thumbnails_for_file(&file).await?;
}
Ok(()) Ok(())
} }

@ -1,37 +1,49 @@
use mediarepo_core::error::RepoResult; use mediarepo_core::error::RepoResult;
use mediarepo_core::settings::Settings; use mediarepo_core::settings::v1::SettingsV1;
use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_model::repo::Repo; use mediarepo_model::repo::Repo;
use std::path::PathBuf; use std::path::PathBuf;
use tokio::fs; use tokio::fs;
/// Loads the settings from a toml path /// Loads the settings from a toml path
pub async fn load_settings(path: &PathBuf) -> RepoResult<Settings> { pub fn load_settings(root_path: &PathBuf) -> RepoResult<Settings> {
let contents = fs::read_to_string(path).await?; let contents = std::fs::read_to_string(root_path.join("repo.toml"))?;
Settings::from_toml_string(&contents)
if let Ok(settings_v1) = SettingsV1::from_toml_string(&contents) {
let settings = Settings::from_v1(settings_v1)?;
settings.save(root_path)?;
Ok(settings)
} else {
Settings::read(root_path)
}
} }
pub async fn get_repo(db_path: &str) -> RepoResult<Repo> { pub async fn get_repo(root_path: &PathBuf, path_settings: &PathSettings) -> RepoResult<Repo> {
Repo::connect(format!("sqlite://{}", db_path)).await Repo::connect(
format!(
"sqlite://{}",
path_settings.db_file_path(root_path).to_string_lossy()
),
path_settings.files_dir(root_path),
path_settings.thumbs_dir(root_path),
)
.await
} }
pub async fn create_paths_for_repo( pub async fn create_paths_for_repo(root: &PathBuf, settings: &PathSettings) -> RepoResult<()> {
root: &PathBuf,
settings: &Settings,
storage_path: &str,
thumbnail_path: &str,
) -> RepoResult<()> {
if !root.exists() { if !root.exists() {
fs::create_dir_all(&root).await?; fs::create_dir_all(&root).await?;
} }
let db_path = root.join(&settings.database_path); let db_path = settings.database_dir(root);
if !db_path.exists() { if !db_path.exists() {
fs::create_dir_all(db_path.parent().unwrap()).await?; fs::create_dir_all(db_path).await?;
} }
let storage_path = root.join(storage_path); let files_path = settings.files_dir(root);
if !storage_path.exists() { if !files_path.exists() {
fs::create_dir_all(storage_path).await?; fs::create_dir_all(files_path).await?;
} }
let thumbnail_path = root.join(thumbnail_path); let thumbnail_path = settings.thumbs_dir(root);
if !thumbnail_path.exists() { if !thumbnail_path.exists() {
fs::create_dir_all(thumbnail_path).await?; fs::create_dir_all(thumbnail_path).await?;
} }

Loading…
Cancel
Save