Merge pull request #2 from Trivernis/feature/file-status

Feature/file status
pull/4/head
Julius Riegel 2 years ago committed by GitHub
commit 3c6599e66f

@ -79,6 +79,17 @@ version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
[[package]]
name = "async-recursion"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7d78656ba01f1b93024b7c3a0467f1608e4be67d725749fdcd7d2c7678fd7a2"
dependencies = [
"proc-macro2 1.0.35",
"quote 1.0.10",
"syn 1.0.84",
]
[[package]]
name = "async-stream"
version = "0.3.2"
@ -117,7 +128,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5"
dependencies = [
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -168,7 +179,7 @@ version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"serde",
"serde 1.0.132",
]
[[package]]
@ -259,7 +270,7 @@ dependencies = [
"byteorder",
"futures 0.3.19",
"lazy_static",
"serde",
"serde 1.0.132",
"thiserror",
"tokio",
"tracing",
@ -328,8 +339,8 @@ checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
dependencies = [
"libc",
"num-integer",
"num-traits",
"serde",
"num-traits 0.2.14",
"serde 1.0.132",
"time 0.1.44",
"winapi",
]
@ -372,6 +383,22 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "120133d4db2ec47efe2e26502ee984747630c67f51974fca0b6c1340cf2368d3"
[[package]]
name = "config"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1b9d958c2b1368a663f05538fc1b5975adce1e19f435acceae987aceeeb369"
dependencies = [
"lazy_static",
"nom 5.1.2",
"rust-ini",
"serde 1.0.132",
"serde-hjson",
"serde_json",
"toml",
"yaml-rust",
]
[[package]]
name = "console-api"
version = "0.1.0"
@ -395,7 +422,7 @@ dependencies = [
"futures 0.3.19",
"hdrhistogram",
"humantime",
"serde",
"serde 1.0.132",
"serde_json",
"thread_local",
"tokio",
@ -847,7 +874,7 @@ dependencies = [
"byteorder",
"flate2",
"nom 7.1.0",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -974,7 +1001,7 @@ dependencies = [
"jpeg-decoder",
"num-iter",
"num-rational",
"num-traits",
"num-traits 0.2.14",
"png",
"scoped_threadpool",
"tiff",
@ -1065,6 +1092,19 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "lexical-core"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe"
dependencies = [
"arrayvec 0.5.2",
"bitflags",
"cfg-if 1.0.0",
"ryu",
"static_assertions",
]
[[package]]
name = "libc"
version = "0.2.112"
@ -1101,6 +1141,12 @@ dependencies = [
"cc",
]
[[package]]
name = "linked-hash-map"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
[[package]]
name = "lock_api"
version = "0.4.5"
@ -1136,12 +1182,12 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
[[package]]
name = "mediarepo-api"
version = "0.20.0"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=0c897acfd959c776fc10bd8fabdd2eb22b437be3#0c897acfd959c776fc10bd8fabdd2eb22b437be3"
version = "0.27.0"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=9fd25e4696cdd68886fa98579aef4fa2ca561dc0#9fd25e4696cdd68886fa98579aef4fa2ca561dc0"
dependencies = [
"bromine",
"chrono",
"serde",
"serde 1.0.132",
"serde_piecewise_default",
"thiserror",
"tracing",
@ -1152,6 +1198,8 @@ name = "mediarepo-core"
version = "0.1.0"
dependencies = [
"base64",
"config",
"data-encoding",
"futures 0.3.19",
"glob",
"itertools",
@ -1159,11 +1207,12 @@ dependencies = [
"multibase",
"multihash",
"sea-orm",
"serde",
"serde 1.0.132",
"sqlx",
"thiserror",
"thumbnailer",
"tokio",
"tokio-graceful-shutdown",
"toml",
"tracing",
"typemap_rev",
@ -1213,7 +1262,7 @@ dependencies = [
"mime",
"mime_guess",
"sea-orm",
"serde",
"serde 1.0.132",
"tokio",
"tracing",
"typemap_rev",
@ -1229,7 +1278,8 @@ dependencies = [
"mediarepo-database",
"mediarepo-model",
"port_check",
"serde",
"rayon",
"serde 1.0.132",
"tokio",
"tracing",
"tracing-futures",
@ -1385,6 +1435,7 @@ version = "5.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af"
dependencies = [
"lexical-core",
"memchr",
"version_check",
]
@ -1417,7 +1468,7 @@ checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -1427,7 +1478,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
dependencies = [
"autocfg",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -1438,7 +1489,7 @@ checksum = "b2021c8337a54d21aca0d59a92577a029af9431cb59b909b03252b9c164fad59"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -1449,7 +1500,16 @@ checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
name = "num-traits"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31"
dependencies = [
"num-traits 0.2.14",
]
[[package]]
@ -1890,6 +1950,12 @@ dependencies = [
"chrono",
]
[[package]]
name = "rust-ini"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2"
[[package]]
name = "rust_decimal"
version = "1.19.0"
@ -1897,8 +1963,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c2d4912d369fb95a351c221475657970678d344d70c1a788223f6e74d1e3732"
dependencies = [
"arrayvec 0.7.2",
"num-traits",
"serde",
"num-traits 0.2.14",
"serde 1.0.132",
]
[[package]]
@ -1958,7 +2024,7 @@ dependencies = [
"sea-orm-macros",
"sea-query",
"sea-strum",
"serde",
"serde 1.0.132",
"serde_json",
"sqlx",
"tracing",
@ -2050,6 +2116,12 @@ dependencies = [
"libc",
]
[[package]]
name = "serde"
version = "0.8.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8"
[[package]]
name = "serde"
version = "1.0.132"
@ -2059,6 +2131,18 @@ dependencies = [
"serde_derive",
]
[[package]]
name = "serde-hjson"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8"
dependencies = [
"lazy_static",
"num-traits 0.1.43",
"regex",
"serde 0.8.23",
]
[[package]]
name = "serde_derive"
version = "1.0.132"
@ -2079,7 +2163,7 @@ dependencies = [
"indexmap",
"itoa 1.0.1",
"ryu",
"serde",
"serde 1.0.132",
]
[[package]]
@ -2088,7 +2172,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91a44b0f51aedd237f8f25c831e1f629982a187a5045c08ce4bccccce17b4b0"
dependencies = [
"serde",
"serde 1.0.132",
"serde_piecewise_default_derive",
]
@ -2100,7 +2184,7 @@ checksum = "19446953e7b22342c23c79ede938c04b1c12f4eb7513db30cda94193ce30ff2a"
dependencies = [
"proc-macro2 0.4.30",
"quote 0.6.13",
"serde",
"serde 1.0.132",
"syn 0.15.44",
]
@ -2144,6 +2228,15 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
[[package]]
name = "signal-hook-registry"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0"
dependencies = [
"libc",
]
[[package]]
name = "slab"
version = "0.4.5"
@ -2221,7 +2314,7 @@ dependencies = [
"parking_lot",
"percent-encoding",
"rust_decimal",
"serde",
"serde 1.0.132",
"serde_json",
"sha2",
"smallvec",
@ -2273,6 +2366,12 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "stringprep"
version = "0.1.2"
@ -2471,7 +2570,9 @@ dependencies = [
"memchr",
"mio",
"num_cpus",
"once_cell",
"pin-project-lite",
"signal-hook-registry",
"tokio-macros",
"tracing",
"winapi",
@ -2487,6 +2588,20 @@ dependencies = [
"futures 0.1.31",
]
[[package]]
name = "tokio-graceful-shutdown"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d08ebea7dc6b22273290d8ece2ca448f979f836e38ba629b650595c64204b4f2"
dependencies = [
"anyhow",
"async-recursion",
"futures 0.3.19",
"log",
"tokio",
"tokio-util",
]
[[package]]
name = "tokio-io-timeout"
version = "1.1.1"
@ -2549,7 +2664,7 @@ version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa"
dependencies = [
"serde",
"serde 1.0.132",
]
[[package]]
@ -2711,7 +2826,7 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb65ea441fbb84f9f6748fd496cf7f63ec9af5bca94dd86456978d055e8eb28b"
dependencies = [
"serde",
"serde 1.0.132",
"tracing-core",
]
@ -2725,7 +2840,7 @@ dependencies = [
"lazy_static",
"matchers",
"regex",
"serde",
"serde 1.0.132",
"serde_json",
"sharded-slab",
"smallvec",
@ -2833,7 +2948,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
dependencies = [
"getrandom",
"serde",
"serde 1.0.132",
]
[[package]]
@ -2992,3 +3107,12 @@ name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]

@ -17,7 +17,9 @@ typemap_rev = "^0.1.5"
futures = "^0.3.19"
itertools = "^0.10.3"
glob = "^0.3.0"
tracing = "0.1.29"
tracing = "^0.1.29"
data-encoding = "^2.3.2"
tokio-graceful-shutdown = "^0.4.3"
[dependencies.thumbnailer]
version = "^0.2.4"
@ -36,9 +38,13 @@ features = ["migrate"]
version = "^1.15.0"
features = ["fs", "io-util", "io-std"]
[dependencies.config]
version = "^0.11.0"
features = ["toml"]
[dependencies.mediarepo-api]
git = "https://github.com/Trivernis/mediarepo-api.git"
rev = "0c897acfd959c776fc10bd8fabdd2eb22b437be3"
rev = "9fd25e4696cdd68886fa98579aef4fa2ca561dc0"
features = ["bromine"]
[features]

@ -0,0 +1,65 @@
use crate::error::RepoResult;
use multihash::{Code, MultihashDigest};
/// Creates a new content descriptor for the given file
pub fn create_content_descriptor(bytes: &[u8]) -> Vec<u8> {
Code::Sha2_256.digest(bytes).to_bytes()
}
/// Encodes a content descriptor while respecting the version
pub fn encode_content_descriptor(descriptor: &[u8]) -> String {
if is_v1_content_descriptor(descriptor) {
encode_content_descriptor_v1(descriptor)
} else {
encode_content_descriptor_v2(descriptor)
}
}
/// Encodes a v1 descriptor that is already stored encoded in the database (only interprets it as string)
pub fn encode_content_descriptor_v1(descriptor: &[u8]) -> String {
String::from_utf8_lossy(descriptor).to_string()
}
/// Encodes the content descriptor as base32 lowercase
pub fn encode_content_descriptor_v2(descriptor: &[u8]) -> String {
data_encoding::BASE32_DNSSEC.encode(descriptor)
}
/// Decodes a content descriptor
pub fn decode_content_descriptor<S: AsRef<str>>(descriptor: S) -> RepoResult<Vec<u8>> {
// check for v1 descriptor with a fixed length of 53 starting with the prefix of the base and hash
if is_v1_content_descriptor_string(descriptor.as_ref()) {
decode_content_descriptor_v1(descriptor)
} else {
decode_content_descriptor_v2(descriptor)
}
}
/// Decodes the first version of content descriptors (multibase)
pub fn decode_content_descriptor_v1<S: AsRef<str>>(descriptor: S) -> RepoResult<Vec<u8>> {
Ok(descriptor.as_ref().as_bytes().to_vec())
}
/// Decodes the second version of content descriptors (faster fixed base32)
pub fn decode_content_descriptor_v2<S: AsRef<str>>(descriptor: S) -> RepoResult<Vec<u8>> {
let data = data_encoding::BASE32_DNSSEC.decode(descriptor.as_ref().as_bytes())?;
Ok(data)
}
/// Decodes the data stored in the v1 content descriptor into the v2 format
pub fn convert_v1_descriptor_to_v2(descriptor_v1: &[u8]) -> RepoResult<Vec<u8>> {
let (_, data) = multibase::decode(encode_content_descriptor_v1(descriptor_v1))?;
Ok(data)
}
/// Checks if a binary descriptor is v1
pub fn is_v1_content_descriptor(descriptor: &[u8]) -> bool {
descriptor.len() == 56 && descriptor.starts_with(b"bciq")
}
/// Checks if a descriptor string is a v1 descriptor
pub fn is_v1_content_descriptor_string<S: AsRef<str>>(descriptor: S) -> bool {
descriptor.as_ref().len() == 56 && descriptor.as_ref().starts_with("bciq")
}

@ -1,5 +1,5 @@
use sea_orm::DbErr;
use std::fmt::{Display, Formatter};
use std::fmt::{Debug, Formatter};
use thiserror::Error;
pub type RepoResult<T> = Result<T, RepoError>;
@ -31,8 +31,14 @@ pub enum RepoError {
#[error(transparent)]
Thumbnailer(#[from] thumbnailer::error::ThumbError),
#[error("No free tcp port available")]
#[error("no free tcp port available")]
PortUnavailable,
#[error("failed to decode data {0}")]
Decode(#[from] data_encoding::DecodeError),
#[error("Failed to read repo.toml configuration file {0}")]
Config(#[from] config::ConfigError),
}
#[derive(Error, Debug)]
@ -56,9 +62,9 @@ pub enum RepoDatabaseError {
#[derive(Debug)]
pub struct StringError(String);
impl Display for StringError {
impl std::fmt::Display for StringError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
std::fmt::Display::fmt(&self.0, f)
}
}

@ -0,0 +1,35 @@
use std::io::Result;
use std::path::{Path, PathBuf};
use tokio::fs::{File, OpenOptions};
/// A file that only exists while being owned.
/// Will automatically be deleted on Drop
pub struct DropFile {
path: PathBuf,
}
impl DropFile {
pub async fn new<P: AsRef<Path>>(path: P) -> Result<(File, Self)> {
let file = OpenOptions::new()
.write(true)
.read(true)
.create(true)
.open(path.as_ref())
.await?;
Ok((file, Self::from_path(path)))
}
pub fn from_path<P: AsRef<Path>>(path: P) -> Self {
Self {
path: path.as_ref().to_path_buf(),
}
}
}
impl Drop for DropFile {
fn drop(&mut self) {
if let Err(e) = std::fs::remove_file(&self.path) {
tracing::error!("failed to remove drop file '{}'", e);
}
}
}

@ -1,14 +1,11 @@
use crate::content_descriptor::{create_content_descriptor, encode_content_descriptor};
use crate::error::RepoResult;
use crate::utils::get_folder_size;
use multibase::Base;
use multihash::{Code, MultihashDigest};
use std::path::PathBuf;
use tokio::fs;
use tokio::fs::{File, OpenOptions};
use tokio::io::{AsyncRead, AsyncReadExt, BufReader};
static STORE_BASE: Base = Base::Base32Lower;
#[derive(Clone, Debug)]
pub struct FileHashStore {
path: PathBuf,
@ -24,35 +21,32 @@ impl FileHashStore {
&self,
mut reader: R,
extension: Option<&str>,
) -> RepoResult<String> {
) -> RepoResult<Vec<u8>> {
let mut buf = Vec::new();
reader.read_to_end(&mut buf).await?;
let hash: Vec<u8> = Code::Sha2_256.digest(&buf).to_bytes();
let hash: String = multibase::encode(STORE_BASE, &hash);
let folder_path = self.hash_to_folder_path(&hash);
let descriptor = create_content_descriptor(&buf);
let file_path = self.descriptor_to_file_path(&descriptor);
let folder_path = file_path.parent().unwrap();
if !folder_path.exists() {
fs::create_dir(folder_path).await?;
}
let mut file_path = self.hash_to_file_path(&hash);
let mut file_path = self.descriptor_to_file_path(&descriptor);
if let Some(extension) = extension {
file_path.set_extension(extension);
}
fs::write(file_path, buf).await?;
Ok(hash)
Ok(descriptor)
}
/// Returns the file extension and a reader for the file by hash
pub async fn get_file(
&self,
mut hash: String,
descriptor: &[u8],
) -> RepoResult<(Option<String>, BufReader<File>)> {
let (base, data) = multibase::decode(&hash)?;
if base != STORE_BASE {
hash = multibase::encode(STORE_BASE, data);
}
let file_path = self.hash_to_file_path(&hash);
let file_path = self.descriptor_to_file_path(descriptor);
tracing::debug!("Opening file {:?}", file_path);
let extension = file_path
.extension()
.and_then(|s| s.to_str())
@ -63,23 +57,56 @@ impl FileHashStore {
Ok((extension, reader))
}
/// Renames a file
pub async fn rename_file(
&self,
src_descriptor: &[u8],
dst_descriptor: &[u8],
) -> RepoResult<()> {
let src_path = self.descriptor_to_file_path(src_descriptor);
if !src_path.exists() {
tracing::warn!("file {:?} doesn't exist", src_path);
return Ok(());
}
let dst_path = self.descriptor_to_file_path(dst_descriptor);
let dst_parent = dst_path.parent().unwrap();
if !dst_parent.exists() {
fs::create_dir(dst_parent).await?;
}
fs::rename(src_path, dst_path).await?;
Ok(())
}
pub async fn delete_file(&self, descriptor: &[u8]) -> RepoResult<()> {
let path = self.descriptor_to_file_path(descriptor);
if !path.exists() {
tracing::warn!("file {:?} doesn't exist", path);
return Ok(());
}
fs::remove_file(path).await?;
Ok(())
}
/// Scans the size of the folder
#[inline]
pub async fn get_size(&self) -> RepoResult<u64> {
get_folder_size(self.path.to_owned()).await
}
fn hash_to_file_path(&self, hash: &str) -> PathBuf {
let mut path = self.hash_to_folder_path(hash);
path.push(hash);
fn descriptor_to_file_path(&self, descriptor: &[u8]) -> PathBuf {
let descriptor_string = encode_content_descriptor(descriptor);
let mut path = self.descriptor_string_to_folder_path(&descriptor_string);
path.push(descriptor_string);
path
}
fn hash_to_folder_path(&self, hash: &str) -> PathBuf {
assert!(hash.len() >= 2);
fn descriptor_string_to_folder_path(&self, descriptor: &str) -> PathBuf {
assert!(descriptor.len() >= 2);
let mut path = self.path.clone();
path.push(&hash[hash.len() - 3..hash.len() - 1]);
path.push(&descriptor[descriptor.len() - 3..descriptor.len() - 1]);
path
}

@ -1,2 +1,3 @@
pub mod drop_file;
pub mod file_hash_store;
pub mod thumbnail_store;

@ -1,5 +1,6 @@
use crate::error::RepoResult;
use crate::utils::get_folder_size;
use std::fmt::Debug;
use std::io::Result;
use std::path::PathBuf;
use tokio::fs;
@ -77,6 +78,37 @@ impl ThumbnailStore {
Ok(entries)
}
/// Renames a thumbnail parent
#[tracing::instrument(level = "debug")]
pub async fn rename_parent<S1: AsRef<str> + Debug, S2: AsRef<str> + Debug>(
&self,
src: S1,
dst: S2,
) -> Result<()> {
let src_dir = self.path.join(src.as_ref());
if !src_dir.exists() {
tracing::warn!("directory {:?} doesn't exist", src_dir);
return Ok(());
}
let dst_dir = self.path.join(dst.as_ref());
fs::rename(src_dir, dst_dir).await
}
/// Deletes all thumbnails of a parent
#[tracing::instrument(level = "debug")]
pub async fn delete_parent<S: AsRef<str> + Debug>(&self, parent: S) -> Result<()> {
let path = PathBuf::from(parent.as_ref());
if !path.exists() {
tracing::warn!("directory {:?} doesn't exist", path);
return Ok(());
}
fs::remove_dir_all(&path).await?;
Ok(())
}
/// Returns the size of the folder
#[tracing::instrument(level = "debug")]
pub async fn get_size(&self) -> RepoResult<u64> {

@ -3,7 +3,9 @@ pub use itertools;
pub use mediarepo_api;
pub use mediarepo_api::bromine;
pub use thumbnailer;
pub use tokio_graceful_shutdown;
pub mod content_descriptor;
pub mod context;
pub mod error;
pub mod fs;

@ -1,38 +0,0 @@
use crate::error::RepoResult;
use serde::{Deserialize, Serialize};
use std::net::IpAddr;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Settings {
pub listen_address: IpAddr,
pub port_range: (u16, u16),
pub database_path: String,
pub default_file_store: String,
pub thumbnail_store: String,
}
impl Default for Settings {
fn default() -> Self {
Self {
listen_address: IpAddr::from([127, 0, 0, 1]),
port_range: (3400, 3500),
database_path: "./db/repo.db".to_string(),
default_file_store: "Main".to_string(),
thumbnail_store: "./thumbnails".to_string(),
}
}
}
impl Settings {
/// Parses settings from a string
pub fn from_toml_string(s: &str) -> RepoResult<Self> {
let settings = toml::from_str(s)?;
Ok(settings)
}
/// Converts the settings into a toml string
pub fn to_toml_string(&self) -> RepoResult<String> {
let string = toml::to_string(&self)?;
Ok(string)
}
}

@ -0,0 +1,42 @@
use serde::{Deserialize, Serialize};
use tracing::Level;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct LoggingSettings {
pub level: LogLevel,
pub trace_sql: bool,
pub trace_api_calls: bool,
}
impl Default for LoggingSettings {
fn default() -> Self {
Self {
level: LogLevel::Info,
trace_sql: false,
trace_api_calls: false,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum LogLevel {
Off,
Error,
Warn,
Info,
Debug,
Trace,
}
impl Into<Option<Level>> for LogLevel {
fn into(self) -> Option<Level> {
match self {
LogLevel::Off => None,
LogLevel::Error => Some(Level::ERROR),
LogLevel::Warn => Some(Level::WARN),
LogLevel::Info => Some(Level::INFO),
LogLevel::Debug => Some(Level::DEBUG),
LogLevel::Trace => Some(Level::TRACE),
}
}
}

@ -0,0 +1,76 @@
mod logging;
mod paths;
mod server;
pub mod v1;
use crate::error::RepoResult;
use crate::settings::v1::SettingsV1;
use config::{Config, FileFormat};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
pub use logging::*;
pub use paths::*;
pub use server::*;
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct Settings {
pub server: ServerSettings,
pub paths: PathSettings,
pub logging: LoggingSettings,
}
impl Settings {
pub fn read(root: &PathBuf) -> RepoResult<Self> {
let mut settings = Config::default();
settings
.merge(config::File::from_str(
&*Settings::default().to_toml_string()?,
FileFormat::Toml,
))?
.merge(config::File::from(root.join("repo")))?
.merge(config::Environment::with_prefix("MEDIAREPO").separator("."))?;
tracing::debug!("Settings are: {:#?}", settings);
Ok(settings.try_into::<Settings>()?)
}
/// Parses settings from a string
pub fn from_v1(settings_v1: SettingsV1) -> RepoResult<Self> {
let mut settings_main = Settings::default();
settings_main.server.tcp.enabled = true;
settings_main.server.tcp.port = PortSetting::Range(settings_v1.port_range);
settings_main.server.tcp.listen_address = settings_v1.listen_address;
settings_main.paths.thumbnail_directory = settings_v1.thumbnail_store.into();
settings_main.paths.database_directory = PathBuf::from(settings_v1.database_path)
.parent()
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_else(|| String::from("./"));
let mut settings = Config::default();
settings
.merge(config::File::from_str(
&*settings_main.to_toml_string()?,
FileFormat::Toml,
))?
.merge(config::Environment::with_prefix("MEDIAREPO"))?;
tracing::debug!("Settings are: {:#?}", settings);
Ok(settings.try_into::<Settings>()?)
}
/// Converts the settings into a toml string
pub fn to_toml_string(&self) -> RepoResult<String> {
let string = toml::to_string(&self)?;
Ok(string)
}
pub fn save(&self, root: &PathBuf) -> RepoResult<()> {
let string = toml::to_string_pretty(&self)?;
fs::write(root.join("repo.toml"), string.into_bytes())?;
Ok(())
}
}

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PathSettings {
pub(crate) database_directory: String,
pub(crate) files_directory: String,
pub(crate) thumbnail_directory: String,
}
impl Default for PathSettings {
fn default() -> Self {
Self {
database_directory: String::from("db"),
files_directory: String::from("files"),
thumbnail_directory: String::from("thumbnails"),
}
}
}
impl PathSettings {
#[inline]
pub fn database_dir(&self, root: &PathBuf) -> PathBuf {
root.join(&self.database_directory)
}
#[inline]
pub fn files_dir(&self, root: &PathBuf) -> PathBuf {
root.join(&self.files_directory)
}
#[inline]
pub fn thumbs_dir(&self, root: &PathBuf) -> PathBuf {
root.join(&self.thumbnail_directory)
}
#[inline]
pub fn db_file_path(&self, root: &PathBuf) -> PathBuf {
self.database_dir(root).join("repo.db")
}
#[inline]
pub fn frontend_state_file_path(&self, root: &PathBuf) -> PathBuf {
self.database_dir(root).join("frontend-state.json")
}
}

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
use std::net::IpAddr;
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct ServerSettings {
pub tcp: TcpServerSettings,
#[cfg(unix)]
pub unix_socket: UnixSocketServerSettings,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct TcpServerSettings {
pub enabled: bool,
pub listen_address: IpAddr,
pub port: PortSetting,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(untagged)]
pub enum PortSetting {
Fixed(u16),
Range((u16, u16)),
}
impl Default for TcpServerSettings {
fn default() -> Self {
Self {
enabled: cfg!(windows),
listen_address: IpAddr::from([127, 0, 0, 1]),
port: PortSetting::Range((13400, 13500)),
}
}
}
#[cfg(unix)]
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct UnixSocketServerSettings {
pub enabled: bool,
}
#[cfg(unix)]
impl Default for UnixSocketServerSettings {
fn default() -> Self {
Self { enabled: true }
}
}

@ -0,0 +1,20 @@
use crate::error::RepoResult;
use serde::{Deserialize, Serialize};
use std::net::IpAddr;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct SettingsV1 {
pub listen_address: IpAddr,
pub port_range: (u16, u16),
pub database_path: String,
pub default_file_store: String,
pub thumbnail_store: String,
}
impl SettingsV1 {
/// Parses settings from a string
pub fn from_toml_string(s: &str) -> RepoResult<Self> {
let settings = toml::from_str(s)?;
Ok(settings)
}
}

@ -1,5 +1,8 @@
use crate::settings::Settings;
use mediarepo_api::types::repo::SizeType;
use std::collections::HashMap;
use std::path::PathBuf;
use tokio_graceful_shutdown::SubsystemHandle;
use typemap_rev::TypeMapKey;
pub struct SettingsKey;
@ -13,3 +16,15 @@ pub struct RepoPathKey;
impl TypeMapKey for RepoPathKey {
type Value = PathBuf;
}
pub struct SizeMetadataKey;
impl TypeMapKey for SizeMetadataKey {
type Value = HashMap<SizeType, u64>;
}
pub struct SubsystemKey;
impl TypeMapKey for SubsystemKey {
type Value = SubsystemHandle;
}

@ -0,0 +1,107 @@
-- Add migration script here
PRAGMA foreign_keys= off;
-- create backup files table
ALTER TABLE files
RENAME TO _files_old;
-- create backup hashes table
ALTER TABLE hashes
RENAME TO _hashes_old;
-- create backup hash_tag_mappings table
ALTER TABLE hash_tag_mappings
RENAME TO _hash_tag_mappings_old;
-- create backup hash_source_mappings table
ALTER TABLE hash_source_mappings
RENAME TO _hash_source_mappings_old;
-- create content id table
CREATE TABLE content_descriptors
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
descriptor BLOB NOT NULL
);
CREATE UNIQUE INDEX content_descriptor_values ON content_descriptors (descriptor);
-- create content-id tag mappings table
CREATE TABLE cd_tag_mappings
(
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
tag_id INTEGER NOT NULL REFERENCES tags (id),
PRIMARY KEY (cd_id, tag_id)
);
CREATE UNIQUE INDEX content_descriptor_tag_mapping_unique ON cd_tag_mappings (cd_id, tag_id);
CREATE INDEX content_descriptor_tag_mapping_tag ON cd_tag_mappings (tag_id);
-- create content-id source mappings table
CREATE TABLE cd_source_mappings
(
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
source_id INTEGER NOT NULL REFERENCES sources (id),
PRIMARY KEY (cd_id, source_id)
);
CREATE UNIQUE INDEX content_descriptor_source_mapping_unique ON cd_source_mappings (cd_id, source_id);
-- create new files table
CREATE TABLE files
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
status INTEGER NOT NULL DEFAULT 10,
storage_id INTEGER NOT NULL REFERENCES storage_locations (id),
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
mime_type VARCHAR(128) NOT NULL DEFAULT 'application/octet-stream'
);
CREATE INDEX files_contend_descriptor ON files (cd_id);
-- create metadata table
CREATE TABLE file_metadata
(
file_id INTEGER PRIMARY KEY REFERENCES files (id),
size INTEGER NOT NULL,
name VARCHAR(128),
comment VARCHAR(1024),
import_time DATETIME NOT NULL,
creation_time DATETIME NOT NULL,
change_time DATETIME NOT NULL
);
CREATE UNIQUE INDEX file_metadata_file_id_unique ON file_metadata (file_id);
-- add content identifiers from hashes table
INSERT INTO content_descriptors
SELECT id, value
FROM _hashes_old;
-- add files from files table
INSERT INTO files (id, storage_id, cd_id, mime_type)
SELECT id, storage_id, hash_id AS content_id, mime_type
FROM _files_old;
-- add metadata from files table
INSERT INTO file_metadata
SELECT id AS file_id, size, name, comment, import_time, creation_time, change_time
FROM _files_old;
-- add content tag mappings
INSERT INTO cd_tag_mappings
SELECT hash_id AS content_id, tag_id
FROM _hash_tag_mappings_old;
-- add content id source mappings
INSERT INTO cd_source_mappings
SELECT hash_id AS content_id, source_id
FROM _hash_source_mappings_old;
-- drop all old tables
DROP TABLE _hash_source_mappings_old;
DROP TABLE _hash_tag_mappings_old;
DROP TABLE _files_old;
DROP TABLE _hashes_old;
pragma foreign_keys= on;

@ -0,0 +1,50 @@
-- Add migration script here
PRAGMA foreign_keys= off;
-- rename old files table
ALTER TABLE files
RENAME TO _files_old;
-- rename metadata value (because of foreign key constraints)
ALTER TABLE file_metadata
RENAME TO _file_metadata_old;
-- create new files table
CREATE TABLE files
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
status INTEGER NOT NULL DEFAULT 10,
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
mime_type VARCHAR(128) NOT NULL DEFAULT 'application/octet-stream'
);
-- add data from files table
INSERT INTO files
SELECT id, status, cd_id, mime_type
FROM _files_old;
-- create metadata table
CREATE TABLE file_metadata
(
file_id INTEGER PRIMARY KEY REFERENCES files (id),
size INTEGER NOT NULL,
name VARCHAR(128),
comment VARCHAR(1024),
import_time DATETIME NOT NULL,
creation_time DATETIME NOT NULL,
change_time DATETIME NOT NULL
);
-- add back the old values
INSERT INTO file_metadata
SELECT *
FROM _file_metadata_old;
-- drop old tables
DROP TABLE _file_metadata_old;
DROP TABLE _files_old;
DROP TABLE storage_locations;
-- create indices on new tables
CREATE UNIQUE INDEX file_metadata_file_id_unique ON file_metadata (file_id);
CREATE INDEX files_content_descriptor ON files (cd_id);
PRAGMA foreign_keys= on;

@ -1,11 +1,11 @@
use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "hashes")]
#[sea_orm(table_name = "content_descriptors")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub value: String,
pub descriptor: Vec<u8>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@ -13,27 +13,35 @@ pub enum Relation {}
impl Related<super::file::Entity> for Entity {
fn to() -> RelationDef {
super::file::Relation::Hash.def().rev()
super::file::Relation::ContentDescriptorId.def().rev()
}
}
impl Related<super::tag::Entity> for Entity {
fn to() -> RelationDef {
super::hash_tag::Relation::Tag.def()
super::content_descriptor_tag::Relation::Tag.def()
}
fn via() -> Option<RelationDef> {
Some(super::hash_tag::Relation::Hash.def().rev())
Some(
super::content_descriptor_tag::Relation::ContentDescriptorId
.def()
.rev(),
)
}
}
impl Related<super::source::Entity> for Entity {
fn to() -> RelationDef {
super::hash_source::Relation::Source.def()
super::content_descriptor_source::Relation::Source.def()
}
fn via() -> Option<RelationDef> {
Some(super::hash_source::Relation::Hash.def().rev())
Some(
super::content_descriptor_source::Relation::ContentDescriptorId
.def()
.rev(),
)
}
}

@ -1,10 +1,10 @@
use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "hash_source_mappings")]
#[sea_orm(table_name = "cd_source_mappings")]
pub struct Model {
#[sea_orm(primary_key)]
pub hash_id: i64,
pub cd_id: i64,
#[sea_orm(primary_key)]
pub source_id: i64,
}
@ -12,11 +12,11 @@ pub struct Model {
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::hash::Entity",
from = "Column::HashId",
to = "super::hash::Column::Id"
belongs_to = "super::content_descriptor::Entity",
from = "Column::CdId",
to = "super::content_descriptor::Column::Id"
)]
Hash,
ContentDescriptorId,
#[sea_orm(
belongs_to = "super::source::Entity",
from = "Column::SourceId",
@ -25,9 +25,9 @@ pub enum Relation {
Source,
}
impl Related<super::hash::Entity> for Entity {
impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef {
Relation::Hash.def()
Relation::ContentDescriptorId.def()
}
}

@ -1,10 +1,10 @@
use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "hash_tag_mappings")]
#[sea_orm(table_name = "cd_tag_mappings")]
pub struct Model {
#[sea_orm(primary_key)]
pub hash_id: i64,
pub cd_id: i64,
#[sea_orm(primary_key)]
pub tag_id: i64,
}
@ -12,11 +12,11 @@ pub struct Model {
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::hash::Entity",
from = "Column::HashId",
to = "super::hash::Column::Id"
belongs_to = "super::content_descriptor::Entity",
from = "Column::CdId",
to = "super::content_descriptor::Column::Id"
)]
Hash,
ContentDescriptorId,
#[sea_orm(
belongs_to = "super::tag::Entity",
from = "Column::TagId",
@ -25,9 +25,9 @@ pub enum Relation {
Tag,
}
impl Related<super::hash::Entity> for Entity {
impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef {
Relation::Hash.def()
Relation::ContentDescriptorId.def()
}
}

@ -1,4 +1,3 @@
use chrono::NaiveDateTime;
use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
@ -6,44 +5,30 @@ use sea_orm::prelude::*;
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub file_type: u32,
pub name: Option<String>,
pub comment: Option<String>,
pub mime_type: Option<String>,
pub size: Option<i64>,
pub storage_id: i64,
pub hash_id: i64,
pub import_time: NaiveDateTime,
pub creation_time: NaiveDateTime,
pub change_time: NaiveDateTime,
pub status: i32,
pub mime_type: String,
pub cd_id: i64,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::hash::Entity",
from = "Column::HashId",
to = "super::hash::Column::Id"
belongs_to = "super::content_descriptor::Entity",
from = "Column::CdId",
to = "super::content_descriptor::Column::Id"
)]
Hash,
#[sea_orm(
belongs_to = "super::storage::Entity",
from = "Column::StorageId",
to = "super::storage::Column::Id"
)]
Storage,
ContentDescriptorId,
}
impl Related<super::hash::Entity> for Entity {
impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef {
Relation::Hash.def()
Relation::ContentDescriptorId.def()
}
}
impl Related<super::storage::Entity> for Entity {
impl Related<super::file_metadata::Entity> for Entity {
fn to() -> RelationDef {
Relation::Storage.def()
super::file_metadata::Relation::File.def().rev()
}
}

@ -0,0 +1,32 @@
use chrono::NaiveDateTime;
use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "file_metadata")]
pub struct Model {
#[sea_orm(primary_key)]
pub file_id: i64,
pub name: Option<String>,
pub comment: Option<String>,
pub size: i64,
pub import_time: NaiveDateTime,
pub creation_time: NaiveDateTime,
pub change_time: NaiveDateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::file::Entity",
from = "Column::FileId",
to = "super::file::Column::Id"
)]
File,
}
impl Related<super::file::Entity> for Entity {
fn to() -> RelationDef {
Relation::File.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

@ -1,8 +1,8 @@
pub mod content_descriptor;
pub mod content_descriptor_source;
pub mod content_descriptor_tag;
pub mod file;
pub mod hash;
pub mod hash_source;
pub mod hash_tag;
pub mod file_metadata;
pub mod namespace;
pub mod source;
pub mod storage;
pub mod tag;

@ -11,13 +11,17 @@ pub struct Model {
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl Related<super::hash::Entity> for Entity {
impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef {
super::hash_source::Relation::Hash.def()
super::content_descriptor_source::Relation::ContentDescriptorId.def()
}
fn via() -> Option<RelationDef> {
Some(super::hash_source::Relation::Source.def().rev())
Some(
super::content_descriptor_source::Relation::Source
.def()
.rev(),
)
}
}

@ -1,24 +0,0 @@
use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "storage_locations")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::file::Entity")]
File,
}
impl Related<super::file::Entity> for Entity {
fn to() -> RelationDef {
Relation::File.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

@ -19,13 +19,13 @@ pub enum Relation {
Namespace,
}
impl Related<super::hash::Entity> for Entity {
impl Related<super::content_descriptor::Entity> for Entity {
fn to() -> RelationDef {
super::hash_tag::Relation::Hash.def()
super::content_descriptor_tag::Relation::ContentDescriptorId.def()
}
fn via() -> Option<RelationDef> {
Some(super::hash_tag::Relation::Tag.def().rev())
Some(super::content_descriptor_tag::Relation::Tag.def().rev())
}
}

@ -1,6 +1,7 @@
use mediarepo_core::error::RepoDatabaseResult;
use sea_orm::{Database, DatabaseConnection};
use sea_orm::{ConnectOptions, Database, DatabaseConnection};
use sqlx::migrate::MigrateDatabase;
use std::time::Duration;
pub mod entities;
pub mod queries;
@ -8,7 +9,11 @@ pub mod queries;
/// Connects to the database, runs migrations and returns the RepoDatabase wrapper type
pub async fn get_database<S: AsRef<str>>(uri: S) -> RepoDatabaseResult<DatabaseConnection> {
migrate(uri.as_ref()).await?;
let conn = Database::connect(uri.as_ref()).await?;
let mut opt = ConnectOptions::new(uri.as_ref().to_string());
opt.connect_timeout(Duration::from_secs(10))
.idle_timeout(Duration::from_secs(10));
let conn = Database::connect(opt).await?;
Ok(conn)
}

@ -7,7 +7,7 @@ use mediarepo_core::error::{RepoError, RepoResult};
#[derive(Debug, FromQueryResult)]
pub struct Counts {
pub file_count: i64,
pub hash_count: i64,
pub cd_count: i64,
pub tag_count: i64,
pub namespace_count: i64,
pub source_count: i64,
@ -20,11 +20,11 @@ pub async fn get_all_counts(db: &DatabaseConnection) -> RepoResult<Counts> {
r#"
SELECT *
FROM (SELECT COUNT(*) AS file_count FROM files),
(SELECT COUNT(*) AS hash_count FROM hashes),
(SELECT COUNT(*) AS cd_count FROM content_descriptors),
(SELECT COUNT(*) AS tag_count FROM tags),
(SELECT COUNT(*) AS namespace_count FROM namespaces),
(SELECT COUNT(*) AS source_count FROM sources),
(SELECT COUNT(*) AS mapping_count FROM hash_tag_mappings)
(SELECT COUNT(*) AS mapping_count FROM cd_tag_mappings)
"#,
vec![],
))

@ -7,27 +7,27 @@ use std::fmt::Display;
use std::iter::FromIterator;
#[derive(Debug, FromQueryResult)]
struct HashNamespaceTags {
hash_id: i64,
struct CIDNamespaceTag {
cd_id: i64,
namespace: String,
tag: String,
}
#[tracing::instrument(level = "debug", skip_all)]
pub async fn get_hashes_with_namespaced_tags(
pub async fn get_cids_with_namespaced_tags(
db: &DatabaseConnection,
hash_ids: Vec<i64>,
) -> RepoResult<HashMap<i64, HashMap<String, Vec<String>>>> {
let hash_namespace_tags: Vec<HashNamespaceTags> =
HashNamespaceTags::find_by_statement(Statement::from_sql_and_values(
let hash_namespace_tags: Vec<CIDNamespaceTag> =
CIDNamespaceTag::find_by_statement(Statement::from_sql_and_values(
DbBackend::Sqlite,
format!(
r#"SELECT htm.hash_id, n.name as namespace, t.name as tag
FROM hash_tag_mappings htm
INNER JOIN tags t on htm.tag_id = t.id
r#"SELECT ctm.cd_id, n.name as namespace, t.name as tag
FROM cd_tag_mappings ctm
INNER JOIN tags t on ctm.tag_id = t.id
JOIN namespaces n on t.namespace_id = n.id
WHERE t.namespace_id IS NOT NULL
AND htm.hash_id IN ({}) ORDER BY t.namespace_id;"#,
AND ctm.cd_id IN ({}) ORDER BY t.namespace_id;"#,
vec_to_query_list(hash_ids)
)
.as_str(),
@ -35,49 +35,49 @@ pub async fn get_hashes_with_namespaced_tags(
))
.all(db)
.await?;
let mut hash_namespaces: HashMap<i64, HashMap<String, Vec<String>>> = HashMap::new();
let mut cd_id_namespaces: HashMap<i64, HashMap<String, Vec<String>>> = HashMap::new();
for hnt in hash_namespace_tags {
if let Some(entry) = hash_namespaces.get_mut(&hnt.hash_id) {
if let Some(entry) = cd_id_namespaces.get_mut(&hnt.cd_id) {
if let Some(nsp_entry) = entry.get_mut(&hnt.namespace) {
nsp_entry.push(hnt.tag);
} else {
entry.insert(hnt.namespace, vec![hnt.tag]);
}
} else {
hash_namespaces.insert(
hnt.hash_id,
cd_id_namespaces.insert(
hnt.cd_id,
HashMap::from_iter(vec![(hnt.namespace, vec![hnt.tag])].into_iter()),
);
}
}
Ok(hash_namespaces)
Ok(cd_id_namespaces)
}
#[derive(Debug, FromQueryResult)]
struct HashTagCount {
hash_id: i64,
struct CIDTagCount {
cd_id: i64,
tag_count: i32,
}
#[tracing::instrument(level = "debug", skip_all)]
pub async fn get_hashes_with_tag_count(
pub async fn get_content_descriptors_with_tag_count(
db: &DatabaseConnection,
hash_ids: Vec<i64>,
cd_ids: Vec<i64>,
) -> RepoResult<HashMap<i64, u32>> {
if hash_ids.is_empty() {
if cd_ids.is_empty() {
return Ok(HashMap::new());
}
let hash_tag_counts: Vec<HashTagCount> =
HashTagCount::find_by_statement(Statement::from_sql_and_values(
let hash_tag_counts: Vec<CIDTagCount> =
CIDTagCount::find_by_statement(Statement::from_sql_and_values(
DbBackend::Sqlite,
format!(
r#"
SELECT htm.hash_id, COUNT(htm.tag_id) AS "tag_count" from hash_tag_mappings htm
WHERE htm.hash_id IN ({})
GROUP BY hash_id
SELECT ctm.cd_id, COUNT(ctm.tag_id) AS "tag_count" from cd_tag_mappings ctm
WHERE ctm.cd_id IN ({})
GROUP BY cd_id
"#,
vec_to_query_list(hash_ids)
vec_to_query_list(cd_ids)
)
.as_str(),
vec![],
@ -87,7 +87,12 @@ pub async fn get_hashes_with_tag_count(
let mappings = hash_tag_counts
.into_iter()
.map(|HashTagCount { hash_id, tag_count }| (hash_id, tag_count as u32))
.map(
|CIDTagCount {
cd_id: hash_id,
tag_count,
}| (hash_id, tag_count as u32),
)
.collect::<HashMap<i64, u32>>();
Ok(mappings)

@ -15,7 +15,6 @@ mime = "^0.3.16"
tracing = "^0.1.29"
async-trait = "^0.1.51"
[dependencies.mediarepo-core]
path = "../mediarepo-core"

@ -1,26 +1,38 @@
use crate::file::File;
use mediarepo_core::content_descriptor::convert_v1_descriptor_to_v2;
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::file;
use mediarepo_database::entities::hash;
use sea_orm::prelude::*;
use sea_orm::{DatabaseConnection, Set};
use std::fmt::Debug;
pub struct Hash {
pub struct ContentDescriptor {
db: DatabaseConnection,
model: hash::Model,
model: content_descriptor::Model,
}
impl Hash {
impl ContentDescriptor {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(db: DatabaseConnection, model: hash::Model) -> Self {
pub(crate) fn new(db: DatabaseConnection, model: content_descriptor::Model) -> Self {
Self { db, model }
}
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<Self>> {
let descriptors = content_descriptor::Entity::find()
.all(&db)
.await?
.into_iter()
.map(|model| Self::new(db.clone(), model))
.collect();
Ok(descriptors)
}
/// Searches for the hash by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
let hash = hash::Entity::find_by_id(id)
let hash = content_descriptor::Entity::find_by_id(id)
.one(&db)
.await?
.map(|model| Self::new(db, model));
@ -30,24 +42,24 @@ impl Hash {
/// Returns the hash by value
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_value<S: AsRef<str> + Debug>(
pub async fn by_value<D: AsRef<[u8]> + Debug>(
db: DatabaseConnection,
value: S,
descriptor: D,
) -> RepoResult<Option<Self>> {
let hash = hash::Entity::find()
.filter(hash::Column::Value.eq(value.as_ref()))
let cid = content_descriptor::Entity::find()
.filter(content_descriptor::Column::Descriptor.eq(descriptor.as_ref()))
.one(&db)
.await?
.map(|model| Self::new(db, model));
Ok(hash)
Ok(cid)
}
/// Adds a new hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub async fn add(db: DatabaseConnection, value: String) -> RepoResult<Self> {
let active_model = hash::ActiveModel {
value: Set(value),
pub async fn add(db: DatabaseConnection, descriptor: Vec<u8>) -> RepoResult<Self> {
let active_model = content_descriptor::ActiveModel {
descriptor: Set(descriptor),
..Default::default()
};
let model = active_model.insert(&db).await?;
@ -59,8 +71,8 @@ impl Hash {
self.model.id
}
pub fn value(&self) -> &String {
&self.model.value
pub fn descriptor(&self) -> &[u8] {
&self.model.descriptor[..]
}
/// Returns the file associated with the hash
@ -75,4 +87,15 @@ impl Hash {
Ok(file)
}
pub async fn convert_v1_to_v2(&mut self) -> RepoResult<()> {
let descriptor = convert_v1_descriptor_to_v2(&self.model.descriptor)?;
let active_model = content_descriptor::ActiveModel {
id: Set(self.id()),
descriptor: Set(descriptor),
};
self.model = active_model.update(&self.db).await?;
Ok(())
}
}

@ -1,395 +0,0 @@
use std::fmt::Debug;
use std::io::Cursor;
use std::str::FromStr;
use chrono::{Local, NaiveDateTime};
use sea_orm::prelude::*;
use sea_orm::sea_query::{Expr, Query};
use sea_orm::{Condition, DatabaseConnection, Set};
use sea_orm::{JoinType, QuerySelect};
use tokio::io::{AsyncReadExt, BufReader};
use mediarepo_core::error::RepoResult;
use mediarepo_core::thumbnailer::{self, Thumbnail as ThumbnailerThumb, ThumbnailSize};
use mediarepo_database::entities::file;
use mediarepo_database::entities::hash;
use mediarepo_database::entities::hash_tag;
use mediarepo_database::entities::namespace;
use mediarepo_database::entities::tag;
use crate::file_type::FileType;
use crate::storage::Storage;
use crate::tag::Tag;
#[derive(Clone)]
pub struct File {
db: DatabaseConnection,
model: file::Model,
hash: hash::Model,
}
impl File {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(db: DatabaseConnection, model: file::Model, hash: hash::Model) -> Self {
Self { db, model, hash }
}
/// Returns a list of all known stored files
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<File>> {
let files: Vec<(file::Model, Option<hash::Model>)> = file::Entity::find()
.find_also_related(hash::Entity)
.all(&db)
.await?;
let files = files
.into_iter()
.filter_map(|(f, h)| {
let h = h?;
Some(Self::new(db.clone(), f, h))
})
.collect();
Ok(files)
}
/// Fetches the file by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
if let Some((model, Some(hash))) = file::Entity::find_by_id(id)
.find_also_related(hash::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by hash
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_hash<S: AsRef<str> + Debug>(
db: DatabaseConnection,
hash: S,
) -> RepoResult<Option<Self>> {
if let Some((hash, Some(model))) = hash::Entity::find()
.filter(hash::Column::Value.eq(hash.as_ref()))
.find_also_related(file::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by tags
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn find_by_tags(
db: DatabaseConnection,
tag_ids: Vec<Vec<(i64, bool)>>,
) -> RepoResult<Vec<Self>> {
let main_condition = build_find_filter_conditions(tag_ids);
let results: Vec<(hash::Model, Option<file::Model>)> = hash::Entity::find()
.find_also_related(file::Entity)
.filter(main_condition)
.group_by(file::Column::Id)
.all(&db)
.await?;
let files: Vec<Self> = results
.into_iter()
.filter_map(|(hash, tag)| Some(Self::new(db.clone(), tag?, hash)))
.collect();
Ok(files)
}
/// Adds a file with its hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn add(
db: DatabaseConnection,
storage_id: i64,
hash_id: i64,
file_type: FileType,
mime_type: Option<String>,
creation_time: NaiveDateTime,
change_time: NaiveDateTime,
) -> RepoResult<Self> {
let file = file::ActiveModel {
hash_id: Set(hash_id),
file_type: Set(file_type as u32),
mime_type: Set(mime_type),
storage_id: Set(storage_id),
import_time: Set(Local::now().naive_local()),
creation_time: Set(creation_time),
change_time: Set(change_time),
..Default::default()
};
let file: file::ActiveModel = file.insert(&db).await?.into();
let file = Self::by_id(db, file.id.unwrap())
.await?
.expect("Inserted file does not exist");
Ok(file)
}
/// Returns the unique identifier of the file
pub fn id(&self) -> i64 {
self.model.id
}
/// Returns the hash of the file (content identifier)
pub fn hash(&self) -> &String {
&self.hash.value
}
/// Returns the hash id of the file
pub fn hash_id(&self) -> i64 {
self.hash.id
}
/// Returns the type of the file
pub fn file_type(&self) -> FileType {
match self.model.file_type {
1 => FileType::Image,
2 => FileType::Video,
3 => FileType::Audio,
_ => FileType::Unknown,
}
}
/// Returns the optional mime type of the file
pub fn mime_type(&self) -> &Option<String> {
&self.model.mime_type
}
/// Returns the optional name of the file
pub fn name(&self) -> &Option<String> {
&self.model.name
}
/// Returns the comment of the file
pub fn comment(&self) -> &Option<String> {
&self.model.comment
}
/// Returns the import time of the file
pub fn import_time(&self) -> &NaiveDateTime {
&self.model.import_time
}
/// Returns the datetime when the file was created
pub fn creation_time(&self) -> &NaiveDateTime {
&self.model.creation_time
}
/// Returns the last time the file was changed
pub fn change_time(&self) -> &NaiveDateTime {
&self.model.change_time
}
/// Returns the storage where the file is stored
pub async fn storage(&self) -> RepoResult<Storage> {
let storage = Storage::by_id(self.db.clone(), self.model.storage_id)
.await?
.expect("The FK storage_id doesn't exist?!");
Ok(storage)
}
/// Returns the list of tags of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags(&self) -> RepoResult<Vec<Tag>> {
let tags: Vec<(tag::Model, Option<namespace::Model>)> = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(JoinType::LeftJoin, hash_tag::Relation::Tag.def().rev())
.join(JoinType::InnerJoin, hash_tag::Relation::Hash.def())
.filter(hash::Column::Id.eq(self.hash.id))
.all(&self.db)
.await?;
let tags = tags
.into_iter()
.map(|(tag, namespace)| Tag::new(self.db.clone(), tag, namespace))
.collect();
Ok(tags)
}
/// Changes the name of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_name<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
let mut active_file = self.get_active_model();
active_file.name = Set(Some(name.to_string()));
let active_file = active_file.update(&self.db).await?;
self.model.name = active_file.name;
Ok(())
}
/// Changes the comment of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_comment<S: ToString + Debug>(&mut self, comment: S) -> RepoResult<()> {
let mut active_file = self.get_active_model();
active_file.comment = Set(Some(comment.to_string()));
let active_file = active_file.update(&self.db).await?;
self.model.comment = active_file.comment;
Ok(())
}
/// Changes the type of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_file_type(&mut self, file_type: FileType) -> RepoResult<()> {
let mut active_file = self.get_active_model();
active_file.file_type = Set(file_type as u32);
let active_file = active_file.update(&self.db).await?;
self.model.file_type = active_file.file_type;
Ok(())
}
/// Adds a single tag to the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tag(&mut self, tag_id: i64) -> RepoResult<()> {
let hash_id = self.hash.id;
let active_model = hash_tag::ActiveModel {
hash_id: Set(hash_id),
tag_id: Set(tag_id),
};
active_model.insert(&self.db).await?;
Ok(())
}
/// Adds multiple tags to the file at once
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
if tag_ids.is_empty() {
return Ok(());
}
let hash_id = self.hash.id;
let models: Vec<hash_tag::ActiveModel> = tag_ids
.into_iter()
.map(|tag_id| hash_tag::ActiveModel {
hash_id: Set(hash_id),
tag_id: Set(tag_id),
})
.collect();
hash_tag::Entity::insert_many(models).exec(&self.db).await?;
Ok(())
}
/// Removes multiple tags from the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn remove_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
let hash_id = self.hash.id;
hash_tag::Entity::delete_many()
.filter(hash_tag::Column::HashId.eq(hash_id))
.filter(hash_tag::Column::TagId.is_in(tag_ids))
.exec(&self.db)
.await?;
Ok(())
}
/// Returns the reader for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_reader(&self) -> RepoResult<BufReader<tokio::fs::File>> {
let storage = self.storage().await?;
storage.get_file_reader(&self.hash.value).await
}
/// Retrieves the size of the file from its content
#[tracing::instrument(level = "trace", skip(self))]
pub async fn get_size(&self) -> RepoResult<u64> {
if let Some(size) = self.model.size {
Ok(size as u64)
} else {
let mut reader = self.get_reader().await?;
let size = {
let mut buf = Vec::new();
reader.read_to_end(&mut buf).await
}?;
let mut model = self.get_active_model();
model.size = Set(Some(size as i64));
model.update(&self.db).await?;
Ok(size as u64)
}
}
/// Creates a thumbnail for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn create_thumbnail<I: IntoIterator<Item = ThumbnailSize> + Debug>(
&self,
sizes: I,
) -> RepoResult<Vec<ThumbnailerThumb>> {
let mut buf = Vec::new();
self.get_reader().await?.read_to_end(&mut buf).await?;
let mime_type = self
.model
.mime_type
.clone()
.map(|mime_type| mime::Mime::from_str(&mime_type).unwrap())
.unwrap_or(mime::IMAGE_STAR);
let thumbs = thumbnailer::create_thumbnails(Cursor::new(buf), mime_type, sizes)?;
Ok(thumbs)
}
/// Returns the active model of the file with only the id set
fn get_active_model(&self) -> file::ActiveModel {
file::ActiveModel {
id: Set(self.id()),
..Default::default()
}
}
}
fn build_find_filter_conditions(tag_ids: Vec<Vec<(i64, bool)>>) -> Condition {
let mut main_condition = Condition::all();
for mut expression in tag_ids {
if expression.len() == 1 {
let (tag_id, negated) = expression.pop().unwrap();
main_condition = add_single_filter_expression(main_condition, tag_id, negated)
} else if !expression.is_empty() {
let mut sub_condition = Condition::any();
for (tag, negated) in expression {
sub_condition = add_single_filter_expression(sub_condition, tag, negated);
}
main_condition = main_condition.add(sub_condition);
}
}
main_condition
}
fn add_single_filter_expression(condition: Condition, tag_id: i64, negated: bool) -> Condition {
if negated {
condition.add(
hash::Column::Id.not_in_subquery(
Query::select()
.expr(Expr::col(hash_tag::Column::HashId))
.from(hash_tag::Entity)
.cond_where(hash_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
)
} else {
condition.add(
hash::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(hash_tag::Column::HashId))
.from(hash_tag::Entity)
.cond_where(hash_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
)
}
}

@ -0,0 +1,223 @@
use chrono::NaiveDateTime;
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::file;
use mediarepo_database::entities::file_metadata;
use sea_orm::sea_query::{Alias, Expr, Query, SimpleExpr};
use sea_orm::ColumnTrait;
use sea_orm::Condition;
macro_rules! apply_ordering_comparator {
($column:expr, $filter:expr) => {
match $filter {
OrderingComparator::Less(value) => $column.lt(value),
OrderingComparator::Equal(value) => $column.eq(value),
OrderingComparator::Greater(value) => $column.gt(value),
OrderingComparator::Between((min_value, max_value)) => {
$column.between(min_value, max_value)
}
}
};
}
#[derive(Clone, Debug)]
pub enum FilterProperty {
TagId(NegatableComparator<i64>),
TagWildcardIds(NegatableComparator<Vec<i64>>),
ContentDescriptor(NegatableComparator<Vec<u8>>),
TagCount(OrderingComparator<i64>),
FileProperty(FilterFileProperty),
}
#[derive(Clone, Debug)]
pub enum FilterFileProperty {
Id(NegatableComparator<i64>),
Status(NegatableComparator<i64>),
FileSize(OrderingComparator<i64>),
ImportedTime(OrderingComparator<NaiveDateTime>),
ChangedTime(OrderingComparator<NaiveDateTime>),
CreatedTime(OrderingComparator<NaiveDateTime>),
}
#[derive(Clone, Debug)]
pub enum OrderingComparator<T> {
Less(T),
Equal(T),
Greater(T),
Between((T, T)),
}
#[derive(Clone, Debug)]
pub enum NegatableComparator<T> {
Is(T),
IsNot(T),
}
#[tracing::instrument(level = "debug")]
pub fn build_find_filter_conditions(filters: Vec<Vec<FilterProperty>>) -> Condition {
filters
.into_iter()
.fold(Condition::all(), |all_cond, mut expression| {
if expression.len() == 1 {
let property = expression.pop().unwrap();
all_cond.add(build_single_filter(property))
} else if !expression.is_empty() {
let sub_condition = expression.into_iter().fold(Condition::any(), |cond, prop| {
cond.add(build_single_filter(prop))
});
all_cond.add(sub_condition)
} else {
all_cond
}
})
}
#[inline]
fn build_single_filter(property: FilterProperty) -> SimpleExpr {
match property {
FilterProperty::TagId(tag_filter) => build_tag_id_filter(tag_filter),
FilterProperty::TagWildcardIds(wildcard_filter) => {
build_tag_wildcard_ids_filter(wildcard_filter)
}
FilterProperty::ContentDescriptor(cd_filter) => build_content_descriptor_filter(cd_filter),
FilterProperty::TagCount(count_filter) => build_tag_count_filter(count_filter),
FilterProperty::FileProperty(property_filter) => {
build_file_property_filter(property_filter)
}
}
}
fn build_tag_id_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
match filter {
NegatableComparator::Is(tag_id) => content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
NegatableComparator::IsNot(tag_id) => content_descriptor::Column::Id.not_in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
}
}
fn build_tag_wildcard_ids_filter(filter: NegatableComparator<Vec<i64>>) -> SimpleExpr {
match filter {
NegatableComparator::Is(tag_ids) => content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.to_owned(),
),
NegatableComparator::IsNot(tag_ids) => content_descriptor::Column::Id.not_in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.to_owned(),
),
}
}
fn build_content_descriptor_filter(filter: NegatableComparator<Vec<u8>>) -> SimpleExpr {
match filter {
NegatableComparator::Is(cd) => content_descriptor::Column::Descriptor.eq(cd),
NegatableComparator::IsNot(cd) => content_descriptor::Column::Descriptor.ne(cd),
}
}
fn build_tag_count_filter(filter: OrderingComparator<i64>) -> SimpleExpr {
let count_column = Alias::new("count");
let cd_id_column = Alias::new("cd_id");
let count_subquery = Query::select()
.expr_as(
Expr::col(content_descriptor_tag::Column::CdId),
cd_id_column.clone(),
)
.expr_as(
content_descriptor_tag::Column::TagId.count(),
count_column.clone(),
)
.from(content_descriptor_tag::Entity)
.group_by_col(cd_id_column.clone())
.to_owned();
let count_expression = apply_ordering_comparator!(Expr::col(count_column), filter);
content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(cd_id_column))
.from_subquery(count_subquery, Alias::new("tag_counts"))
.cond_where(count_expression)
.to_owned(),
)
}
#[inline]
fn build_file_property_filter(property: FilterFileProperty) -> SimpleExpr {
match property {
FilterFileProperty::Id(id_filter) => build_file_id_filter(id_filter),
FilterFileProperty::Status(status_filter) => build_file_status_filter(status_filter),
FilterFileProperty::FileSize(size_filter) => {
build_file_metadata_filter(build_file_size_filter(size_filter))
}
FilterFileProperty::ImportedTime(time_filter) => {
build_file_metadata_filter(build_file_import_time_filter(time_filter))
}
FilterFileProperty::ChangedTime(time_filter) => {
build_file_metadata_filter(build_file_changed_time_filter(time_filter))
}
FilterFileProperty::CreatedTime(time_filter) => {
build_file_metadata_filter(build_file_created_time_filter(time_filter))
}
}
}
fn build_file_id_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
match filter {
NegatableComparator::Is(id) => file::Column::Id.eq(id),
NegatableComparator::IsNot(id) => file::Column::Id.ne(id),
}
}
fn build_file_status_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
match filter {
NegatableComparator::Is(status) => file::Column::Status.eq(status),
NegatableComparator::IsNot(status) => file::Column::Status.ne(status),
}
}
fn build_file_metadata_filter(property_condition: SimpleExpr) -> SimpleExpr {
file::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(file_metadata::Column::FileId))
.from(file_metadata::Entity)
.cond_where(property_condition)
.to_owned(),
)
}
fn build_file_size_filter(filter: OrderingComparator<i64>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::Size, filter)
}
fn build_file_import_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::ImportTime, filter)
}
fn build_file_changed_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::ChangeTime, filter)
}
fn build_file_created_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::CreationTime, filter)
}

@ -0,0 +1,334 @@
pub mod filter;
use std::fmt::Debug;
use std::io::Cursor;
use std::str::FromStr;
use mediarepo_core::content_descriptor::encode_content_descriptor;
use sea_orm::prelude::*;
use sea_orm::{ConnectionTrait, DatabaseConnection, Set};
use sea_orm::{JoinType, QuerySelect};
use tokio::io::{AsyncReadExt, BufReader};
use crate::file::filter::FilterProperty;
use crate::file_metadata::FileMetadata;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
use mediarepo_core::thumbnailer::{self, Thumbnail as ThumbnailerThumb, ThumbnailSize};
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::file;
use mediarepo_database::entities::file_metadata;
use mediarepo_database::entities::namespace;
use mediarepo_database::entities::tag;
use crate::tag::Tag;
pub enum FileStatus {
Imported = 10,
Archived = 20,
Deleted = 30,
}
impl From<ApiFileStatus> for FileStatus {
fn from(s: ApiFileStatus) -> Self {
match s {
ApiFileStatus::Imported => Self::Imported,
ApiFileStatus::Archived => Self::Archived,
ApiFileStatus::Deleted => Self::Deleted,
}
}
}
#[derive(Clone)]
pub struct File {
db: DatabaseConnection,
model: file::Model,
content_descriptor: content_descriptor::Model,
}
impl File {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(
db: DatabaseConnection,
model: file::Model,
hash: content_descriptor::Model,
) -> Self {
Self {
db,
model,
content_descriptor: hash,
}
}
/// Returns a list of all known stored files
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<File>> {
let files: Vec<(file::Model, Option<content_descriptor::Model>)> = file::Entity::find()
.find_also_related(content_descriptor::Entity)
.all(&db)
.await?;
let files = files
.into_iter()
.filter_map(|(f, h)| {
let h = h?;
Some(Self::new(db.clone(), f, h))
})
.collect();
Ok(files)
}
/// Fetches the file by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
if let Some((model, Some(hash))) = file::Entity::find_by_id(id)
.find_also_related(content_descriptor::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by hash
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_cd(db: DatabaseConnection, cd: &[u8]) -> RepoResult<Option<Self>> {
if let Some((hash, Some(model))) = content_descriptor::Entity::find()
.filter(content_descriptor::Column::Descriptor.eq(cd))
.find_also_related(file::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by tags
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn find_by_filters(
db: DatabaseConnection,
filters: Vec<Vec<FilterProperty>>,
) -> RepoResult<Vec<Self>> {
let main_condition = filter::build_find_filter_conditions(filters);
let results: Vec<(content_descriptor::Model, Option<file::Model>)> =
content_descriptor::Entity::find()
.find_also_related(file::Entity)
.filter(main_condition)
.group_by(file::Column::Id)
.all(&db)
.await?;
let files: Vec<Self> = results
.into_iter()
.filter_map(|(hash, tag)| Some(Self::new(db.clone(), tag?, hash)))
.collect();
Ok(files)
}
/// Adds a file with its hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn add(
db: DatabaseConnection,
cd_id: i64,
mime_type: String,
) -> RepoResult<Self> {
let file = file::ActiveModel {
cd_id: Set(cd_id),
mime_type: Set(mime_type),
..Default::default()
};
let file: file::ActiveModel = file.insert(&db).await?.into();
let file = Self::by_id(db, file.id.unwrap())
.await?
.expect("Inserted file does not exist");
Ok(file)
}
/// Returns the unique identifier of the file
pub fn id(&self) -> i64 {
self.model.id
}
/// Returns the hash of the file (content identifier)
pub fn cd(&self) -> &[u8] {
&self.content_descriptor.descriptor
}
/// Returns the encoded content descriptor
pub fn encoded_cd(&self) -> String {
encode_content_descriptor(self.cd())
}
/// Returns the id of the civ (content identifier value) of the file
pub fn cd_id(&self) -> i64 {
self.content_descriptor.id
}
/// Returns the mime type of the file
pub fn mime_type(&self) -> &String {
&self.model.mime_type
}
/// Returns the status of the file
pub fn status(&self) -> FileStatus {
match self.model.status {
10 => FileStatus::Imported,
20 => FileStatus::Archived,
30 => FileStatus::Deleted,
_ => FileStatus::Imported,
}
}
pub async fn set_status(&mut self, status: FileStatus) -> RepoResult<()> {
let active_model = file::ActiveModel {
id: Set(self.model.id),
status: Set(status as i32),
..Default::default()
};
self.model = active_model.update(&self.db).await?;
Ok(())
}
/// Returns the metadata associated with this file
/// A file MUST always have metadata associated
pub async fn metadata(&self) -> RepoResult<FileMetadata> {
FileMetadata::by_id(self.db.clone(), self.model.id)
.await
.and_then(|f| f.ok_or_else(|| RepoError::from("missing file metadata")))
}
/// Returns the list of tags of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags(&self) -> RepoResult<Vec<Tag>> {
let tags: Vec<(tag::Model, Option<namespace::Model>)> = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(
JoinType::LeftJoin,
content_descriptor_tag::Relation::Tag.def().rev(),
)
.join(
JoinType::InnerJoin,
content_descriptor_tag::Relation::ContentDescriptorId.def(),
)
.filter(content_descriptor::Column::Id.eq(self.content_descriptor.id))
.all(&self.db)
.await?;
let tags = tags
.into_iter()
.map(|(tag, namespace)| Tag::new(self.db.clone(), tag, namespace))
.collect();
Ok(tags)
}
/// Adds a single tag to the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tag(&mut self, tag_id: i64) -> RepoResult<()> {
let cd_id = self.content_descriptor.id;
let active_model = content_descriptor_tag::ActiveModel {
cd_id: Set(cd_id),
tag_id: Set(tag_id),
};
active_model.insert(&self.db).await?;
Ok(())
}
/// Adds multiple tags to the file at once
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
if tag_ids.is_empty() {
return Ok(());
}
let cd_id = self.content_descriptor.id;
let models: Vec<content_descriptor_tag::ActiveModel> = tag_ids
.into_iter()
.map(|tag_id| content_descriptor_tag::ActiveModel {
cd_id: Set(cd_id),
tag_id: Set(tag_id),
})
.collect();
content_descriptor_tag::Entity::insert_many(models)
.exec(&self.db)
.await?;
Ok(())
}
/// Removes multiple tags from the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn remove_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
let hash_id = self.content_descriptor.id;
content_descriptor_tag::Entity::delete_many()
.filter(content_descriptor_tag::Column::CdId.eq(hash_id))
.filter(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.exec(&self.db)
.await?;
Ok(())
}
/// Returns the reader for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_reader(
&self,
storage: &FileHashStore,
) -> RepoResult<BufReader<tokio::fs::File>> {
storage
.get_file(&self.content_descriptor.descriptor)
.await
.map(|(_, f)| f)
}
/// Creates a thumbnail for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn create_thumbnail<I: IntoIterator<Item = ThumbnailSize> + Debug>(
&self,
storage: &FileHashStore,
sizes: I,
) -> RepoResult<Vec<ThumbnailerThumb>> {
let mut buf = Vec::new();
self.get_reader(storage)
.await?
.read_to_end(&mut buf)
.await?;
let mime_type = self.model.mime_type.clone();
let mime_type =
mime::Mime::from_str(&mime_type).unwrap_or_else(|_| mime::APPLICATION_OCTET_STREAM);
let thumbs = thumbnailer::create_thumbnails(Cursor::new(buf), mime_type, sizes)?;
Ok(thumbs)
}
/// Deletes the file as well as the content descriptor, tag mappings and metadata about the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn delete(self) -> RepoResult<()> {
let trx = self.db.begin().await?;
file_metadata::Entity::delete_many()
.filter(file_metadata::Column::FileId.eq(self.model.id))
.exec(&trx)
.await?;
self.model.delete(&trx).await?;
content_descriptor_tag::Entity::delete_many()
.filter(content_descriptor_tag::Column::CdId.eq(self.content_descriptor.id))
.exec(&trx)
.await?;
content_descriptor::Entity::delete_many()
.filter(content_descriptor::Column::Id.eq(self.content_descriptor.id))
.exec(&trx)
.await?;
trx.commit().await?;
Ok(())
}
}

@ -0,0 +1,124 @@
use std::fmt::Debug;
use chrono::{Local, NaiveDateTime};
use sea_orm::prelude::*;
use sea_orm::{DatabaseConnection, Set};
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::file_metadata;
#[derive(Clone)]
pub struct FileMetadata {
db: DatabaseConnection,
model: file_metadata::Model,
}
impl FileMetadata {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(db: DatabaseConnection, model: file_metadata::Model) -> Self {
Self { db, model }
}
/// Fetches the file by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
let file_metadata = file_metadata::Entity::find_by_id(id)
.one(&db)
.await?
.map(|m| FileMetadata::new(db, m));
Ok(file_metadata)
}
/// Fetches metadata for all given file ids
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all_by_ids(db: DatabaseConnection, ids: Vec<i64>) -> RepoResult<Vec<Self>> {
let file_metadata = file_metadata::Entity::find()
.filter(file_metadata::Column::FileId.is_in(ids))
.all(&db)
.await?
.into_iter()
.map(|m| FileMetadata::new(db.clone(), m))
.collect();
Ok(file_metadata)
}
/// Adds a file with its hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn add(
db: DatabaseConnection,
file_id: i64,
size: i64,
creation_time: NaiveDateTime,
change_time: NaiveDateTime,
) -> RepoResult<Self> {
let file = file_metadata::ActiveModel {
file_id: Set(file_id),
size: Set(size),
import_time: Set(Local::now().naive_local()),
creation_time: Set(creation_time),
change_time: Set(change_time),
..Default::default()
};
let model = file.insert(&db).await?;
Ok(Self::new(db, model))
}
pub fn file_id(&self) -> i64 {
self.model.file_id
}
pub fn size(&self) -> i64 {
self.model.size
}
pub fn name(&self) -> &Option<String> {
&self.model.name
}
pub fn comment(&self) -> &Option<String> {
&self.model.comment
}
pub fn import_time(&self) -> &NaiveDateTime {
&self.model.import_time
}
pub fn creation_time(&self) -> &NaiveDateTime {
&self.model.creation_time
}
pub fn change_time(&self) -> &NaiveDateTime {
&self.model.change_time
}
/// Changes the name of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_name<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
let mut active_model = self.get_active_model();
active_model.name = Set(Some(name.to_string()));
self.model = active_model.update(&self.db).await?;
Ok(())
}
/// Changes the comment of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_comment<S: ToString + Debug>(&mut self, comment: S) -> RepoResult<()> {
let mut active_file = self.get_active_model();
active_file.comment = Set(Some(comment.to_string()));
self.model = active_file.update(&self.db).await?;
Ok(())
}
/// Returns the active model of the file with only the id set
fn get_active_model(&self) -> file_metadata::ActiveModel {
file_metadata::ActiveModel {
file_id: Set(self.file_id()),
..Default::default()
}
}
}

@ -1,22 +0,0 @@
use mime::Mime;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, PartialOrd, PartialEq)]
pub enum FileType {
Other = -1,
Unknown = 0,
Image = 1,
Video = 2,
Audio = 3,
}
impl From<Mime> for FileType {
fn from(mime_type: Mime) -> Self {
match mime_type.type_() {
mime::IMAGE => Self::Image,
mime::VIDEO => Self::Video,
mime::AUDIO => Self::Audio,
_ => Self::Other,
}
}
}

@ -1,16 +0,0 @@
pub mod tag_handle;
use async_trait::async_trait;
use mediarepo_core::error::RepoResult;
use sea_orm::DatabaseConnection;
#[async_trait]
pub trait EntityHandle {
type Model;
/// Returns the ID that is stored in the handle
fn id(&self) -> i64;
/// Returns the model associated with the handle
async fn model(&self, db: DatabaseConnection) -> RepoResult<Self::Model>;
}

@ -1,24 +0,0 @@
use crate::handles::EntityHandle;
use crate::tag::Tag;
use async_trait::async_trait;
use mediarepo_core::error::{RepoDatabaseError, RepoResult};
use sea_orm::DatabaseConnection;
pub struct TagHandle(pub(crate) i64);
#[async_trait]
impl EntityHandle for TagHandle {
type Model = Tag;
fn id(&self) -> i64 {
self.0
}
async fn model(&self, db: DatabaseConnection) -> RepoResult<Self::Model> {
let tag = Tag::by_id(db, self.0)
.await?
.ok_or_else(|| RepoDatabaseError::InvalidHandle(self.id()))?;
Ok(tag)
}
}

@ -1,10 +1,8 @@
pub mod content_descriptor;
pub mod file;
pub mod file_type;
pub mod handles;
pub mod hash;
pub mod file_metadata;
pub mod namespace;
pub mod repo;
pub mod storage;
pub mod tag;
pub mod thumbnail;
pub mod type_keys;

@ -1,11 +1,14 @@
use crate::content_descriptor::ContentDescriptor;
use crate::file::filter::FilterProperty;
use crate::file::File;
use crate::file_type::FileType;
use crate::file_metadata::FileMetadata;
use crate::namespace::Namespace;
use crate::storage::Storage;
use crate::tag::Tag;
use crate::thumbnail::Thumbnail;
use chrono::{Local, NaiveDateTime};
use mediarepo_core::content_descriptor::{encode_content_descriptor, is_v1_content_descriptor};
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::fs::thumbnail_store::{Dimensions, ThumbnailStore};
use mediarepo_core::itertools::Itertools;
use mediarepo_core::thumbnailer::ThumbnailSize;
@ -20,29 +23,37 @@ use std::iter::FromIterator;
use std::path::PathBuf;
use std::str::FromStr;
use tokio::fs::OpenOptions;
use tokio::io::BufReader;
use tokio::io::AsyncReadExt;
#[derive(Clone)]
pub struct Repo {
db: DatabaseConnection,
main_storage: Option<Storage>,
thumbnail_storage: Option<ThumbnailStore>,
main_storage: FileHashStore,
thumbnail_storage: ThumbnailStore,
}
impl Repo {
pub(crate) fn new(db: DatabaseConnection) -> Self {
pub(crate) fn new(
db: DatabaseConnection,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> Self {
Self {
db,
main_storage: None,
thumbnail_storage: None,
main_storage: FileHashStore::new(file_store_path),
thumbnail_storage: ThumbnailStore::new(thumb_store_path),
}
}
/// Connects to the database with the given uri
#[tracing::instrument(level = "debug")]
pub async fn connect<S: AsRef<str> + Debug>(uri: S) -> RepoResult<Self> {
pub async fn connect<S: AsRef<str> + Debug>(
uri: S,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> RepoResult<Self> {
let db = get_database(uri).await?;
Ok(Self::new(db))
Ok(Self::new(db, file_store_path, thumb_store_path))
}
/// Returns the database of the repo for raw sql queries
@ -50,49 +61,10 @@ impl Repo {
&self.db
}
/// Returns all available storages
#[tracing::instrument(level = "debug", skip(self))]
pub async fn storages(&self) -> RepoResult<Vec<Storage>> {
Storage::all(self.db.clone()).await
}
/// Returns a storage by path
#[tracing::instrument(level = "debug", skip(self))]
pub async fn storage_by_path<S: ToString + Debug>(
&self,
path: S,
) -> RepoResult<Option<Storage>> {
Storage::by_path(self.db.clone(), path).await
}
/// Sets the main storage
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_main_storage<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
self.main_storage = Storage::by_name(self.db.clone(), name.to_string()).await?;
Ok(())
}
/// Sets the default thumbnail storage
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_thumbnail_storage(&mut self, path: PathBuf) -> RepoResult<()> {
self.thumbnail_storage = Some(ThumbnailStore::new(path));
Ok(())
}
/// Adds a storage to the repository
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_storage<S1: ToString + Debug, S2: ToString + Debug>(
&self,
name: S1,
path: S2,
) -> RepoResult<Storage> {
Storage::create(self.db.clone(), name, path).await
}
/// Returns a file by its mapped hash
#[tracing::instrument(level = "debug", skip(self))]
pub async fn file_by_hash<S: AsRef<str> + Debug>(&self, hash: S) -> RepoResult<Option<File>> {
File::by_hash(self.db.clone(), hash).await
pub async fn file_by_cd(&self, cd: &[u8]) -> RepoResult<Option<File>> {
File::by_cd(self.db.clone(), cd).await
}
/// Returns a file by id
@ -109,23 +81,17 @@ impl Repo {
/// Finds all files by a list of tags
#[tracing::instrument(level = "debug", skip(self))]
pub async fn find_files_by_tags(
pub async fn find_files_by_filters(
&self,
tags: Vec<Vec<(String, bool)>>,
filters: Vec<Vec<FilterProperty>>,
) -> RepoResult<Vec<File>> {
let parsed_tags = tags
.iter()
.flat_map(|e| e.into_iter().map(|t| parse_namespace_and_tag(t.0.clone())))
.unique()
.collect();
let db_tags = self.tags_by_names(parsed_tags).await?;
let tag_map: HashMap<String, i64> =
HashMap::from_iter(db_tags.into_iter().map(|t| (t.normalized_name(), t.id())));
let tag_ids = process_filters_with_tag_ids(tags, tag_map);
File::find_by_filters(self.db.clone(), filters).await
}
File::find_by_tags(self.db.clone(), tag_ids).await
/// Returns all file metadata entries for the given file ids
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_file_metadata_for_ids(&self, ids: Vec<i64>) -> RepoResult<Vec<FileMetadata>> {
FileMetadata::all_by_ids(self.db.clone(), ids).await
}
/// Adds a file from bytes to the database
@ -137,63 +103,69 @@ impl Repo {
creation_time: NaiveDateTime,
change_time: NaiveDateTime,
) -> RepoResult<File> {
let storage = self.get_main_storage()?;
let file_size = content.len();
let reader = Cursor::new(content);
let hash = storage.store_entry(reader).await?;
let cd_binary = self.main_storage.add_file(reader, None).await?;
let cd = ContentDescriptor::add(self.db.clone(), cd_binary).await?;
let (mime_type, file_type) = mime_type
let mime_type = mime_type
.and_then(|m| mime::Mime::from_str(&m).ok())
.map(|m| (Some(m.to_string()), FileType::from(m)))
.unwrap_or((None, FileType::Unknown));
.unwrap_or_else(|| mime::APPLICATION_OCTET_STREAM)
.to_string();
File::add(
let file = File::add(self.db.clone(), cd.id(), mime_type).await?;
FileMetadata::add(
self.db.clone(),
storage.id(),
hash.id(),
file_type,
mime_type,
file.id(),
file_size as i64,
creation_time,
change_time,
)
.await
.await?;
Ok(file)
}
/// Adds a file to the database by its readable path in the file system
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_file_by_path(&self, path: PathBuf) -> RepoResult<File> {
let mime_match = mime_guess::from_path(&path).first();
let mime_type = mime_guess::from_path(&path).first().map(|m| m.to_string());
let (mime_type, file_type) = if let Some(mime) = mime_match {
(Some(mime.clone().to_string()), FileType::from(mime))
} else {
(None, FileType::Unknown)
};
let os_file = OpenOptions::new().read(true).open(&path).await?;
let reader = BufReader::new(os_file);
let mut os_file = OpenOptions::new().read(true).open(&path).await?;
let mut buf = Vec::new();
os_file.read_to_end(&mut buf).await?;
let storage = self.get_main_storage()?;
let hash = storage.store_entry(reader).await?;
File::add(
self.db.clone(),
storage.id(),
hash.id(),
file_type,
self.add_file(
mime_type,
buf,
Local::now().naive_local(),
Local::now().naive_local(),
)
.await
}
/// Deletes a file from the database and disk
#[tracing::instrument(level = "debug", skip(self, file))]
pub async fn delete_file(&self, file: File) -> RepoResult<()> {
let cd = file.cd().to_owned();
let cd_string = file.encoded_cd();
file.delete().await?;
self.main_storage.delete_file(&cd).await?;
self.thumbnail_storage.delete_parent(&cd_string).await?;
Ok(())
}
/// Returns all thumbnails of a file
pub async fn get_file_thumbnails(&self, file_hash: String) -> RepoResult<Vec<Thumbnail>> {
let thumb_store = self.get_thumbnail_storage()?;
let thumbnails = thumb_store
.get_thumbnails(&file_hash)
pub async fn get_file_thumbnails(&self, file_cd: &[u8]) -> RepoResult<Vec<Thumbnail>> {
let file_cd = encode_content_descriptor(file_cd);
let thumbnails = self
.thumbnail_storage
.get_thumbnails(&file_cd)
.await?
.into_iter()
.map(|(size, path)| Thumbnail {
file_hash: file_hash.to_owned(),
file_hash: file_cd.to_owned(),
path,
size,
mime_type: mime::IMAGE_PNG.to_string(),
@ -203,18 +175,25 @@ impl Repo {
Ok(thumbnails)
}
pub async fn get_file_bytes(&self, file: &File) -> RepoResult<Vec<u8>> {
let mut buf = Vec::new();
let mut reader = file.get_reader(&self.main_storage).await?;
reader.read_to_end(&mut buf).await?;
Ok(buf)
}
/// Creates thumbnails of all sizes for a file
#[tracing::instrument(level = "debug", skip(self, file))]
pub async fn create_thumbnails_for_file(&self, file: &File) -> RepoResult<Vec<Thumbnail>> {
let thumb_storage = self.get_thumbnail_storage()?;
let size = ThumbnailSize::Medium;
let (height, width) = size.dimensions();
let thumbs = file.create_thumbnail([size]).await?;
let thumbs = file.create_thumbnail(&self.main_storage, [size]).await?;
let mut created_thumbs = Vec::with_capacity(1);
for thumb in thumbs {
let entry = self
.store_single_thumbnail(file.hash().to_owned(), thumb_storage, height, width, thumb)
.store_single_thumbnail(file.encoded_cd(), height, width, thumb)
.await?;
created_thumbs.push(entry);
}
@ -228,15 +207,14 @@ impl Repo {
file: &File,
size: ThumbnailSize,
) -> RepoResult<Thumbnail> {
let thumb_storage = self.get_thumbnail_storage()?;
let (height, width) = size.dimensions();
let thumb = file
.create_thumbnail([size])
.create_thumbnail(&self.main_storage, [size])
.await?
.pop()
.ok_or_else(|| RepoError::from("Failed to create thumbnail"))?;
let thumbnail = self
.store_single_thumbnail(file.hash().to_owned(), thumb_storage, height, width, thumb)
.store_single_thumbnail(file.encoded_cd(), height, width, thumb)
.await?;
Ok(thumbnail)
@ -246,7 +224,6 @@ impl Repo {
async fn store_single_thumbnail(
&self,
file_hash: String,
thumb_storage: &ThumbnailStore,
height: u32,
width: u32,
thumb: mediarepo_core::thumbnailer::Thumbnail,
@ -254,7 +231,8 @@ impl Repo {
let mut buf = Vec::new();
thumb.write_png(&mut buf)?;
let size = Dimensions { height, width };
let path = thumb_storage
let path = self
.thumbnail_storage
.add_thumbnail(&file_hash, size.clone(), &buf)
.await?;
@ -280,6 +258,22 @@ impl Repo {
Namespace::all(self.db.clone()).await
}
/// Converts a list of tag names to tag ids
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tag_names_to_ids(&self, tags: Vec<String>) -> RepoResult<HashMap<String, i64>> {
let parsed_tags = tags
.iter()
.map(|tag| parse_namespace_and_tag(tag.clone()))
.unique()
.collect();
let db_tags = self.tags_by_names(parsed_tags).await?;
let tag_map: HashMap<String, i64> =
HashMap::from_iter(db_tags.into_iter().map(|t| (t.normalized_name(), t.id())));
Ok(tag_map)
}
/// Finds all tags by name
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags_by_names(&self, tags: Vec<(Option<String>, String)>) -> RepoResult<Vec<Tag>> {
@ -288,8 +282,8 @@ impl Repo {
/// Finds all tags that are assigned to the given list of hashes
#[tracing::instrument(level = "debug", skip_all)]
pub async fn find_tags_for_hashes(&self, hashes: Vec<String>) -> RepoResult<Vec<Tag>> {
Tag::for_hash_list(self.db.clone(), hashes).await
pub async fn find_tags_for_file_identifiers(&self, cds: Vec<Vec<u8>>) -> RepoResult<Vec<Tag>> {
Tag::for_cd_list(self.db.clone(), cds).await
}
/// Adds all tags that are not in the database to the database and returns the ones already existing as well
@ -393,16 +387,14 @@ impl Repo {
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_main_store_size(&self) -> RepoResult<u64> {
let main_storage = self.get_main_storage()?;
main_storage.get_size().await
self.main_storage.get_size().await
}
/// Returns the size of the thumbnail storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_thumb_store_size(&self) -> RepoResult<u64> {
let thumb_storage = self.get_thumbnail_storage()?;
thumb_storage.get_size().await
self.thumbnail_storage.get_size().await
}
/// Returns all entity counts
@ -412,67 +404,29 @@ impl Repo {
get_all_counts(&self.db).await
}
#[tracing::instrument(level = "trace", skip(self))]
fn get_main_storage(&self) -> RepoResult<&Storage> {
if let Some(storage) = &self.main_storage {
Ok(storage)
} else {
Err(RepoError::from("No main storage configured."))
}
}
#[tracing::instrument(level = "trace", skip(self))]
fn get_thumbnail_storage(&self) -> RepoResult<&ThumbnailStore> {
if let Some(storage) = &self.thumbnail_storage {
Ok(storage)
} else {
Err(RepoError::from("No thumbnail storage configured."))
}
}
}
fn process_filters_with_tag_ids(
filters: Vec<Vec<(String, bool)>>,
tag_ids: HashMap<String, i64>,
) -> Vec<Vec<(i64, bool)>> {
let mut id_filters = Vec::new();
for expression in filters {
let mut id_sub_filters = Vec::new();
let mut negated_wildcard_filters = Vec::new();
for (tag, negate) in expression {
if tag.ends_with("*") {
let tag_prefix = tag.trim_end_matches('*');
let mut found_tag_ids = tag_ids
.iter()
.filter(|(k, _)| k.starts_with(tag_prefix))
.map(|(_, id)| (*id, negate))
.collect::<Vec<(i64, bool)>>();
if negate {
negated_wildcard_filters.push(found_tag_ids)
} else {
id_sub_filters.append(&mut found_tag_ids);
}
} else {
if let Some(id) = tag_ids.get(&tag) {
id_sub_filters.push((*id, negate));
}
pub async fn migrate(&self) -> RepoResult<()> {
let cds = ContentDescriptor::all(self.db.clone()).await?;
tracing::info!("Converting content descriptors to v2 format...");
let mut converted_count = 0;
for mut cd in cds {
if is_v1_content_descriptor(cd.descriptor()) {
let src_cd = cd.descriptor().to_owned();
cd.convert_v1_to_v2().await?;
let dst_cd = cd.descriptor().to_owned();
self.main_storage.rename_file(&src_cd, &dst_cd).await?;
self.thumbnail_storage
.rename_parent(
encode_content_descriptor(&src_cd),
encode_content_descriptor(&dst_cd),
)
.await?;
converted_count += 1;
}
}
if !negated_wildcard_filters.is_empty() {
for wildcard_filter in negated_wildcard_filters {
for query in wildcard_filter {
let mut sub_filters = id_sub_filters.clone();
sub_filters.push(query);
id_filters.push(sub_filters)
}
}
} else if !id_sub_filters.is_empty() {
id_filters.push(id_sub_filters);
}
}
tracing::info!("Converted {} descriptors", converted_count);
id_filters
Ok(())
}
}

@ -1,198 +0,0 @@
use crate::hash::Hash;
use mediarepo_core::error::RepoResult;
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_database::entities::storage;
use mediarepo_database::entities::storage::ActiveModel as ActiveStorage;
use mediarepo_database::entities::storage::Model as StorageModel;
use sea_orm::prelude::*;
use sea_orm::{DatabaseConnection, NotSet, Set};
use std::fmt::Debug;
use std::path::PathBuf;
use tokio::fs;
use tokio::io::{AsyncRead, BufReader};
#[derive(Clone)]
pub struct Storage {
db: DatabaseConnection,
model: StorageModel,
store: FileHashStore,
}
impl Storage {
#[tracing::instrument(level = "trace")]
fn new(db: DatabaseConnection, model: StorageModel) -> Self {
let path = PathBuf::from(&model.path);
Self {
store: FileHashStore::new(path),
db,
model,
}
}
/// Returns all available storages
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<Self>> {
let storages: Vec<storage::Model> = storage::Entity::find().all(&db).await?;
let storages = storages
.into_iter()
.map(|s| Self::new(db.clone(), s))
.collect();
Ok(storages)
}
/// Returns the storage by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
if let Some(model) = storage::Entity::find_by_id(id).one(&db).await? {
let storage = Self::new(db, model);
Ok(Some(storage))
} else {
Ok(None)
}
}
/// Returns the storage by name
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_name<S: AsRef<str> + Debug>(
db: DatabaseConnection,
name: S,
) -> RepoResult<Option<Self>> {
if let Some(model) = storage::Entity::find()
.filter(storage::Column::Name.eq(name.as_ref()))
.one(&db)
.await?
{
let storage = Self::new(db, model);
Ok(Some(storage))
} else {
Ok(None)
}
}
/// Returns the storage by path
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_path<S: ToString + Debug>(
db: DatabaseConnection,
path: S,
) -> RepoResult<Option<Self>> {
if let Some(model) = storage::Entity::find()
.filter(storage::Column::Path.eq(path.to_string()))
.one(&db)
.await?
{
let storage = Self::new(db, model);
Ok(Some(storage))
} else {
Ok(None)
}
}
/// Creates a new active storage and also creates the associated directory
/// if it doesn't exist yet.
#[tracing::instrument(level = "debug", skip(db))]
pub async fn create<S1: ToString + Debug, S2: ToString + Debug>(
db: DatabaseConnection,
name: S1,
path: S2,
) -> RepoResult<Self> {
let path = path.to_string();
let name = name.to_string();
let path_buf = PathBuf::from(&path);
if !path_buf.exists() {
fs::create_dir(path_buf).await?;
}
let storage = ActiveStorage {
id: NotSet,
name: Set(name),
path: Set(path),
..Default::default()
};
let model = storage.insert(&db).await?;
Ok(Self::new(db, model))
}
/// Returns the unique identifier of this storage
pub fn id(&self) -> i64 {
self.model.id
}
/// Returns the name of the storage
pub fn name(&self) -> &String {
&self.model.name
}
/// Returns the path of the storage
pub fn path(&self) -> &String {
&self.model.path
}
/// Sets a new name for the storage
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_name<S: ToString + Debug>(&self, name: S) -> RepoResult<()> {
let mut active_storage: ActiveStorage = self.get_active_model();
active_storage.name = Set(name.to_string());
active_storage.update(&self.db).await?;
Ok(())
}
/// Sets a new path for the storage. This will only update the database record
/// so if the physical part of the storage is already created it needs to be migrated first
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_path<S: ToString + Debug>(&mut self, path: S) -> RepoResult<()> {
let mut active_storage: ActiveStorage = self.get_active_model();
active_storage.path = Set(path.to_string());
let storage = active_storage.update(&self.db).await?;
self.model = storage;
Ok(())
}
/// Checks if the storage exists on the harddrive
pub fn exists(&self) -> bool {
let path = PathBuf::from(&self.path());
path.exists()
}
/// Adds a thumbnail
#[tracing::instrument(level = "debug", skip(self, reader))]
pub async fn store_entry<R: AsyncRead + Unpin>(&self, reader: R) -> RepoResult<Hash> {
let hash = self.store.add_file(reader, None).await?;
if let Some(hash) = Hash::by_value(self.db.clone(), &hash).await? {
Ok(hash)
} else {
Hash::add(self.db.clone(), hash).await
}
}
/// Returns the buf reader to the given hash
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_file_reader<S: ToString + Debug>(
&self,
hash: S,
) -> RepoResult<BufReader<tokio::fs::File>> {
let (_ext, reader) = self.store.get_file(hash.to_string()).await?;
Ok(reader)
}
/// Returns the size of the storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_size(&self) -> RepoResult<u64> {
self.store.get_size().await
}
/// Returns the active model with only the ID filled so saves always perform an update
fn get_active_model(&self) -> ActiveStorage {
ActiveStorage {
id: Set(self.model.id),
..Default::default()
}
}
}

@ -1,8 +1,8 @@
use std::fmt::Debug;
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::hash;
use mediarepo_database::entities::hash_tag;
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::namespace;
use mediarepo_database::entities::tag;
use sea_orm::prelude::*;
@ -118,15 +118,18 @@ impl Tag {
/// Returns all tags that are assigned to any of the passed hashes
#[tracing::instrument(level = "debug", skip_all)]
pub async fn for_hash_list(
db: DatabaseConnection,
hashes: Vec<String>,
) -> RepoResult<Vec<Self>> {
pub async fn for_cd_list(db: DatabaseConnection, cds: Vec<Vec<u8>>) -> RepoResult<Vec<Self>> {
let tags: Vec<Self> = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(JoinType::LeftJoin, hash_tag::Relation::Tag.def().rev())
.join(JoinType::InnerJoin, hash_tag::Relation::Hash.def())
.filter(hash::Column::Value.is_in(hashes))
.join(
JoinType::LeftJoin,
content_descriptor_tag::Relation::Tag.def().rev(),
)
.join(
JoinType::InnerJoin,
content_descriptor_tag::Relation::ContentDescriptorId.def(),
)
.filter(content_descriptor::Column::Descriptor.is_in(cds))
.group_by(tag::Column::Id)
.all(&db)
.await?

@ -11,6 +11,7 @@ serde = "^1.0.130"
tracing = "^0.1.29"
compare = "^0.1.0"
port_check = "^0.1.5"
rayon = "1.5.1"
[dependencies.mediarepo-core]
path = "../mediarepo-core"

@ -1,8 +1,9 @@
use mediarepo_core::mediarepo_api::types::files::{
FileMetadataResponse, ThumbnailMetadataResponse,
FileBasicDataResponse, FileMetadataResponse, FileStatus, ThumbnailMetadataResponse,
};
use mediarepo_core::mediarepo_api::types::tags::{NamespaceResponse, TagResponse};
use mediarepo_model::file::File;
use mediarepo_model::file::{File, FileStatus as FileStatusModel};
use mediarepo_model::file_metadata::FileMetadata;
use mediarepo_model::namespace::Namespace;
use mediarepo_model::tag::Tag;
use mediarepo_model::thumbnail::Thumbnail;
@ -11,18 +12,36 @@ pub trait FromModel<M> {
fn from_model(model: M) -> Self;
}
impl FromModel<File> for FileMetadataResponse {
fn from_model(file: File) -> Self {
impl FromModel<FileMetadata> for FileMetadataResponse {
fn from_model(metadata: FileMetadata) -> Self {
Self {
file_id: metadata.file_id(),
name: metadata.name().to_owned(),
comment: metadata.comment().to_owned(),
creation_time: metadata.creation_time().to_owned(),
change_time: metadata.change_time().to_owned(),
import_time: metadata.import_time().to_owned(),
}
}
}
impl FromModel<File> for FileBasicDataResponse {
fn from_model(file: File) -> Self {
FileBasicDataResponse {
id: file.id(),
name: file.name().to_owned(),
comment: file.comment().to_owned(),
hash: file.hash().to_owned(),
file_type: file.file_type() as u32,
status: FileStatus::from_model(file.status()),
cd: file.encoded_cd(),
mime_type: file.mime_type().to_owned(),
creation_time: file.creation_time().to_owned(),
change_time: file.change_time().to_owned(),
import_time: file.import_time().to_owned(),
}
}
}
impl FromModel<FileStatusModel> for FileStatus {
fn from_model(status: FileStatusModel) -> Self {
match status {
FileStatusModel::Imported => FileStatus::Imported,
FileStatusModel::Archived => FileStatus::Archived,
FileStatusModel::Deleted => FileStatus::Deleted,
}
}
}

@ -1,11 +1,12 @@
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::mediarepo_api::types::misc::InfoResponse;
use mediarepo_core::settings::Settings;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use mediarepo_core::settings::{PortSetting, Settings};
use mediarepo_core::tokio_graceful_shutdown::SubsystemHandle;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey, SizeMetadataKey, SubsystemKey};
use mediarepo_model::repo::Repo;
use mediarepo_model::type_keys::RepoKey;
use std::net::{IpAddr, SocketAddr};
use std::net::SocketAddr;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::net::TcpListener;
@ -15,16 +16,25 @@ mod from_model;
mod namespaces;
mod utils;
#[tracing::instrument(skip(settings, repo))]
#[tracing::instrument(skip(subsystem, settings, repo))]
pub fn start_tcp_server(
ip: IpAddr,
port_range: (u16, u16),
subsystem: SubsystemHandle,
repo_path: PathBuf,
settings: Settings,
repo: Repo,
) -> RepoResult<(String, JoinHandle<()>)> {
let port = port_check::free_local_port_in_range(port_range.0, port_range.1)
.ok_or_else(|| RepoError::PortUnavailable)?;
let port = match &settings.server.tcp.port {
PortSetting::Fixed(p) => {
if port_check::is_local_port_free(*p) {
*p
} else {
return Err(RepoError::PortUnavailable);
}
}
PortSetting::Range((l, r)) => port_check::free_local_port_in_range(*l, *r)
.ok_or_else(|| RepoError::PortUnavailable)?,
};
let ip = settings.server.tcp.listen_address.to_owned();
let address = SocketAddr::new(ip, port);
let address_string = address.to_string();
@ -32,9 +42,11 @@ pub fn start_tcp_server(
.name("mediarepo_tcp::listen")
.spawn(async move {
get_builder::<TcpListener>(address)
.insert::<SubsystemKey>(subsystem)
.insert::<RepoKey>(Arc::new(repo))
.insert::<SettingsKey>(settings)
.insert::<RepoPathKey>(repo_path)
.insert::<SizeMetadataKey>(Default::default())
.build_server()
.await
.expect("Failed to start tcp server")
@ -44,8 +56,9 @@ pub fn start_tcp_server(
}
#[cfg(unix)]
#[tracing::instrument(skip(settings, repo))]
#[tracing::instrument(skip(subsystem, settings, repo))]
pub fn create_unix_socket(
subsystem: SubsystemHandle,
path: std::path::PathBuf,
repo_path: PathBuf,
settings: Settings,
@ -61,9 +74,11 @@ pub fn create_unix_socket(
.name("mediarepo_unix_socket::listen")
.spawn(async move {
get_builder::<UnixListener>(path)
.insert::<SubsystemKey>(subsystem)
.insert::<RepoKey>(Arc::new(repo))
.insert::<SettingsKey>(settings)
.insert::<RepoPathKey>(repo_path)
.insert::<SizeMetadataKey>(Default::default())
.build_server()
.await
.expect("Failed to create unix domain socket");
@ -73,7 +88,9 @@ pub fn create_unix_socket(
}
fn get_builder<L: AsyncStreamProtocolListener>(address: L::AddressType) -> IPCBuilder<L> {
namespaces::build_namespaces(IPCBuilder::new().address(address)).on("info", callback!(info))
namespaces::build_namespaces(IPCBuilder::new().address(address))
.on("info", callback!(info))
.on("shutdown", callback!(shutdown))
}
#[tracing::instrument(skip_all)]
@ -86,3 +103,17 @@ async fn info(ctx: &Context, _: Event) -> IPCResult<()> {
Ok(())
}
#[tracing::instrument(skip_all)]
async fn shutdown(ctx: &Context, _: Event) -> IPCResult<()> {
ctx.clone().stop().await?;
{
let data = ctx.data.read().await;
let subsystem = data.get::<SubsystemKey>().unwrap();
subsystem.request_shutdown();
subsystem.on_shutdown_requested().await;
}
ctx.emit("shutdown", ()).await?;
Ok(())
}

@ -1,36 +1,25 @@
mod searching;
mod sorting;
use crate::from_model::FromModel;
use crate::utils::{file_by_identifier, get_repo_from_context, hash_by_identifier};
use chrono::NaiveDateTime;
use compare::Compare;
use crate::namespaces::files::searching::find_files_for_filters;
use crate::namespaces::files::sorting::sort_files_by_properties;
use crate::utils::{cd_by_identifier, file_by_identifier, get_repo_from_context};
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::fs::thumbnail_store::Dimensions;
use mediarepo_core::itertools::Itertools;
use mediarepo_core::mediarepo_api::types::files::{
AddFileRequestHeader, FileMetadataResponse, FilterExpression, FindFilesRequest,
GetFileThumbnailOfSizeRequest, GetFileThumbnailsRequest, ReadFileRequest, SortDirection,
SortKey, ThumbnailMetadataResponse, UpdateFileNameRequest,
AddFileRequestHeader, FileBasicDataResponse, FileMetadataResponse,
GetFileThumbnailOfSizeRequest, GetFileThumbnailsRequest, ReadFileRequest,
ThumbnailMetadataResponse, UpdateFileNameRequest, UpdateFileStatusRequest,
};
use mediarepo_core::mediarepo_api::types::filtering::FindFilesRequest;
use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier;
use mediarepo_core::thumbnailer::ThumbnailSize;
use mediarepo_core::utils::parse_namespace_and_tag;
use mediarepo_database::queries::tags::{
get_hashes_with_namespaced_tags, get_hashes_with_tag_count,
};
use std::cmp::Ordering;
use std::collections::HashMap;
use tokio::io::AsyncReadExt;
pub struct FilesNamespace;
pub struct FileSortContext {
name: Option<String>,
size: u64,
mime_type: Option<String>,
namespaces: HashMap<String, Vec<String>>,
tag_count: u32,
import_time: NaiveDateTime,
create_time: NaiveDateTime,
change_time: NaiveDateTime,
}
impl NamespaceProvider for FilesNamespace {
fn name() -> &'static str {
@ -41,6 +30,7 @@ impl NamespaceProvider for FilesNamespace {
events!(handler,
"all_files" => Self::all_files,
"get_file" => Self::get_file,
"get_file_metadata" => Self::get_file_metadata,
"get_files" => Self::get_files,
"find_files" => Self::find_files,
"add_file" => Self::add_file,
@ -48,7 +38,9 @@ impl NamespaceProvider for FilesNamespace {
"get_thumbnails" => Self::thumbnails,
"get_thumbnail_of_size" => Self::get_thumbnail_of_size,
"update_file_name" => Self::update_file_name,
"delete_thumbnails" => Self::delete_thumbnails
"delete_thumbnails" => Self::delete_thumbnails,
"update_file_status" => Self::update_status,
"delete_file" => Self::delete_file
);
}
}
@ -60,9 +52,9 @@ impl FilesNamespace {
let repo = get_repo_from_context(ctx).await;
let files = repo.files().await?;
let responses: Vec<FileMetadataResponse> = files
let responses: Vec<FileBasicDataResponse> = files
.into_iter()
.map(FileMetadataResponse::from_model)
.map(FileBasicDataResponse::from_model)
.collect();
ctx.emit_to(Self::name(), "all_files", responses).await?;
@ -76,12 +68,29 @@ impl FilesNamespace {
let id = event.payload::<FileIdentifier>()?;
let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(id, &repo).await?;
let response = FileMetadataResponse::from_model(file);
let response = FileBasicDataResponse::from_model(file);
ctx.emit_to(Self::name(), "get_file", response).await?;
Ok(())
}
/// Returns metadata for a given file
#[tracing::instrument(skip_all)]
async fn get_file_metadata(ctx: &Context, event: Event) -> IPCResult<()> {
let id = event.payload::<FileIdentifier>()?;
let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(id, &repo).await?;
let metadata = file.metadata().await?;
ctx.emit_to(
Self::name(),
"get_file_metadata",
FileMetadataResponse::from_model(metadata),
)
.await?;
Ok(())
}
/// Returns a list of files by identifier
#[tracing::instrument(skip_all)]
async fn get_files(ctx: &Context, event: Event) -> IPCResult<()> {
@ -93,7 +102,7 @@ impl FilesNamespace {
responses.push(
file_by_identifier(id, &repo)
.await
.map(FileMetadataResponse::from_model)?,
.map(FileBasicDataResponse::from_model)?,
);
}
ctx.emit_to(Self::name(), "get_files", responses).await?;
@ -107,57 +116,12 @@ impl FilesNamespace {
let req = event.payload::<FindFilesRequest>()?;
let repo = get_repo_from_context(ctx).await;
let tags = req
.filters
.into_iter()
.map(|e| match e {
FilterExpression::OrExpression(tags) => {
tags.into_iter().map(|t| (t.tag, t.negate)).collect_vec()
}
FilterExpression::Query(tag) => {
vec![(tag.tag, tag.negate)]
}
})
.collect();
let mut files = find_files_for_filters(&repo, req.filters).await?;
sort_files_by_properties(&repo, req.sort_expression, &mut files).await?;
let mut files = repo.find_files_by_tags(tags).await?;
let hash_ids: Vec<i64> = files.iter().map(|f| f.hash_id()).collect();
let mut hash_nsp: HashMap<i64, HashMap<String, Vec<String>>> =
get_hashes_with_namespaced_tags(repo.db(), hash_ids.clone()).await?;
let mut hash_tag_counts = get_hashes_with_tag_count(repo.db(), hash_ids).await?;
let mut contexts = HashMap::new();
for file in &files {
let context = FileSortContext {
name: file.name().to_owned(),
size: file.get_size().await?,
mime_type: file.mime_type().to_owned(),
namespaces: hash_nsp
.remove(&file.hash_id())
.unwrap_or(HashMap::with_capacity(0)),
tag_count: hash_tag_counts.remove(&file.hash_id()).unwrap_or(0),
import_time: file.import_time().to_owned(),
create_time: file.import_time().to_owned(),
change_time: file.change_time().to_owned(),
};
contexts.insert(file.id(), context);
}
let sort_expression = req.sort_expression;
tracing::debug!("sort_expression = {:?}", sort_expression);
files.sort_by(|a, b| {
compare_files(
contexts.get(&a.id()).unwrap(),
contexts.get(&b.id()).unwrap(),
&sort_expression,
)
});
let responses: Vec<FileMetadataResponse> = files
let responses: Vec<FileBasicDataResponse> = files
.into_iter()
.map(FileMetadataResponse::from_model)
.map(FileBasicDataResponse::from_model)
.collect();
ctx.emit_to(Self::name(), "find_files", responses).await?;
Ok(())
@ -172,7 +136,7 @@ impl FilesNamespace {
let AddFileRequestHeader { metadata, tags } = request;
let repo = get_repo_from_context(ctx).await;
let mut file = repo
let file = repo
.add_file(
metadata.mime_type,
bytes.into_inner(),
@ -180,7 +144,7 @@ impl FilesNamespace {
metadata.change_time,
)
.await?;
file.set_name(metadata.name).await?;
file.metadata().await?.set_name(metadata.name).await?;
let tags = repo
.add_all_tags(tags.into_iter().map(parse_namespace_and_tag).collect())
@ -191,7 +155,23 @@ impl FilesNamespace {
ctx.emit_to(
Self::name(),
"add_file",
FileMetadataResponse::from_model(file),
FileBasicDataResponse::from_model(file),
)
.await?;
Ok(())
}
#[tracing::instrument(skip_all)]
async fn update_status(ctx: &Context, event: Event) -> IPCResult<()> {
let request = event.payload::<UpdateFileStatusRequest>()?;
let repo = get_repo_from_context(ctx).await;
let mut file = file_by_identifier(request.file_id, &repo).await?;
file.set_status(request.status.into()).await?;
ctx.emit_to(
Self::name(),
"update_file_status",
FileBasicDataResponse::from_model(file),
)
.await?;
@ -202,26 +182,36 @@ impl FilesNamespace {
#[tracing::instrument(skip_all)]
async fn read_file(ctx: &Context, event: Event) -> IPCResult<()> {
let request = event.payload::<ReadFileRequest>()?;
let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(request.id, &repo).await?;
let mut reader = file.get_reader().await?;
let mut buf = Vec::new();
reader.read_to_end(&mut buf).await?;
let bytes = repo.get_file_bytes(&file).await?;
ctx.emit_to(Self::name(), "read_file", BytePayload::new(buf))
ctx.emit_to(Self::name(), "read_file", BytePayload::new(bytes))
.await?;
Ok(())
}
/// Deletes a file
#[tracing::instrument(skip_all)]
async fn delete_file(ctx: &Context, event: Event) -> IPCResult<()> {
let id = event.payload::<FileIdentifier>()?;
let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(id, &repo).await?;
repo.delete_file(file).await?;
ctx.emit_to(Self::name(), "delete_file", ()).await?;
Ok(())
}
/// Returns a list of available thumbnails of a file
#[tracing::instrument(skip_all)]
async fn thumbnails(ctx: &Context, event: Event) -> IPCResult<()> {
let request = event.payload::<GetFileThumbnailsRequest>()?;
let repo = get_repo_from_context(ctx).await;
let file_hash = hash_by_identifier(request.id.clone(), &repo).await?;
let mut thumbnails = repo.get_file_thumbnails(file_hash).await?;
let file_cd = cd_by_identifier(request.id.clone(), &repo).await?;
let mut thumbnails = repo.get_file_thumbnails(&file_cd).await?;
if thumbnails.is_empty() {
tracing::debug!("No thumbnails for file found. Creating thumbnails...");
@ -245,8 +235,8 @@ impl FilesNamespace {
async fn get_thumbnail_of_size(ctx: &Context, event: Event) -> IPCResult<()> {
let request = event.payload::<GetFileThumbnailOfSizeRequest>()?;
let repo = get_repo_from_context(ctx).await;
let file_hash = hash_by_identifier(request.id.clone(), &repo).await?;
let thumbnails = repo.get_file_thumbnails(file_hash).await?;
let file_cd = cd_by_identifier(request.id.clone(), &repo).await?;
let thumbnails = repo.get_file_thumbnails(&file_cd).await?;
let min_size = request.min_size;
let max_size = request.max_size;
@ -289,12 +279,14 @@ impl FilesNamespace {
async fn update_file_name(ctx: &Context, event: Event) -> IPCResult<()> {
let repo = get_repo_from_context(ctx).await;
let request = event.payload::<UpdateFileNameRequest>()?;
let mut file = file_by_identifier(request.file_id, &repo).await?;
file.set_name(request.name).await?;
let file = file_by_identifier(request.file_id, &repo).await?;
let mut metadata = file.metadata().await?;
metadata.set_name(request.name).await?;
ctx.emit_to(
Self::name(),
"update_file_name",
FileMetadataResponse::from_model(file),
FileMetadataResponse::from_model(metadata),
)
.await?;
@ -307,7 +299,7 @@ impl FilesNamespace {
let repo = get_repo_from_context(ctx).await;
let id = event.payload::<FileIdentifier>()?;
let file = file_by_identifier(id, &repo).await?;
let thumbnails = repo.get_file_thumbnails(file.hash().to_owned()).await?;
let thumbnails = repo.get_file_thumbnails(file.cd()).await?;
for thumb in thumbnails {
thumb.delete().await?;
@ -316,113 +308,3 @@ impl FilesNamespace {
Ok(())
}
}
#[tracing::instrument(level = "trace", skip_all)]
fn compare_files(
ctx_a: &FileSortContext,
ctx_b: &FileSortContext,
expression: &Vec<SortKey>,
) -> Ordering {
let cmp_date = compare::natural();
let cmp_u64 = compare::natural();
let cmp_u32 = compare::natural();
for sort_key in expression {
let ordering = match sort_key {
SortKey::Namespace(namespace) => {
let list_a = ctx_a.namespaces.get(&namespace.name);
let list_b = ctx_b.namespaces.get(&namespace.name);
let cmp_result = if let (Some(list_a), Some(list_b)) = (list_a, list_b) {
compare_tag_lists(list_a, list_b)
} else if list_a.is_some() {
Ordering::Greater
} else if list_b.is_some() {
Ordering::Less
} else {
Ordering::Equal
};
adjust_for_dir(cmp_result, &namespace.direction)
}
SortKey::FileName(direction) => {
adjust_for_dir(compare_opts(&ctx_a.name, &ctx_b.name), direction)
}
SortKey::FileSize(direction) => {
adjust_for_dir(cmp_u64.compare(&ctx_a.size, &ctx_b.size), direction)
}
SortKey::FileImportedTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.import_time, &ctx_b.import_time),
direction,
),
SortKey::FileCreatedTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.create_time, &ctx_b.create_time),
direction,
),
SortKey::FileChangeTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.change_time, &ctx_b.change_time),
direction,
),
SortKey::FileType(direction) => {
adjust_for_dir(compare_opts(&ctx_a.mime_type, &ctx_b.mime_type), direction)
}
SortKey::NumTags(direction) => adjust_for_dir(
cmp_u32.compare(&ctx_a.tag_count, &ctx_b.tag_count),
direction,
),
};
if !ordering.is_eq() {
return ordering;
}
}
Ordering::Equal
}
fn compare_opts<T: Ord + Sized>(opt_a: &Option<T>, opt_b: &Option<T>) -> Ordering {
let cmp = compare::natural();
if let (Some(a), Some(b)) = (opt_a, opt_b) {
cmp.compare(a, b)
} else if opt_a.is_some() {
Ordering::Greater
} else if opt_b.is_some() {
Ordering::Less
} else {
Ordering::Equal
}
}
fn compare_f32(a: f32, b: f32) -> Ordering {
if a > b {
Ordering::Greater
} else if b > a {
Ordering::Less
} else {
Ordering::Equal
}
}
fn adjust_for_dir(ordering: Ordering, direction: &SortDirection) -> Ordering {
if *direction == SortDirection::Descending {
ordering.reverse()
} else {
ordering
}
}
fn compare_tag_lists(list_a: &Vec<String>, list_b: &Vec<String>) -> Ordering {
let first_diff = list_a
.into_iter()
.zip(list_b.into_iter())
.find(|(a, b)| *a != *b);
if let Some(diff) = first_diff {
if let (Some(num_a), Some(num_b)) = (diff.0.parse::<f32>().ok(), diff.1.parse::<f32>().ok())
{
compare_f32(num_a, num_b)
} else {
let cmp = compare::natural();
cmp.compare(diff.0, diff.1)
}
} else {
Ordering::Equal
}
}

@ -0,0 +1,185 @@
use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
use mediarepo_core::mediarepo_api::types::filtering::{
FilterExpression, FilterQuery, PropertyQuery, TagQuery, ValueComparator,
};
use mediarepo_model::file::filter::NegatableComparator::{Is, IsNot};
use mediarepo_model::file::filter::{FilterFileProperty, FilterProperty, OrderingComparator};
use mediarepo_model::file::{File, FileStatus};
use mediarepo_model::repo::Repo;
use std::collections::HashMap;
#[tracing::instrument(level = "debug", skip(repo))]
pub async fn find_files_for_filters(
repo: &Repo,
expressions: Vec<FilterExpression>,
) -> RepoResult<Vec<File>> {
let tag_names = get_tag_names_from_expressions(&expressions);
let tag_id_map = repo.tag_names_to_ids(tag_names).await?;
let filters = build_filters_from_expressions(expressions, &tag_id_map);
repo.find_files_by_filters(filters).await
}
#[tracing::instrument(level = "debug")]
fn get_tag_names_from_expressions(expressions: &Vec<FilterExpression>) -> Vec<String> {
expressions
.iter()
.flat_map(|f| match f {
FilterExpression::OrExpression(queries) => queries
.iter()
.filter_map(|q| match q {
FilterQuery::Tag(tag) => Some(tag.tag.to_owned()),
_ => None,
})
.collect::<Vec<String>>(),
FilterExpression::Query(q) => match q {
FilterQuery::Tag(tag) => {
vec![tag.tag.to_owned()]
}
FilterQuery::Property(_) => {
vec![]
}
},
})
.collect::<Vec<String>>()
}
#[tracing::instrument(level = "debug")]
fn build_filters_from_expressions(
expressions: Vec<FilterExpression>,
tag_id_map: &HashMap<String, i64>,
) -> Vec<Vec<FilterProperty>> {
expressions
.into_iter()
.filter_map(|e| {
let filters = match e {
FilterExpression::OrExpression(queries) => queries
.into_iter()
.filter_map(|q| map_query_to_filter(q, tag_id_map))
.collect(),
FilterExpression::Query(q) => {
if let Some(filter) = map_query_to_filter(q, tag_id_map) {
vec![filter]
} else {
vec![]
}
}
};
if filters.len() > 0 {
Some(filters)
} else {
None
}
})
.collect()
}
fn map_query_to_filter(
query: FilterQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
match query {
FilterQuery::Tag(tag_query) => map_tag_query_to_filter(tag_query, tag_id_map),
FilterQuery::Property(property) => map_property_query_to_filter(property),
}
}
fn map_tag_query_to_filter(
query: TagQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
if query.tag.ends_with("*") {
map_wildcard_tag_to_filter(query, tag_id_map)
} else {
map_tag_to_filter(query, tag_id_map)
}
}
fn map_wildcard_tag_to_filter(
query: TagQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
let filter_tag = query.tag.trim_end_matches("*");
let relevant_ids = tag_id_map
.iter()
.filter_map(|(name, id)| {
if name.starts_with(filter_tag) {
Some(*id)
} else {
None
}
})
.collect::<Vec<i64>>();
if relevant_ids.len() > 0 {
let comparator = if query.negate {
IsNot(relevant_ids)
} else {
Is(relevant_ids)
};
Some(FilterProperty::TagWildcardIds(comparator))
} else {
None
}
}
fn map_tag_to_filter(query: TagQuery, tag_id_map: &HashMap<String, i64>) -> Option<FilterProperty> {
tag_id_map.get(&query.tag).map(|id| {
let comparator = if query.negate { IsNot(*id) } else { Is(*id) };
FilterProperty::TagId(comparator)
})
}
fn map_property_query_to_filter(query: PropertyQuery) -> Option<FilterProperty> {
match query {
PropertyQuery::Status(s) => Some(FilterProperty::FileProperty(FilterFileProperty::Status(
Is(file_status_to_number(s)),
))),
PropertyQuery::FileSize(s) => Some(FilterProperty::FileProperty(
FilterFileProperty::FileSize(val_comparator_to_order(s, |v| v as i64)),
)),
PropertyQuery::ImportedTime(t) => Some(FilterProperty::FileProperty(
FilterFileProperty::ImportedTime(val_comparator_to_order(t, |t| t)),
)),
PropertyQuery::ChangedTime(t) => Some(FilterProperty::FileProperty(
FilterFileProperty::ChangedTime(val_comparator_to_order(t, |t| t)),
)),
PropertyQuery::CreatedTime(t) => Some(FilterProperty::FileProperty(
FilterFileProperty::CreatedTime(val_comparator_to_order(t, |t| t)),
)),
PropertyQuery::TagCount(c) => {
Some(FilterProperty::TagCount(val_comparator_to_order(c, |v| {
v as i64
})))
}
PropertyQuery::Cd(cd) => decode_content_descriptor(cd)
.ok()
.map(|cd| FilterProperty::ContentDescriptor(Is(cd))),
PropertyQuery::Id(id) => Some(FilterProperty::FileProperty(FilterFileProperty::Id(Is(id)))),
}
}
fn file_status_to_number(status: ApiFileStatus) -> i64 {
match status {
ApiFileStatus::Imported => FileStatus::Imported as i64,
ApiFileStatus::Archived => FileStatus::Archived as i64,
ApiFileStatus::Deleted => FileStatus::Deleted as i64,
}
}
#[inline]
fn val_comparator_to_order<T1, T2, F: Fn(T1) -> T2>(
comp: ValueComparator<T1>,
conv_fn: F,
) -> OrderingComparator<T2> {
match comp {
ValueComparator::Less(v) => OrderingComparator::Less(conv_fn(v)),
ValueComparator::Equal(v) => OrderingComparator::Equal(conv_fn(v)),
ValueComparator::Greater(v) => OrderingComparator::Greater(conv_fn(v)),
ValueComparator::Between((v1, v2)) => {
OrderingComparator::Between((conv_fn(v1), conv_fn(v2)))
}
}
}

@ -0,0 +1,193 @@
use chrono::NaiveDateTime;
use compare::Compare;
use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::filtering::{SortDirection, SortKey};
use mediarepo_database::queries::tags::{
get_cids_with_namespaced_tags, get_content_descriptors_with_tag_count,
};
use mediarepo_model::file::File;
use mediarepo_model::file_metadata::FileMetadata;
use mediarepo_model::repo::Repo;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::iter::FromIterator;
pub struct FileSortContext {
name: Option<String>,
size: u64,
mime_type: String,
namespaces: HashMap<String, Vec<String>>,
tag_count: u32,
import_time: NaiveDateTime,
create_time: NaiveDateTime,
change_time: NaiveDateTime,
}
#[tracing::instrument(level = "debug", skip(repo, files))]
pub async fn sort_files_by_properties(
repo: &Repo,
sort_expression: Vec<SortKey>,
files: &mut Vec<File>,
) -> RepoResult<()> {
let contexts = build_sort_context(repo, files).await?;
files.sort_by(|a, b| {
compare_files(
contexts.get(&a.id()).unwrap(),
contexts.get(&b.id()).unwrap(),
&sort_expression,
)
});
Ok(())
}
async fn build_sort_context(
repo: &Repo,
files: &Vec<File>,
) -> RepoResult<HashMap<i64, FileSortContext>> {
let hash_ids: Vec<i64> = files.par_iter().map(|f| f.cd_id()).collect();
let file_ids: Vec<i64> = files.par_iter().map(|f| f.id()).collect();
let mut cid_nsp: HashMap<i64, HashMap<String, Vec<String>>> =
get_cids_with_namespaced_tags(repo.db(), hash_ids.clone()).await?;
let mut cid_tag_counts = get_content_descriptors_with_tag_count(repo.db(), hash_ids).await?;
let files_metadata = repo.get_file_metadata_for_ids(file_ids).await?;
let mut file_metadata_map: HashMap<i64, FileMetadata> =
HashMap::from_iter(files_metadata.into_iter().map(|m| (m.file_id(), m)));
let mut contexts = HashMap::new();
for file in files {
if let Some(metadata) = file_metadata_map.remove(&file.id()) {
let context = FileSortContext {
name: metadata.name().to_owned(),
size: metadata.size() as u64,
mime_type: file.mime_type().to_owned(),
namespaces: cid_nsp
.remove(&file.cd_id())
.unwrap_or(HashMap::with_capacity(0)),
tag_count: cid_tag_counts.remove(&file.cd_id()).unwrap_or(0),
import_time: metadata.import_time().to_owned(),
create_time: metadata.import_time().to_owned(),
change_time: metadata.change_time().to_owned(),
};
contexts.insert(file.id(), context);
}
}
Ok(contexts)
}
#[tracing::instrument(level = "trace", skip_all)]
fn compare_files(
ctx_a: &FileSortContext,
ctx_b: &FileSortContext,
expression: &Vec<SortKey>,
) -> Ordering {
let cmp_date = compare::natural();
let cmp_u64 = compare::natural();
let cmp_u32 = compare::natural();
for sort_key in expression {
let ordering = match sort_key {
SortKey::Namespace(namespace) => {
let list_a = ctx_a.namespaces.get(&namespace.name);
let list_b = ctx_b.namespaces.get(&namespace.name);
let cmp_result = if let (Some(list_a), Some(list_b)) = (list_a, list_b) {
compare_tag_lists(list_a, list_b)
} else if list_a.is_some() {
Ordering::Greater
} else if list_b.is_some() {
Ordering::Less
} else {
Ordering::Equal
};
adjust_for_dir(cmp_result, &namespace.direction)
}
SortKey::FileName(direction) => {
adjust_for_dir(compare_opts(&ctx_a.name, &ctx_b.name), direction)
}
SortKey::FileSize(direction) => {
adjust_for_dir(cmp_u64.compare(&ctx_a.size, &ctx_b.size), direction)
}
SortKey::FileImportedTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.import_time, &ctx_b.import_time),
direction,
),
SortKey::FileCreatedTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.create_time, &ctx_b.create_time),
direction,
),
SortKey::FileChangeTime(direction) => adjust_for_dir(
cmp_date.compare(&ctx_a.change_time, &ctx_b.change_time),
direction,
),
SortKey::FileType(direction) => {
adjust_for_dir(ctx_a.mime_type.cmp(&ctx_b.mime_type), direction)
}
SortKey::NumTags(direction) => adjust_for_dir(
cmp_u32.compare(&ctx_a.tag_count, &ctx_b.tag_count),
direction,
),
};
if !ordering.is_eq() {
return ordering;
}
}
Ordering::Equal
}
fn compare_opts<T: Ord + Sized>(opt_a: &Option<T>, opt_b: &Option<T>) -> Ordering {
let cmp = compare::natural();
if let (Some(a), Some(b)) = (opt_a, opt_b) {
cmp.compare(a, b)
} else if opt_a.is_some() {
Ordering::Greater
} else if opt_b.is_some() {
Ordering::Less
} else {
Ordering::Equal
}
}
fn compare_f32(a: f32, b: f32) -> Ordering {
if a > b {
Ordering::Greater
} else if b > a {
Ordering::Less
} else {
Ordering::Equal
}
}
fn adjust_for_dir(ordering: Ordering, direction: &SortDirection) -> Ordering {
if *direction == SortDirection::Descending {
ordering.reverse()
} else {
ordering
}
}
fn compare_tag_lists(list_a: &Vec<String>, list_b: &Vec<String>) -> Ordering {
let first_diff = list_a
.into_iter()
.zip(list_b.into_iter())
.find(|(a, b)| *a != *b);
if let Some(diff) = first_diff {
if let (Some(num_a), Some(num_b)) = (diff.0.parse::<f32>().ok(), diff.1.parse::<f32>().ok())
{
compare_f32(num_a, num_b)
} else {
let cmp = compare::natural();
cmp.compare(diff.0, diff.1)
}
} else {
Ordering::Equal
}
}

@ -0,0 +1,55 @@
use crate::utils::{calculate_size, get_repo_from_context};
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::jobs::{JobType, RunJobRequest};
use mediarepo_core::mediarepo_api::types::repo::SizeType;
use mediarepo_core::type_keys::SizeMetadataKey;
pub struct JobsNamespace;
impl NamespaceProvider for JobsNamespace {
fn name() -> &'static str {
"jobs"
}
fn register(handler: &mut EventHandler) {
events!(handler,
"run_job" => Self::run_job
)
}
}
impl JobsNamespace {
#[tracing::instrument(skip_all)]
pub async fn run_job(ctx: &Context, event: Event) -> IPCResult<()> {
let run_request = event.payload::<RunJobRequest>()?;
let repo = get_repo_from_context(ctx).await;
match run_request.job_type {
JobType::MigrateContentDescriptors => repo.migrate().await?,
JobType::CalculateSizes => calculate_all_sizes(ctx).await?,
JobType::CheckIntegrity => {}
}
ctx.emit_to(Self::name(), "run_job", ()).await?;
Ok(())
}
}
async fn calculate_all_sizes(ctx: &Context) -> RepoResult<()> {
let size_types = vec![
SizeType::Total,
SizeType::FileFolder,
SizeType::ThumbFolder,
SizeType::DatabaseFile,
];
for size_type in size_types {
let size = calculate_size(&size_type, ctx).await?;
let mut data = ctx.data.write().await;
let size_map = data.get_mut::<SizeMetadataKey>().unwrap();
size_map.insert(size_type, size);
}
Ok(())
}

@ -2,6 +2,7 @@ use mediarepo_core::bromine::prelude::AsyncStreamProtocolListener;
use mediarepo_core::bromine::{namespace, namespace::Namespace, IPCBuilder};
pub mod files;
pub mod jobs;
pub mod repo;
pub mod tags;
@ -10,4 +11,5 @@ pub fn build_namespaces<L: AsyncStreamProtocolListener>(builder: IPCBuilder<L>)
.add_namespace(namespace!(files::FilesNamespace))
.add_namespace(namespace!(tags::TagsNamespace))
.add_namespace(namespace!(repo::RepoNamespace))
.add_namespace(namespace!(jobs::JobsNamespace))
}

@ -6,10 +6,9 @@ use mediarepo_core::bromine::prelude::*;
use mediarepo_core::mediarepo_api::types::repo::{
FrontendState, RepositoryMetadata, SizeMetadata, SizeType,
};
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use mediarepo_core::utils::get_folder_size;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey, SizeMetadataKey};
use crate::utils::get_repo_from_context;
use crate::utils::{calculate_size, get_repo_from_context};
pub struct RepoNamespace;
@ -40,7 +39,7 @@ impl RepoNamespace {
tag_count: counts.tag_count as u64,
namespace_count: counts.namespace_count as u64,
mapping_count: counts.mapping_count as u64,
hash_count: counts.hash_count as u64,
hash_count: counts.cd_count as u64,
};
tracing::debug!("metadata = {:?}", metadata);
@ -53,29 +52,21 @@ impl RepoNamespace {
#[tracing::instrument(skip_all)]
async fn get_size_metadata(ctx: &Context, event: Event) -> IPCResult<()> {
let size_type = event.payload::<SizeType>()?;
let repo = get_repo_from_context(ctx).await;
let (repo_path, settings) = {
let data = ctx.data.read().await;
(
data.get::<RepoPathKey>().unwrap().clone(),
data.get::<SettingsKey>().unwrap().clone(),
)
};
let size = match &size_type {
SizeType::Total => get_folder_size(repo_path).await?,
SizeType::FileFolder => repo.get_main_store_size().await?,
SizeType::ThumbFolder => repo.get_thumb_store_size().await?,
SizeType::DatabaseFile => {
let db_path = repo_path.join(settings.database_path);
let database_metadata = fs::metadata(db_path).await?;
database_metadata.len()
}
let data = ctx.data.read().await;
let size_cache = data.get::<SizeMetadataKey>().unwrap();
let size = if let Some(size) = size_cache.get(&size_type) {
*size
} else {
calculate_size(&size_type, ctx).await?
};
let response = SizeMetadata { size, size_type };
tracing::debug!("size response = {:?}", response);
ctx.emit_to(Self::name(), "size_metadata", response).await?;
ctx.emit_to(
Self::name(),
"size_metadata",
SizeMetadata { size, size_type },
)
.await?;
Ok(())
}
@ -118,9 +109,7 @@ async fn get_frontend_state_path(ctx: &Context) -> IPCResult<PathBuf> {
let data = ctx.data.read().await;
let settings = data.get::<SettingsKey>().unwrap();
let repo_path = data.get::<RepoPathKey>().unwrap();
let state_path = repo_path
.join(PathBuf::from(&settings.database_path).parent().unwrap())
.join("frontend-state.json");
let state_path = settings.paths.frontend_state_file_path(&repo_path);
Ok(state_path)
}

@ -1,10 +1,12 @@
use crate::from_model::FromModel;
use crate::utils::{file_by_identifier, get_repo_from_context};
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::mediarepo_api::types::files::{GetFileTagsRequest, GetFilesTagsRequest};
use mediarepo_core::mediarepo_api::types::tags::{
ChangeFileTagsRequest, NamespaceResponse, TagResponse,
};
use rayon::iter::{IntoParallelIterator, ParallelIterator};
pub struct TagsNamespace;
@ -78,7 +80,13 @@ impl TagsNamespace {
let repo = get_repo_from_context(ctx).await;
let request = event.payload::<GetFilesTagsRequest>()?;
let tag_responses: Vec<TagResponse> = repo
.find_tags_for_hashes(request.hashes)
.find_tags_for_file_identifiers(
request
.cds
.into_par_iter()
.filter_map(|c| decode_content_descriptor(c).ok())
.collect(),
)
.await?
.into_iter()
.map(TagResponse::from_model)

@ -1,10 +1,15 @@
use mediarepo_core::bromine::ipc::context::Context;
use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier;
use mediarepo_core::mediarepo_api::types::repo::SizeType;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use mediarepo_core::utils::get_folder_size;
use mediarepo_model::file::File;
use mediarepo_model::repo::Repo;
use mediarepo_model::type_keys::RepoKey;
use std::sync::Arc;
use tokio::fs;
pub async fn get_repo_from_context(ctx: &Context) -> Arc<Repo> {
let data = ctx.data.read().await;
@ -15,20 +20,44 @@ pub async fn get_repo_from_context(ctx: &Context) -> Arc<Repo> {
pub async fn file_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<File> {
let file = match identifier {
FileIdentifier::ID(id) => repo.file_by_id(id).await,
FileIdentifier::Hash(hash) => repo.file_by_hash(hash).await,
FileIdentifier::CD(cd) => repo.file_by_cd(&decode_content_descriptor(cd)?).await,
}?;
file.ok_or_else(|| RepoError::from("Thumbnail not found"))
file.ok_or_else(|| RepoError::from("File not found"))
}
pub async fn hash_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<String> {
pub async fn cd_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<Vec<u8>> {
match identifier {
FileIdentifier::ID(id) => {
let file = repo
.file_by_id(id)
.await?
.ok_or_else(|| "Thumbnail not found")?;
Ok(file.hash().to_owned())
Ok(file.cd().to_owned())
}
FileIdentifier::Hash(hash) => Ok(hash),
FileIdentifier::CD(cd) => decode_content_descriptor(cd),
}
}
pub async fn calculate_size(size_type: &SizeType, ctx: &Context) -> RepoResult<u64> {
let repo = get_repo_from_context(ctx).await;
let (repo_path, settings) = {
let data = ctx.data.read().await;
(
data.get::<RepoPathKey>().unwrap().clone(),
data.get::<SettingsKey>().unwrap().clone(),
)
};
let size = match &size_type {
SizeType::Total => get_folder_size(repo_path).await?,
SizeType::FileFolder => repo.get_main_store_size().await?,
SizeType::ThumbFolder => repo.get_thumb_store_size().await?,
SizeType::DatabaseFile => {
let db_path = settings.paths.db_file_path(&repo_path);
let database_metadata = fs::metadata(db_path).await?;
database_metadata.len()
}
};
Ok(size)
}

@ -1,5 +0,0 @@
pub static SETTINGS_PATH: &str = "./repo.toml";
pub static DEFAULT_STORAGE_NAME: &str = "Main";
pub static DEFAULT_STORAGE_PATH: &str = "files";
pub static THUMBNAIL_STORAGE_NAME: &str = "Thumbnails";
pub static THUMBNAIL_STORAGE_PATH: &str = "./thumbnails";

@ -3,11 +3,12 @@ use rolling_file::RollingConditionBasic;
use std::fs;
use std::path::PathBuf;
use mediarepo_core::settings::LoggingSettings;
use tracing::Level;
use tracing_appender::non_blocking::{NonBlocking, WorkerGuard};
use tracing_flame::FlameLayer;
use tracing_log::LogTracer;
use tracing_subscriber::filter::{self};
use tracing_subscriber::filter::{self, Targets};
use tracing_subscriber::fmt::format::FmtSpan;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
@ -19,7 +20,7 @@ use tracing_subscriber::{
#[allow(dyn_drop)]
pub type DropGuard = Box<dyn Drop>;
pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
pub fn init_tracing(repo_path: &PathBuf, log_cfg: &LoggingSettings) -> Vec<DropGuard> {
LogTracer::init().expect("failed to subscribe to log entries");
let log_path = repo_path.join("logs");
let mut guards = Vec::new();
@ -54,12 +55,7 @@ pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
.pretty()
.with_ansi(false)
.with_span_events(FmtSpan::NONE)
.with_filter(
filter::Targets::new()
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::TRACE)
.with_target("mediarepo_database", Level::TRACE),
);
.with_filter(get_sql_targets(log_cfg.trace_sql));
let (bromine_writer, guard) = get_bromine_log_writer(&log_path);
guards.push(Box::new(guard) as DropGuard);
@ -69,7 +65,7 @@ pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
.pretty()
.with_ansi(false)
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_filter(filter::Targets::new().with_target("bromine", Level::DEBUG));
.with_filter(get_bromine_targets(log_cfg.trace_api_calls));
let (app_log_writer, guard) = get_application_log_writer(&log_path);
guards.push(Box::new(guard) as DropGuard);
@ -79,16 +75,7 @@ pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
.pretty()
.with_ansi(false)
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_filter(
filter::Targets::new()
.with_target("bromine", Level::WARN)
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::INFO)
.with_target("tokio", Level::WARN)
.with_target("console_subscriber", Level::INFO)
.with_target("h2", Level::INFO)
.with_default(Level::DEBUG),
);
.with_filter(get_app_targets(log_cfg.level.clone().into()));
let registry = Registry::default()
.with(stdout_layer)
@ -144,6 +131,36 @@ fn get_application_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard)
)
}
fn get_app_targets(level: Option<Level>) -> Targets {
filter::Targets::new()
.with_target("bromine", Level::WARN)
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::WARN)
.with_target("tokio", Level::WARN)
.with_target("console_subscriber", Level::ERROR)
.with_target("h2", Level::WARN)
.with_default(level)
}
fn get_sql_targets(trace_sql: bool) -> Targets {
if trace_sql {
filter::Targets::new()
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::TRACE)
.with_target("mediarepo_database", Level::TRACE)
} else {
filter::Targets::new().with_default(None)
}
}
fn get_bromine_targets(trace_bromine: bool) -> Targets {
if trace_bromine {
filter::Targets::new().with_target("bromine", Level::DEBUG)
} else {
filter::Targets::new().with_default(None)
}
}
pub fn init_tracing_flame() -> DropGuard {
let fmt_layer = fmt::Layer::default();
let (flame_layer, guard) = FlameLayer::with_file("./tracing.folded").unwrap();

@ -6,22 +6,17 @@ use tokio::runtime;
use tokio::runtime::Runtime;
use mediarepo_core::error::RepoResult;
use mediarepo_core::futures;
use mediarepo_core::settings::Settings;
use mediarepo_core::utils::parse_tags_file;
use mediarepo_model::file::{File as RepoFile, File};
use mediarepo_core::fs::drop_file::DropFile;
use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_core::tokio_graceful_shutdown::{SubsystemHandle, Toplevel};
use mediarepo_model::repo::Repo;
use mediarepo_socket::start_tcp_server;
use num_integer::Integer;
use std::env;
use std::time::Duration;
use tokio::io::AsyncWriteExt;
use crate::constants::{
DEFAULT_STORAGE_NAME, DEFAULT_STORAGE_PATH, SETTINGS_PATH, THUMBNAIL_STORAGE_NAME,
THUMBNAIL_STORAGE_PATH,
};
use crate::utils::{create_paths_for_repo, get_repo, load_settings};
mod constants;
mod logging;
mod utils;
@ -50,17 +45,6 @@ enum SubCommand {
force: bool,
},
/// Imports file from a folder (by glob pattern) into the repository
Import {
/// The path to the folder where the files are located
#[structopt()]
folder_path: String,
/// If imported files should be deleted after import
#[structopt(long)]
delete: bool,
},
/// Starts the event server for the selected repository
Start,
}
@ -69,27 +53,34 @@ fn main() -> RepoResult<()> {
let mut opt: Opt = Opt::from_args();
opt.repo = env::current_dir().unwrap().join(opt.repo);
if opt.repo.exists() {
let settings = if opt.repo.exists() {
opt.repo = opt.repo.canonicalize().unwrap();
}
load_settings(&opt.repo)?
} else {
Settings::default()
};
clean_old_connection_files(&opt.repo)?;
let mut guards = Vec::new();
if opt.profile {
guards.push(logging::init_tracing_flame());
} else {
guards.append(&mut logging::init_tracing(&opt.repo));
guards.append(&mut logging::init_tracing(&opt.repo, &settings.logging));
}
match opt.cmd.clone() {
let result = match opt.cmd.clone() {
SubCommand::Init { force } => get_single_thread_runtime().block_on(init(opt, force)),
SubCommand::Start => get_multi_thread_runtime().block_on(start_server(opt)),
SubCommand::Import {
folder_path,
delete,
} => get_single_thread_runtime().block_on(import(opt, folder_path, delete)),
}?;
SubCommand::Start => get_multi_thread_runtime().block_on(start_server(opt, settings)),
};
Ok(())
match result {
Ok(_) => Ok(()),
Err(e) => {
tracing::error!("a critical error occurred when running the daemon: {}", e);
Err(e)
}
}
}
fn get_single_thread_runtime() -> Runtime {
@ -109,43 +100,112 @@ fn get_multi_thread_runtime() -> Runtime {
.unwrap()
}
async fn init_repo(opt: &Opt) -> RepoResult<(Settings, Repo)> {
let settings = load_settings(&opt.repo.join(SETTINGS_PATH)).await?;
let mut repo = get_repo(&opt.repo.join(&settings.database_path).to_str().unwrap()).await?;
async fn init_repo(opt: &Opt, paths: &PathSettings) -> RepoResult<Repo> {
let repo = get_repo(&opt.repo, paths).await?;
repo.set_main_storage(&settings.default_file_store).await?;
repo.set_thumbnail_storage(opt.repo.join(&settings.thumbnail_store))
.await?;
Ok((settings, repo))
Ok(repo)
}
/// Starts the server
async fn start_server(opt: Opt) -> RepoResult<()> {
let (settings, repo) = init_repo(&opt).await?;
let mut handles = Vec::new();
async fn start_server(opt: Opt, settings: Settings) -> RepoResult<()> {
let repo = init_repo(&opt, &settings.paths).await?;
let mut top_level = Toplevel::new();
#[cfg(unix)]
{
let socket_path = opt.repo.join("repo.sock");
let handle = mediarepo_socket::create_unix_socket(
socket_path,
opt.repo.clone(),
settings.clone(),
repo.clone(),
)?;
handles.push(handle);
if settings.server.unix_socket.enabled {
let settings = settings.clone();
let repo_path = opt.repo.clone();
let repo = repo.clone();
top_level = top_level.start("mediarepo-unix-socket", |subsystem| {
Box::pin(async move {
start_and_await_unix_socket(subsystem, repo_path, settings, repo).await?;
Ok(())
})
})
}
}
if settings.server.tcp.enabled {
top_level = top_level.start("mediarepo-tcp", move |subsystem| {
Box::pin(async move {
start_and_await_tcp_server(subsystem, opt.repo, settings, repo).await?;
Ok(())
})
})
}
if let Err(e) = top_level
.catch_signals()
.handle_shutdown_requests(Duration::from_millis(1000))
.await
{
tracing::error!("an error occurred when running the servers {}", e);
}
let (address, tcp_handle) = start_tcp_server(
settings.listen_address.clone(),
settings.port_range,
opt.repo.clone(),
tracing::warn!(
r"All servers quit.
Either they were requested to stop, a fatal error occurred or no servers are enabled in the config.
Stopping daemon..."
);
Ok(())
}
async fn start_and_await_tcp_server(
subsystem: SubsystemHandle,
repo_path: PathBuf,
settings: Settings,
repo: Repo,
) -> RepoResult<()> {
let (address, handle) = start_tcp_server(subsystem.clone(), repo_path.clone(), settings, repo)?;
let (mut file, _guard) = DropFile::new(repo_path.join("repo.tcp")).await?;
file.write_all(&address.into_bytes()).await?;
tokio::select! {
_ = subsystem.on_shutdown_requested() => {
tracing::info!("shutdown requested")
},
result = handle => {
if let Err(e) = result {
tracing::error!("the tcp server shut down with an error {}", e);
subsystem.request_shutdown();
}
}
}
Ok(())
}
#[cfg(unix)]
async fn start_and_await_unix_socket(
subsystem: SubsystemHandle,
repo_path: PathBuf,
settings: Settings,
repo: Repo,
) -> RepoResult<()> {
let socket_path = repo_path.join("repo.sock");
let handle = mediarepo_socket::create_unix_socket(
subsystem.clone(),
socket_path,
repo_path.clone(),
settings,
repo,
)?;
handles.push(tcp_handle);
fs::write(opt.repo.join("repo.tcp"), &address.into_bytes()).await?;
futures::future::join_all(handles.into_iter()).await;
let _guard = DropFile::from_path(repo_path.join("repo.sock"));
tokio::select! {
_ = subsystem.on_shutdown_requested() => {
tracing::info!("shutdown requested")
},
result = handle => {
if let Err(e) = result {
tracing::error!("the unix socket shut down with an error {}", e);
subsystem.request_shutdown();
}
}
}
Ok(())
}
@ -153,148 +213,40 @@ async fn start_server(opt: Opt) -> RepoResult<()> {
/// Initializes an empty repository
async fn init(opt: Opt, force: bool) -> RepoResult<()> {
log::info!("Initializing repository at {:?}", opt.repo);
if force {
log::debug!("Removing old repository");
fs::remove_dir_all(&opt.repo).await?;
}
let settings = Settings::default();
log::debug!("Creating paths");
create_paths_for_repo(
&opt.repo,
&settings,
DEFAULT_STORAGE_PATH,
THUMBNAIL_STORAGE_PATH,
)
.await?;
let db_path = opt.repo.join(&settings.database_path);
if db_path.exists() {
create_paths_for_repo(&opt.repo, &settings.paths).await?;
if settings.paths.db_file_path(&opt.repo).exists() {
panic!("Database already exists in location. Use --force with init to delete everything and start a new repository");
}
log::debug!("Creating repo");
let repo = get_repo(&db_path.to_str().unwrap()).await?;
let storage_path = opt.repo.join(DEFAULT_STORAGE_PATH).canonicalize().unwrap();
log::debug!("Adding storage");
repo.add_storage(DEFAULT_STORAGE_NAME, storage_path.to_str().unwrap())
.await?;
let thumb_storage_path = opt
.repo
.join(THUMBNAIL_STORAGE_PATH)
.canonicalize()
.unwrap();
repo.add_storage(THUMBNAIL_STORAGE_NAME, thumb_storage_path.to_str().unwrap())
.await?;
let settings_string = settings.to_toml_string()?;
let _repo = get_repo(&opt.repo, &settings.paths).await?;
log::debug!("Writing settings");
fs::write(opt.repo.join(SETTINGS_PATH), &settings_string.into_bytes()).await?;
settings.save(&opt.repo)?;
log::info!("Repository initialized");
Ok(())
}
/// Imports files from a source into the database
async fn import(opt: Opt, path: String, delete_files: bool) -> RepoResult<()> {
let (_s, repo) = init_repo(&opt).await?;
log::info!("Importing");
let paths: Vec<PathBuf> = glob::glob(&path)
.unwrap()
.into_iter()
.filter_map(|r| r.ok())
.filter(|e| e.is_file())
.collect();
fn clean_old_connection_files(root: &PathBuf) -> RepoResult<()> {
let paths = ["repo.tcp", "repo.sock"];
for path in paths {
if let Err(e) = import_single_image(&path, &repo).await {
log::error!("Import failed: {:?}", e);
if delete_files {
log::info!("Deleting file {:?}", path);
let _ = fs::remove_file(&path).await;
}
} else {
if delete_files {
log::info!("Deleting file {:?}", path);
let _ = fs::remove_file(&path).await;
}
}
}
log::info!("Creating thumbnails...");
let mut files = repo.files().await?;
for _ in 0..(files.len().div_ceil(&64)) {
futures::future::join_all(
(0..64)
.filter_map(|_| files.pop())
.map(|f| create_file_thumbnails(&repo, f)),
)
.await
.into_iter()
.filter_map(|r| r.err())
.for_each(|e| log::error!("Failed to create thumbnail: {:?}", e));
}
let path = root.join(path);
Ok(())
}
/// Creates thumbnails of all sizes
async fn import_single_image(path: &PathBuf, repo: &Repo) -> RepoResult<()> {
log::info!("Importing file");
let file = repo.add_file_by_path(path.clone()).await?;
log::info!("Adding tags");
let tags_path = PathBuf::from(format!("{}{}", path.to_str().unwrap(), ".txt"));
add_tags_from_tags_file(tags_path, repo, file).await?;
Ok(())
}
async fn add_tags_from_tags_file(
tags_path: PathBuf,
repo: &Repo,
file: RepoFile,
) -> RepoResult<()> {
log::info!("Adding tags");
if tags_path.exists() {
let mut tags = parse_tags_file(tags_path).await?;
log::info!("Found {} tags in the tag file", tags.len());
let resolved_tags = repo.tags_by_names(tags.clone()).await?;
tags.retain(|tag| {
resolved_tags
.iter()
.find(|t| if let (Some(ns1), Some(ns2)) = (t.namespace(), &tag.0) {
*ns1.name() == *ns2
} else { t.namespace().is_none() && tag.0.is_none() } && *t.name() == *tag.1)
.is_none()
});
let mut tag_ids: Vec<i64> = resolved_tags.into_iter().map(|t| t.id()).collect();
log::info!("Existing tag_ids count is {}", tag_ids.len());
log::info!("{} tags need to be created", tags.len());
for (namespace, name) in tags {
let tag = if let Some(namespace) = namespace {
log::info!("Adding namespaced tag '{}:{}'", namespace, name);
repo.add_namespaced_tag(name, namespace).await?
} else {
log::info!("Adding unnamespaced tag '{}'", name);
repo.add_unnamespaced_tag(name).await?
};
tag_ids.push(tag.id());
}
log::info!("Mapping {} tags to the file", tag_ids.len());
if !tag_ids.is_empty() {
file.add_tags(tag_ids).await?;
if path.exists() {
std::fs::remove_file(&path)?;
}
} else {
log::info!("No tags file '{:?}' found", tags_path);
}
Ok(())
}
#[tracing::instrument(skip(repo, file))]
async fn create_file_thumbnails(repo: &Repo, file: File) -> RepoResult<()> {
let file_thumbnails = repo.get_file_thumbnails(file.hash().to_owned()).await?;
if file_thumbnails.is_empty() {
repo.create_thumbnails_for_file(&file).await?;
}
Ok(())
}

@ -1,37 +1,49 @@
use mediarepo_core::error::RepoResult;
use mediarepo_core::settings::Settings;
use mediarepo_core::settings::v1::SettingsV1;
use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_model::repo::Repo;
use std::path::PathBuf;
use tokio::fs;
/// Loads the settings from a toml path
pub async fn load_settings(path: &PathBuf) -> RepoResult<Settings> {
let contents = fs::read_to_string(path).await?;
Settings::from_toml_string(&contents)
pub fn load_settings(root_path: &PathBuf) -> RepoResult<Settings> {
let contents = std::fs::read_to_string(root_path.join("repo.toml"))?;
if let Ok(settings_v1) = SettingsV1::from_toml_string(&contents) {
let settings = Settings::from_v1(settings_v1)?;
settings.save(root_path)?;
Ok(settings)
} else {
Settings::read(root_path)
}
}
pub async fn get_repo(db_path: &str) -> RepoResult<Repo> {
Repo::connect(format!("sqlite://{}", db_path)).await
pub async fn get_repo(root_path: &PathBuf, path_settings: &PathSettings) -> RepoResult<Repo> {
Repo::connect(
format!(
"sqlite://{}",
path_settings.db_file_path(root_path).to_string_lossy()
),
path_settings.files_dir(root_path),
path_settings.thumbs_dir(root_path),
)
.await
}
pub async fn create_paths_for_repo(
root: &PathBuf,
settings: &Settings,
storage_path: &str,
thumbnail_path: &str,
) -> RepoResult<()> {
pub async fn create_paths_for_repo(root: &PathBuf, settings: &PathSettings) -> RepoResult<()> {
if !root.exists() {
fs::create_dir_all(&root).await?;
}
let db_path = root.join(&settings.database_path);
let db_path = settings.database_dir(root);
if !db_path.exists() {
fs::create_dir_all(db_path.parent().unwrap()).await?;
fs::create_dir_all(db_path).await?;
}
let storage_path = root.join(storage_path);
if !storage_path.exists() {
fs::create_dir_all(storage_path).await?;
let files_path = settings.files_dir(root);
if !files_path.exists() {
fs::create_dir_all(files_path).await?;
}
let thumbnail_path = root.join(thumbnail_path);
let thumbnail_path = settings.thumbs_dir(root);
if !thumbnail_path.exists() {
fs::create_dir_all(thumbnail_path).await?;
}

Loading…
Cancel
Save