Implement new settings format and remove storage table

TG-16 #done
TG-67 #closed

Signed-off-by: trivernis <trivernis@protonmail.com>
pull/4/head
trivernis 3 years ago
parent 796eb56a62
commit f77a45e963

@ -117,7 +117,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5"
dependencies = [
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -168,7 +168,7 @@ version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"serde",
"serde 1.0.132",
]
[[package]]
@ -259,7 +259,7 @@ dependencies = [
"byteorder",
"futures 0.3.19",
"lazy_static",
"serde",
"serde 1.0.132",
"thiserror",
"tokio",
"tracing",
@ -328,8 +328,8 @@ checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
dependencies = [
"libc",
"num-integer",
"num-traits",
"serde",
"num-traits 0.2.14",
"serde 1.0.132",
"time 0.1.44",
"winapi",
]
@ -372,6 +372,22 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "120133d4db2ec47efe2e26502ee984747630c67f51974fca0b6c1340cf2368d3"
[[package]]
name = "config"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1b9d958c2b1368a663f05538fc1b5975adce1e19f435acceae987aceeeb369"
dependencies = [
"lazy_static",
"nom 5.1.2",
"rust-ini",
"serde 1.0.132",
"serde-hjson",
"serde_json",
"toml",
"yaml-rust",
]
[[package]]
name = "console-api"
version = "0.1.0"
@ -395,7 +411,7 @@ dependencies = [
"futures 0.3.19",
"hdrhistogram",
"humantime",
"serde",
"serde 1.0.132",
"serde_json",
"thread_local",
"tokio",
@ -847,7 +863,7 @@ dependencies = [
"byteorder",
"flate2",
"nom 7.1.0",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -974,7 +990,7 @@ dependencies = [
"jpeg-decoder",
"num-iter",
"num-rational",
"num-traits",
"num-traits 0.2.14",
"png",
"scoped_threadpool",
"tiff",
@ -1065,6 +1081,19 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "lexical-core"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe"
dependencies = [
"arrayvec 0.5.2",
"bitflags",
"cfg-if 1.0.0",
"ryu",
"static_assertions",
]
[[package]]
name = "libc"
version = "0.2.112"
@ -1101,6 +1130,12 @@ dependencies = [
"cc",
]
[[package]]
name = "linked-hash-map"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
[[package]]
name = "lock_api"
version = "0.4.5"
@ -1141,7 +1176,7 @@ source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=7b210251f0986e3
dependencies = [
"bromine",
"chrono",
"serde",
"serde 1.0.132",
"serde_piecewise_default",
"thiserror",
"tracing",
@ -1152,6 +1187,7 @@ name = "mediarepo-core"
version = "0.1.0"
dependencies = [
"base64",
"config",
"data-encoding",
"futures 0.3.19",
"glob",
@ -1160,7 +1196,7 @@ dependencies = [
"multibase",
"multihash",
"sea-orm",
"serde",
"serde 1.0.132",
"sqlx",
"thiserror",
"thumbnailer",
@ -1214,7 +1250,7 @@ dependencies = [
"mime",
"mime_guess",
"sea-orm",
"serde",
"serde 1.0.132",
"tokio",
"tracing",
"typemap_rev",
@ -1231,7 +1267,7 @@ dependencies = [
"mediarepo-model",
"port_check",
"rayon",
"serde",
"serde 1.0.132",
"tokio",
"tracing",
"tracing-futures",
@ -1387,6 +1423,7 @@ version = "5.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af"
dependencies = [
"lexical-core",
"memchr",
"version_check",
]
@ -1419,7 +1456,7 @@ checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -1429,7 +1466,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
dependencies = [
"autocfg",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -1440,7 +1477,7 @@ checksum = "b2021c8337a54d21aca0d59a92577a029af9431cb59b909b03252b9c164fad59"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
@ -1451,7 +1488,16 @@ checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
"num-traits 0.2.14",
]
[[package]]
name = "num-traits"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31"
dependencies = [
"num-traits 0.2.14",
]
[[package]]
@ -1892,6 +1938,12 @@ dependencies = [
"chrono",
]
[[package]]
name = "rust-ini"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2"
[[package]]
name = "rust_decimal"
version = "1.19.0"
@ -1899,8 +1951,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c2d4912d369fb95a351c221475657970678d344d70c1a788223f6e74d1e3732"
dependencies = [
"arrayvec 0.7.2",
"num-traits",
"serde",
"num-traits 0.2.14",
"serde 1.0.132",
]
[[package]]
@ -1960,7 +2012,7 @@ dependencies = [
"sea-orm-macros",
"sea-query",
"sea-strum",
"serde",
"serde 1.0.132",
"serde_json",
"sqlx",
"tracing",
@ -2052,6 +2104,12 @@ dependencies = [
"libc",
]
[[package]]
name = "serde"
version = "0.8.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8"
[[package]]
name = "serde"
version = "1.0.132"
@ -2061,6 +2119,18 @@ dependencies = [
"serde_derive",
]
[[package]]
name = "serde-hjson"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8"
dependencies = [
"lazy_static",
"num-traits 0.1.43",
"regex",
"serde 0.8.23",
]
[[package]]
name = "serde_derive"
version = "1.0.132"
@ -2081,7 +2151,7 @@ dependencies = [
"indexmap",
"itoa 1.0.1",
"ryu",
"serde",
"serde 1.0.132",
]
[[package]]
@ -2090,7 +2160,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91a44b0f51aedd237f8f25c831e1f629982a187a5045c08ce4bccccce17b4b0"
dependencies = [
"serde",
"serde 1.0.132",
"serde_piecewise_default_derive",
]
@ -2102,7 +2172,7 @@ checksum = "19446953e7b22342c23c79ede938c04b1c12f4eb7513db30cda94193ce30ff2a"
dependencies = [
"proc-macro2 0.4.30",
"quote 0.6.13",
"serde",
"serde 1.0.132",
"syn 0.15.44",
]
@ -2223,7 +2293,7 @@ dependencies = [
"parking_lot",
"percent-encoding",
"rust_decimal",
"serde",
"serde 1.0.132",
"serde_json",
"sha2",
"smallvec",
@ -2275,6 +2345,12 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "stringprep"
version = "0.1.2"
@ -2551,7 +2627,7 @@ version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa"
dependencies = [
"serde",
"serde 1.0.132",
]
[[package]]
@ -2713,7 +2789,7 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb65ea441fbb84f9f6748fd496cf7f63ec9af5bca94dd86456978d055e8eb28b"
dependencies = [
"serde",
"serde 1.0.132",
"tracing-core",
]
@ -2727,7 +2803,7 @@ dependencies = [
"lazy_static",
"matchers",
"regex",
"serde",
"serde 1.0.132",
"serde_json",
"sharded-slab",
"smallvec",
@ -2835,7 +2911,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
dependencies = [
"getrandom",
"serde",
"serde 1.0.132",
]
[[package]]
@ -2994,3 +3070,12 @@ name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]

@ -17,8 +17,8 @@ typemap_rev = "^0.1.5"
futures = "^0.3.19"
itertools = "^0.10.3"
glob = "^0.3.0"
tracing = "0.1.29"
data-encoding = "2.3.2"
tracing = "^0.1.29"
data-encoding = "^2.3.2"
[dependencies.thumbnailer]
version = "^0.2.4"
@ -37,6 +37,10 @@ features = ["migrate"]
version = "^1.15.0"
features = ["fs", "io-util", "io-std"]
[dependencies.config]
version = "^0.11.0"
features = ["toml"]
[dependencies.mediarepo-api]
git = "https://github.com/Trivernis/mediarepo-api.git"
rev = "7b210251f0986e3be060bcfd69cfddcec4e45466"

@ -1,5 +1,5 @@
use sea_orm::DbErr;
use std::fmt::{Display, Formatter};
use std::fmt::{Debug, Formatter};
use thiserror::Error;
pub type RepoResult<T> = Result<T, RepoError>;
@ -36,6 +36,9 @@ pub enum RepoError {
#[error("failed to decode data {0}")]
Decode(#[from] data_encoding::DecodeError),
#[error("Failed to read repo.toml configuration file {0}")]
Config(#[from] config::ConfigError),
}
#[derive(Error, Debug)]
@ -59,9 +62,9 @@ pub enum RepoDatabaseError {
#[derive(Debug)]
pub struct StringError(String);
impl Display for StringError {
impl std::fmt::Display for StringError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
std::fmt::Display::fmt(&self.0, f)
}
}

@ -1,38 +0,0 @@
use crate::error::RepoResult;
use serde::{Deserialize, Serialize};
use std::net::IpAddr;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Settings {
pub listen_address: IpAddr,
pub port_range: (u16, u16),
pub database_path: String,
pub default_file_store: String,
pub thumbnail_store: String,
}
impl Default for Settings {
fn default() -> Self {
Self {
listen_address: IpAddr::from([127, 0, 0, 1]),
port_range: (3400, 3500),
database_path: "./db/repo.db".to_string(),
default_file_store: "Main".to_string(),
thumbnail_store: "./thumbnails".to_string(),
}
}
}
impl Settings {
/// Parses settings from a string
pub fn from_toml_string(s: &str) -> RepoResult<Self> {
let settings = toml::from_str(s)?;
Ok(settings)
}
/// Converts the settings into a toml string
pub fn to_toml_string(&self) -> RepoResult<String> {
let string = toml::to_string(&self)?;
Ok(string)
}
}

@ -0,0 +1,42 @@
use serde::{Deserialize, Serialize};
use tracing::Level;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct LoggingSettings {
pub level: LogLevel,
pub trace_sql: bool,
pub trace_api_calls: bool,
}
impl Default for LoggingSettings {
fn default() -> Self {
Self {
level: LogLevel::Info,
trace_sql: false,
trace_api_calls: false,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum LogLevel {
Off,
Error,
Warn,
Info,
Debug,
Trace,
}
impl Into<Option<Level>> for LogLevel {
fn into(self) -> Option<Level> {
match self {
LogLevel::Off => None,
LogLevel::Error => Some(Level::ERROR),
LogLevel::Warn => Some(Level::WARN),
LogLevel::Info => Some(Level::INFO),
LogLevel::Debug => Some(Level::DEBUG),
LogLevel::Trace => Some(Level::TRACE),
}
}
}

@ -0,0 +1,76 @@
mod logging;
mod paths;
mod server;
pub mod v1;
use crate::error::RepoResult;
use crate::settings::v1::SettingsV1;
use config::{Config, FileFormat};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
pub use logging::*;
pub use paths::*;
pub use server::*;
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct Settings {
pub server: ServerSettings,
pub paths: PathSettings,
pub logging: LoggingSettings,
}
impl Settings {
pub fn read(root: &PathBuf) -> RepoResult<Self> {
let mut settings = Config::default();
settings
.merge(config::File::from_str(
&*Settings::default().to_toml_string()?,
FileFormat::Toml,
))?
.merge(config::File::from(root.join("repo")))?
.merge(config::Environment::with_prefix("MEDIAREPO"))?;
tracing::debug!("Settings are: {:#?}", settings);
Ok(settings.try_into::<Settings>()?)
}
/// Parses settings from a string
pub fn from_v1(settings_v1: SettingsV1) -> RepoResult<Self> {
let mut settings_main = Settings::default();
settings_main.server.tcp.enabled = true;
settings_main.server.tcp.port = PortSetting::Range(settings_v1.port_range);
settings_main.server.tcp.listen_address = settings_v1.listen_address;
settings_main.paths.thumbnail_directory = settings_v1.thumbnail_store.into();
settings_main.paths.database_directory = PathBuf::from(settings_v1.database_path)
.parent()
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_else(|| String::from("./"));
let mut settings = Config::default();
settings
.merge(config::File::from_str(
&*settings_main.to_toml_string()?,
FileFormat::Toml,
))?
.merge(config::Environment::with_prefix("MEDIAREPO"))?;
tracing::debug!("Settings are: {:#?}", settings);
Ok(settings.try_into::<Settings>()?)
}
/// Converts the settings into a toml string
pub fn to_toml_string(&self) -> RepoResult<String> {
let string = toml::to_string(&self)?;
Ok(string)
}
pub fn save(&self, root: &PathBuf) -> RepoResult<()> {
let string = toml::to_string_pretty(&self)?;
fs::write(root.join("repo.toml"), string.into_bytes())?;
Ok(())
}
}

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PathSettings {
pub(crate) database_directory: String,
pub(crate) files_directory: String,
pub(crate) thumbnail_directory: String,
}
impl Default for PathSettings {
fn default() -> Self {
Self {
database_directory: String::from("db"),
files_directory: String::from("files"),
thumbnail_directory: String::from("thumbnails"),
}
}
}
impl PathSettings {
#[inline]
pub fn database_dir(&self, root: &PathBuf) -> PathBuf {
root.join(&self.database_directory)
}
#[inline]
pub fn files_dir(&self, root: &PathBuf) -> PathBuf {
root.join(&self.files_directory)
}
#[inline]
pub fn thumbs_dir(&self, root: &PathBuf) -> PathBuf {
root.join(&self.thumbnail_directory)
}
#[inline]
pub fn db_file_path(&self, root: &PathBuf) -> PathBuf {
self.database_dir(root).join("repo.db")
}
#[inline]
pub fn frontend_state_file_path(&self, root: &PathBuf) -> PathBuf {
self.database_dir(root).join("frontend-state.json")
}
}

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
use std::net::IpAddr;
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct ServerSettings {
pub tcp: TcpServerSettings,
#[cfg(unix)]
pub unix_socket: UnixSocketServerSettings,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct TcpServerSettings {
pub enabled: bool,
pub listen_address: IpAddr,
pub port: PortSetting,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(untagged)]
pub enum PortSetting {
Fixed(u16),
Range((u16, u16)),
}
impl Default for TcpServerSettings {
fn default() -> Self {
Self {
enabled: cfg!(windows),
listen_address: IpAddr::from([127, 0, 0, 1]),
port: PortSetting::Range((13400, 13500)),
}
}
}
#[cfg(unix)]
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct UnixSocketServerSettings {
pub enabled: bool,
}
#[cfg(unix)]
impl Default for UnixSocketServerSettings {
fn default() -> Self {
Self { enabled: true }
}
}

@ -0,0 +1,20 @@
use crate::error::RepoResult;
use serde::{Deserialize, Serialize};
use std::net::IpAddr;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct SettingsV1 {
pub listen_address: IpAddr,
pub port_range: (u16, u16),
pub database_path: String,
pub default_file_store: String,
pub thumbnail_store: String,
}
impl SettingsV1 {
/// Parses settings from a string
pub fn from_toml_string(s: &str) -> RepoResult<Self> {
let settings = toml::from_str(s)?;
Ok(settings)
}
}

@ -0,0 +1,50 @@
-- Add migration script here
PRAGMA foreign_keys= off;
-- rename old files table
ALTER TABLE files
RENAME TO _files_old;
-- rename metadata value (because of foreign key constraints)
ALTER TABLE file_metadata
RENAME TO _file_metadata_old;
-- create new files table
CREATE TABLE files
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
status INTEGER NOT NULL DEFAULT 10,
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
mime_type VARCHAR(128) NOT NULL DEFAULT 'application/octet-stream'
);
-- add data from files table
INSERT INTO files
SELECT id, status, cd_id, mime_type
FROM _files_old;
-- create metadata table
CREATE TABLE file_metadata
(
file_id INTEGER PRIMARY KEY REFERENCES files (id),
size INTEGER NOT NULL,
name VARCHAR(128),
comment VARCHAR(1024),
import_time DATETIME NOT NULL,
creation_time DATETIME NOT NULL,
change_time DATETIME NOT NULL
);
-- add back the old values
INSERT INTO file_metadata
SELECT *
FROM _file_metadata_old;
-- drop old tables
DROP TABLE _file_metadata_old;
DROP TABLE _files_old;
DROP TABLE storage_locations;
-- create indices on new tables
CREATE UNIQUE INDEX file_metadata_file_id_unique ON file_metadata (file_id);
CREATE INDEX files_content_descriptor ON files (cd_id);
PRAGMA foreign_keys= on;

@ -7,7 +7,6 @@ pub struct Model {
pub id: i64,
pub status: i32,
pub mime_type: String,
pub storage_id: i64,
pub cd_id: i64,
}
@ -19,13 +18,6 @@ pub enum Relation {
to = "super::content_descriptor::Column::Id"
)]
ContentDescriptorId,
#[sea_orm(
belongs_to = "super::storage::Entity",
from = "Column::StorageId",
to = "super::storage::Column::Id"
)]
Storage,
}
impl Related<super::content_descriptor::Entity> for Entity {
@ -34,12 +26,6 @@ impl Related<super::content_descriptor::Entity> for Entity {
}
}
impl Related<super::storage::Entity> for Entity {
fn to() -> RelationDef {
Relation::Storage.def()
}
}
impl Related<super::file_metadata::Entity> for Entity {
fn to() -> RelationDef {
super::file_metadata::Relation::File.def().rev()

@ -5,5 +5,4 @@ pub mod file;
pub mod file_metadata;
pub mod namespace;
pub mod source;
pub mod storage;
pub mod tag;

@ -1,24 +0,0 @@
use sea_orm::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "storage_locations")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::file::Entity")]
File,
}
impl Related<super::file::Entity> for Entity {
fn to() -> RelationDef {
Relation::File.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

@ -11,6 +11,7 @@ use tokio::io::{AsyncReadExt, BufReader};
use crate::file_metadata::FileMetadata;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::thumbnailer::{self, Thumbnail as ThumbnailerThumb, ThumbnailSize};
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag;
@ -18,7 +19,6 @@ use mediarepo_database::entities::file;
use mediarepo_database::entities::namespace;
use mediarepo_database::entities::tag;
use crate::storage::Storage;
use crate::tag::Tag;
pub enum FileStatus {
@ -124,14 +124,12 @@ impl File {
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn add(
db: DatabaseConnection,
storage_id: i64,
cd_id: i64,
mime_type: String,
) -> RepoResult<Self> {
let file = file::ActiveModel {
cd_id: Set(cd_id),
mime_type: Set(mime_type),
storage_id: Set(storage_id),
..Default::default()
};
let file: file::ActiveModel = file.insert(&db).await?.into();
@ -185,15 +183,6 @@ impl File {
.and_then(|f| f.ok_or_else(|| RepoError::from("missing file metadata")))
}
/// Returns the storage where the file is stored
pub async fn storage(&self) -> RepoResult<Storage> {
let storage = Storage::by_id(self.db.clone(), self.model.storage_id)
.await?
.expect("The FK storage_id doesn't exist?!");
Ok(storage)
}
/// Returns the list of tags of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags(&self) -> RepoResult<Vec<Tag>> {
@ -266,22 +255,28 @@ impl File {
/// Returns the reader for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_reader(&self) -> RepoResult<BufReader<tokio::fs::File>> {
let storage = self.storage().await?;
pub async fn get_reader(
&self,
storage: &FileHashStore,
) -> RepoResult<BufReader<tokio::fs::File>> {
storage
.get_file_reader(&self.content_descriptor.descriptor)
.get_file(&self.content_descriptor.descriptor)
.await
.map(|(_, f)| f)
}
/// Creates a thumbnail for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn create_thumbnail<I: IntoIterator<Item = ThumbnailSize> + Debug>(
&self,
storage: &FileHashStore,
sizes: I,
) -> RepoResult<Vec<ThumbnailerThumb>> {
let mut buf = Vec::new();
self.get_reader().await?.read_to_end(&mut buf).await?;
self.get_reader(storage)
.await?
.read_to_end(&mut buf)
.await?;
let mime_type = self.model.mime_type.clone();
let mime_type =
mime::Mime::from_str(&mime_type).unwrap_or_else(|_| mime::APPLICATION_OCTET_STREAM);

@ -1,16 +0,0 @@
pub mod tag_handle;
use async_trait::async_trait;
use mediarepo_core::error::RepoResult;
use sea_orm::DatabaseConnection;
#[async_trait]
pub trait EntityHandle {
type Model;
/// Returns the ID that is stored in the handle
fn id(&self) -> i64;
/// Returns the model associated with the handle
async fn model(&self, db: DatabaseConnection) -> RepoResult<Self::Model>;
}

@ -1,24 +0,0 @@
use crate::handles::EntityHandle;
use crate::tag::Tag;
use async_trait::async_trait;
use mediarepo_core::error::{RepoDatabaseError, RepoResult};
use sea_orm::DatabaseConnection;
pub struct TagHandle(pub(crate) i64);
#[async_trait]
impl EntityHandle for TagHandle {
type Model = Tag;
fn id(&self) -> i64 {
self.0
}
async fn model(&self, db: DatabaseConnection) -> RepoResult<Self::Model> {
let tag = Tag::by_id(db, self.0)
.await?
.ok_or_else(|| RepoDatabaseError::InvalidHandle(self.id()))?;
Ok(tag)
}
}

@ -1,10 +1,8 @@
pub mod content_descriptor;
pub mod file;
pub mod file_metadata;
pub mod handles;
pub mod namespace;
pub mod repo;
pub mod storage;
pub mod tag;
pub mod thumbnail;
pub mod type_keys;

@ -2,12 +2,12 @@ use crate::content_descriptor::ContentDescriptor;
use crate::file::File;
use crate::file_metadata::FileMetadata;
use crate::namespace::Namespace;
use crate::storage::Storage;
use crate::tag::Tag;
use crate::thumbnail::Thumbnail;
use chrono::{Local, NaiveDateTime};
use mediarepo_core::content_descriptor::{encode_content_descriptor, is_v1_content_descriptor};
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::fs::thumbnail_store::{Dimensions, ThumbnailStore};
use mediarepo_core::itertools::Itertools;
use mediarepo_core::thumbnailer::ThumbnailSize;
@ -27,24 +27,32 @@ use tokio::io::AsyncReadExt;
#[derive(Clone)]
pub struct Repo {
db: DatabaseConnection,
main_storage: Option<Storage>,
thumbnail_storage: Option<ThumbnailStore>,
main_storage: FileHashStore,
thumbnail_storage: ThumbnailStore,
}
impl Repo {
pub(crate) fn new(db: DatabaseConnection) -> Self {
pub(crate) fn new(
db: DatabaseConnection,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> Self {
Self {
db,
main_storage: None,
thumbnail_storage: None,
main_storage: FileHashStore::new(file_store_path),
thumbnail_storage: ThumbnailStore::new(thumb_store_path),
}
}
/// Connects to the database with the given uri
#[tracing::instrument(level = "debug")]
pub async fn connect<S: AsRef<str> + Debug>(uri: S) -> RepoResult<Self> {
pub async fn connect<S: AsRef<str> + Debug>(
uri: S,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> RepoResult<Self> {
let db = get_database(uri).await?;
Ok(Self::new(db))
Ok(Self::new(db, file_store_path, thumb_store_path))
}
/// Returns the database of the repo for raw sql queries
@ -52,45 +60,6 @@ impl Repo {
&self.db
}
/// Returns all available storages
#[tracing::instrument(level = "debug", skip(self))]
pub async fn storages(&self) -> RepoResult<Vec<Storage>> {
Storage::all(self.db.clone()).await
}
/// Returns a storage by path
#[tracing::instrument(level = "debug", skip(self))]
pub async fn storage_by_path<S: ToString + Debug>(
&self,
path: S,
) -> RepoResult<Option<Storage>> {
Storage::by_path(self.db.clone(), path).await
}
/// Sets the main storage
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_main_storage<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
self.main_storage = Storage::by_name(self.db.clone(), name.to_string()).await?;
Ok(())
}
/// Sets the default thumbnail storage
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_thumbnail_storage(&mut self, path: PathBuf) -> RepoResult<()> {
self.thumbnail_storage = Some(ThumbnailStore::new(path));
Ok(())
}
/// Adds a storage to the repository
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_storage<S1: ToString + Debug, S2: ToString + Debug>(
&self,
name: S1,
path: S2,
) -> RepoResult<Storage> {
Storage::create(self.db.clone(), name, path).await
}
/// Returns a file by its mapped hash
#[tracing::instrument(level = "debug", skip(self))]
pub async fn file_by_cd(&self, cd: &[u8]) -> RepoResult<Option<File>> {
@ -145,17 +114,17 @@ impl Repo {
creation_time: NaiveDateTime,
change_time: NaiveDateTime,
) -> RepoResult<File> {
let storage = self.get_main_storage()?;
let file_size = content.len();
let reader = Cursor::new(content);
let hash = storage.store_entry(reader).await?;
let cd_binary = self.main_storage.add_file(reader, None).await?;
let cd = ContentDescriptor::add(self.db.clone(), cd_binary).await?;
let mime_type = mime_type
.and_then(|m| mime::Mime::from_str(&m).ok())
.unwrap_or_else(|| mime::APPLICATION_OCTET_STREAM)
.to_string();
let file = File::add(self.db.clone(), storage.id(), hash.id(), mime_type).await?;
let file = File::add(self.db.clone(), cd.id(), mime_type).await?;
FileMetadata::add(
self.db.clone(),
file.id(),
@ -188,9 +157,9 @@ impl Repo {
/// Returns all thumbnails of a file
pub async fn get_file_thumbnails(&self, file_cd: &[u8]) -> RepoResult<Vec<Thumbnail>> {
let thumb_store = self.get_thumbnail_storage()?;
let file_cd = encode_content_descriptor(file_cd);
let thumbnails = thumb_store
let thumbnails = self
.thumbnail_storage
.get_thumbnails(&file_cd)
.await?
.into_iter()
@ -205,18 +174,25 @@ impl Repo {
Ok(thumbnails)
}
pub async fn get_file_bytes(&self, file: &File) -> RepoResult<Vec<u8>> {
let mut buf = Vec::new();
let mut reader = file.get_reader(&self.main_storage).await?;
reader.read_to_end(&mut buf).await?;
Ok(buf)
}
/// Creates thumbnails of all sizes for a file
#[tracing::instrument(level = "debug", skip(self, file))]
pub async fn create_thumbnails_for_file(&self, file: &File) -> RepoResult<Vec<Thumbnail>> {
let thumb_storage = self.get_thumbnail_storage()?;
let size = ThumbnailSize::Medium;
let (height, width) = size.dimensions();
let thumbs = file.create_thumbnail([size]).await?;
let thumbs = file.create_thumbnail(&self.main_storage, [size]).await?;
let mut created_thumbs = Vec::with_capacity(1);
for thumb in thumbs {
let entry = self
.store_single_thumbnail(file.encoded_cd(), thumb_storage, height, width, thumb)
.store_single_thumbnail(file.encoded_cd(), height, width, thumb)
.await?;
created_thumbs.push(entry);
}
@ -230,15 +206,14 @@ impl Repo {
file: &File,
size: ThumbnailSize,
) -> RepoResult<Thumbnail> {
let thumb_storage = self.get_thumbnail_storage()?;
let (height, width) = size.dimensions();
let thumb = file
.create_thumbnail([size])
.create_thumbnail(&self.main_storage, [size])
.await?
.pop()
.ok_or_else(|| RepoError::from("Failed to create thumbnail"))?;
let thumbnail = self
.store_single_thumbnail(file.encoded_cd(), thumb_storage, height, width, thumb)
.store_single_thumbnail(file.encoded_cd(), height, width, thumb)
.await?;
Ok(thumbnail)
@ -248,7 +223,6 @@ impl Repo {
async fn store_single_thumbnail(
&self,
file_hash: String,
thumb_storage: &ThumbnailStore,
height: u32,
width: u32,
thumb: mediarepo_core::thumbnailer::Thumbnail,
@ -256,7 +230,8 @@ impl Repo {
let mut buf = Vec::new();
thumb.write_png(&mut buf)?;
let size = Dimensions { height, width };
let path = thumb_storage
let path = self
.thumbnail_storage
.add_thumbnail(&file_hash, size.clone(), &buf)
.await?;
@ -395,16 +370,14 @@ impl Repo {
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_main_store_size(&self) -> RepoResult<u64> {
let main_storage = self.get_main_storage()?;
main_storage.get_size().await
self.main_storage.get_size().await
}
/// Returns the size of the thumbnail storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_thumb_store_size(&self) -> RepoResult<u64> {
let thumb_storage = self.get_thumbnail_storage()?;
thumb_storage.get_size().await
self.thumbnail_storage.get_size().await
}
/// Returns all entity counts
@ -419,16 +392,14 @@ impl Repo {
tracing::info!("Converting content descriptors to v2 format...");
let mut converted_count = 0;
let thumb_store = self.get_thumbnail_storage()?;
let file_store = self.get_main_storage()?;
for mut cd in cds {
if is_v1_content_descriptor(cd.descriptor()) {
let src_cd = cd.descriptor().to_owned();
cd.convert_v1_to_v2().await?;
let dst_cd = cd.descriptor().to_owned();
file_store.rename_entry(&src_cd, &dst_cd).await?;
thumb_store
self.main_storage.rename_file(&src_cd, &dst_cd).await?;
self.thumbnail_storage
.rename_parent(
encode_content_descriptor(&src_cd),
encode_content_descriptor(&dst_cd),
@ -441,24 +412,6 @@ impl Repo {
Ok(())
}
#[tracing::instrument(level = "trace", skip(self))]
fn get_main_storage(&self) -> RepoResult<&Storage> {
if let Some(storage) = &self.main_storage {
Ok(storage)
} else {
Err(RepoError::from("No main storage configured."))
}
}
#[tracing::instrument(level = "trace", skip(self))]
fn get_thumbnail_storage(&self) -> RepoResult<&ThumbnailStore> {
if let Some(storage) = &self.thumbnail_storage {
Ok(storage)
} else {
Err(RepoError::from("No thumbnail storage configured."))
}
}
}
fn process_filters_with_tag_ids(

@ -1,211 +0,0 @@
use crate::content_descriptor::ContentDescriptor;
use mediarepo_core::error::RepoResult;
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_database::entities::storage;
use mediarepo_database::entities::storage::ActiveModel as ActiveStorage;
use mediarepo_database::entities::storage::Model as StorageModel;
use sea_orm::prelude::*;
use sea_orm::{DatabaseConnection, NotSet, Set};
use std::fmt::Debug;
use std::path::PathBuf;
use tokio::fs;
use tokio::io::{AsyncRead, BufReader};
#[derive(Clone)]
pub struct Storage {
db: DatabaseConnection,
model: StorageModel,
store: FileHashStore,
}
impl Storage {
#[tracing::instrument(level = "trace")]
fn new(db: DatabaseConnection, model: StorageModel) -> Self {
let path = PathBuf::from(&model.path);
Self {
store: FileHashStore::new(path),
db,
model,
}
}
/// Returns all available storages
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<Self>> {
let storages: Vec<storage::Model> = storage::Entity::find().all(&db).await?;
let storages = storages
.into_iter()
.map(|s| Self::new(db.clone(), s))
.collect();
Ok(storages)
}
/// Returns the storage by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
if let Some(model) = storage::Entity::find_by_id(id).one(&db).await? {
let storage = Self::new(db, model);
Ok(Some(storage))
} else {
Ok(None)
}
}
/// Returns the storage by name
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_name<S: AsRef<str> + Debug>(
db: DatabaseConnection,
name: S,
) -> RepoResult<Option<Self>> {
if let Some(model) = storage::Entity::find()
.filter(storage::Column::Name.eq(name.as_ref()))
.one(&db)
.await?
{
let storage = Self::new(db, model);
Ok(Some(storage))
} else {
Ok(None)
}
}
/// Returns the storage by path
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_path<S: ToString + Debug>(
db: DatabaseConnection,
path: S,
) -> RepoResult<Option<Self>> {
if let Some(model) = storage::Entity::find()
.filter(storage::Column::Path.eq(path.to_string()))
.one(&db)
.await?
{
let storage = Self::new(db, model);
Ok(Some(storage))
} else {
Ok(None)
}
}
/// Creates a new active storage and also creates the associated directory
/// if it doesn't exist yet.
#[tracing::instrument(level = "debug", skip(db))]
pub async fn create<S1: ToString + Debug, S2: ToString + Debug>(
db: DatabaseConnection,
name: S1,
path: S2,
) -> RepoResult<Self> {
let path = path.to_string();
let name = name.to_string();
let path_buf = PathBuf::from(&path);
if !path_buf.exists() {
fs::create_dir(path_buf).await?;
}
let storage = ActiveStorage {
id: NotSet,
name: Set(name),
path: Set(path),
..Default::default()
};
let model = storage.insert(&db).await?;
Ok(Self::new(db, model))
}
/// Returns the unique identifier of this storage
pub fn id(&self) -> i64 {
self.model.id
}
/// Returns the name of the storage
pub fn name(&self) -> &String {
&self.model.name
}
/// Returns the path of the storage
pub fn path(&self) -> &String {
&self.model.path
}
/// Sets a new name for the storage
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_name<S: ToString + Debug>(&self, name: S) -> RepoResult<()> {
let mut active_storage: ActiveStorage = self.get_active_model();
active_storage.name = Set(name.to_string());
active_storage.update(&self.db).await?;
Ok(())
}
/// Sets a new path for the storage. This will only update the database record
/// so if the physical part of the storage is already created it needs to be migrated first
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_path<S: ToString + Debug>(&mut self, path: S) -> RepoResult<()> {
let mut active_storage: ActiveStorage = self.get_active_model();
active_storage.path = Set(path.to_string());
let storage = active_storage.update(&self.db).await?;
self.model = storage;
Ok(())
}
/// Checks if the storage exists on the harddrive
pub fn exists(&self) -> bool {
let path = PathBuf::from(&self.path());
path.exists()
}
/// Adds a thumbnail
#[tracing::instrument(level = "debug", skip(self, reader))]
pub async fn store_entry<R: AsyncRead + Unpin>(
&self,
reader: R,
) -> RepoResult<ContentDescriptor> {
let descriptor = self.store.add_file(reader, None).await?;
if let Some(hash) = ContentDescriptor::by_value(self.db.clone(), &descriptor).await? {
Ok(hash)
} else {
ContentDescriptor::add(self.db.clone(), descriptor).await
}
}
/// Renames an entry
#[tracing::instrument(level = "debug", skip(self))]
pub async fn rename_entry(
&self,
src_descriptor: &[u8],
dst_descriptor: &[u8],
) -> RepoResult<()> {
self.store.rename_file(src_descriptor, dst_descriptor).await
}
/// Returns the buf reader to the given hash
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_file_reader(
&self,
descriptor: &[u8],
) -> RepoResult<BufReader<tokio::fs::File>> {
let (_ext, reader) = self.store.get_file(descriptor).await?;
Ok(reader)
}
/// Returns the size of the storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_size(&self) -> RepoResult<u64> {
self.store.get_size().await
}
/// Returns the active model with only the ID filled so saves always perform an update
fn get_active_model(&self) -> ActiveStorage {
ActiveStorage {
id: Set(self.model.id),
..Default::default()
}
}
}

@ -1,11 +1,11 @@
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::mediarepo_api::types::misc::InfoResponse;
use mediarepo_core::settings::Settings;
use mediarepo_core::settings::{PortSetting, Settings};
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey, SizeMetadataKey};
use mediarepo_model::repo::Repo;
use mediarepo_model::type_keys::RepoKey;
use std::net::{IpAddr, SocketAddr};
use std::net::SocketAddr;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::net::TcpListener;
@ -17,14 +17,22 @@ mod utils;
#[tracing::instrument(skip(settings, repo))]
pub fn start_tcp_server(
ip: IpAddr,
port_range: (u16, u16),
repo_path: PathBuf,
settings: Settings,
repo: Repo,
) -> RepoResult<(String, JoinHandle<()>)> {
let port = port_check::free_local_port_in_range(port_range.0, port_range.1)
.ok_or_else(|| RepoError::PortUnavailable)?;
let port = match &settings.server.tcp.port {
PortSetting::Fixed(p) => {
if port_check::is_local_port_free(*p) {
*p
} else {
return Err(RepoError::PortUnavailable);
}
}
PortSetting::Range((l, r)) => port_check::free_local_port_in_range(*l, *r)
.ok_or_else(|| RepoError::PortUnavailable)?,
};
let ip = settings.server.tcp.listen_address.to_owned();
let address = SocketAddr::new(ip, port);
let address_string = address.to_string();

@ -234,11 +234,9 @@ impl FilesNamespace {
let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(request.id, &repo).await?;
let mut reader = file.get_reader().await?;
let mut buf = Vec::new();
reader.read_to_end(&mut buf).await?;
let bytes = repo.get_file_bytes(&file).await?;
ctx.emit_to(Self::name(), "read_file", BytePayload::new(buf))
ctx.emit_to(Self::name(), "read_file", BytePayload::new(bytes))
.await?;
Ok(())

@ -109,9 +109,7 @@ async fn get_frontend_state_path(ctx: &Context) -> IPCResult<PathBuf> {
let data = ctx.data.read().await;
let settings = data.get::<SettingsKey>().unwrap();
let repo_path = data.get::<RepoPathKey>().unwrap();
let state_path = repo_path
.join(PathBuf::from(&settings.database_path).parent().unwrap())
.join("frontend-state.json");
let state_path = settings.paths.frontend_state_file_path(&repo_path);
Ok(state_path)
}

@ -52,7 +52,7 @@ pub async fn calculate_size(size_type: &SizeType, ctx: &Context) -> RepoResult<u
SizeType::FileFolder => repo.get_main_store_size().await?,
SizeType::ThumbFolder => repo.get_thumb_store_size().await?,
SizeType::DatabaseFile => {
let db_path = repo_path.join(settings.database_path);
let db_path = settings.paths.db_file_path(&repo_path);
let database_metadata = fs::metadata(db_path).await?;
database_metadata.len()

@ -1,5 +0,0 @@
pub static SETTINGS_PATH: &str = "./repo.toml";
pub static DEFAULT_STORAGE_NAME: &str = "Main";
pub static DEFAULT_STORAGE_PATH: &str = "files";
pub static THUMBNAIL_STORAGE_NAME: &str = "Thumbnails";
pub static THUMBNAIL_STORAGE_PATH: &str = "./thumbnails";

@ -3,11 +3,12 @@ use rolling_file::RollingConditionBasic;
use std::fs;
use std::path::PathBuf;
use mediarepo_core::settings::LoggingSettings;
use tracing::Level;
use tracing_appender::non_blocking::{NonBlocking, WorkerGuard};
use tracing_flame::FlameLayer;
use tracing_log::LogTracer;
use tracing_subscriber::filter::{self};
use tracing_subscriber::filter::{self, Targets};
use tracing_subscriber::fmt::format::FmtSpan;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
@ -19,7 +20,7 @@ use tracing_subscriber::{
#[allow(dyn_drop)]
pub type DropGuard = Box<dyn Drop>;
pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
pub fn init_tracing(repo_path: &PathBuf, log_cfg: &LoggingSettings) -> Vec<DropGuard> {
LogTracer::init().expect("failed to subscribe to log entries");
let log_path = repo_path.join("logs");
let mut guards = Vec::new();
@ -54,12 +55,7 @@ pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
.pretty()
.with_ansi(false)
.with_span_events(FmtSpan::NONE)
.with_filter(
filter::Targets::new()
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::TRACE)
.with_target("mediarepo_database", Level::TRACE),
);
.with_filter(get_sql_targets(log_cfg.trace_sql));
let (bromine_writer, guard) = get_bromine_log_writer(&log_path);
guards.push(Box::new(guard) as DropGuard);
@ -69,7 +65,7 @@ pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
.pretty()
.with_ansi(false)
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_filter(filter::Targets::new().with_target("bromine", Level::DEBUG));
.with_filter(get_bromine_targets(log_cfg.trace_api_calls));
let (app_log_writer, guard) = get_application_log_writer(&log_path);
guards.push(Box::new(guard) as DropGuard);
@ -79,16 +75,7 @@ pub fn init_tracing(repo_path: &PathBuf) -> Vec<DropGuard> {
.pretty()
.with_ansi(false)
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_filter(
filter::Targets::new()
.with_target("bromine", Level::WARN)
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::INFO)
.with_target("tokio", Level::WARN)
.with_target("console_subscriber", Level::INFO)
.with_target("h2", Level::INFO)
.with_default(Level::DEBUG),
);
.with_filter(get_app_targets(log_cfg.level.clone().into()));
let registry = Registry::default()
.with(stdout_layer)
@ -144,6 +131,36 @@ fn get_application_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard)
)
}
fn get_app_targets(level: Option<Level>) -> Targets {
filter::Targets::new()
.with_target("bromine", Level::WARN)
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::WARN)
.with_target("tokio", Level::WARN)
.with_target("console_subscriber", Level::ERROR)
.with_target("h2", Level::WARN)
.with_default(level)
}
fn get_sql_targets(trace_sql: bool) -> Targets {
if trace_sql {
filter::Targets::new()
.with_target("sqlx", Level::WARN)
.with_target("sea_orm", Level::TRACE)
.with_target("mediarepo_database", Level::TRACE)
} else {
filter::Targets::new().with_default(None)
}
}
fn get_bromine_targets(trace_bromine: bool) -> Targets {
if trace_bromine {
filter::Targets::new().with_target("bromine", Level::DEBUG)
} else {
filter::Targets::new().with_default(None)
}
}
pub fn init_tracing_flame() -> DropGuard {
let fmt_layer = fmt::Layer::default();
let (flame_layer, guard) = FlameLayer::with_file("./tracing.folded").unwrap();

@ -7,21 +7,13 @@ use tokio::runtime::Runtime;
use mediarepo_core::error::RepoResult;
use mediarepo_core::futures;
use mediarepo_core::settings::Settings;
use mediarepo_core::utils::parse_tags_file;
use mediarepo_model::file::{File as RepoFile, File};
use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_model::repo::Repo;
use mediarepo_socket::start_tcp_server;
use num_integer::Integer;
use std::env;
use crate::constants::{
DEFAULT_STORAGE_NAME, DEFAULT_STORAGE_PATH, SETTINGS_PATH, THUMBNAIL_STORAGE_NAME,
THUMBNAIL_STORAGE_PATH,
};
use crate::utils::{create_paths_for_repo, get_repo, load_settings};
mod constants;
mod logging;
mod utils;
@ -50,17 +42,6 @@ enum SubCommand {
force: bool,
},
/// Imports file from a folder (by glob pattern) into the repository
Import {
/// The path to the folder where the files are located
#[structopt()]
folder_path: String,
/// If imported files should be deleted after import
#[structopt(long)]
delete: bool,
},
/// Starts the event server for the selected repository
Start,
}
@ -69,27 +50,33 @@ fn main() -> RepoResult<()> {
let mut opt: Opt = Opt::from_args();
opt.repo = env::current_dir().unwrap().join(opt.repo);
if opt.repo.exists() {
let settings = if opt.repo.exists() {
opt.repo = opt.repo.canonicalize().unwrap();
}
load_settings(&opt.repo)?
} else {
Settings::default()
};
let mut guards = Vec::new();
if opt.profile {
guards.push(logging::init_tracing_flame());
} else {
guards.append(&mut logging::init_tracing(&opt.repo));
guards.append(&mut logging::init_tracing(&opt.repo, &settings.logging));
}
match opt.cmd.clone() {
let result = match opt.cmd.clone() {
SubCommand::Init { force } => get_single_thread_runtime().block_on(init(opt, force)),
SubCommand::Start => get_multi_thread_runtime().block_on(start_server(opt)),
SubCommand::Import {
folder_path,
delete,
} => get_single_thread_runtime().block_on(import(opt, folder_path, delete)),
}?;
SubCommand::Start => get_multi_thread_runtime().block_on(start_server(opt, settings)),
};
Ok(())
match result {
Ok(_) => Ok(()),
Err(e) => {
tracing::error!("a critical error occurred when running the daemon: {}", e);
Err(e)
}
}
}
fn get_single_thread_runtime() -> Runtime {
@ -109,19 +96,15 @@ fn get_multi_thread_runtime() -> Runtime {
.unwrap()
}
async fn init_repo(opt: &Opt) -> RepoResult<(Settings, Repo)> {
let settings = load_settings(&opt.repo.join(SETTINGS_PATH)).await?;
let mut repo = get_repo(&opt.repo.join(&settings.database_path).to_str().unwrap()).await?;
async fn init_repo(opt: &Opt, paths: &PathSettings) -> RepoResult<Repo> {
let repo = get_repo(&opt.repo, paths).await?;
repo.set_main_storage(&settings.default_file_store).await?;
repo.set_thumbnail_storage(opt.repo.join(&settings.thumbnail_store))
.await?;
Ok((settings, repo))
Ok(repo)
}
/// Starts the server
async fn start_server(opt: Opt) -> RepoResult<()> {
let (settings, repo) = init_repo(&opt).await?;
async fn start_server(opt: Opt, settings: Settings) -> RepoResult<()> {
let repo = init_repo(&opt, &settings.paths).await?;
let mut handles = Vec::new();
#[cfg(unix)]
@ -136,13 +119,7 @@ async fn start_server(opt: Opt) -> RepoResult<()> {
handles.push(handle);
}
let (address, tcp_handle) = start_tcp_server(
settings.listen_address.clone(),
settings.port_range,
opt.repo.clone(),
settings,
repo,
)?;
let (address, tcp_handle) = start_tcp_server(opt.repo.clone(), settings, repo)?;
handles.push(tcp_handle);
fs::write(opt.repo.join("repo.tcp"), &address.into_bytes()).await?;
futures::future::join_all(handles.into_iter()).await;
@ -153,148 +130,26 @@ async fn start_server(opt: Opt) -> RepoResult<()> {
/// Initializes an empty repository
async fn init(opt: Opt, force: bool) -> RepoResult<()> {
log::info!("Initializing repository at {:?}", opt.repo);
if force {
log::debug!("Removing old repository");
fs::remove_dir_all(&opt.repo).await?;
}
let settings = Settings::default();
log::debug!("Creating paths");
create_paths_for_repo(
&opt.repo,
&settings,
DEFAULT_STORAGE_PATH,
THUMBNAIL_STORAGE_PATH,
)
.await?;
let db_path = opt.repo.join(&settings.database_path);
if db_path.exists() {
create_paths_for_repo(&opt.repo, &settings.paths).await?;
if settings.paths.db_file_path(&opt.repo).exists() {
panic!("Database already exists in location. Use --force with init to delete everything and start a new repository");
}
log::debug!("Creating repo");
let repo = get_repo(&db_path.to_str().unwrap()).await?;
let storage_path = opt.repo.join(DEFAULT_STORAGE_PATH).canonicalize().unwrap();
log::debug!("Adding storage");
repo.add_storage(DEFAULT_STORAGE_NAME, storage_path.to_str().unwrap())
.await?;
let thumb_storage_path = opt
.repo
.join(THUMBNAIL_STORAGE_PATH)
.canonicalize()
.unwrap();
repo.add_storage(THUMBNAIL_STORAGE_NAME, thumb_storage_path.to_str().unwrap())
.await?;
let settings_string = settings.to_toml_string()?;
log::debug!("Writing settings");
fs::write(opt.repo.join(SETTINGS_PATH), &settings_string.into_bytes()).await?;
log::info!("Repository initialized");
Ok(())
}
let _repo = get_repo(&opt.repo, &settings.paths).await?;
/// Imports files from a source into the database
async fn import(opt: Opt, path: String, delete_files: bool) -> RepoResult<()> {
let (_s, repo) = init_repo(&opt).await?;
log::info!("Importing");
let paths: Vec<PathBuf> = glob::glob(&path)
.unwrap()
.into_iter()
.filter_map(|r| r.ok())
.filter(|e| e.is_file())
.collect();
for path in paths {
if let Err(e) = import_single_image(&path, &repo).await {
log::error!("Import failed: {:?}", e);
if delete_files {
log::info!("Deleting file {:?}", path);
let _ = fs::remove_file(&path).await;
}
} else {
if delete_files {
log::info!("Deleting file {:?}", path);
let _ = fs::remove_file(&path).await;
}
}
}
log::info!("Creating thumbnails...");
let mut files = repo.files().await?;
for _ in 0..(files.len().div_ceil(&64)) {
futures::future::join_all(
(0..64)
.filter_map(|_| files.pop())
.map(|f| create_file_thumbnails(&repo, f)),
)
.await
.into_iter()
.filter_map(|r| r.err())
.for_each(|e| log::error!("Failed to create thumbnail: {:?}", e));
}
Ok(())
}
/// Creates thumbnails of all sizes
async fn import_single_image(path: &PathBuf, repo: &Repo) -> RepoResult<()> {
log::info!("Importing file");
let file = repo.add_file_by_path(path.clone()).await?;
log::info!("Adding tags");
let tags_path = PathBuf::from(format!("{}{}", path.to_str().unwrap(), ".txt"));
add_tags_from_tags_file(tags_path, repo, file).await?;
Ok(())
}
async fn add_tags_from_tags_file(
tags_path: PathBuf,
repo: &Repo,
file: RepoFile,
) -> RepoResult<()> {
log::info!("Adding tags");
if tags_path.exists() {
let mut tags = parse_tags_file(tags_path).await?;
log::info!("Found {} tags in the tag file", tags.len());
let resolved_tags = repo.tags_by_names(tags.clone()).await?;
tags.retain(|tag| {
resolved_tags
.iter()
.find(|t| if let (Some(ns1), Some(ns2)) = (t.namespace(), &tag.0) {
*ns1.name() == *ns2
} else { t.namespace().is_none() && tag.0.is_none() } && *t.name() == *tag.1)
.is_none()
});
let mut tag_ids: Vec<i64> = resolved_tags.into_iter().map(|t| t.id()).collect();
log::info!("Existing tag_ids count is {}", tag_ids.len());
log::info!("{} tags need to be created", tags.len());
for (namespace, name) in tags {
let tag = if let Some(namespace) = namespace {
log::info!("Adding namespaced tag '{}:{}'", namespace, name);
repo.add_namespaced_tag(name, namespace).await?
} else {
log::info!("Adding unnamespaced tag '{}'", name);
repo.add_unnamespaced_tag(name).await?
};
tag_ids.push(tag.id());
}
log::info!("Mapping {} tags to the file", tag_ids.len());
if !tag_ids.is_empty() {
file.add_tags(tag_ids).await?;
}
} else {
log::info!("No tags file '{:?}' found", tags_path);
}
Ok(())
}
log::debug!("Writing settings");
settings.save(&opt.repo)?;
#[tracing::instrument(skip(repo, file))]
async fn create_file_thumbnails(repo: &Repo, file: File) -> RepoResult<()> {
let file_thumbnails = repo.get_file_thumbnails(file.cd()).await?;
log::info!("Repository initialized");
if file_thumbnails.is_empty() {
repo.create_thumbnails_for_file(&file).await?;
}
Ok(())
}

@ -1,37 +1,49 @@
use mediarepo_core::error::RepoResult;
use mediarepo_core::settings::Settings;
use mediarepo_core::settings::v1::SettingsV1;
use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_model::repo::Repo;
use std::path::PathBuf;
use tokio::fs;
/// Loads the settings from a toml path
pub async fn load_settings(path: &PathBuf) -> RepoResult<Settings> {
let contents = fs::read_to_string(path).await?;
Settings::from_toml_string(&contents)
pub fn load_settings(root_path: &PathBuf) -> RepoResult<Settings> {
let contents = std::fs::read_to_string(root_path.join("repo.toml"))?;
if let Ok(settings_v1) = SettingsV1::from_toml_string(&contents) {
let settings = Settings::from_v1(settings_v1)?;
settings.save(root_path)?;
Ok(settings)
} else {
Settings::read(root_path)
}
}
pub async fn get_repo(db_path: &str) -> RepoResult<Repo> {
Repo::connect(format!("sqlite://{}", db_path)).await
pub async fn get_repo(root_path: &PathBuf, path_settings: &PathSettings) -> RepoResult<Repo> {
Repo::connect(
format!(
"sqlite://{}",
path_settings.db_file_path(root_path).to_string_lossy()
),
path_settings.files_dir(root_path),
path_settings.thumbs_dir(root_path),
)
.await
}
pub async fn create_paths_for_repo(
root: &PathBuf,
settings: &Settings,
storage_path: &str,
thumbnail_path: &str,
) -> RepoResult<()> {
pub async fn create_paths_for_repo(root: &PathBuf, settings: &PathSettings) -> RepoResult<()> {
if !root.exists() {
fs::create_dir_all(&root).await?;
}
let db_path = root.join(&settings.database_path);
let db_path = settings.database_dir(root);
if !db_path.exists() {
fs::create_dir_all(db_path.parent().unwrap()).await?;
fs::create_dir_all(db_path).await?;
}
let storage_path = root.join(storage_path);
if !storage_path.exists() {
fs::create_dir_all(storage_path).await?;
let files_path = settings.files_dir(root);
if !files_path.exists() {
fs::create_dir_all(files_path).await?;
}
let thumbnail_path = root.join(thumbnail_path);
let thumbnail_path = settings.thumbs_dir(root);
if !thumbnail_path.exists() {
fs::create_dir_all(thumbnail_path).await?;
}

Loading…
Cancel
Save