Merge pull request #8 from Trivernis/develop

Version 0.13.2
pull/30/head v0.13.2
Julius Riegel 3 years ago committed by GitHub
commit b1ca9a9c57
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -55,7 +55,7 @@ jobs:
- uses: vimtor/action-zip@v1 - uses: vimtor/action-zip@v1
with: with:
files: target/release/mediarepo-daemon target/release/mediarepo-daemon.exe files: mediarepo-daemon/target/release/mediarepo-daemon mediarepo-daemon/target/release/mediarepo-daemon.exe
dest: mediarepo-daemon-${{ runner.os }}.zip dest: mediarepo-daemon-${{ runner.os }}.zip
- name: Upload Release Asset - name: Upload Release Asset
@ -128,7 +128,7 @@ jobs:
- uses: vimtor/action-zip@v1 - uses: vimtor/action-zip@v1
with: with:
files: src-tauri/target/release/bundle src-tauri/target/release/mediarepo-ui src-tauri/target/release/mediarepo-ui.exe files: mediarepo-ui/src-tauri/target/release/bundle mediarepo-ui/src-tauri/target/release/mediarepo-ui mediarepo-ui/src-tauri/target/release/mediarepo-ui.exe
dest: mediarepo-ui-${{ runner.os }}.zip dest: mediarepo-ui-${{ runner.os }}.zip
- name: Upload Release Asset - name: Upload Release Asset

@ -1,6 +1,6 @@
[package] [package]
name = "mediarepo-api" name = "mediarepo-api"
version = "0.27.0" version = "0.28.0"
edition = "2018" edition = "2018"
license = "gpl-3" license = "gpl-3"

@ -1,6 +1,7 @@
use std::collections::HashMap;
use crate::client_api::error::ApiResult; use crate::client_api::error::ApiResult;
use crate::client_api::IPCApi; use crate::client_api::IPCApi;
use crate::types::files::{GetFileTagsRequest, GetFilesTagsRequest}; use crate::types::files::{GetFileTagsRequest, GetFilesTagsRequest, GetFileTagMapRequest};
use crate::types::identifier::FileIdentifier; use crate::types::identifier::FileIdentifier;
use crate::types::tags::{ChangeFileTagsRequest, NamespaceResponse, TagResponse}; use crate::types::tags::{ChangeFileTagsRequest, NamespaceResponse, TagResponse};
use async_trait::async_trait; use async_trait::async_trait;
@ -72,6 +73,12 @@ impl TagApi {
.await .await
} }
/// Returns a map from files to assigned tags
#[tracing::instrument(level = "debug", skip_all)]
pub async fn get_file_tag_map(&self, cds: Vec<String>) -> ApiResult<HashMap<String, Vec<TagResponse>>> {
self.emit_and_get("file_tag_map", GetFileTagMapRequest{cds}, Some(Duration::from_secs(10))).await
}
/// Creates a new tag and returns the created tag object /// Creates a new tag and returns the created tag object
#[tracing::instrument(level = "debug", skip(self))] #[tracing::instrument(level = "debug", skip(self))]
pub async fn create_tags(&self, tags: Vec<String>) -> ApiResult<Vec<TagResponse>> { pub async fn create_tags(&self, tags: Vec<String>) -> ApiResult<Vec<TagResponse>> {

@ -1,3 +1,4 @@
use std::collections::HashMap;
use crate::tauri_plugin::commands::ApiAccess; use crate::tauri_plugin::commands::ApiAccess;
use crate::tauri_plugin::error::PluginResult; use crate::tauri_plugin::error::PluginResult;
use crate::types::identifier::FileIdentifier; use crate::types::identifier::FileIdentifier;
@ -41,6 +42,14 @@ pub async fn get_tags_for_files(
Ok(tags) Ok(tags)
} }
#[tauri::command]
pub async fn get_file_tag_map(cds: Vec<String>, api_state: ApiAccess<'_>) -> PluginResult<HashMap<String, Vec<TagResponse>>> {
let api = api_state.api().await?;
let mappings = api.tag.get_file_tag_map(cds).await?;
Ok(mappings)
}
#[tauri::command] #[tauri::command]
pub async fn create_tags( pub async fn create_tags(
api_state: ApiAccess<'_>, api_state: ApiAccess<'_>,

@ -69,7 +69,8 @@ impl<R: Runtime> MediarepoPlugin<R> {
get_file_metadata, get_file_metadata,
run_job, run_job,
update_file_status, update_file_status,
delete_file delete_file,
get_file_tag_map
]), ]),
} }
} }

@ -30,6 +30,11 @@ pub struct GetFilesTagsRequest {
pub cds: Vec<String>, pub cds: Vec<String>,
} }
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct GetFileTagMapRequest {
pub cds: Vec<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FileBasicDataResponse { pub struct FileBasicDataResponse {
pub id: i64, pub id: i64,

@ -11,4 +11,5 @@ pub enum JobType {
MigrateContentDescriptors, MigrateContentDescriptors,
CalculateSizes, CalculateSizes,
CheckIntegrity, CheckIntegrity,
Vacuum,
} }

File diff suppressed because it is too large Load Diff

@ -1,10 +1,10 @@
[workspace] [workspace]
members = ["mediarepo-core", "mediarepo-database", "mediarepo-model", "mediarepo-socket", "."] members = ["mediarepo-core", "mediarepo-database", "mediarepo-logic", "mediarepo-socket", "."]
default-members = ["mediarepo-core", "mediarepo-database", "mediarepo-model", "mediarepo-socket", "."] default-members = ["mediarepo-core", "mediarepo-database", "mediarepo-logic", "mediarepo-socket", "."]
[package] [package]
name = "mediarepo-daemon" name = "mediarepo-daemon"
version = "0.13.1" version = "0.13.2"
edition = "2018" edition = "2018"
license = "gpl-3" license = "gpl-3"
repository = "https://github.com/Trivernis/mediarepo-daemon" repository = "https://github.com/Trivernis/mediarepo-daemon"
@ -31,8 +31,8 @@ log = "^0.4.14"
[dependencies.mediarepo-core] [dependencies.mediarepo-core]
path = "./mediarepo-core" path = "./mediarepo-core"
[dependencies.mediarepo-model] [dependencies.mediarepo-logic]
path = "./mediarepo-model" path = "mediarepo-logic"
[dependencies.mediarepo-socket] [dependencies.mediarepo-socket]
path = "./mediarepo-socket" path = "./mediarepo-socket"
@ -47,4 +47,4 @@ features = ["env-filter", "ansi", "json"]
[features] [features]
default = ["ffmpeg"] default = ["ffmpeg"]
ffmpeg = ["mediarepo-core/ffmpeg", "mediarepo-model/ffmpeg"] ffmpeg = ["mediarepo-core/ffmpeg", "mediarepo-logic/ffmpeg"]

@ -22,7 +22,7 @@ data-encoding = "^2.3.2"
tokio-graceful-shutdown = "^0.4.3" tokio-graceful-shutdown = "^0.4.3"
[dependencies.thumbnailer] [dependencies.thumbnailer]
version = "^0.2.4" version = "^0.2.5"
default-features = false default-features = false
[dependencies.sea-orm] [dependencies.sea-orm]

@ -1,6 +1,7 @@
use crate::error::RepoResult;
use multihash::{Code, MultihashDigest}; use multihash::{Code, MultihashDigest};
use crate::error::RepoResult;
/// Creates a new content descriptor for the given file /// Creates a new content descriptor for the given file
pub fn create_content_descriptor(bytes: &[u8]) -> Vec<u8> { pub fn create_content_descriptor(bytes: &[u8]) -> Vec<u8> {
Code::Sha2_256.digest(bytes).to_bytes() Code::Sha2_256.digest(bytes).to_bytes()

@ -1,8 +1,10 @@
use crate::settings::Settings;
use sea_orm::DatabaseConnection;
use std::sync::Arc; use std::sync::Arc;
use sea_orm::DatabaseConnection;
use tokio::sync::Mutex; use tokio::sync::Mutex;
use crate::settings::Settings;
#[derive(Clone, Default)] #[derive(Clone, Default)]
pub struct Context { pub struct Context {
pub settings: Arc<Mutex<Settings>>, pub settings: Arc<Mutex<Settings>>,

@ -1,5 +1,6 @@
use sea_orm::DbErr;
use std::fmt::{Debug, Formatter}; use std::fmt::{Debug, Formatter};
use sea_orm::DbErr;
use thiserror::Error; use thiserror::Error;
pub type RepoResult<T> = Result<T, RepoError>; pub type RepoResult<T> = Result<T, RepoError>;
@ -37,8 +38,11 @@ pub enum RepoError {
#[error("failed to decode data {0}")] #[error("failed to decode data {0}")]
Decode(#[from] data_encoding::DecodeError), Decode(#[from] data_encoding::DecodeError),
#[error("Failed to read repo.toml configuration file {0}")] #[error("failed to read repo.toml configuration file {0}")]
Config(#[from] config::ConfigError), Config(#[from] config::ConfigError),
#[error("the database file is corrupted {0}")]
Corrupted(String),
} }
#[derive(Error, Debug)] #[derive(Error, Debug)]

@ -1,5 +1,6 @@
use std::io::Result; use std::io::Result;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use tokio::fs::{File, OpenOptions}; use tokio::fs::{File, OpenOptions};
/// A file that only exists while being owned. /// A file that only exists while being owned.

@ -1,11 +1,13 @@
use crate::content_descriptor::{create_content_descriptor, encode_content_descriptor};
use crate::error::RepoResult;
use crate::utils::get_folder_size;
use std::path::PathBuf; use std::path::PathBuf;
use tokio::fs; use tokio::fs;
use tokio::fs::{File, OpenOptions}; use tokio::fs::{File, OpenOptions};
use tokio::io::{AsyncRead, AsyncReadExt, BufReader}; use tokio::io::{AsyncRead, AsyncReadExt, BufReader};
use crate::content_descriptor::{create_content_descriptor, encode_content_descriptor};
use crate::error::RepoResult;
use crate::utils::get_folder_size;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct FileHashStore { pub struct FileHashStore {
path: PathBuf, path: PathBuf,

@ -1,12 +1,14 @@
use crate::error::RepoResult;
use crate::utils::get_folder_size;
use std::fmt::Debug; use std::fmt::Debug;
use std::io::Result; use std::io::Result;
use std::path::PathBuf; use std::path::PathBuf;
use tokio::fs; use tokio::fs;
use tokio::fs::OpenOptions; use tokio::fs::OpenOptions;
use tokio::io::{AsyncWriteExt, BufWriter}; use tokio::io::{AsyncWriteExt, BufWriter};
use crate::error::RepoResult;
use crate::utils::get_folder_size;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ThumbnailStore { pub struct ThumbnailStore {
path: PathBuf, path: PathBuf,

@ -1,19 +1,21 @@
mod logging; use std::fs;
mod paths; use std::path::PathBuf;
mod server;
pub mod v1;
use crate::error::RepoResult;
use crate::settings::v1::SettingsV1;
use config::{Config, FileFormat}; use config::{Config, FileFormat};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
pub use logging::*; pub use logging::*;
pub use paths::*; pub use paths::*;
pub use server::*; pub use server::*;
use crate::error::RepoResult;
use crate::settings::v1::SettingsV1;
mod logging;
mod paths;
mod server;
pub mod v1;
#[derive(Clone, Debug, Deserialize, Serialize, Default)] #[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct Settings { pub struct Settings {
pub server: ServerSettings, pub server: ServerSettings,

@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf; use std::path::PathBuf;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, Serialize)] #[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PathSettings { pub struct PathSettings {
pub(crate) database_directory: String, pub(crate) database_directory: String,

@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize};
use std::net::IpAddr; use std::net::IpAddr;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, Serialize, Default)] #[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct ServerSettings { pub struct ServerSettings {
pub tcp: TcpServerSettings, pub tcp: TcpServerSettings,

@ -1,7 +1,9 @@
use crate::error::RepoResult;
use serde::{Deserialize, Serialize};
use std::net::IpAddr; use std::net::IpAddr;
use serde::{Deserialize, Serialize};
use crate::error::RepoResult;
#[derive(Clone, Debug, Deserialize, Serialize)] #[derive(Clone, Debug, Deserialize, Serialize)]
pub struct SettingsV1 { pub struct SettingsV1 {
pub listen_address: IpAddr, pub listen_address: IpAddr,

@ -1,10 +1,12 @@
use crate::settings::Settings;
use mediarepo_api::types::repo::SizeType;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use mediarepo_api::types::repo::SizeType;
use tokio_graceful_shutdown::SubsystemHandle; use tokio_graceful_shutdown::SubsystemHandle;
use typemap_rev::TypeMapKey; use typemap_rev::TypeMapKey;
use crate::settings::Settings;
pub struct SettingsKey; pub struct SettingsKey;
impl TypeMapKey for SettingsKey { impl TypeMapKey for SettingsKey {

@ -1,9 +1,11 @@
use crate::error::RepoResult;
use futures::future;
use std::path::PathBuf; use std::path::PathBuf;
use futures::future;
use tokio::fs::{self, OpenOptions}; use tokio::fs::{self, OpenOptions};
use tokio::io::{AsyncBufReadExt, BufReader}; use tokio::io::{AsyncBufReadExt, BufReader};
use crate::error::RepoResult;
/// Parses a normalized tag into its two components of namespace and tag /// Parses a normalized tag into its two components of namespace and tag
pub fn parse_namespace_and_tag(norm_tag: String) -> (Option<String>, String) { pub fn parse_namespace_and_tag(norm_tag: String) -> (Option<String>, String) {
norm_tag norm_tag

@ -1,7 +1,9 @@
use mediarepo_core::error::RepoDatabaseResult; use std::time::Duration;
use sea_orm::{ConnectOptions, Database, DatabaseConnection}; use sea_orm::{ConnectOptions, Database, DatabaseConnection};
use sqlx::migrate::MigrateDatabase; use sqlx::migrate::MigrateDatabase;
use std::time::Duration;
use mediarepo_core::error::RepoDatabaseResult;
pub mod entities; pub mod entities;
pub mod queries; pub mod queries;

@ -1,11 +1,13 @@
use mediarepo_core::error::RepoResult;
use sea_orm::DbBackend;
use sea_orm::FromQueryResult;
use sea_orm::{DatabaseConnection, Statement};
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt::Display; use std::fmt::Display;
use std::iter::FromIterator; use std::iter::FromIterator;
use sea_orm::{DatabaseConnection, Statement};
use sea_orm::DbBackend;
use sea_orm::FromQueryResult;
use mediarepo_core::error::RepoResult;
#[derive(Debug, FromQueryResult)] #[derive(Debug, FromQueryResult)]
struct CIDNamespaceTag { struct CIDNamespaceTag {
cd_id: i64, cd_id: i64,

@ -1,5 +1,5 @@
[package] [package]
name = "mediarepo-model" name = "mediarepo-logic"
version = "0.1.0" version = "0.1.0"
edition = "2018" edition = "2018"
workspace = ".." workspace = ".."

@ -0,0 +1,73 @@
use std::io::Cursor;
use chrono::{Local, NaiveDateTime};
use sea_orm::ActiveValue::Set;
use sea_orm::{ActiveModelTrait, ConnectionTrait, DatabaseTransaction};
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::{content_descriptor, file, file_metadata};
use crate::dao::file::FileDao;
use crate::dto::{AddFileDto, FileDto};
impl FileDao {
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add(&self, add_dto: AddFileDto) -> RepoResult<FileDto> {
let trx = self.ctx.db.begin().await?;
let file_size = add_dto.content.len();
let cd_bin = self
.ctx
.main_storage
.add_file(Cursor::new(add_dto.content), None)
.await?;
let cd_model = content_descriptor::ActiveModel {
descriptor: Set(cd_bin),
..Default::default()
};
let cd = cd_model.insert(&trx).await?;
let model = file::ActiveModel {
cd_id: Set(cd.id),
mime_type: Set(add_dto.mime_type),
..Default::default()
};
let file: file::Model = model.insert(&trx).await?;
let metadata = add_file_metadata(
&trx,
file.id,
file_size as i64,
add_dto.creation_time,
add_dto.change_time,
add_dto.name,
)
.await?;
trx.commit().await?;
Ok(FileDto::new(file, cd, Some(metadata)))
}
}
async fn add_file_metadata(
trx: &DatabaseTransaction,
file_id: i64,
size: i64,
creation_time: NaiveDateTime,
change_time: NaiveDateTime,
name: Option<String>,
) -> RepoResult<file_metadata::Model> {
let metadata_model = file_metadata::ActiveModel {
file_id: Set(file_id),
size: Set(size),
import_time: Set(Local::now().naive_local()),
creation_time: Set(creation_time),
change_time: Set(change_time),
name: Set(name),
..Default::default()
};
let metadata = metadata_model.insert(trx).await?;
Ok(metadata)
}

@ -0,0 +1,43 @@
use sea_orm::ConnectionTrait;
use sea_orm::prelude::*;
use mediarepo_core::error::{RepoResult};
use mediarepo_database::entities::{
content_descriptor, content_descriptor_tag, file, file_metadata,
};
use crate::dao::file::{FileDao};
use crate::dto::FileDto;
impl FileDao {
#[tracing::instrument(level = "debug", skip(self))]
pub async fn delete(&self, file: FileDto) -> RepoResult<()> {
let trx = self.ctx.db.begin().await?;
file_metadata::Entity::delete_many()
.filter(file_metadata::Column::FileId.eq(file.id()))
.exec(&trx)
.await?;
file::Entity::delete_many()
.filter(file::Column::Id.eq(file.id()))
.exec(&trx)
.await?;
content_descriptor_tag::Entity::delete_many()
.filter(content_descriptor_tag::Column::CdId.eq(file.cd_id()))
.exec(&trx)
.await?;
content_descriptor::Entity::delete_many()
.filter(content_descriptor::Column::Id.eq(file.cd_id()))
.exec(&trx)
.await?;
self.ctx
.thumbnail_storage
.delete_parent(&file.encoded_cd())
.await?;
self.ctx.main_storage.delete_file(file.cd()).await?;
trx.commit().await?;
Ok(())
}
}

@ -1,11 +1,16 @@
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, QuerySelect};
use sea_orm::Condition;
use sea_orm::sea_query::{Alias, Expr, Query, SimpleExpr};
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::content_descriptor; use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag; use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::file; use mediarepo_database::entities::file;
use mediarepo_database::entities::file_metadata; use mediarepo_database::entities::file_metadata;
use sea_orm::sea_query::{Alias, Expr, Query, SimpleExpr};
use sea_orm::ColumnTrait; use crate::dao::file::{FileDao, map_cd_and_file};
use sea_orm::Condition; use crate::dto::FileDto;
macro_rules! apply_ordering_comparator { macro_rules! apply_ordering_comparator {
($column:expr, $filter:expr) => { ($column:expr, $filter:expr) => {
@ -53,8 +58,28 @@ pub enum NegatableComparator<T> {
IsNot(T), IsNot(T),
} }
impl FileDao {
/// Finds files by filters
#[tracing::instrument(level = "debug", skip(self))]
pub async fn find(&self, filters: Vec<Vec<FilterProperty>>) -> RepoResult<Vec<FileDto>> {
let main_condition = build_find_filter_conditions(filters);
let files = content_descriptor::Entity::find()
.find_also_related(file::Entity)
.filter(main_condition)
.group_by(file::Column::Id)
.all(&self.ctx.db)
.await?
.into_iter()
.filter_map(map_cd_and_file)
.collect();
Ok(files)
}
}
#[tracing::instrument(level = "debug")] #[tracing::instrument(level = "debug")]
pub fn build_find_filter_conditions(filters: Vec<Vec<FilterProperty>>) -> Condition { fn build_find_filter_conditions(filters: Vec<Vec<FilterProperty>>) -> Condition {
filters filters
.into_iter() .into_iter()
.fold(Condition::all(), |all_cond, mut expression| { .fold(Condition::all(), |all_cond, mut expression| {

@ -0,0 +1,151 @@
use sea_orm::prelude::*;
use tokio::io::AsyncReadExt;
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::{content_descriptor, file, file_metadata};
use crate::dao::{DaoContext, DaoProvider};
use crate::dto::{FileDto, FileMetadataDto, ThumbnailDto};
pub mod add;
pub mod delete;
pub mod find;
pub mod update;
pub struct FileDao {
ctx: DaoContext,
}
impl DaoProvider for FileDao {
fn dao_ctx(&self) -> DaoContext {
self.ctx.clone()
}
}
impl FileDao {
pub fn new(ctx: DaoContext) -> Self {
Self { ctx }
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn all(&self) -> RepoResult<Vec<FileDto>> {
let files = file::Entity::find()
.find_also_related(content_descriptor::Entity)
.all(&self.ctx.db)
.await?
.into_iter()
.filter_map(map_file_and_cd)
.collect();
Ok(files)
}
#[tracing::instrument(level = "debug", skip(self))]
#[inline]
pub async fn by_id(&self, id: i64) -> RepoResult<Option<FileDto>> {
self.all_by_id(vec![id]).await.map(|f| f.into_iter().next())
}
#[tracing::instrument(level = "debug", skip(self))]
#[inline]
pub async fn by_cd(&self, cd: Vec<u8>) -> RepoResult<Option<FileDto>> {
self.all_by_cd(vec![cd]).await.map(|f| f.into_iter().next())
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn all_by_cd(&self, cds: Vec<Vec<u8>>) -> RepoResult<Vec<FileDto>> {
if cds.is_empty() {
return Ok(vec![]);
}
let files = file::Entity::find()
.find_also_related(content_descriptor::Entity)
.filter(content_descriptor::Column::Descriptor.is_in(cds))
.all(&self.ctx.db)
.await?
.into_iter()
.filter_map(map_file_and_cd)
.collect();
Ok(files)
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn all_by_id(&self, ids: Vec<i64>) -> RepoResult<Vec<FileDto>> {
if ids.is_empty() {
return Ok(vec![]);
}
let files = file::Entity::find()
.find_also_related(content_descriptor::Entity)
.filter(file::Column::Id.is_in(ids))
.all(&self.ctx.db)
.await?
.into_iter()
.filter_map(map_file_and_cd)
.collect();
Ok(files)
}
pub async fn metadata(&self, file_id: i64) -> RepoResult<Option<FileMetadataDto>> {
self.all_metadata(vec![file_id])
.await
.map(|m| m.into_iter().next())
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn all_metadata(&self, file_ids: Vec<i64>) -> RepoResult<Vec<FileMetadataDto>> {
if file_ids.is_empty() {
return Ok(vec![]);
}
let metadata = file_metadata::Entity::find()
.filter(file_metadata::Column::FileId.is_in(file_ids))
.all(&self.ctx.db)
.await?
.into_iter()
.map(|m| FileMetadataDto::new(m))
.collect();
Ok(metadata)
}
/// Returns all thumbnails for a cd
#[tracing::instrument(level = "debug", skip(self))]
pub async fn thumbnails(&self, encoded_cd: String) -> RepoResult<Vec<ThumbnailDto>> {
let thumbnails = self
.ctx
.thumbnail_storage
.get_thumbnails(&encoded_cd)
.await?
.into_iter()
.map(|(size, path)| {
ThumbnailDto::new(path, encoded_cd.clone(), size, String::from("image/png"))
})
.collect();
Ok(thumbnails)
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_bytes(&self, cd: &[u8]) -> RepoResult<Vec<u8>> {
let mut buf = Vec::new();
let mut reader = self.ctx.main_storage.get_file(cd).await?.1;
reader.read_to_end(&mut buf).await?;
Ok(buf)
}
}
fn map_file_and_cd(
(file, cd): (file::Model, Option<content_descriptor::Model>),
) -> Option<FileDto> {
cd.map(|c| FileDto::new(file, c, None))
}
fn map_cd_and_file(
(cd, file): (content_descriptor::Model, Option<file::Model>),
) -> Option<FileDto> {
file.map(|f| FileDto::new(f, cd, None))
}

@ -0,0 +1,95 @@
use std::fmt::Debug;
use std::io::Cursor;
use std::str::FromStr;
use sea_orm::prelude::*;
use sea_orm::ActiveValue::{Set, Unchanged};
use sea_orm::{ConnectionTrait, NotSet};
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::thumbnail_store::Dimensions;
use mediarepo_core::thumbnailer;
use mediarepo_core::thumbnailer::{ThumbnailSize};
use mediarepo_database::entities::{content_descriptor, file, file_metadata};
use crate::dao::file::FileDao;
use crate::dao::opt_to_active_val;
use crate::dto::{FileDto, FileMetadataDto, ThumbnailDto, UpdateFileDto, UpdateFileMetadataDto};
impl FileDao {
#[tracing::instrument(level = "debug", skip(self))]
pub async fn update(&self, update_dto: UpdateFileDto) -> RepoResult<FileDto> {
let trx = self.ctx.db.begin().await?;
let model = file::ActiveModel {
id: Set(update_dto.id),
cd_id: update_dto.cd_id.map(|v| Set(v)).unwrap_or(NotSet),
mime_type: update_dto.mime_type.map(|v| Set(v)).unwrap_or(NotSet),
status: update_dto.status.map(|v| Set(v as i32)).unwrap_or(NotSet),
};
let file_model = model.update(&trx).await?;
let cd = file_model
.find_related(content_descriptor::Entity)
.one(&trx)
.await?
.ok_or_else(|| RepoError::from("Content descriptor not found"))?;
trx.commit().await?;
Ok(FileDto::new(file_model, cd, None))
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn update_metadata(
&self,
update_dto: UpdateFileMetadataDto,
) -> RepoResult<FileMetadataDto> {
let model = file_metadata::ActiveModel {
file_id: Unchanged(update_dto.file_id),
name: opt_to_active_val(update_dto.name),
comment: opt_to_active_val(update_dto.comment),
size: opt_to_active_val(update_dto.size),
change_time: opt_to_active_val(update_dto.change_time),
..Default::default()
};
let metadata = model.update(&self.ctx.db).await?;
Ok(FileMetadataDto::new(metadata))
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn create_thumbnails<I: IntoIterator<Item = ThumbnailSize> + Debug>(
&self,
file: FileDto,
sizes: I,
) -> RepoResult<Vec<ThumbnailDto>> {
let bytes = self.get_bytes(file.cd()).await?;
let mime_type = mime::Mime::from_str(file.mime_type())
.unwrap_or_else(|_| mime::APPLICATION_OCTET_STREAM);
let thumbnails =
thumbnailer::create_thumbnails(Cursor::new(bytes), mime_type.clone(), sizes)?;
let mut dtos = Vec::new();
for thumbnail in thumbnails {
let mut buf = Vec::new();
let size = thumbnail.size();
let size = Dimensions {
height: size.1,
width: size.0,
};
thumbnail.write_png(&mut buf)?;
let path = self
.ctx
.thumbnail_storage
.add_thumbnail(file.encoded_cd(), size.clone(), &buf)
.await?;
dtos.push(ThumbnailDto::new(
path,
file.encoded_cd(),
size,
mime_type.to_string(),
))
}
Ok(dtos)
}
}

@ -0,0 +1,46 @@
use crate::dao::job::JobDao;
use mediarepo_core::content_descriptor::{
convert_v1_descriptor_to_v2, encode_content_descriptor, is_v1_content_descriptor,
};
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::content_descriptor;
use sea_orm::prelude::*;
use sea_orm::ActiveValue::Set;
use sea_orm::ConnectionTrait;
impl JobDao {
#[tracing::instrument(level = "debug", skip(self))]
pub async fn migrate_content_descriptors(&self) -> RepoResult<()> {
let cds: Vec<content_descriptor::Model> =
content_descriptor::Entity::find().all(&self.ctx.db).await?;
tracing::info!("Converting content descriptors to v2 format...");
let mut converted_count = 0;
for cd in cds {
if is_v1_content_descriptor(&cd.descriptor) {
let trx = self.ctx.db.begin().await?;
let src_cd = cd.descriptor;
let dst_cd = convert_v1_descriptor_to_v2(&src_cd)?;
let _active_model = content_descriptor::ActiveModel {
id: Set(cd.id),
descriptor: Set(dst_cd.clone()),
};
self.ctx.main_storage.rename_file(&src_cd, &dst_cd).await?;
self.ctx
.thumbnail_storage
.rename_parent(
encode_content_descriptor(&src_cd),
encode_content_descriptor(&dst_cd),
)
.await?;
trx.commit().await?;
converted_count += 1;
}
}
tracing::info!("Converted {} descriptors", converted_count);
Ok(())
}
}

@ -0,0 +1,20 @@
pub mod migrate_content_descriptors;
pub mod sqlite_operations;
use crate::dao::{DaoContext, DaoProvider};
pub struct JobDao {
ctx: DaoContext,
}
impl DaoProvider for JobDao {
fn dao_ctx(&self) -> DaoContext {
self.ctx.clone()
}
}
impl JobDao {
pub fn new(ctx: DaoContext) -> JobDao {
Self { ctx }
}
}

@ -0,0 +1,44 @@
use crate::dao::job::JobDao;
use mediarepo_core::error::RepoError::Corrupted;
use mediarepo_core::error::RepoResult;
use sea_orm::DatabaseBackend::Sqlite;
use sea_orm::{ConnectionTrait, FromQueryResult, Statement};
#[derive(Debug, FromQueryResult)]
struct IntegrityCheckResult {
integrity_check: String,
}
impl JobDao {
#[tracing::instrument(level = "debug", skip(self))]
pub async fn check_integrity(&self) -> RepoResult<()> {
let check_result: Option<IntegrityCheckResult> = IntegrityCheckResult::find_by_statement(
Statement::from_string(Sqlite, String::from("PRAGMA integrity_check;")),
)
.one(&self.ctx.db)
.await?;
tracing::debug!("check result = {:?}", check_result);
check_result
.ok_or_else(|| Corrupted(String::from("no check result")))
.and_then(map_check_result)
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn vacuum(&self) -> RepoResult<()> {
self.ctx
.db
.execute(Statement::from_string(Sqlite, String::from("VACUUM;")))
.await?;
Ok(())
}
}
fn map_check_result(result: IntegrityCheckResult) -> RepoResult<()> {
if result.integrity_check == "ok" {
Ok(())
} else {
Err(Corrupted(result.integrity_check))
}
}

@ -0,0 +1,41 @@
use sea_orm::{ActiveValue, DatabaseConnection};
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::fs::thumbnail_store::ThumbnailStore;
use crate::dao::file::FileDao;
use crate::dao::job::JobDao;
use crate::dao::tag::TagDao;
pub mod file;
pub mod job;
pub mod repo;
pub mod tag;
#[derive(Clone)]
pub struct DaoContext {
pub db: DatabaseConnection,
pub main_storage: FileHashStore,
pub thumbnail_storage: ThumbnailStore,
}
pub trait DaoProvider {
fn dao_ctx(&self) -> DaoContext;
fn file(&self) -> FileDao {
FileDao::new(self.dao_ctx())
}
fn tag(&self) -> TagDao {
TagDao::new(self.dao_ctx())
}
fn job(&self) -> JobDao {
JobDao::new(self.dao_ctx())
}
}
fn opt_to_active_val<T: Into<sea_orm::Value>>(opt: Option<T>) -> ActiveValue<T> {
opt.map(|v| ActiveValue::Set(v))
.unwrap_or(ActiveValue::NotSet)
}

@ -0,0 +1,81 @@
use std::fmt::Debug;
use std::path::PathBuf;
use sea_orm::DatabaseConnection;
use mediarepo_core::error::RepoResult;
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::fs::thumbnail_store::ThumbnailStore;
use crate::dao::{DaoContext, DaoProvider};
use mediarepo_database::get_database;
use mediarepo_database::queries::analysis::{get_all_counts, Counts};
#[derive(Clone)]
pub struct Repo {
db: DatabaseConnection,
main_storage: FileHashStore,
thumbnail_storage: ThumbnailStore,
}
impl DaoProvider for Repo {
fn dao_ctx(&self) -> DaoContext {
DaoContext {
db: self.db.clone(),
main_storage: self.main_storage.clone(),
thumbnail_storage: self.thumbnail_storage.clone(),
}
}
}
impl Repo {
pub(crate) fn new(
db: DatabaseConnection,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> Self {
Self {
db,
main_storage: FileHashStore::new(file_store_path),
thumbnail_storage: ThumbnailStore::new(thumb_store_path),
}
}
/// Connects to the database with the given uri
#[tracing::instrument(level = "debug")]
pub async fn connect<S: AsRef<str> + Debug>(
uri: S,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> RepoResult<Self> {
let db = get_database(uri).await?;
Ok(Self::new(db, file_store_path, thumb_store_path))
}
/// Returns the database of the repo for raw sql queries
pub fn db(&self) -> &DatabaseConnection {
&self.db
}
/// Returns the size of the main storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_main_store_size(&self) -> RepoResult<u64> {
self.main_storage.get_size().await
}
/// Returns the size of the thumbnail storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_thumb_store_size(&self) -> RepoResult<u64> {
self.thumbnail_storage.get_size().await
}
/// Returns all entity counts
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_counts(&self) -> RepoResult<Counts> {
get_all_counts(&self.db).await
}
}

@ -0,0 +1,137 @@
use crate::dao::tag::{map_tag_dto, TagDao};
use crate::dto::{AddTagDto, NamespaceDto, TagDto};
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::{namespace, tag};
use sea_orm::prelude::*;
use sea_orm::ActiveValue::Set;
use sea_orm::{Condition, ConnectionTrait, DatabaseTransaction};
use std::collections::HashMap;
use std::iter::FromIterator;
impl TagDao {
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_all(&self, mut tags: Vec<AddTagDto>) -> RepoResult<Vec<TagDto>> {
let namespaces = tags.iter().filter_map(|t| t.namespace.clone()).collect();
let trx = self.ctx.db.begin().await?;
let existing_tags = tags_by_name(&trx, tags.clone()).await?;
if existing_tags.len() == tags.len() {
return Ok(existing_tags);
}
let existing_tag_map: HashMap<String, TagDto> =
HashMap::from_iter(existing_tags.into_iter().map(|t| (t.normalized_name(), t)));
tags.retain(|dto| !existing_tag_map.contains_key(&dto.normalized_name()));
let namespace_map = add_or_get_all_namespaces(&trx, namespaces).await?;
if tags.is_empty() {
return Ok(existing_tag_map.into_values().collect());
}
let tag_models: Vec<tag::ActiveModel> = tags
.iter()
.map(|t| tag::ActiveModel {
name: Set(t.name.to_owned()),
namespace_id: Set(t
.namespace
.as_ref()
.and_then(|n| namespace_map.get(n))
.map(|n| n.id())),
..Default::default()
})
.collect();
tag::Entity::insert_many(tag_models).exec(&trx).await?;
let mut tag_dtos = tags_by_name(&trx, tags).await?;
trx.commit().await?;
tag_dtos.append(&mut existing_tag_map.into_values().collect());
Ok(tag_dtos)
}
}
async fn add_or_get_all_namespaces(
trx: &DatabaseTransaction,
mut namespaces: Vec<String>,
) -> RepoResult<HashMap<String, NamespaceDto>> {
if namespaces.is_empty() {
return Ok(HashMap::with_capacity(0));
}
let existing_namespaces = namespaces_by_name(trx, namespaces.clone()).await?;
let mut namespace_map = HashMap::from_iter(
existing_namespaces
.into_iter()
.map(|nsp| (nsp.name().to_owned(), nsp)),
);
if namespaces.len() == namespace_map.len() {
return Ok(namespace_map);
}
namespaces.retain(|nsp| !namespace_map.contains_key(nsp));
if namespaces.is_empty() {
return Ok(namespace_map);
}
let namespace_models: Vec<namespace::ActiveModel> = namespaces
.iter()
.map(|nsp| namespace::ActiveModel {
name: Set(nsp.to_owned()),
..Default::default()
})
.collect();
namespace::Entity::insert_many(namespace_models)
.exec(trx)
.await?;
let additional_namespaces = namespaces_by_name(trx, namespaces.clone()).await?;
for nsp in additional_namespaces {
namespace_map.insert(nsp.name().to_owned(), nsp);
}
Ok(namespace_map)
}
async fn namespaces_by_name(
trx: &DatabaseTransaction,
names: Vec<String>,
) -> RepoResult<Vec<NamespaceDto>> {
if names.is_empty() {
return Ok(vec![]);
}
let namespaces: Vec<NamespaceDto> = namespace::Entity::find()
.filter(namespace::Column::Name.is_in(names))
.all(trx)
.await?
.into_iter()
.map(NamespaceDto::new)
.collect();
Ok(namespaces)
}
async fn tags_by_name(trx: &DatabaseTransaction, tags: Vec<AddTagDto>) -> RepoResult<Vec<TagDto>> {
if tags.is_empty() {
return Ok(vec![]);
}
let condition = tags
.into_iter()
.map(build_tag_condition)
.fold(Condition::any(), Condition::add);
let tags = tag::Entity::find()
.find_also_related(namespace::Entity)
.filter(condition)
.all(trx)
.await?
.into_iter()
.map(map_tag_dto)
.collect();
Ok(tags)
}
fn build_tag_condition(tag: AddTagDto) -> Condition {
if let Some(namespace) = tag.namespace {
Condition::all()
.add(tag::Column::Name.eq(tag.name))
.add(namespace::Column::Name.eq(namespace))
} else {
Condition::all().add(tag::Column::Name.eq(tag.name))
}
}

@ -0,0 +1,64 @@
use crate::dao::tag::{map_tag_dto, TagDao};
use crate::dto::TagDto;
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::{namespace, tag};
use sea_orm::prelude::*;
use sea_orm::sea_query::Expr;
use sea_orm::{Condition, QuerySelect};
#[derive(Clone, Debug)]
pub struct TagByNameQuery {
pub namespace: Option<String>,
pub name: String,
}
impl TagDao {
/// Filters all tags by names
/// wildcards are supported
#[tracing::instrument(level = "debug", skip(self))]
pub async fn all_by_name(&self, names: Vec<TagByNameQuery>) -> RepoResult<Vec<TagDto>> {
let mut condition_count = 0;
let condition = names
.into_iter()
.filter_map(name_query_to_condition)
.inspect(|_| condition_count += 1)
.fold(Condition::any(), Condition::add);
if condition_count == 0 {
return Ok(vec![]);
}
let tags = tag::Entity::find()
.find_also_related(namespace::Entity)
.filter(condition)
.group_by(tag::Column::Id)
.all(&self.ctx.db)
.await?
.into_iter()
.map(map_tag_dto)
.collect();
Ok(tags)
}
}
fn name_query_to_condition(query: TagByNameQuery) -> Option<Condition> {
let TagByNameQuery { namespace, name } = query;
let mut condition = Condition::all();
if !name.ends_with('*') {
condition = condition.add(tag::Column::Name.eq(name))
} else if name.len() > 1 {
condition =
condition.add(tag::Column::Name.like(&*format!("{}%", name.trim_end_matches("*"))))
} else if namespace.is_none() {
return None;
}
condition = if let Some(namespace) = namespace {
condition.add(namespace::Column::Name.eq(namespace))
} else {
condition.add(Expr::tbl(tag::Entity, tag::Column::NamespaceId).is_null())
};
Some(condition)
}

@ -0,0 +1,66 @@
use sea_orm::{ConnectionTrait, DatabaseTransaction};
use sea_orm::ActiveValue::Set;
use sea_orm::prelude::*;
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::content_descriptor_tag;
use crate::dao::tag::TagDao;
impl TagDao {
#[tracing::instrument(level = "debug", skip(self))]
pub async fn upsert_mappings(&self, cd_ids: Vec<i64>, tag_ids: Vec<i64>) -> RepoResult<()> {
let trx = self.ctx.db.begin().await?;
let existing_mappings = get_existing_mappings(&trx, &cd_ids, &tag_ids).await?;
let active_models: Vec<content_descriptor_tag::ActiveModel> = cd_ids
.into_iter()
.flat_map(|cd_id: i64| {
tag_ids
.iter()
.filter(|tag_id| !existing_mappings.contains(&(cd_id, **tag_id)))
.map(move |tag_id| content_descriptor_tag::ActiveModel {
cd_id: Set(cd_id),
tag_id: Set(*tag_id),
})
.collect::<Vec<content_descriptor_tag::ActiveModel>>()
})
.collect();
content_descriptor_tag::Entity::insert_many(active_models)
.exec(&trx)
.await?;
trx.commit().await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn remove_mappings(&self, cd_ids: Vec<i64>, tag_ids: Vec<i64>) -> RepoResult<()> {
content_descriptor_tag::Entity::delete_many()
.filter(content_descriptor_tag::Column::CdId.is_in(cd_ids))
.filter(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.exec(&self.ctx.db)
.await?;
Ok(())
}
}
async fn get_existing_mappings(
trx: &DatabaseTransaction,
cd_ids: &Vec<i64>,
tag_ids: &Vec<i64>,
) -> RepoResult<Vec<(i64, i64)>> {
let existing_mappings: Vec<(i64, i64)> = content_descriptor_tag::Entity::find()
.filter(content_descriptor_tag::Column::CdId.is_in(cd_ids.clone()))
.filter(content_descriptor_tag::Column::TagId.is_in(tag_ids.clone()))
.all(trx)
.await?
.into_iter()
.map(|model: content_descriptor_tag::Model| (model.tag_id, model.cd_id))
.collect();
Ok(existing_mappings)
}

@ -0,0 +1,174 @@
use sea_orm::prelude::*;
use sea_orm::JoinType;
use sea_orm::QuerySelect;
use std::collections::HashMap;
use std::iter::FromIterator;
use mediarepo_core::error::RepoResult;
use mediarepo_core::itertools::Itertools;
use mediarepo_core::utils::parse_namespace_and_tag;
use mediarepo_database::entities::{content_descriptor, content_descriptor_tag, namespace, tag};
use crate::dao::tag::by_name::TagByNameQuery;
use crate::dao::{DaoContext, DaoProvider};
use crate::dto::{NamespaceDto, TagDto};
pub mod add;
pub mod by_name;
pub mod mappings;
pub struct TagDao {
ctx: DaoContext,
}
impl DaoProvider for TagDao {
fn dao_ctx(&self) -> DaoContext {
self.ctx.clone()
}
}
impl TagDao {
pub fn new(ctx: DaoContext) -> Self {
Self { ctx }
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn all(&self) -> RepoResult<Vec<TagDto>> {
let tags = tag::Entity::find()
.find_also_related(namespace::Entity)
.all(&self.ctx.db)
.await?
.into_iter()
.map(map_tag_dto)
.collect();
Ok(tags)
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn all_namespaces(&self) -> RepoResult<Vec<NamespaceDto>> {
let namespaces = namespace::Entity::find()
.all(&self.ctx.db)
.await?
.into_iter()
.map(NamespaceDto::new)
.collect();
Ok(namespaces)
}
#[tracing::instrument(level = "debug", skip(self, cds))]
pub async fn all_for_cds(&self, cds: Vec<Vec<u8>>) -> RepoResult<Vec<TagDto>> {
let tags = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(
JoinType::LeftJoin,
content_descriptor_tag::Relation::Tag.def().rev(),
)
.join(
JoinType::InnerJoin,
content_descriptor_tag::Relation::ContentDescriptorId.def(),
)
.filter(content_descriptor::Column::Descriptor.is_in(cds))
.group_by(tag::Column::Id)
.all(&self.ctx.db)
.await?
.into_iter()
.map(map_tag_dto)
.collect();
Ok(tags)
}
#[tracing::instrument(level = "debug", skip(self, cds))]
pub async fn all_for_cds_map(
&self,
cds: Vec<Vec<u8>>,
) -> RepoResult<HashMap<Vec<u8>, Vec<TagDto>>> {
let tag_cd_entries: Vec<(
content_descriptor_tag::Model,
Option<content_descriptor::Model>,
)> = content_descriptor_tag::Entity::find()
.find_also_related(content_descriptor::Entity)
.filter(content_descriptor::Column::Descriptor.is_in(cds))
.all(&self.ctx.db)
.await?;
let tag_ids: Vec<i64> = tag_cd_entries
.iter()
.map(|(t, _)| t.tag_id)
.unique()
.collect();
let tags: Vec<TagDto> = tag::Entity::find()
.find_also_related(namespace::Entity)
.filter(tag::Column::Id.is_in(tag_ids))
.all(&self.ctx.db)
.await?
.into_iter()
.map(map_tag_dto)
.collect();
let tag_id_map = tags
.into_iter()
.map(|t| (t.id(), t))
.collect::<HashMap<i64, TagDto>>();
let cd_tag_map = tag_cd_entries
.into_iter()
.filter_map(|(t, cd)| Some((cd?, tag_id_map.get(&t.tag_id)?.clone())))
.sorted_by_key(|(cd, _)| cd.id)
.group_by(|(cd, _)| cd.descriptor.to_owned())
.into_iter()
.map(|(key, group)| (key, group.map(|(_, t)| t).collect::<Vec<TagDto>>()))
.collect();
Ok(cd_tag_map)
}
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags_for_cd(&self, cd_id: i64) -> RepoResult<Vec<TagDto>> {
let tags = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(
JoinType::LeftJoin,
content_descriptor_tag::Relation::Tag.def().rev(),
)
.join(
JoinType::InnerJoin,
content_descriptor_tag::Relation::ContentDescriptorId.def(),
)
.filter(content_descriptor::Column::Id.eq(cd_id))
.all(&self.ctx.db)
.await?
.into_iter()
.map(map_tag_dto)
.collect();
Ok(tags)
}
/// Returns a map mapping tag names to ids
#[tracing::instrument(level = "debug", skip(self))]
pub async fn normalized_tags_to_ids(
&self,
names: Vec<String>,
) -> RepoResult<HashMap<String, i64>> {
let queries = names
.into_iter()
.map(parse_namespace_and_tag)
.map(|(namespace, name)| TagByNameQuery { namespace, name })
.collect();
let tags = self.all_by_name(queries).await?;
let tag_map = HashMap::from_iter(
tags.into_iter()
.map(|tag| (tag.normalized_name(), tag.id())),
);
Ok(tag_map)
}
}
fn map_tag_dto(result: (tag::Model, Option<namespace::Model>)) -> TagDto {
TagDto::new(result.0, result.1)
}

@ -0,0 +1,112 @@
use chrono::NaiveDateTime;
use mediarepo_core::content_descriptor::encode_content_descriptor;
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::file;
use mediarepo_database::entities::file_metadata;
use crate::dto::FileMetadataDto;
#[derive(Clone, Debug)]
pub struct FileDto {
model: file::Model,
content_descriptor: content_descriptor::Model,
metadata: Option<FileMetadataDto>,
}
impl FileDto {
pub(crate) fn new(
model: file::Model,
content_descriptor: content_descriptor::Model,
metadata: Option<file_metadata::Model>,
) -> Self {
Self {
model,
content_descriptor,
metadata: metadata.map(FileMetadataDto::new),
}
}
pub fn id(&self) -> i64 {
self.model.id
}
pub fn cd_id(&self) -> i64 {
self.model.cd_id
}
pub fn cd(&self) -> &[u8] {
&self.content_descriptor.descriptor
}
pub fn encoded_cd(&self) -> String {
encode_content_descriptor(&self.content_descriptor.descriptor)
}
pub fn status(&self) -> FileStatus {
match self.model.status {
10 => FileStatus::Imported,
20 => FileStatus::Archived,
30 => FileStatus::Deleted,
_ => FileStatus::Imported,
}
}
pub fn mime_type(&self) -> &String {
&self.model.mime_type
}
pub fn metadata(&self) -> Option<&FileMetadataDto> {
self.metadata.as_ref()
}
pub fn into_metadata(self) -> Option<FileMetadataDto> {
self.metadata
}
}
#[derive(Clone, Debug)]
pub struct AddFileDto {
pub content: Vec<u8>,
pub mime_type: String,
pub creation_time: NaiveDateTime,
pub change_time: NaiveDateTime,
pub name: Option<String>,
}
#[derive(Clone, Debug)]
pub struct UpdateFileDto {
pub id: i64,
pub cd_id: Option<i64>,
pub mime_type: Option<String>,
pub status: Option<FileStatus>,
}
impl Default for UpdateFileDto {
fn default() -> Self {
Self {
id: 0,
cd_id: None,
mime_type: None,
status: None,
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum FileStatus {
Imported = 10,
Archived = 20,
Deleted = 30,
}
impl From<ApiFileStatus> for FileStatus {
fn from(s: ApiFileStatus) -> Self {
match s {
ApiFileStatus::Imported => Self::Imported,
ApiFileStatus::Archived => Self::Archived,
ApiFileStatus::Deleted => Self::Deleted,
}
}
}

@ -0,0 +1,51 @@
use chrono::NaiveDateTime;
use mediarepo_database::entities::file_metadata;
#[derive(Clone, Debug)]
pub struct FileMetadataDto {
model: file_metadata::Model,
}
impl FileMetadataDto {
pub(crate) fn new(model: file_metadata::Model) -> Self {
Self { model }
}
pub fn file_id(&self) -> i64 {
self.model.file_id
}
pub fn name(&self) -> Option<&String> {
self.model.name.as_ref()
}
pub fn comment(&self) -> Option<&String> {
self.model.comment.as_ref()
}
pub fn size(&self) -> i64 {
self.model.size
}
pub fn import_time(&self) -> NaiveDateTime {
self.model.import_time
}
pub fn creation_time(&self) -> NaiveDateTime {
self.model.creation_time
}
pub fn change_time(&self) -> NaiveDateTime {
self.model.change_time
}
}
#[derive(Clone, Debug, Default)]
pub struct UpdateFileMetadataDto {
pub file_id: i64,
pub name: Option<Option<String>>,
pub comment: Option<Option<String>>,
pub size: Option<i64>,
pub change_time: Option<NaiveDateTime>,
}

@ -0,0 +1,12 @@
pub use file::*;
pub use file_metadata::*;
pub use namespace::*;
pub use tag::*;
pub use thumbnail::*;
mod file;
mod file_metadata;
mod tag;
mod namespace;
mod thumbnail;

@ -0,0 +1,20 @@
use mediarepo_database::entities::namespace;
#[derive(Clone, Debug)]
pub struct NamespaceDto {
model: namespace::Model,
}
impl NamespaceDto {
pub(crate) fn new(model: namespace::Model) -> Self {
Self {model}
}
pub fn id(&self) -> i64 {
self.model.id
}
pub fn name(&self) -> &String {
&self.model.name
}
}

@ -0,0 +1,62 @@
pub use mediarepo_database::entities::namespace;
pub use mediarepo_database::entities::tag;
use crate::dto::NamespaceDto;
#[derive(Clone, Debug)]
pub struct TagDto {
model: tag::Model,
namespace: Option<NamespaceDto>,
}
impl TagDto {
pub(crate) fn new(model: tag::Model, namespace_model: Option<namespace::Model>) -> Self {
Self {
model,
namespace: namespace_model.map(NamespaceDto::new),
}
}
pub fn id(&self) -> i64 {
self.model.id
}
pub fn name(&self) -> &String {
&self.model.name
}
pub fn namespace(&self) -> Option<&NamespaceDto> {
self.namespace.as_ref()
}
/// Returns the normalized name of the tag (namespace:tag)
pub fn normalized_name(&self) -> String {
if let Some(namespace) = &self.namespace {
format!("{}:{}", namespace.name(), self.name())
} else {
self.name().to_owned()
}
}
}
#[derive(Clone, Debug)]
pub struct AddTagDto {
pub namespace: Option<String>,
pub name: String,
}
impl AddTagDto {
pub fn from_tuple(tuple: (Option<String>, String)) -> Self {
let (namespace, name) = tuple;
Self { namespace, name }
}
/// Returns the normalized name of the tag (namespace:tag)
pub fn normalized_name(&self) -> String {
if let Some(namespace) = &self.namespace {
format!("{}:{}", namespace, &self.name)
} else {
self.name.to_owned()
}
}
}

@ -0,0 +1,53 @@
use std::path::PathBuf;
use tokio::fs;
use tokio::fs::{File, OpenOptions};
use tokio::io::BufReader;
use mediarepo_core::error::RepoResult;
use mediarepo_core::fs::thumbnail_store::Dimensions;
#[derive(Clone, Debug)]
pub struct ThumbnailDto {
path: PathBuf,
parent_cd: String,
size: Dimensions,
mime_type: String,
}
impl ThumbnailDto {
pub fn new(path: PathBuf, parent_cd: String, size: Dimensions, mime_type: String) -> Self {
Self {
path,
parent_cd,
size,
mime_type,
}
}
pub fn parent_cd(&self) -> &String {
&self.parent_cd
}
pub fn size(&self) -> &Dimensions {
&self.size
}
pub fn mime_type(&self) -> &String {
&self.mime_type
}
#[tracing::instrument(level = "debug")]
pub async fn get_reader(&self) -> RepoResult<BufReader<File>> {
let file = OpenOptions::new().read(true).open(&self.path).await?;
Ok(BufReader::new(file))
}
/// Deletes the thumbnail
#[tracing::instrument(level = "debug")]
pub async fn delete(self) -> RepoResult<()> {
fs::remove_file(&self.path).await?;
Ok(())
}
}

@ -0,0 +1,3 @@
pub mod dao;
pub mod dto;
pub mod type_keys;

@ -1,7 +1,9 @@
use crate::repo::Repo;
use std::sync::Arc; use std::sync::Arc;
use typemap_rev::TypeMapKey; use typemap_rev::TypeMapKey;
use crate::dao::repo::Repo;
pub struct RepoKey; pub struct RepoKey;
impl TypeMapKey for RepoKey { impl TypeMapKey for RepoKey {

@ -1,101 +0,0 @@
use crate::file::File;
use mediarepo_core::content_descriptor::convert_v1_descriptor_to_v2;
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::file;
use sea_orm::prelude::*;
use sea_orm::{DatabaseConnection, Set};
use std::fmt::Debug;
pub struct ContentDescriptor {
db: DatabaseConnection,
model: content_descriptor::Model,
}
impl ContentDescriptor {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(db: DatabaseConnection, model: content_descriptor::Model) -> Self {
Self { db, model }
}
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<Self>> {
let descriptors = content_descriptor::Entity::find()
.all(&db)
.await?
.into_iter()
.map(|model| Self::new(db.clone(), model))
.collect();
Ok(descriptors)
}
/// Searches for the hash by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
let hash = content_descriptor::Entity::find_by_id(id)
.one(&db)
.await?
.map(|model| Self::new(db, model));
Ok(hash)
}
/// Returns the hash by value
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_value<D: AsRef<[u8]> + Debug>(
db: DatabaseConnection,
descriptor: D,
) -> RepoResult<Option<Self>> {
let cid = content_descriptor::Entity::find()
.filter(content_descriptor::Column::Descriptor.eq(descriptor.as_ref()))
.one(&db)
.await?
.map(|model| Self::new(db, model));
Ok(cid)
}
/// Adds a new hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub async fn add(db: DatabaseConnection, descriptor: Vec<u8>) -> RepoResult<Self> {
let active_model = content_descriptor::ActiveModel {
descriptor: Set(descriptor),
..Default::default()
};
let model = active_model.insert(&db).await?;
Ok(Self::new(db, model))
}
pub fn id(&self) -> i64 {
self.model.id
}
pub fn descriptor(&self) -> &[u8] {
&self.model.descriptor[..]
}
/// Returns the file associated with the hash
#[tracing::instrument(level = "debug", skip(self))]
pub async fn file(&self) -> RepoResult<Option<File>> {
let file = self
.model
.find_related(file::Entity)
.one(&self.db)
.await?
.map(|file_model| File::new(self.db.clone(), file_model, self.model.clone()));
Ok(file)
}
pub async fn convert_v1_to_v2(&mut self) -> RepoResult<()> {
let descriptor = convert_v1_descriptor_to_v2(&self.model.descriptor)?;
let active_model = content_descriptor::ActiveModel {
id: Set(self.id()),
descriptor: Set(descriptor),
};
self.model = active_model.update(&self.db).await?;
Ok(())
}
}

@ -1,334 +0,0 @@
pub mod filter;
use std::fmt::Debug;
use std::io::Cursor;
use std::str::FromStr;
use mediarepo_core::content_descriptor::encode_content_descriptor;
use sea_orm::prelude::*;
use sea_orm::{ConnectionTrait, DatabaseConnection, Set};
use sea_orm::{JoinType, QuerySelect};
use tokio::io::{AsyncReadExt, BufReader};
use crate::file::filter::FilterProperty;
use crate::file_metadata::FileMetadata;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
use mediarepo_core::thumbnailer::{self, Thumbnail as ThumbnailerThumb, ThumbnailSize};
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::file;
use mediarepo_database::entities::file_metadata;
use mediarepo_database::entities::namespace;
use mediarepo_database::entities::tag;
use crate::tag::Tag;
pub enum FileStatus {
Imported = 10,
Archived = 20,
Deleted = 30,
}
impl From<ApiFileStatus> for FileStatus {
fn from(s: ApiFileStatus) -> Self {
match s {
ApiFileStatus::Imported => Self::Imported,
ApiFileStatus::Archived => Self::Archived,
ApiFileStatus::Deleted => Self::Deleted,
}
}
}
#[derive(Clone)]
pub struct File {
db: DatabaseConnection,
model: file::Model,
content_descriptor: content_descriptor::Model,
}
impl File {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(
db: DatabaseConnection,
model: file::Model,
hash: content_descriptor::Model,
) -> Self {
Self {
db,
model,
content_descriptor: hash,
}
}
/// Returns a list of all known stored files
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<File>> {
let files: Vec<(file::Model, Option<content_descriptor::Model>)> = file::Entity::find()
.find_also_related(content_descriptor::Entity)
.all(&db)
.await?;
let files = files
.into_iter()
.filter_map(|(f, h)| {
let h = h?;
Some(Self::new(db.clone(), f, h))
})
.collect();
Ok(files)
}
/// Fetches the file by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
if let Some((model, Some(hash))) = file::Entity::find_by_id(id)
.find_also_related(content_descriptor::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by hash
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_cd(db: DatabaseConnection, cd: &[u8]) -> RepoResult<Option<Self>> {
if let Some((hash, Some(model))) = content_descriptor::Entity::find()
.filter(content_descriptor::Column::Descriptor.eq(cd))
.find_also_related(file::Entity)
.one(&db)
.await?
{
let file = File::new(db, model, hash);
Ok(Some(file))
} else {
Ok(None)
}
}
/// Finds the file by tags
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn find_by_filters(
db: DatabaseConnection,
filters: Vec<Vec<FilterProperty>>,
) -> RepoResult<Vec<Self>> {
let main_condition = filter::build_find_filter_conditions(filters);
let results: Vec<(content_descriptor::Model, Option<file::Model>)> =
content_descriptor::Entity::find()
.find_also_related(file::Entity)
.filter(main_condition)
.group_by(file::Column::Id)
.all(&db)
.await?;
let files: Vec<Self> = results
.into_iter()
.filter_map(|(hash, tag)| Some(Self::new(db.clone(), tag?, hash)))
.collect();
Ok(files)
}
/// Adds a file with its hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn add(
db: DatabaseConnection,
cd_id: i64,
mime_type: String,
) -> RepoResult<Self> {
let file = file::ActiveModel {
cd_id: Set(cd_id),
mime_type: Set(mime_type),
..Default::default()
};
let file: file::ActiveModel = file.insert(&db).await?.into();
let file = Self::by_id(db, file.id.unwrap())
.await?
.expect("Inserted file does not exist");
Ok(file)
}
/// Returns the unique identifier of the file
pub fn id(&self) -> i64 {
self.model.id
}
/// Returns the hash of the file (content identifier)
pub fn cd(&self) -> &[u8] {
&self.content_descriptor.descriptor
}
/// Returns the encoded content descriptor
pub fn encoded_cd(&self) -> String {
encode_content_descriptor(self.cd())
}
/// Returns the id of the civ (content identifier value) of the file
pub fn cd_id(&self) -> i64 {
self.content_descriptor.id
}
/// Returns the mime type of the file
pub fn mime_type(&self) -> &String {
&self.model.mime_type
}
/// Returns the status of the file
pub fn status(&self) -> FileStatus {
match self.model.status {
10 => FileStatus::Imported,
20 => FileStatus::Archived,
30 => FileStatus::Deleted,
_ => FileStatus::Imported,
}
}
pub async fn set_status(&mut self, status: FileStatus) -> RepoResult<()> {
let active_model = file::ActiveModel {
id: Set(self.model.id),
status: Set(status as i32),
..Default::default()
};
self.model = active_model.update(&self.db).await?;
Ok(())
}
/// Returns the metadata associated with this file
/// A file MUST always have metadata associated
pub async fn metadata(&self) -> RepoResult<FileMetadata> {
FileMetadata::by_id(self.db.clone(), self.model.id)
.await
.and_then(|f| f.ok_or_else(|| RepoError::from("missing file metadata")))
}
/// Returns the list of tags of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags(&self) -> RepoResult<Vec<Tag>> {
let tags: Vec<(tag::Model, Option<namespace::Model>)> = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(
JoinType::LeftJoin,
content_descriptor_tag::Relation::Tag.def().rev(),
)
.join(
JoinType::InnerJoin,
content_descriptor_tag::Relation::ContentDescriptorId.def(),
)
.filter(content_descriptor::Column::Id.eq(self.content_descriptor.id))
.all(&self.db)
.await?;
let tags = tags
.into_iter()
.map(|(tag, namespace)| Tag::new(self.db.clone(), tag, namespace))
.collect();
Ok(tags)
}
/// Adds a single tag to the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tag(&mut self, tag_id: i64) -> RepoResult<()> {
let cd_id = self.content_descriptor.id;
let active_model = content_descriptor_tag::ActiveModel {
cd_id: Set(cd_id),
tag_id: Set(tag_id),
};
active_model.insert(&self.db).await?;
Ok(())
}
/// Adds multiple tags to the file at once
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
if tag_ids.is_empty() {
return Ok(());
}
let cd_id = self.content_descriptor.id;
let models: Vec<content_descriptor_tag::ActiveModel> = tag_ids
.into_iter()
.map(|tag_id| content_descriptor_tag::ActiveModel {
cd_id: Set(cd_id),
tag_id: Set(tag_id),
})
.collect();
content_descriptor_tag::Entity::insert_many(models)
.exec(&self.db)
.await?;
Ok(())
}
/// Removes multiple tags from the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn remove_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
let hash_id = self.content_descriptor.id;
content_descriptor_tag::Entity::delete_many()
.filter(content_descriptor_tag::Column::CdId.eq(hash_id))
.filter(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.exec(&self.db)
.await?;
Ok(())
}
/// Returns the reader for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_reader(
&self,
storage: &FileHashStore,
) -> RepoResult<BufReader<tokio::fs::File>> {
storage
.get_file(&self.content_descriptor.descriptor)
.await
.map(|(_, f)| f)
}
/// Creates a thumbnail for the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn create_thumbnail<I: IntoIterator<Item = ThumbnailSize> + Debug>(
&self,
storage: &FileHashStore,
sizes: I,
) -> RepoResult<Vec<ThumbnailerThumb>> {
let mut buf = Vec::new();
self.get_reader(storage)
.await?
.read_to_end(&mut buf)
.await?;
let mime_type = self.model.mime_type.clone();
let mime_type =
mime::Mime::from_str(&mime_type).unwrap_or_else(|_| mime::APPLICATION_OCTET_STREAM);
let thumbs = thumbnailer::create_thumbnails(Cursor::new(buf), mime_type, sizes)?;
Ok(thumbs)
}
/// Deletes the file as well as the content descriptor, tag mappings and metadata about the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn delete(self) -> RepoResult<()> {
let trx = self.db.begin().await?;
file_metadata::Entity::delete_many()
.filter(file_metadata::Column::FileId.eq(self.model.id))
.exec(&trx)
.await?;
self.model.delete(&trx).await?;
content_descriptor_tag::Entity::delete_many()
.filter(content_descriptor_tag::Column::CdId.eq(self.content_descriptor.id))
.exec(&trx)
.await?;
content_descriptor::Entity::delete_many()
.filter(content_descriptor::Column::Id.eq(self.content_descriptor.id))
.exec(&trx)
.await?;
trx.commit().await?;
Ok(())
}
}

@ -1,124 +0,0 @@
use std::fmt::Debug;
use chrono::{Local, NaiveDateTime};
use sea_orm::prelude::*;
use sea_orm::{DatabaseConnection, Set};
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::file_metadata;
#[derive(Clone)]
pub struct FileMetadata {
db: DatabaseConnection,
model: file_metadata::Model,
}
impl FileMetadata {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(db: DatabaseConnection, model: file_metadata::Model) -> Self {
Self { db, model }
}
/// Fetches the file by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
let file_metadata = file_metadata::Entity::find_by_id(id)
.one(&db)
.await?
.map(|m| FileMetadata::new(db, m));
Ok(file_metadata)
}
/// Fetches metadata for all given file ids
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all_by_ids(db: DatabaseConnection, ids: Vec<i64>) -> RepoResult<Vec<Self>> {
let file_metadata = file_metadata::Entity::find()
.filter(file_metadata::Column::FileId.is_in(ids))
.all(&db)
.await?
.into_iter()
.map(|m| FileMetadata::new(db.clone(), m))
.collect();
Ok(file_metadata)
}
/// Adds a file with its hash to the database
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn add(
db: DatabaseConnection,
file_id: i64,
size: i64,
creation_time: NaiveDateTime,
change_time: NaiveDateTime,
) -> RepoResult<Self> {
let file = file_metadata::ActiveModel {
file_id: Set(file_id),
size: Set(size),
import_time: Set(Local::now().naive_local()),
creation_time: Set(creation_time),
change_time: Set(change_time),
..Default::default()
};
let model = file.insert(&db).await?;
Ok(Self::new(db, model))
}
pub fn file_id(&self) -> i64 {
self.model.file_id
}
pub fn size(&self) -> i64 {
self.model.size
}
pub fn name(&self) -> &Option<String> {
&self.model.name
}
pub fn comment(&self) -> &Option<String> {
&self.model.comment
}
pub fn import_time(&self) -> &NaiveDateTime {
&self.model.import_time
}
pub fn creation_time(&self) -> &NaiveDateTime {
&self.model.creation_time
}
pub fn change_time(&self) -> &NaiveDateTime {
&self.model.change_time
}
/// Changes the name of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_name<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
let mut active_model = self.get_active_model();
active_model.name = Set(Some(name.to_string()));
self.model = active_model.update(&self.db).await?;
Ok(())
}
/// Changes the comment of the file
#[tracing::instrument(level = "debug", skip(self))]
pub async fn set_comment<S: ToString + Debug>(&mut self, comment: S) -> RepoResult<()> {
let mut active_file = self.get_active_model();
active_file.comment = Set(Some(comment.to_string()));
self.model = active_file.update(&self.db).await?;
Ok(())
}
/// Returns the active model of the file with only the id set
fn get_active_model(&self) -> file_metadata::ActiveModel {
file_metadata::ActiveModel {
file_id: Set(self.file_id()),
..Default::default()
}
}
}

@ -1,8 +0,0 @@
pub mod content_descriptor;
pub mod file;
pub mod file_metadata;
pub mod namespace;
pub mod repo;
pub mod tag;
pub mod thumbnail;
pub mod type_keys;

@ -1,141 +0,0 @@
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::namespace;
use sea_orm::prelude::*;
use sea_orm::{
Condition, ConnectionTrait, DatabaseBackend, DatabaseConnection, InsertResult, Set, Statement,
};
use std::fmt::Debug;
#[derive(Clone)]
pub struct Namespace {
#[allow(dead_code)]
db: DatabaseConnection,
model: namespace::Model,
}
impl Namespace {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(db: DatabaseConnection, model: namespace::Model) -> Self {
Self { db, model }
}
/// Retrieves a list of all namespaces
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<Self>> {
let namespaces = namespace::Entity::find()
.all(&db)
.await?
.into_iter()
.map(|model| Self::new(db.clone(), model))
.collect();
Ok(namespaces)
}
/// Retrieves the namespace by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
let namespace = namespace::Entity::find_by_id(id)
.one(&db)
.await?
.map(|model| Self::new(db, model));
Ok(namespace)
}
/// Retrieves a namespace by its name
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_name<S: AsRef<str> + Debug>(
db: DatabaseConnection,
name: S,
) -> RepoResult<Option<Self>> {
let namespace = namespace::Entity::find()
.filter(namespace::Column::Name.eq(name.as_ref()))
.one(&db)
.await?
.map(|model| Self::new(db, model));
Ok(namespace)
}
/// Returns all namespaces by name
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all_by_name(db: DatabaseConnection, names: Vec<String>) -> RepoResult<Vec<Self>> {
if names.is_empty() {
return Ok(Vec::with_capacity(0));
}
let mut condition = Condition::any();
for name in names {
condition = condition.add(namespace::Column::Name.eq(name));
}
let namespaces = namespace::Entity::find()
.filter(condition)
.all(&db)
.await?
.into_iter()
.map(|model| Self::new(db.clone(), model))
.collect();
Ok(namespaces)
}
/// Adds all namespaces to the database
#[tracing::instrument(level = "debug", skip(db))]
pub async fn add_all(db: DatabaseConnection, names: Vec<String>) -> RepoResult<Vec<Self>> {
if names.is_empty() {
return Ok(vec![]);
}
let models: Vec<namespace::ActiveModel> = names
.into_iter()
.map(|name| namespace::ActiveModel {
name: Set(name),
..Default::default()
})
.collect();
let txn = db.begin().await?;
let last_id = txn
.query_one(Statement::from_string(
DatabaseBackend::Sqlite,
r#"SELECT MAX(id) AS "max_id" FROM namespaces;"#.to_owned(),
))
.await?
.and_then(|result| result.try_get("", "max_id").ok())
.unwrap_or(-1);
let result: InsertResult<namespace::ActiveModel> =
namespace::Entity::insert_many(models).exec(&txn).await?;
let namespaces = namespace::Entity::find()
.filter(namespace::Column::Id.between(last_id, result.last_insert_id + 1))
.all(&txn)
.await?
.into_iter()
.map(|model| Self::new(db.clone(), model))
.collect();
txn.commit().await?;
Ok(namespaces)
}
/// Adds a namespace to the database
#[tracing::instrument(level = "debug", skip(db))]
pub async fn add<S: ToString + Debug>(db: DatabaseConnection, name: S) -> RepoResult<Self> {
let active_model = namespace::ActiveModel {
name: Set(name.to_string()),
..Default::default()
};
let model = active_model.insert(&db).await?;
Ok(Self::new(db, model))
}
/// The ID of the namespace
pub fn id(&self) -> i64 {
self.model.id
}
/// The name of the namespace
pub fn name(&self) -> &String {
&self.model.name
}
}

@ -1,432 +0,0 @@
use crate::content_descriptor::ContentDescriptor;
use crate::file::filter::FilterProperty;
use crate::file::File;
use crate::file_metadata::FileMetadata;
use crate::namespace::Namespace;
use crate::tag::Tag;
use crate::thumbnail::Thumbnail;
use chrono::{Local, NaiveDateTime};
use mediarepo_core::content_descriptor::{encode_content_descriptor, is_v1_content_descriptor};
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::file_hash_store::FileHashStore;
use mediarepo_core::fs::thumbnail_store::{Dimensions, ThumbnailStore};
use mediarepo_core::itertools::Itertools;
use mediarepo_core::thumbnailer::ThumbnailSize;
use mediarepo_core::utils::parse_namespace_and_tag;
use mediarepo_database::get_database;
use mediarepo_database::queries::analysis::{get_all_counts, Counts};
use sea_orm::DatabaseConnection;
use std::collections::{HashMap, HashSet};
use std::fmt::Debug;
use std::io::Cursor;
use std::iter::FromIterator;
use std::path::PathBuf;
use std::str::FromStr;
use tokio::fs::OpenOptions;
use tokio::io::AsyncReadExt;
#[derive(Clone)]
pub struct Repo {
db: DatabaseConnection,
main_storage: FileHashStore,
thumbnail_storage: ThumbnailStore,
}
impl Repo {
pub(crate) fn new(
db: DatabaseConnection,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> Self {
Self {
db,
main_storage: FileHashStore::new(file_store_path),
thumbnail_storage: ThumbnailStore::new(thumb_store_path),
}
}
/// Connects to the database with the given uri
#[tracing::instrument(level = "debug")]
pub async fn connect<S: AsRef<str> + Debug>(
uri: S,
file_store_path: PathBuf,
thumb_store_path: PathBuf,
) -> RepoResult<Self> {
let db = get_database(uri).await?;
Ok(Self::new(db, file_store_path, thumb_store_path))
}
/// Returns the database of the repo for raw sql queries
pub fn db(&self) -> &DatabaseConnection {
&self.db
}
/// Returns a file by its mapped hash
#[tracing::instrument(level = "debug", skip(self))]
pub async fn file_by_cd(&self, cd: &[u8]) -> RepoResult<Option<File>> {
File::by_cd(self.db.clone(), cd).await
}
/// Returns a file by id
#[tracing::instrument(level = "debug", skip(self))]
pub async fn file_by_id(&self, id: i64) -> RepoResult<Option<File>> {
File::by_id(self.db.clone(), id).await
}
/// Returns a list of all stored files
#[tracing::instrument(level = "debug", skip(self))]
pub async fn files(&self) -> RepoResult<Vec<File>> {
File::all(self.db.clone()).await
}
/// Finds all files by a list of tags
#[tracing::instrument(level = "debug", skip(self))]
pub async fn find_files_by_filters(
&self,
filters: Vec<Vec<FilterProperty>>,
) -> RepoResult<Vec<File>> {
File::find_by_filters(self.db.clone(), filters).await
}
/// Returns all file metadata entries for the given file ids
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_file_metadata_for_ids(&self, ids: Vec<i64>) -> RepoResult<Vec<FileMetadata>> {
FileMetadata::all_by_ids(self.db.clone(), ids).await
}
/// Adds a file from bytes to the database
#[tracing::instrument(level = "debug", skip(self, content))]
pub async fn add_file(
&self,
mime_type: Option<String>,
content: Vec<u8>,
creation_time: NaiveDateTime,
change_time: NaiveDateTime,
) -> RepoResult<File> {
let file_size = content.len();
let reader = Cursor::new(content);
let cd_binary = self.main_storage.add_file(reader, None).await?;
let cd = ContentDescriptor::add(self.db.clone(), cd_binary).await?;
let mime_type = mime_type
.and_then(|m| mime::Mime::from_str(&m).ok())
.unwrap_or_else(|| mime::APPLICATION_OCTET_STREAM)
.to_string();
let file = File::add(self.db.clone(), cd.id(), mime_type).await?;
FileMetadata::add(
self.db.clone(),
file.id(),
file_size as i64,
creation_time,
change_time,
)
.await?;
Ok(file)
}
/// Adds a file to the database by its readable path in the file system
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_file_by_path(&self, path: PathBuf) -> RepoResult<File> {
let mime_type = mime_guess::from_path(&path).first().map(|m| m.to_string());
let mut os_file = OpenOptions::new().read(true).open(&path).await?;
let mut buf = Vec::new();
os_file.read_to_end(&mut buf).await?;
self.add_file(
mime_type,
buf,
Local::now().naive_local(),
Local::now().naive_local(),
)
.await
}
/// Deletes a file from the database and disk
#[tracing::instrument(level = "debug", skip(self, file))]
pub async fn delete_file(&self, file: File) -> RepoResult<()> {
let cd = file.cd().to_owned();
let cd_string = file.encoded_cd();
file.delete().await?;
self.main_storage.delete_file(&cd).await?;
self.thumbnail_storage.delete_parent(&cd_string).await?;
Ok(())
}
/// Returns all thumbnails of a file
pub async fn get_file_thumbnails(&self, file_cd: &[u8]) -> RepoResult<Vec<Thumbnail>> {
let file_cd = encode_content_descriptor(file_cd);
let thumbnails = self
.thumbnail_storage
.get_thumbnails(&file_cd)
.await?
.into_iter()
.map(|(size, path)| Thumbnail {
file_hash: file_cd.to_owned(),
path,
size,
mime_type: mime::IMAGE_PNG.to_string(),
})
.collect_vec();
Ok(thumbnails)
}
pub async fn get_file_bytes(&self, file: &File) -> RepoResult<Vec<u8>> {
let mut buf = Vec::new();
let mut reader = file.get_reader(&self.main_storage).await?;
reader.read_to_end(&mut buf).await?;
Ok(buf)
}
/// Creates thumbnails of all sizes for a file
#[tracing::instrument(level = "debug", skip(self, file))]
pub async fn create_thumbnails_for_file(&self, file: &File) -> RepoResult<Vec<Thumbnail>> {
let size = ThumbnailSize::Medium;
let (height, width) = size.dimensions();
let thumbs = file.create_thumbnail(&self.main_storage, [size]).await?;
let mut created_thumbs = Vec::with_capacity(1);
for thumb in thumbs {
let entry = self
.store_single_thumbnail(file.encoded_cd(), height, width, thumb)
.await?;
created_thumbs.push(entry);
}
Ok(created_thumbs)
}
#[tracing::instrument(level = "debug", skip(self, file))]
pub async fn create_file_thumbnail(
&self,
file: &File,
size: ThumbnailSize,
) -> RepoResult<Thumbnail> {
let (height, width) = size.dimensions();
let thumb = file
.create_thumbnail(&self.main_storage, [size])
.await?
.pop()
.ok_or_else(|| RepoError::from("Failed to create thumbnail"))?;
let thumbnail = self
.store_single_thumbnail(file.encoded_cd(), height, width, thumb)
.await?;
Ok(thumbnail)
}
/// Stores a single thumbnail
async fn store_single_thumbnail(
&self,
file_hash: String,
height: u32,
width: u32,
thumb: mediarepo_core::thumbnailer::Thumbnail,
) -> RepoResult<Thumbnail> {
let mut buf = Vec::new();
thumb.write_png(&mut buf)?;
let size = Dimensions { height, width };
let path = self
.thumbnail_storage
.add_thumbnail(&file_hash, size.clone(), &buf)
.await?;
let thumbnail = Thumbnail {
file_hash,
path,
size,
mime_type: mime::IMAGE_PNG.to_string(),
};
Ok(thumbnail)
}
/// Returns all tags stored in the database
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags(&self) -> RepoResult<Vec<Tag>> {
Tag::all(self.db.clone()).await
}
/// Returns all namespaces stored in the database
#[tracing::instrument(level = "debug", skip(self))]
pub async fn namespaces(&self) -> RepoResult<Vec<Namespace>> {
Namespace::all(self.db.clone()).await
}
/// Converts a list of tag names to tag ids
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tag_names_to_ids(&self, tags: Vec<String>) -> RepoResult<HashMap<String, i64>> {
let parsed_tags = tags
.iter()
.map(|tag| parse_namespace_and_tag(tag.clone()))
.unique()
.collect();
let db_tags = self.tags_by_names(parsed_tags).await?;
let tag_map: HashMap<String, i64> =
HashMap::from_iter(db_tags.into_iter().map(|t| (t.normalized_name(), t.id())));
Ok(tag_map)
}
/// Finds all tags by name
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags_by_names(&self, tags: Vec<(Option<String>, String)>) -> RepoResult<Vec<Tag>> {
Tag::all_by_name(self.db.clone(), tags).await
}
/// Finds all tags that are assigned to the given list of hashes
#[tracing::instrument(level = "debug", skip_all)]
pub async fn find_tags_for_file_identifiers(&self, cds: Vec<Vec<u8>>) -> RepoResult<Vec<Tag>> {
Tag::for_cd_list(self.db.clone(), cds).await
}
/// Adds all tags that are not in the database to the database and returns the ones already existing as well
#[tracing::instrument(level = "debug", skip_all)]
pub async fn add_all_tags(&self, tags: Vec<(Option<String>, String)>) -> RepoResult<Vec<Tag>> {
let mut tags_to_add = tags.into_iter().unique().collect_vec();
let mut namespaces_to_add = tags_to_add
.iter()
.filter_map(|(namespace, _)| namespace.clone())
.unique()
.collect_vec();
let mut existing_namespaces =
Namespace::all_by_name(self.db.clone(), namespaces_to_add.clone()).await?;
{
let existing_namespaces_set = existing_namespaces
.iter()
.map(|n| n.name().clone())
.collect::<HashSet<String>>();
namespaces_to_add.retain(|namespace| !existing_namespaces_set.contains(namespace));
}
existing_namespaces
.append(&mut Namespace::add_all(self.db.clone(), namespaces_to_add).await?);
let mut existing_tags = self.tags_by_names(tags_to_add.clone()).await?;
{
let existing_tags_set = existing_tags
.iter()
.map(|t| (t.namespace().map(|n| n.name().clone()), t.name().clone()))
.collect::<HashSet<(Option<String>, String)>>();
tags_to_add.retain(|t| !existing_tags_set.contains(t));
}
let namespace_map = existing_namespaces
.into_iter()
.map(|namespace| (namespace.name().clone(), namespace.id()))
.collect::<HashMap<String, i64>>();
let tags_to_add = tags_to_add
.into_iter()
.map(|(nsp, name)| (nsp.and_then(|n| namespace_map.get(&n)).map(|i| *i), name))
.collect_vec();
existing_tags.append(&mut Tag::add_all(self.db.clone(), tags_to_add).await?);
Ok(existing_tags)
}
/// Adds or finds a tag
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_or_find_tag<S: ToString + Debug>(&self, tag: S) -> RepoResult<Tag> {
let (namespace, name) = parse_namespace_and_tag(tag.to_string());
if let Some(namespace) = namespace {
self.add_or_find_namespaced_tag(name, namespace).await
} else {
self.add_or_find_unnamespaced_tag(name).await
}
}
/// Adds or finds an unnamespaced tag
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_or_find_unnamespaced_tag(&self, name: String) -> RepoResult<Tag> {
if let Some(tag) = Tag::by_name(self.db.clone(), &name, None).await? {
Ok(tag)
} else {
self.add_unnamespaced_tag(name).await
}
}
/// Adds an unnamespaced tag
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_unnamespaced_tag(&self, name: String) -> RepoResult<Tag> {
Tag::add(self.db.clone(), name, None).await
}
/// Adds or finds a namespaced tag
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_or_find_namespaced_tag(
&self,
name: String,
namespace: String,
) -> RepoResult<Tag> {
if let Some(tag) = Tag::by_name(self.db.clone(), &name, Some(namespace.clone())).await? {
Ok(tag)
} else {
self.add_namespaced_tag(name, namespace).await
}
}
/// Adds a namespaced tag
#[tracing::instrument(level = "debug", skip(self))]
pub async fn add_namespaced_tag(&self, name: String, namespace: String) -> RepoResult<Tag> {
let namespace =
if let Some(namespace) = Namespace::by_name(self.db.clone(), &namespace).await? {
namespace
} else {
Namespace::add(self.db.clone(), namespace).await?
};
Tag::add(self.db.clone(), name, Some(namespace.id())).await
}
/// Returns the size of the main storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_main_store_size(&self) -> RepoResult<u64> {
self.main_storage.get_size().await
}
/// Returns the size of the thumbnail storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_thumb_store_size(&self) -> RepoResult<u64> {
self.thumbnail_storage.get_size().await
}
/// Returns all entity counts
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_counts(&self) -> RepoResult<Counts> {
get_all_counts(&self.db).await
}
pub async fn migrate(&self) -> RepoResult<()> {
let cds = ContentDescriptor::all(self.db.clone()).await?;
tracing::info!("Converting content descriptors to v2 format...");
let mut converted_count = 0;
for mut cd in cds {
if is_v1_content_descriptor(cd.descriptor()) {
let src_cd = cd.descriptor().to_owned();
cd.convert_v1_to_v2().await?;
let dst_cd = cd.descriptor().to_owned();
self.main_storage.rename_file(&src_cd, &dst_cd).await?;
self.thumbnail_storage
.rename_parent(
encode_content_descriptor(&src_cd),
encode_content_descriptor(&dst_cd),
)
.await?;
converted_count += 1;
}
}
tracing::info!("Converted {} descriptors", converted_count);
Ok(())
}
}

@ -1,226 +0,0 @@
use std::fmt::Debug;
use mediarepo_core::error::RepoResult;
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::namespace;
use mediarepo_database::entities::tag;
use sea_orm::prelude::*;
use sea_orm::query::ConnectionTrait;
use sea_orm::sea_query::Expr;
use sea_orm::{Condition, DatabaseBackend, DatabaseConnection, JoinType, Set, Statement};
use sea_orm::{InsertResult, QuerySelect};
use crate::namespace::Namespace;
#[derive(Clone)]
pub struct Tag {
db: DatabaseConnection,
model: tag::Model,
namespace: Option<namespace::Model>,
}
impl Tag {
#[tracing::instrument(level = "trace")]
pub(crate) fn new(
db: DatabaseConnection,
model: tag::Model,
namespace: Option<namespace::Model>,
) -> Self {
Self {
db,
model,
namespace,
}
}
/// Returns all tags stored in the database
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<Self>> {
let tags: Vec<Self> = tag::Entity::find()
.left_join(namespace::Entity)
.select_also(namespace::Entity)
.all(&db)
.await?
.into_iter()
.map(|(tag, namespace)| Self::new(db.clone(), tag, namespace))
.collect();
Ok(tags)
}
/// Returns the tag by id
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
let tag = tag::Entity::find_by_id(id)
.find_also_related(namespace::Entity)
.one(&db)
.await?
.map(|(model, namespace)| Self::new(db, model, namespace));
Ok(tag)
}
/// Returns one tag by name and namespace
#[tracing::instrument(level = "debug", skip(db))]
pub async fn by_name<S1: ToString + Debug>(
db: DatabaseConnection,
name: S1,
namespace: Option<String>,
) -> RepoResult<Option<Self>> {
let mut entries = Self::all_by_name(db, vec![(namespace, name.to_string())]).await?;
Ok(entries.pop())
}
/// Retrieves the namespaced tags by name and namespace
#[tracing::instrument(level = "debug", skip(db))]
pub async fn all_by_name(
db: DatabaseConnection,
namespaces_with_names: Vec<(Option<String>, String)>,
) -> RepoResult<Vec<Self>> {
if namespaces_with_names.is_empty() {
return Ok(vec![]);
}
let mut or_condition = Condition::any();
for (namespace, name) in namespaces_with_names {
let mut all_condition = Condition::all();
if !name.ends_with('*') {
all_condition = all_condition.add(tag::Column::Name.eq(name))
} else if name.len() > 1 {
all_condition = all_condition
.add(tag::Column::Name.like(&*format!("{}%", name.trim_end_matches("*"))))
} else if namespace.is_none() {
continue; // would result in an empty condition otherwise
}
all_condition = if let Some(namespace) = namespace {
all_condition.add(namespace::Column::Name.eq(namespace))
} else {
all_condition.add(Expr::tbl(tag::Entity, tag::Column::NamespaceId).is_null())
};
or_condition = or_condition.add(all_condition);
}
let tags: Vec<Self> = tag::Entity::find()
.find_also_related(namespace::Entity)
.filter(or_condition)
.group_by(tag::Column::Id)
.all(&db)
.await?
.into_iter()
.map(|(t, n)| Self::new(db.clone(), t, n))
.collect();
Ok(tags)
}
/// Returns all tags that are assigned to any of the passed hashes
#[tracing::instrument(level = "debug", skip_all)]
pub async fn for_cd_list(db: DatabaseConnection, cds: Vec<Vec<u8>>) -> RepoResult<Vec<Self>> {
let tags: Vec<Self> = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(
JoinType::LeftJoin,
content_descriptor_tag::Relation::Tag.def().rev(),
)
.join(
JoinType::InnerJoin,
content_descriptor_tag::Relation::ContentDescriptorId.def(),
)
.filter(content_descriptor::Column::Descriptor.is_in(cds))
.group_by(tag::Column::Id)
.all(&db)
.await?
.into_iter()
.map(|(t, n)| Self::new(db.clone(), t, n))
.collect();
Ok(tags)
}
pub async fn add_all(
db: DatabaseConnection,
namespaces_with_names: Vec<(Option<i64>, String)>,
) -> RepoResult<Vec<Self>> {
if namespaces_with_names.is_empty() {
return Ok(vec![]);
}
let models: Vec<tag::ActiveModel> = namespaces_with_names
.into_iter()
.map(|(namespace_id, name)| tag::ActiveModel {
name: Set(name),
namespace_id: Set(namespace_id),
..Default::default()
})
.collect();
let txn = db.begin().await?;
let last_id: i64 = txn
.query_one(Statement::from_string(
DatabaseBackend::Sqlite,
r#"SELECT MAX(id) as "max_id" FROM tags"#.to_owned(),
))
.await?
.and_then(|res| res.try_get("", "max_id").ok())
.unwrap_or(-1);
let result: InsertResult<tag::ActiveModel> =
tag::Entity::insert_many(models).exec(&txn).await?;
let tags: Vec<Self> = tag::Entity::find()
.find_also_related(namespace::Entity)
.filter(tag::Column::Id.between(last_id, result.last_insert_id + 1))
.all(&txn)
.await?
.into_iter()
.map(|(t, n)| Self::new(db.clone(), t, n))
.collect();
txn.commit().await?;
Ok(tags)
}
/// Adds a new tag to the database
#[tracing::instrument(level = "debug", skip(db))]
pub async fn add<S: ToString + Debug>(
db: DatabaseConnection,
name: S,
namespace_id: Option<i64>,
) -> RepoResult<Self> {
let active_model = tag::ActiveModel {
name: Set(name.to_string()),
namespace_id: Set(namespace_id),
..Default::default()
};
let model: tag::Model = active_model.insert(&db).await?;
let namespace = model.find_related(namespace::Entity).one(&db).await?;
Ok(Self::new(db, model, namespace))
}
/// The ID of the tag
pub fn id(&self) -> i64 {
self.model.id
}
/// The name of the tag
pub fn name(&self) -> &String {
&self.model.name
}
/// The namespace of the tag
pub fn namespace(&self) -> Option<Namespace> {
self.namespace
.clone()
.map(|n| Namespace::new(self.db.clone(), n))
}
/// Returns the normalized name of the tag (namespace:tag)
pub fn normalized_name(&self) -> String {
if let Some(namespace) = &self.namespace {
format!("{}:{}", namespace.name, self.model.name)
} else {
self.model.name.to_owned()
}
}
}

@ -1,30 +0,0 @@
use mediarepo_core::error::RepoResult;
use mediarepo_core::fs::thumbnail_store::Dimensions;
use std::path::PathBuf;
use tokio::fs::{self, File, OpenOptions};
use tokio::io::BufReader;
#[derive(Debug)]
pub struct Thumbnail {
pub file_hash: String,
pub path: PathBuf,
pub size: Dimensions,
pub mime_type: String,
}
impl Thumbnail {
/// Returns the reader of the thumbnail file
#[tracing::instrument(level = "debug")]
pub async fn get_reader(&self) -> RepoResult<BufReader<File>> {
let file = OpenOptions::new().read(true).open(&self.path).await?;
Ok(BufReader::new(file))
}
/// Deletes the thumbnail
#[tracing::instrument(level = "debug")]
pub async fn delete(self) -> RepoResult<()> {
fs::remove_file(&self.path).await?;
Ok(())
}
}

@ -19,8 +19,8 @@ path = "../mediarepo-core"
[dependencies.mediarepo-database] [dependencies.mediarepo-database]
path = "../mediarepo-database" path = "../mediarepo-database"
[dependencies.mediarepo-model] [dependencies.mediarepo-logic]
path = "../mediarepo-model" path = "../mediarepo-logic"
[dependencies.tokio] [dependencies.tokio]
version = "^1.15.0" version = "^1.15.0"

@ -2,36 +2,34 @@ use mediarepo_core::mediarepo_api::types::files::{
FileBasicDataResponse, FileMetadataResponse, FileStatus, ThumbnailMetadataResponse, FileBasicDataResponse, FileMetadataResponse, FileStatus, ThumbnailMetadataResponse,
}; };
use mediarepo_core::mediarepo_api::types::tags::{NamespaceResponse, TagResponse}; use mediarepo_core::mediarepo_api::types::tags::{NamespaceResponse, TagResponse};
use mediarepo_model::file::{File, FileStatus as FileStatusModel}; use mediarepo_logic::dto::{
use mediarepo_model::file_metadata::FileMetadata; FileDto, FileMetadataDto, FileStatus as FileStatusModel, NamespaceDto, TagDto, ThumbnailDto,
use mediarepo_model::namespace::Namespace; };
use mediarepo_model::tag::Tag;
use mediarepo_model::thumbnail::Thumbnail;
pub trait FromModel<M> { pub trait FromModel<M> {
fn from_model(model: M) -> Self; fn from_model(model: M) -> Self;
} }
impl FromModel<FileMetadata> for FileMetadataResponse { impl FromModel<FileMetadataDto> for FileMetadataResponse {
fn from_model(metadata: FileMetadata) -> Self { fn from_model(model: FileMetadataDto) -> Self {
Self { Self {
file_id: metadata.file_id(), file_id: model.file_id(),
name: metadata.name().to_owned(), name: model.name().cloned(),
comment: metadata.comment().to_owned(), comment: model.comment().cloned(),
creation_time: metadata.creation_time().to_owned(), creation_time: model.creation_time().to_owned(),
change_time: metadata.change_time().to_owned(), change_time: model.change_time().to_owned(),
import_time: metadata.import_time().to_owned(), import_time: model.import_time().to_owned(),
} }
} }
} }
impl FromModel<File> for FileBasicDataResponse { impl FromModel<FileDto> for FileBasicDataResponse {
fn from_model(file: File) -> Self { fn from_model(model: FileDto) -> Self {
FileBasicDataResponse { FileBasicDataResponse {
id: file.id(), id: model.id(),
status: FileStatus::from_model(file.status()), status: FileStatus::from_model(model.status()),
cd: file.encoded_cd(), cd: model.encoded_cd(),
mime_type: file.mime_type().to_owned(), mime_type: model.mime_type().to_owned(),
} }
} }
} }
@ -46,8 +44,8 @@ impl FromModel<FileStatusModel> for FileStatus {
} }
} }
impl FromModel<Tag> for TagResponse { impl FromModel<TagDto> for TagResponse {
fn from_model(model: Tag) -> Self { fn from_model(model: TagDto) -> Self {
Self { Self {
id: model.id(), id: model.id(),
namespace: model.namespace().map(|n| n.name().to_owned()), namespace: model.namespace().map(|n| n.name().to_owned()),
@ -56,19 +54,19 @@ impl FromModel<Tag> for TagResponse {
} }
} }
impl FromModel<Thumbnail> for ThumbnailMetadataResponse { impl FromModel<ThumbnailDto> for ThumbnailMetadataResponse {
fn from_model(model: Thumbnail) -> Self { fn from_model(model: ThumbnailDto) -> Self {
Self { Self {
file_hash: model.file_hash, file_hash: model.parent_cd().to_owned(),
height: model.size.height, height: model.size().height,
width: model.size.width, width: model.size().width,
mime_type: model.mime_type.to_owned(), mime_type: model.mime_type().to_owned(),
} }
} }
} }
impl FromModel<Namespace> for NamespaceResponse { impl FromModel<NamespaceDto> for NamespaceResponse {
fn from_model(model: Namespace) -> Self { fn from_model(model: NamespaceDto) -> Self {
Self { Self {
id: model.id(), id: model.id(),
name: model.name().to_owned(), name: model.name().to_owned(),

@ -1,16 +1,18 @@
use std::net::SocketAddr;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::net::TcpListener;
use tokio::task::JoinHandle;
use mediarepo_core::bromine::prelude::*; use mediarepo_core::bromine::prelude::*;
use mediarepo_core::error::{RepoError, RepoResult}; use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::mediarepo_api::types::misc::InfoResponse; use mediarepo_core::mediarepo_api::types::misc::InfoResponse;
use mediarepo_core::settings::{PortSetting, Settings}; use mediarepo_core::settings::{PortSetting, Settings};
use mediarepo_core::tokio_graceful_shutdown::SubsystemHandle; use mediarepo_core::tokio_graceful_shutdown::SubsystemHandle;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey, SizeMetadataKey, SubsystemKey}; use mediarepo_core::type_keys::{RepoPathKey, SettingsKey, SizeMetadataKey, SubsystemKey};
use mediarepo_model::repo::Repo; use mediarepo_logic::dao::repo::Repo;
use mediarepo_model::type_keys::RepoKey; use mediarepo_logic::type_keys::RepoKey;
use std::net::SocketAddr;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::net::TcpListener;
use tokio::task::JoinHandle;
mod from_model; mod from_model;
mod namespaces; mod namespaces;

@ -1,11 +1,8 @@
mod searching; use tokio::io::AsyncReadExt;
mod sorting;
use crate::from_model::FromModel;
use crate::namespaces::files::searching::find_files_for_filters;
use crate::namespaces::files::sorting::sort_files_by_properties;
use crate::utils::{cd_by_identifier, file_by_identifier, get_repo_from_context};
use mediarepo_core::bromine::prelude::*; use mediarepo_core::bromine::prelude::*;
use mediarepo_core::content_descriptor::{create_content_descriptor, encode_content_descriptor};
use mediarepo_core::error::RepoError;
use mediarepo_core::fs::thumbnail_store::Dimensions; use mediarepo_core::fs::thumbnail_store::Dimensions;
use mediarepo_core::itertools::Itertools; use mediarepo_core::itertools::Itertools;
use mediarepo_core::mediarepo_api::types::files::{ use mediarepo_core::mediarepo_api::types::files::{
@ -17,7 +14,16 @@ use mediarepo_core::mediarepo_api::types::filtering::FindFilesRequest;
use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier; use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier;
use mediarepo_core::thumbnailer::ThumbnailSize; use mediarepo_core::thumbnailer::ThumbnailSize;
use mediarepo_core::utils::parse_namespace_and_tag; use mediarepo_core::utils::parse_namespace_and_tag;
use tokio::io::AsyncReadExt; use mediarepo_logic::dao::DaoProvider;
use mediarepo_logic::dto::{AddFileDto, AddTagDto, UpdateFileDto, UpdateFileMetadataDto};
use crate::from_model::FromModel;
use crate::namespaces::files::searching::find_files_for_filters;
use crate::namespaces::files::sorting::sort_files_by_properties;
use crate::utils::{cd_by_identifier, file_by_identifier, get_repo_from_context};
mod searching;
mod sorting;
pub struct FilesNamespace; pub struct FilesNamespace;
@ -50,7 +56,7 @@ impl FilesNamespace {
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn all_files(ctx: &Context, _event: Event) -> IPCResult<()> { async fn all_files(ctx: &Context, _event: Event) -> IPCResult<()> {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let files = repo.files().await?; let files = repo.file().all().await?;
let responses: Vec<FileBasicDataResponse> = files let responses: Vec<FileBasicDataResponse> = files
.into_iter() .into_iter()
@ -80,7 +86,17 @@ impl FilesNamespace {
let id = event.payload::<FileIdentifier>()?; let id = event.payload::<FileIdentifier>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(id, &repo).await?; let file = file_by_identifier(id, &repo).await?;
let metadata = file.metadata().await?; let file_id = file.id();
let metadata = if let Some(metadata) = file.into_metadata() {
metadata
} else {
repo.file()
.metadata(file_id)
.await?
.ok_or_else(|| RepoError::from("file metadata not found"))?
};
ctx.emit_to( ctx.emit_to(
Self::name(), Self::name(),
"get_file_metadata", "get_file_metadata",
@ -135,22 +151,38 @@ impl FilesNamespace {
.into_inner(); .into_inner();
let AddFileRequestHeader { metadata, tags } = request; let AddFileRequestHeader { metadata, tags } = request;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let bytes = bytes.into_inner();
let cd = create_content_descriptor(&bytes);
let file = repo let file = if let Some(file) = repo.file().by_cd(cd).await? {
.add_file( tracing::debug!("Inserted file already exists");
metadata.mime_type, file
bytes.into_inner(), } else {
metadata.creation_time, let add_dto = AddFileDto {
metadata.change_time, content: bytes,
) mime_type: metadata
.await?; .mime_type
file.metadata().await?.set_name(metadata.name).await?; .unwrap_or(String::from("application/octet-stream")),
creation_time: metadata.creation_time,
change_time: metadata.change_time,
name: Some(metadata.name),
};
repo.file().add(add_dto).await?
};
let tags = repo let tags = repo
.add_all_tags(tags.into_iter().map(parse_namespace_and_tag).collect()) .tag()
.add_all(
tags.into_iter()
.map(parse_namespace_and_tag)
.map(AddTagDto::from_tuple)
.collect(),
)
.await?; .await?;
let tag_ids: Vec<i64> = tags.into_iter().map(|t| t.id()).unique().collect(); let tag_ids: Vec<i64> = tags.into_iter().map(|t| t.id()).unique().collect();
file.add_tags(tag_ids).await?; repo.tag()
.upsert_mappings(vec![file.cd_id()], tag_ids)
.await?;
ctx.emit_to( ctx.emit_to(
Self::name(), Self::name(),
@ -167,7 +199,14 @@ impl FilesNamespace {
let request = event.payload::<UpdateFileStatusRequest>()?; let request = event.payload::<UpdateFileStatusRequest>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let mut file = file_by_identifier(request.file_id, &repo).await?; let mut file = file_by_identifier(request.file_id, &repo).await?;
file.set_status(request.status.into()).await?; file = repo
.file()
.update(UpdateFileDto {
id: file.id(),
status: Some(request.status.into()),
..Default::default()
})
.await?;
ctx.emit_to( ctx.emit_to(
Self::name(), Self::name(),
"update_file_status", "update_file_status",
@ -184,7 +223,7 @@ impl FilesNamespace {
let request = event.payload::<ReadFileRequest>()?; let request = event.payload::<ReadFileRequest>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(request.id, &repo).await?; let file = file_by_identifier(request.id, &repo).await?;
let bytes = repo.get_file_bytes(&file).await?; let bytes = repo.file().get_bytes(file.cd()).await?;
ctx.emit_to(Self::name(), "read_file", BytePayload::new(bytes)) ctx.emit_to(Self::name(), "read_file", BytePayload::new(bytes))
.await?; .await?;
@ -198,7 +237,7 @@ impl FilesNamespace {
let id = event.payload::<FileIdentifier>()?; let id = event.payload::<FileIdentifier>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let file = file_by_identifier(id, &repo).await?; let file = file_by_identifier(id, &repo).await?;
repo.delete_file(file).await?; repo.file().delete(file).await?;
ctx.emit_to(Self::name(), "delete_file", ()).await?; ctx.emit_to(Self::name(), "delete_file", ()).await?;
@ -211,12 +250,18 @@ impl FilesNamespace {
let request = event.payload::<GetFileThumbnailsRequest>()?; let request = event.payload::<GetFileThumbnailsRequest>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let file_cd = cd_by_identifier(request.id.clone(), &repo).await?; let file_cd = cd_by_identifier(request.id.clone(), &repo).await?;
let mut thumbnails = repo.get_file_thumbnails(&file_cd).await?; let mut thumbnails = repo
.file()
.thumbnails(encode_content_descriptor(&file_cd))
.await?;
if thumbnails.is_empty() { if thumbnails.is_empty() {
tracing::debug!("No thumbnails for file found. Creating thumbnails..."); tracing::debug!("No thumbnails for file found. Creating thumbnails...");
let file = file_by_identifier(request.id, &repo).await?; let file = file_by_identifier(request.id, &repo).await?;
thumbnails = repo.create_thumbnails_for_file(&file).await?; thumbnails = repo
.file()
.create_thumbnails(file, vec![ThumbnailSize::Medium])
.await?;
tracing::debug!("Thumbnails for file created."); tracing::debug!("Thumbnails for file created.");
} }
@ -236,17 +281,20 @@ impl FilesNamespace {
let request = event.payload::<GetFileThumbnailOfSizeRequest>()?; let request = event.payload::<GetFileThumbnailOfSizeRequest>()?;
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let file_cd = cd_by_identifier(request.id.clone(), &repo).await?; let file_cd = cd_by_identifier(request.id.clone(), &repo).await?;
let thumbnails = repo.get_file_thumbnails(&file_cd).await?;
let min_size = request.min_size; let min_size = request.min_size;
let max_size = request.max_size; let max_size = request.max_size;
let thumbnails = repo
.file()
.thumbnails(encode_content_descriptor(&file_cd))
.await?;
let found_thumbnail = thumbnails.into_iter().find(|thumb| { let found_thumbnail = thumbnails.into_iter().find(|thumb| {
let Dimensions { height, width } = thumb.size; let Dimensions { height, width } = thumb.size();
height >= min_size.0 *height >= min_size.0
&& height <= max_size.0 && *height <= max_size.0
&& width >= min_size.1 && *width >= min_size.1
&& width <= max_size.1 && *width <= max_size.1
}); });
let thumbnail = if let Some(thumbnail) = found_thumbnail { let thumbnail = if let Some(thumbnail) = found_thumbnail {
@ -255,10 +303,14 @@ impl FilesNamespace {
let file = file_by_identifier(request.id, &repo).await?; let file = file_by_identifier(request.id, &repo).await?;
let middle_size = ((max_size.0 + min_size.0) / 2, (max_size.1 + min_size.1) / 2); let middle_size = ((max_size.0 + min_size.0) / 2, (max_size.1 + min_size.1) / 2);
let thumbnail = repo let thumbnail = repo
.create_file_thumbnail(&file, ThumbnailSize::Custom(middle_size)) .file()
.create_thumbnails(file, vec![ThumbnailSize::Custom(middle_size)])
.await?; .await?;
thumbnail thumbnail
.into_iter()
.next()
.ok_or_else(|| RepoError::from("thumbnail could not be created"))?
}; };
let mut buf = Vec::new(); let mut buf = Vec::new();
thumbnail.get_reader().await?.read_to_end(&mut buf).await?; thumbnail.get_reader().await?.read_to_end(&mut buf).await?;
@ -280,8 +332,15 @@ impl FilesNamespace {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let request = event.payload::<UpdateFileNameRequest>()?; let request = event.payload::<UpdateFileNameRequest>()?;
let file = file_by_identifier(request.file_id, &repo).await?; let file = file_by_identifier(request.file_id, &repo).await?;
let mut metadata = file.metadata().await?;
metadata.set_name(request.name).await?; let metadata = repo
.file()
.update_metadata(UpdateFileMetadataDto {
file_id: file.id(),
name: Some(Some(request.name)),
..Default::default()
})
.await?;
ctx.emit_to( ctx.emit_to(
Self::name(), Self::name(),
@ -299,7 +358,7 @@ impl FilesNamespace {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let id = event.payload::<FileIdentifier>()?; let id = event.payload::<FileIdentifier>()?;
let file = file_by_identifier(id, &repo).await?; let file = file_by_identifier(id, &repo).await?;
let thumbnails = repo.get_file_thumbnails(file.cd()).await?; let thumbnails = repo.file().thumbnails(file.encoded_cd()).await?;
for thumb in thumbnails { for thumb in thumbnails {
thumb.delete().await?; thumb.delete().await?;

@ -1,25 +1,27 @@
use std::collections::HashMap;
use mediarepo_core::content_descriptor::decode_content_descriptor; use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::error::RepoResult; use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus; use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
use mediarepo_core::mediarepo_api::types::filtering::{ use mediarepo_core::mediarepo_api::types::filtering::{
FilterExpression, FilterQuery, PropertyQuery, TagQuery, ValueComparator, FilterExpression, FilterQuery, PropertyQuery, TagQuery, ValueComparator,
}; };
use mediarepo_model::file::filter::NegatableComparator::{Is, IsNot}; use mediarepo_logic::dao::file::find::NegatableComparator::{Is, IsNot};
use mediarepo_model::file::filter::{FilterFileProperty, FilterProperty, OrderingComparator}; use mediarepo_logic::dao::file::find::{FilterFileProperty, FilterProperty, OrderingComparator};
use mediarepo_model::file::{File, FileStatus}; use mediarepo_logic::dao::repo::Repo;
use mediarepo_model::repo::Repo; use mediarepo_logic::dao::DaoProvider;
use std::collections::HashMap; use mediarepo_logic::dto::{FileDto, FileStatus};
#[tracing::instrument(level = "debug", skip(repo))] #[tracing::instrument(level = "debug", skip(repo))]
pub async fn find_files_for_filters( pub async fn find_files_for_filters(
repo: &Repo, repo: &Repo,
expressions: Vec<FilterExpression>, expressions: Vec<FilterExpression>,
) -> RepoResult<Vec<File>> { ) -> RepoResult<Vec<FileDto>> {
let tag_names = get_tag_names_from_expressions(&expressions); let tag_names = get_tag_names_from_expressions(&expressions);
let tag_id_map = repo.tag_names_to_ids(tag_names).await?; let tag_id_map = repo.tag().normalized_tags_to_ids(tag_names).await?;
let filters = build_filters_from_expressions(expressions, &tag_id_map); let filters = build_filters_from_expressions(expressions, &tag_id_map);
repo.find_files_by_filters(filters).await repo.file().find(filters).await
} }
#[tracing::instrument(level = "debug")] #[tracing::instrument(level = "debug")]

@ -1,17 +1,20 @@
use std::cmp::Ordering;
use std::collections::HashMap;
use std::iter::FromIterator;
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use compare::Compare; use compare::Compare;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use mediarepo_core::error::RepoResult; use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::filtering::{SortDirection, SortKey}; use mediarepo_core::mediarepo_api::types::filtering::{SortDirection, SortKey};
use mediarepo_database::queries::tags::{ use mediarepo_database::queries::tags::{
get_cids_with_namespaced_tags, get_content_descriptors_with_tag_count, get_cids_with_namespaced_tags, get_content_descriptors_with_tag_count,
}; };
use mediarepo_model::file::File; use mediarepo_logic::dao::DaoProvider;
use mediarepo_model::file_metadata::FileMetadata; use mediarepo_logic::dao::repo::Repo;
use mediarepo_model::repo::Repo; use mediarepo_logic::dto::{FileDto, FileMetadataDto};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::iter::FromIterator;
pub struct FileSortContext { pub struct FileSortContext {
name: Option<String>, name: Option<String>,
@ -28,7 +31,7 @@ pub struct FileSortContext {
pub async fn sort_files_by_properties( pub async fn sort_files_by_properties(
repo: &Repo, repo: &Repo,
sort_expression: Vec<SortKey>, sort_expression: Vec<SortKey>,
files: &mut Vec<File>, files: &mut Vec<FileDto>,
) -> RepoResult<()> { ) -> RepoResult<()> {
let contexts = build_sort_context(repo, files).await?; let contexts = build_sort_context(repo, files).await?;
@ -45,7 +48,7 @@ pub async fn sort_files_by_properties(
async fn build_sort_context( async fn build_sort_context(
repo: &Repo, repo: &Repo,
files: &Vec<File>, files: &Vec<FileDto>,
) -> RepoResult<HashMap<i64, FileSortContext>> { ) -> RepoResult<HashMap<i64, FileSortContext>> {
let hash_ids: Vec<i64> = files.par_iter().map(|f| f.cd_id()).collect(); let hash_ids: Vec<i64> = files.par_iter().map(|f| f.cd_id()).collect();
let file_ids: Vec<i64> = files.par_iter().map(|f| f.id()).collect(); let file_ids: Vec<i64> = files.par_iter().map(|f| f.id()).collect();
@ -54,9 +57,9 @@ async fn build_sort_context(
get_cids_with_namespaced_tags(repo.db(), hash_ids.clone()).await?; get_cids_with_namespaced_tags(repo.db(), hash_ids.clone()).await?;
let mut cid_tag_counts = get_content_descriptors_with_tag_count(repo.db(), hash_ids).await?; let mut cid_tag_counts = get_content_descriptors_with_tag_count(repo.db(), hash_ids).await?;
let files_metadata = repo.get_file_metadata_for_ids(file_ids).await?; let files_metadata = repo.file().all_metadata(file_ids).await?;
let mut file_metadata_map: HashMap<i64, FileMetadata> = let mut file_metadata_map: HashMap<i64, FileMetadataDto> =
HashMap::from_iter(files_metadata.into_iter().map(|m| (m.file_id(), m))); HashMap::from_iter(files_metadata.into_iter().map(|m| (m.file_id(), m)));
let mut contexts = HashMap::new(); let mut contexts = HashMap::new();
@ -64,7 +67,7 @@ async fn build_sort_context(
for file in files { for file in files {
if let Some(metadata) = file_metadata_map.remove(&file.id()) { if let Some(metadata) = file_metadata_map.remove(&file.id()) {
let context = FileSortContext { let context = FileSortContext {
name: metadata.name().to_owned(), name: metadata.name().cloned(),
size: metadata.size() as u64, size: metadata.size() as u64,
mime_type: file.mime_type().to_owned(), mime_type: file.mime_type().to_owned(),
namespaces: cid_nsp namespaces: cid_nsp

@ -1,9 +1,11 @@
use crate::utils::{calculate_size, get_repo_from_context};
use mediarepo_core::bromine::prelude::*; use mediarepo_core::bromine::prelude::*;
use mediarepo_core::error::RepoResult; use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::jobs::{JobType, RunJobRequest}; use mediarepo_core::mediarepo_api::types::jobs::{JobType, RunJobRequest};
use mediarepo_core::mediarepo_api::types::repo::SizeType; use mediarepo_core::mediarepo_api::types::repo::SizeType;
use mediarepo_core::type_keys::SizeMetadataKey; use mediarepo_core::type_keys::SizeMetadataKey;
use mediarepo_logic::dao::DaoProvider;
use crate::utils::{calculate_size, get_repo_from_context};
pub struct JobsNamespace; pub struct JobsNamespace;
@ -23,12 +25,13 @@ impl JobsNamespace {
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub async fn run_job(ctx: &Context, event: Event) -> IPCResult<()> { pub async fn run_job(ctx: &Context, event: Event) -> IPCResult<()> {
let run_request = event.payload::<RunJobRequest>()?; let run_request = event.payload::<RunJobRequest>()?;
let repo = get_repo_from_context(ctx).await; let job_dao = get_repo_from_context(ctx).await.job();
match run_request.job_type { match run_request.job_type {
JobType::MigrateContentDescriptors => repo.migrate().await?, JobType::MigrateContentDescriptors => job_dao.migrate_content_descriptors().await?,
JobType::CalculateSizes => calculate_all_sizes(ctx).await?, JobType::CalculateSizes => calculate_all_sizes(ctx).await?,
JobType::CheckIntegrity => {} JobType::CheckIntegrity => job_dao.check_integrity().await?,
JobType::Vacuum => job_dao.vacuum().await?,
} }
ctx.emit_to(Self::name(), "run_job", ()).await?; ctx.emit_to(Self::name(), "run_job", ()).await?;

@ -1,12 +1,20 @@
use crate::from_model::FromModel; use rayon::iter::{IntoParallelIterator, ParallelIterator};
use crate::utils::{file_by_identifier, get_repo_from_context}; use std::collections::HashMap;
use mediarepo_core::bromine::prelude::*; use mediarepo_core::bromine::prelude::*;
use mediarepo_core::content_descriptor::decode_content_descriptor; use mediarepo_core::content_descriptor::{decode_content_descriptor, encode_content_descriptor};
use mediarepo_core::mediarepo_api::types::files::{GetFileTagsRequest, GetFilesTagsRequest}; use mediarepo_core::mediarepo_api::types::files::{
GetFileTagMapRequest, GetFileTagsRequest, GetFilesTagsRequest,
};
use mediarepo_core::mediarepo_api::types::tags::{ use mediarepo_core::mediarepo_api::types::tags::{
ChangeFileTagsRequest, NamespaceResponse, TagResponse, ChangeFileTagsRequest, NamespaceResponse, TagResponse,
}; };
use rayon::iter::{IntoParallelIterator, ParallelIterator}; use mediarepo_core::utils::parse_namespace_and_tag;
use mediarepo_logic::dao::DaoProvider;
use mediarepo_logic::dto::AddTagDto;
use crate::from_model::FromModel;
use crate::utils::{file_by_identifier, get_repo_from_context};
pub struct TagsNamespace; pub struct TagsNamespace;
@ -21,6 +29,7 @@ impl NamespaceProvider for TagsNamespace {
"all_namespaces" => Self::all_namespaces, "all_namespaces" => Self::all_namespaces,
"tags_for_file" => Self::tags_for_file, "tags_for_file" => Self::tags_for_file,
"tags_for_files" => Self::tags_for_files, "tags_for_files" => Self::tags_for_files,
"file_tag_map" => Self::tag_cd_map_for_files,
"create_tags" => Self::create_tags, "create_tags" => Self::create_tags,
"change_file_tags" => Self::change_file_tags "change_file_tags" => Self::change_file_tags
); );
@ -33,7 +42,8 @@ impl TagsNamespace {
async fn all_tags(ctx: &Context, _event: Event) -> IPCResult<()> { async fn all_tags(ctx: &Context, _event: Event) -> IPCResult<()> {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let tags: Vec<TagResponse> = repo let tags: Vec<TagResponse> = repo
.tags() .tag()
.all()
.await? .await?
.into_iter() .into_iter()
.map(TagResponse::from_model) .map(TagResponse::from_model)
@ -48,7 +58,8 @@ impl TagsNamespace {
async fn all_namespaces(ctx: &Context, _event: Event) -> IPCResult<()> { async fn all_namespaces(ctx: &Context, _event: Event) -> IPCResult<()> {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let namespaces: Vec<NamespaceResponse> = repo let namespaces: Vec<NamespaceResponse> = repo
.namespaces() .tag()
.all_namespaces()
.await? .await?
.into_iter() .into_iter()
.map(NamespaceResponse::from_model) .map(NamespaceResponse::from_model)
@ -65,7 +76,7 @@ impl TagsNamespace {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let request = event.payload::<GetFileTagsRequest>()?; let request = event.payload::<GetFileTagsRequest>()?;
let file = file_by_identifier(request.id, &repo).await?; let file = file_by_identifier(request.id, &repo).await?;
let tags = file.tags().await?; let tags = repo.tag().tags_for_cd(file.cd_id()).await?;
let responses: Vec<TagResponse> = tags.into_iter().map(TagResponse::from_model).collect(); let responses: Vec<TagResponse> = tags.into_iter().map(TagResponse::from_model).collect();
ctx.emit_to(Self::name(), "tags_for_file", responses) ctx.emit_to(Self::name(), "tags_for_file", responses)
@ -80,7 +91,8 @@ impl TagsNamespace {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let request = event.payload::<GetFilesTagsRequest>()?; let request = event.payload::<GetFilesTagsRequest>()?;
let tag_responses: Vec<TagResponse> = repo let tag_responses: Vec<TagResponse> = repo
.find_tags_for_file_identifiers( .tag()
.all_for_cds(
request request
.cds .cds
.into_par_iter() .into_par_iter()
@ -97,17 +109,53 @@ impl TagsNamespace {
Ok(()) Ok(())
} }
/// Creates all tags given as input or returns the existing tag /// Returns a map of content descriptors to assigned tags
#[tracing::instrument(skip_all)]
async fn tag_cd_map_for_files(ctx: &Context, event: Event) -> IPCResult<()> {
let request = event.payload::<GetFileTagMapRequest>()?;
let repo = get_repo_from_context(ctx).await;
let cds = request
.cds
.into_iter()
.filter_map(|c| decode_content_descriptor(c).ok())
.collect();
let mappings = repo
.tag()
.all_for_cds_map(cds)
.await?
.into_iter()
.map(|(cd, tags)| (encode_content_descriptor(&cd), tags))
.map(|(cd, tags)| {
(
cd,
tags.into_iter()
.map(TagResponse::from_model)
.collect::<Vec<TagResponse>>(),
)
})
.collect::<HashMap<String, Vec<TagResponse>>>();
ctx.emit_to(Self::name(), "file_tag_map", mappings).await?;
Ok(())
}
/// Creates all tags given as input or returns the existing tags
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn create_tags(ctx: &Context, event: Event) -> IPCResult<()> { async fn create_tags(ctx: &Context, event: Event) -> IPCResult<()> {
let repo = get_repo_from_context(ctx).await; let repo = get_repo_from_context(ctx).await;
let tags = event.payload::<Vec<String>>()?; let tags = event.payload::<Vec<String>>()?;
let mut created_tags = Vec::new(); let created_tags = repo
.tag()
.add_all(
tags.into_iter()
.map(parse_namespace_and_tag)
.map(AddTagDto::from_tuple)
.collect(),
)
.await?;
for tag in tags {
let created_tag = repo.add_or_find_tag(tag).await?;
created_tags.push(created_tag);
}
let responses: Vec<TagResponse> = created_tags let responses: Vec<TagResponse> = created_tags
.into_iter() .into_iter()
.map(TagResponse::from_model) .map(TagResponse::from_model)
@ -126,14 +174,19 @@ impl TagsNamespace {
let file = file_by_identifier(request.file_id, &repo).await?; let file = file_by_identifier(request.file_id, &repo).await?;
if !request.added_tags.is_empty() { if !request.added_tags.is_empty() {
file.add_tags(request.added_tags).await?; repo.tag()
.upsert_mappings(vec![file.cd_id()], request.added_tags)
.await?;
} }
if !request.removed_tags.is_empty() { if !request.removed_tags.is_empty() {
file.remove_tags(request.removed_tags).await?; repo.tag()
.remove_mappings(vec![file.cd_id()], request.removed_tags)
.await?;
} }
let responses: Vec<TagResponse> = file let responses: Vec<TagResponse> = repo
.tags() .tag()
.tags_for_cd(file.cd_id())
.await? .await?
.into_iter() .into_iter()
.map(TagResponse::from_model) .map(TagResponse::from_model)

@ -1,3 +1,7 @@
use std::sync::Arc;
use tokio::fs;
use mediarepo_core::bromine::ipc::context::Context; use mediarepo_core::bromine::ipc::context::Context;
use mediarepo_core::content_descriptor::decode_content_descriptor; use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::error::{RepoError, RepoResult}; use mediarepo_core::error::{RepoError, RepoResult};
@ -5,11 +9,10 @@ use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier;
use mediarepo_core::mediarepo_api::types::repo::SizeType; use mediarepo_core::mediarepo_api::types::repo::SizeType;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey}; use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use mediarepo_core::utils::get_folder_size; use mediarepo_core::utils::get_folder_size;
use mediarepo_model::file::File; use mediarepo_logic::dao::DaoProvider;
use mediarepo_model::repo::Repo; use mediarepo_logic::dao::repo::Repo;
use mediarepo_model::type_keys::RepoKey; use mediarepo_logic::dto::FileDto;
use std::sync::Arc; use mediarepo_logic::type_keys::RepoKey;
use tokio::fs;
pub async fn get_repo_from_context(ctx: &Context) -> Arc<Repo> { pub async fn get_repo_from_context(ctx: &Context) -> Arc<Repo> {
let data = ctx.data.read().await; let data = ctx.data.read().await;
@ -17,10 +20,10 @@ pub async fn get_repo_from_context(ctx: &Context) -> Arc<Repo> {
Arc::clone(repo) Arc::clone(repo)
} }
pub async fn file_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<File> { pub async fn file_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<FileDto> {
let file = match identifier { let file = match identifier {
FileIdentifier::ID(id) => repo.file_by_id(id).await, FileIdentifier::ID(id) => repo.file().by_id(id).await,
FileIdentifier::CD(cd) => repo.file_by_cd(&decode_content_descriptor(cd)?).await, FileIdentifier::CD(cd) => repo.file().by_cd(decode_content_descriptor(cd)?).await,
}?; }?;
file.ok_or_else(|| RepoError::from("File not found")) file.ok_or_else(|| RepoError::from("File not found"))
} }
@ -29,7 +32,8 @@ pub async fn cd_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoRe
match identifier { match identifier {
FileIdentifier::ID(id) => { FileIdentifier::ID(id) => {
let file = repo let file = repo
.file_by_id(id) .file()
.by_id(id)
.await? .await?
.ok_or_else(|| "Thumbnail not found")?; .ok_or_else(|| "Thumbnail not found")?;
Ok(file.cd().to_owned()) Ok(file.cd().to_owned())

@ -1,21 +1,22 @@
use console_subscriber::ConsoleLayer;
use rolling_file::RollingConditionBasic;
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
use mediarepo_core::settings::LoggingSettings; use console_subscriber::ConsoleLayer;
use rolling_file::RollingConditionBasic;
use tracing::Level; use tracing::Level;
use tracing_appender::non_blocking::{NonBlocking, WorkerGuard}; use tracing_appender::non_blocking::{NonBlocking, WorkerGuard};
use tracing_flame::FlameLayer; use tracing_flame::FlameLayer;
use tracing_log::LogTracer; use tracing_log::LogTracer;
use tracing_subscriber::filter::{self, Targets};
use tracing_subscriber::fmt::format::FmtSpan;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use tracing_subscriber::{ use tracing_subscriber::{
fmt::{self}, fmt::{self},
Layer, Registry, Layer, Registry,
}; };
use tracing_subscriber::filter::{self, Targets};
use tracing_subscriber::fmt::format::FmtSpan;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use mediarepo_core::settings::LoggingSettings;
#[allow(dyn_drop)] #[allow(dyn_drop)]
pub type DropGuard = Box<dyn Drop>; pub type DropGuard = Box<dyn Drop>;

@ -1,7 +1,10 @@
use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use std::time::Duration;
use structopt::StructOpt; use structopt::StructOpt;
use tokio::fs; use tokio::fs;
use tokio::io::AsyncWriteExt;
use tokio::runtime; use tokio::runtime;
use tokio::runtime::Runtime; use tokio::runtime::Runtime;
@ -9,11 +12,8 @@ use mediarepo_core::error::RepoResult;
use mediarepo_core::fs::drop_file::DropFile; use mediarepo_core::fs::drop_file::DropFile;
use mediarepo_core::settings::{PathSettings, Settings}; use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_core::tokio_graceful_shutdown::{SubsystemHandle, Toplevel}; use mediarepo_core::tokio_graceful_shutdown::{SubsystemHandle, Toplevel};
use mediarepo_model::repo::Repo; use mediarepo_logic::dao::repo::Repo;
use mediarepo_socket::start_tcp_server; use mediarepo_socket::start_tcp_server;
use std::env;
use std::time::Duration;
use tokio::io::AsyncWriteExt;
use crate::utils::{create_paths_for_repo, get_repo, load_settings}; use crate::utils::{create_paths_for_repo, get_repo, load_settings};

@ -1,10 +1,12 @@
use mediarepo_core::error::RepoResult;
use mediarepo_core::settings::v1::SettingsV1;
use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_model::repo::Repo;
use std::path::PathBuf; use std::path::PathBuf;
use tokio::fs; use tokio::fs;
use mediarepo_core::error::RepoResult;
use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_core::settings::v1::SettingsV1;
use mediarepo_logic::dao::repo::Repo;
/// Loads the settings from a toml path /// Loads the settings from a toml path
pub fn load_settings(root_path: &PathBuf) -> RepoResult<Settings> { pub fn load_settings(root_path: &PathBuf) -> RepoResult<Settings> {
let contents = std::fs::read_to_string(root_path.join("repo.toml"))?; let contents = std::fs::read_to_string(root_path.join("repo.toml"))?;

@ -1,6 +1,6 @@
{ {
"name": "mediarepo-ui", "name": "mediarepo-ui",
"version": "0.13.1", "version": "0.13.2",
"scripts": { "scripts": {
"ng": "ng", "ng": "ng",
"start": "ng serve", "start": "ng serve",

@ -34,13 +34,13 @@ dependencies = [
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.52" version = "1.0.53"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84450d0b4a8bd1ba4144ce8ce718fbc5d071358b1e5384bace6536b3d1f2d5b3" checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0"
[[package]] [[package]]
name = "app" name = "app"
version = "0.13.1" version = "0.13.2"
dependencies = [ dependencies = [
"mediarepo-api", "mediarepo-api",
"serde", "serde",
@ -71,8 +71,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -140,9 +140,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]] [[package]]
name = "blake3" name = "blake3"
version = "1.3.0" version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "882e99e4a0cb2ae6cb6e442102e8e6b7131718d94110e64c3e6a34ea9b106f37" checksum = "a08e53fc5a564bb15bfe6fae56bd71522205f1f91893f9c0116edad6496c183f"
dependencies = [ dependencies = [
"arrayref", "arrayref",
"arrayvec", "arrayvec",
@ -366,7 +366,7 @@ checksum = "7606b05842fea68ddcc89e8053b8860ebcb2a0ba8d6abfe3a148e5d5a8d3f0c1"
dependencies = [ dependencies = [
"com_macros_support", "com_macros_support",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -376,8 +376,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97e9a6d20f4ac8830e309a455d7e9416e65c6af5a97c88c55fbb4c2012e107da" checksum = "97e9a6d20f4ac8830e309a455d7e9416e65c6af5a97c88c55fbb4c2012e107da"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -476,9 +476,9 @@ dependencies = [
[[package]] [[package]]
name = "crc32fast" name = "crc32fast"
version = "1.3.0" version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "738c290dfaea84fc1ca15ad9c168d083b05a714e1efddd8edaab678dc28d2836" checksum = "a2209c310e29876f7f0b2721e7e26b84aff178aa3da5d091f9bfbf47669e60e3"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
] ]
@ -548,9 +548,9 @@ dependencies = [
"matches", "matches",
"phf 0.8.0", "phf 0.8.0",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"smallvec", "smallvec",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -559,8 +559,8 @@ version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfae75de57f2b2e85e8768c3ea840fd159c8f33e2b6522c7835b7abac81be16e" checksum = "dfae75de57f2b2e85e8768c3ea840fd159c8f33e2b6522c7835b7abac81be16e"
dependencies = [ dependencies = [
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -588,9 +588,9 @@ dependencies = [
"fnv", "fnv",
"ident_case", "ident_case",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"strsim", "strsim",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -600,8 +600,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72" checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72"
dependencies = [ dependencies = [
"darling_core", "darling_core",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -632,9 +632,9 @@ checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [ dependencies = [
"convert_case", "convert_case",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"rustc_version 0.4.0", "rustc_version 0.4.0",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -725,9 +725,9 @@ checksum = "4ef6b89e5b37196644d8796de5268852ff179b44e96276cf4290264843743bb7"
[[package]] [[package]]
name = "fastrand" name = "fastrand"
version = "1.6.0" version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "779d043b6a0b90cc4c0ed7ee380a6504394cee7efd7db050e3774eee387324b2" checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf"
dependencies = [ dependencies = [
"instant", "instant",
] ]
@ -877,8 +877,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c" checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -1096,8 +1096,8 @@ dependencies = [
"proc-macro-crate 1.1.0", "proc-macro-crate 1.1.0",
"proc-macro-error", "proc-macro-error",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -1207,8 +1207,8 @@ dependencies = [
"proc-macro-crate 1.1.0", "proc-macro-crate 1.1.0",
"proc-macro-error", "proc-macro-error",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -1239,8 +1239,8 @@ dependencies = [
"mac", "mac",
"markup5ever", "markup5ever",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -1389,9 +1389,9 @@ dependencies = [
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.55" version = "0.3.56"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cc9ffccd38c451a86bf13657df244e9c3f37493cce8e5e21e940963777acc84" checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04"
dependencies = [ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
@ -1416,15 +1416,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.112" version = "0.2.116"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125" checksum = "565dbd88872dbe4cc8a46e527f26483c1d1f7afa6b884a3bd6cd893d4f98da74"
[[package]] [[package]]
name = "lock_api" name = "lock_api"
version = "0.4.5" version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712a4d093c9976e24e7dbca41db895dabcbac38eb5f4045393d17a95bdfb1109" checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b"
dependencies = [ dependencies = [
"scopeguard", "scopeguard",
] ]
@ -1499,7 +1499,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
[[package]] [[package]]
name = "mediarepo-api" name = "mediarepo-api"
version = "0.27.0" version = "0.28.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"bromine", "bromine",
@ -1646,8 +1646,8 @@ dependencies = [
"darling", "darling",
"proc-macro-crate 0.1.5", "proc-macro-crate 0.1.5",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -1734,8 +1734,8 @@ checksum = "0d992b768490d7fe0d8586d9b5745f6c49f557da6d81dc982b1d167ad4edbb21"
dependencies = [ dependencies = [
"proc-macro-crate 1.1.0", "proc-macro-crate 1.1.0",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -1975,8 +1975,8 @@ dependencies = [
"phf_shared 0.8.0", "phf_shared 0.8.0",
"proc-macro-hack", "proc-macro-hack",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -1989,8 +1989,8 @@ dependencies = [
"phf_shared 0.10.0", "phf_shared 0.10.0",
"proc-macro-hack", "proc-macro-hack",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -2092,8 +2092,8 @@ checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [ dependencies = [
"proc-macro-error-attr", "proc-macro-error-attr",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
"version_check", "version_check",
] ]
@ -2104,7 +2104,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"version_check", "version_check",
] ]
@ -2143,9 +2143,9 @@ dependencies = [
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.14" version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47aa80447ce4daf1717500037052af176af5d38cc3e571d9ec1c7353fc10c87d" checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
] ]
@ -2425,9 +2425,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]] [[package]]
name = "security-framework" name = "security-framework"
version = "2.4.2" version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "525bc1abfda2e1998d152c45cf13e696f76d0a4972310b22fac1658b05df7c87" checksum = "3fed7948b6c68acbb6e20c334f55ad635dc0f75506963de4464289fbd3b051ac"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"core-foundation 0.9.2", "core-foundation 0.9.2",
@ -2438,9 +2438,9 @@ dependencies = [
[[package]] [[package]]
name = "security-framework-sys" name = "security-framework-sys"
version = "2.4.2" version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9dd14d83160b528b7bfd66439110573efcfbe281b17fc2ca9f39f550d619c7e" checksum = "a57321bf8bc2362081b2599912d2961fe899c0efadf1b4b2f8d48b3e253bb96c"
dependencies = [ dependencies = [
"core-foundation-sys 0.8.3", "core-foundation-sys 0.8.3",
"libc", "libc",
@ -2492,29 +2492,29 @@ dependencies = [
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.133" version = "1.0.136"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97565067517b60e2d1ea8b268e59ce036de907ac523ad83a0475da04e818989a" checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.133" version = "1.0.136"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed201699328568d8d08208fdd080e3ff594e6c422e438b6705905da01005d537" checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.74" version = "1.0.78"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee2bb9cd061c5865d345bb02ca49fcef1391741b672b54a0bf7b679badec3142" checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085"
dependencies = [ dependencies = [
"itoa 1.0.1", "itoa 1.0.1",
"ryu", "ryu",
@ -2550,8 +2550,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98d0516900518c29efa217c298fa1f4e6c6ffc85ae29fd7f4ee48f176e1a9ed5" checksum = "98d0516900518c29efa217c298fa1f4e6c6ffc85ae29fd7f4ee48f176e1a9ed5"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -2606,9 +2606,9 @@ dependencies = [
[[package]] [[package]]
name = "siphasher" name = "siphasher"
version = "0.3.7" version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "533494a8f9b724d33625ab53c6c4800f7cc445895924a8ef649222dcb76e938b" checksum = "a86232ab60fa71287d7f2ddae4a7073f6b7aac33631c3015abb556f08c6d0a3e"
[[package]] [[package]]
name = "slab" name = "slab"
@ -2675,7 +2675,7 @@ dependencies = [
"phf_generator 0.8.0", "phf_generator 0.8.0",
"phf_shared 0.8.0", "phf_shared 0.8.0",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
] ]
[[package]] [[package]]
@ -2704,8 +2704,8 @@ checksum = "87c85aa3f8ea653bfd3ddf25f7ee357ee4d204731f6aa9ad04002306f6e2774c"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -2716,8 +2716,8 @@ checksum = "d06aaeeee809dbc59eb4556183dd927df67db1540de5be8d3ec0b6636358a5ec"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -2739,12 +2739,12 @@ dependencies = [
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.85" version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a684ac3dcd8913827e18cd09a68384ee66c1de24157e3c556c9ab16d85695fb7" checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"unicode-xid 0.2.2", "unicode-xid 0.2.2",
] ]
@ -2884,7 +2884,7 @@ checksum = "3c9c9a9bea25b9d6f5845b8662e18447e17218f99860cab37e39e2b57a9fcd49"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"serde_json", "serde_json",
"tauri-utils", "tauri-utils",
"winres", "winres",
@ -2899,7 +2899,7 @@ dependencies = [
"blake3", "blake3",
"kuchiki", "kuchiki",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"regex", "regex",
"serde", "serde",
"serde_json", "serde_json",
@ -2916,8 +2916,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bddf9f5868402323f35ef94fa6ab1d5d10b29aea9de598d829723aa1db5693b4" checksum = "bddf9f5868402323f35ef94fa6ab1d5d10b29aea9de598d829723aa1db5693b4"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
"tauri-codegen", "tauri-codegen",
] ]
@ -2966,7 +2966,7 @@ dependencies = [
"kuchiki", "kuchiki",
"phf 0.10.1", "phf 0.10.1",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror",
@ -3021,15 +3021,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
name = "thread_local" name = "thread_local"
version = "1.1.3" version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd" checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
dependencies = [ dependencies = [
"once_cell", "once_cell",
] ]
@ -3062,9 +3062,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.15.0" version = "1.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbbf1c778ec206785635ce8ad57fe52b3009ae9e0c9f574a728f3049d3e55838" checksum = "0c27a64b625de6d309e8c57716ba93021dccf1b3b5c97edd6d3dd2d2135afc0a"
dependencies = [ dependencies = [
"bytes", "bytes",
"libc", "libc",
@ -3105,8 +3105,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e" checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
] ]
[[package]] [[package]]
@ -3131,9 +3131,9 @@ dependencies = [
[[package]] [[package]]
name = "tracing-subscriber" name = "tracing-subscriber"
version = "0.3.5" version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d81bfa81424cc98cb034b837c985b7a290f592e5b4322f353f94a0ab0f9f594" checksum = "5312f325fe3588e277415f5a6cca1f4ccad0f248c4cd5a4bd33032d7286abc22"
dependencies = [ dependencies = [
"ansi_term", "ansi_term",
"lazy_static", "lazy_static",
@ -3290,9 +3290,9 @@ checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
[[package]] [[package]]
name = "wasm-bindgen" name = "wasm-bindgen"
version = "0.2.78" version = "0.2.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "632f73e236b219150ea279196e54e610f5dbafa5d61786303d4da54f84e47fce" checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
"wasm-bindgen-macro", "wasm-bindgen-macro",
@ -3300,24 +3300,24 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-backend" name = "wasm-bindgen-backend"
version = "0.2.78" version = "0.2.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a317bf8f9fba2476b4b2c85ef4c4af8ff39c3c7f0cdfeed4f82c34a880aa837b" checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca"
dependencies = [ dependencies = [
"bumpalo", "bumpalo",
"lazy_static", "lazy_static",
"log", "log",
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
[[package]] [[package]]
name = "wasm-bindgen-futures" name = "wasm-bindgen-futures"
version = "0.4.28" version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e8d7523cb1f2a4c96c1317ca690031b714a51cc14e05f712446691f413f5d39" checksum = "2eb6ec270a31b1d3c7e266b999739109abce8b6c87e4b31fcfcd788b65267395"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
"js-sys", "js-sys",
@ -3327,38 +3327,38 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-macro" name = "wasm-bindgen-macro"
version = "0.2.78" version = "0.2.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d56146e7c495528bf6587663bea13a8eb588d39b36b679d83972e1a2dbbdacf9" checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01"
dependencies = [ dependencies = [
"quote 1.0.14", "quote 1.0.15",
"wasm-bindgen-macro-support", "wasm-bindgen-macro-support",
] ]
[[package]] [[package]]
name = "wasm-bindgen-macro-support" name = "wasm-bindgen-macro-support"
version = "0.2.78" version = "0.2.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7803e0eea25835f8abdc585cd3021b3deb11543c6fe226dcd30b228857c5c5ab" checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc"
dependencies = [ dependencies = [
"proc-macro2 1.0.36", "proc-macro2 1.0.36",
"quote 1.0.14", "quote 1.0.15",
"syn 1.0.85", "syn 1.0.86",
"wasm-bindgen-backend", "wasm-bindgen-backend",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
[[package]] [[package]]
name = "wasm-bindgen-shared" name = "wasm-bindgen-shared"
version = "0.2.78" version = "0.2.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0237232789cf037d5480773fe568aac745bfe2afbc11a863e97901780a6b47cc" checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2"
[[package]] [[package]]
name = "web-sys" name = "web-sys"
version = "0.3.55" version = "0.3.56"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38eb105f1c59d9eaa6b5cdc92b859d85b926e82cb2e0945cd0c9259faa6fe9fb" checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb"
dependencies = [ dependencies = [
"js-sys", "js-sys",
"wasm-bindgen", "wasm-bindgen",

@ -1,7 +1,7 @@
[package] [package]
name = "app" name = "app"
version = "0.13.1" version = "0.13.2"
description = "A Tauri App" description = "The UI for the mediarepo media management tool"
authors = ["you"] authors = ["you"]
license = "" license = ""
repository = "" repository = ""

@ -1,7 +1,7 @@
{ {
"package": { "package": {
"productName": "mediarepo-ui", "productName": "mediarepo-ui",
"version": "0.13.1" "version": "0.13.2"
}, },
"build": { "build": {
"distDir": "../dist/mediarepo-ui", "distDir": "../dist/mediarepo-ui",

@ -13,6 +13,7 @@ import {
DeleteThumbnailsRequest, DeleteThumbnailsRequest,
FindFilesRequest, FindFilesRequest,
GetFileMetadataRequest, GetFileMetadataRequest,
GetFileTagMapRequest,
GetSizeRequest, GetSizeRequest,
GetTagsForFilesRequest, GetTagsForFilesRequest,
InitRepositoryRequest, InitRepositoryRequest,
@ -28,7 +29,7 @@ import {
UpdateFileStatusRequest UpdateFileStatusRequest
} from "./api-types/requests"; } from "./api-types/requests";
import {RepositoryData, RepositoryMetadata, SizeMetadata} from "./api-types/repo"; import {RepositoryData, RepositoryMetadata, SizeMetadata} from "./api-types/repo";
import {NamespaceData, TagData} from "./api-types/tags"; import {CdTagMappings, NamespaceData, TagData} from "./api-types/tags";
import {ShortCache} from "./ShortCache"; import {ShortCache} from "./ShortCache";
export class MediarepoApi { export class MediarepoApi {
@ -146,6 +147,15 @@ export class MediarepoApi {
); );
} }
public static async getFileTagMap(request: GetFileTagMapRequest): Promise<CdTagMappings> {
return ShortCache.cached(
request,
() => this.invokePlugin(ApiFunction.GetFileTagMap, request),
1000,
"getFileTagMap"
);
}
public static async createTags(request: CreateTagsRequest): Promise<TagData[]> { public static async createTags(request: CreateTagsRequest): Promise<TagData[]> {
return this.invokePlugin(ApiFunction.CreateTags, request); return this.invokePlugin(ApiFunction.CreateTags, request);
} }

@ -29,6 +29,7 @@ export enum ApiFunction {
GetAllTags = "get_all_tags", GetAllTags = "get_all_tags",
GetAllNamespace = "get_all_namespaces", GetAllNamespace = "get_all_namespaces",
GetTagsForFiles = "get_tags_for_files", GetTagsForFiles = "get_tags_for_files",
GetFileTagMap = "get_file_tag_map",
CreateTags = "create_tags", CreateTags = "create_tags",
ChangeFileTags = "change_file_tags", ChangeFileTags = "change_file_tags",
// import // import

@ -1,3 +1,4 @@
export type JobType = "MigrateContentDescriptors" export type JobType = "MigrateContentDescriptors"
| "CalculateSizes" | "CalculateSizes"
| "CheckIntegrity"; | "CheckIntegrity"
| "Vacuum";

@ -73,6 +73,10 @@ export type GetTagsForFilesRequest = {
cds: string[] cds: string[]
}; };
export type GetFileTagMapRequest = {
cds: string[]
};
export type CreateTagsRequest = { export type CreateTagsRequest = {
tags: string[] tags: string[]
}; };

@ -8,3 +8,7 @@ export type NamespaceData = {
id: number, id: number,
name: string, name: string,
}; };
export type CdTagMappings = {
[key: string]: TagData[],
};

@ -1,3 +1,5 @@
@import "src/colors";
::ng-deep .mat-button-wrapper > ng-icon { ::ng-deep .mat-button-wrapper > ng-icon {
font-size: 26px; font-size: 26px;
} }
@ -5,3 +7,12 @@
::ng-deep ng-icon { ::ng-deep ng-icon {
font-size: 24px; font-size: 24px;
} }
::ng-deep .app-warn {
background-color: $warn-chill;
}
::ng-deep .app-error {
background-color: $warn;
color: $text;
}

@ -1,7 +1,9 @@
import {Component, OnInit} from "@angular/core"; import {Component, OnInit} from "@angular/core";
import {RepositoryService} from "./services/repository/repository.service"; import {RepositoryService} from "./services/repository/repository.service";
import {MatSnackBar} from "@angular/material/snack-bar"; import {MatSnackBar} from "@angular/material/snack-bar";
import {ErrorBrokerService} from "./services/error-broker/error-broker.service"; import {LoggingService} from "./services/logging/logging.service";
import {LogEntry, LogLevel} from "./services/logging/LogEntry";
import {environment} from "../environments/environment";
@Component({ @Component({
selector: "app-root", selector: "app-root",
@ -13,21 +15,39 @@ export class AppComponent implements OnInit {
constructor( constructor(
private snackBar: MatSnackBar, private snackBar: MatSnackBar,
private errorBroker: ErrorBrokerService, private logger: LoggingService,
private repoService: RepositoryService, private repoService: RepositoryService,
) { ) {
} }
async ngOnInit() { async ngOnInit() {
this.errorBroker.errorCb = (err: { message: string }) => this.showError( this.logger.logs.subscribe(entry => {
err); this.logEntry(entry);
this.errorBroker.infoCb = (info: string) => this.showInfo(info); switch (entry.getLevel()) {
case LogLevel.Info:
this.showInfo(entry.getMessage());
break;
case LogLevel.Warn:
this.showWarning(entry.getMessage());
break;
case LogLevel.Error:
this.showError(entry.getMessage());
break;
}
});
await this.repoService.loadRepositories(); await this.repoService.loadRepositories();
} }
private showError(err: { message: string }) { private showError(err: string) {
this.snackBar.open(err.message, undefined, { this.snackBar.open(err, undefined, {
panelClass: "warn", panelClass: "app-error",
duration: 2000,
});
}
private showWarning(err: string) {
this.snackBar.open(err, undefined, {
panelClass: "app-warn",
duration: 2000, duration: 2000,
}); });
} }
@ -38,4 +58,26 @@ export class AppComponent implements OnInit {
duration: 2000, duration: 2000,
}); });
} }
private logEntry(entry: LogEntry) {
if (!environment.production) {
switch (entry.getLevel()) {
case LogLevel.Trace:
console.trace(entry.getMessage());
break;
case LogLevel.Debug:
console.debug(entry.getMessage());
break;
case LogLevel.Info:
console.info(entry.getMessage());
break;
case LogLevel.Warn:
console.warn(entry.getMessage());
break;
}
}
if (entry.getLevel() == LogLevel.Error) {
console.error(entry.getMessage(), entry.getError());
}
}
} }

@ -30,6 +30,7 @@ export class FilesTabComponent implements OnInit {
} else { } else {
this.state.selectedCD.next(undefined); this.state.selectedCD.next(undefined);
} }
console.debug(this.selectedFiles);
} }
public getStateSelectedFile(): File | undefined { public getStateSelectedFile(): File | undefined {

@ -6,7 +6,7 @@ import {DownloadDaemonDialogComponent} from "./download-daemon-dialog/download-d
import { import {
AddRepositoryDialogComponent AddRepositoryDialogComponent
} from "../../shared/repository/repository/add-repository-dialog/add-repository-dialog.component"; } from "../../shared/repository/repository/add-repository-dialog/add-repository-dialog.component";
import {ErrorBrokerService} from "../../../services/error-broker/error-broker.service"; import {LoggingService} from "../../../services/logging/logging.service";
import {BehaviorSubject} from "rxjs"; import {BehaviorSubject} from "rxjs";
import {BusyDialogComponent} from "../../shared/app-common/busy-dialog/busy-dialog.component"; import {BusyDialogComponent} from "../../shared/app-common/busy-dialog/busy-dialog.component";
import {JobService} from "../../../services/job/job.service"; import {JobService} from "../../../services/job/job.service";
@ -24,7 +24,7 @@ export class RepositoriesTabComponent implements OnInit, AfterViewInit {
public selectedRepository?: Repository; public selectedRepository?: Repository;
constructor( constructor(
private errorBroker: ErrorBrokerService, private logger: LoggingService,
private repoService: RepositoryService, private repoService: RepositoryService,
private jobService: JobService, private jobService: JobService,
private stateService: StateService, private stateService: StateService,
@ -60,8 +60,8 @@ export class RepositoriesTabComponent implements OnInit, AfterViewInit {
}); });
} }
await this.selectRepository(repository, dialogContext); await this.selectRepository(repository, dialogContext);
} catch (err) { } catch (err: any) {
this.errorBroker.showError(err); this.logger.error(err);
} }
} }
@ -75,7 +75,7 @@ export class RepositoriesTabComponent implements OnInit, AfterViewInit {
await this.repoService.loadRepositories(); await this.repoService.loadRepositories();
dialogContext.dialog.close(true); dialogContext.dialog.close(true);
} catch (err: any) { } catch (err: any) {
this.errorBroker.showError(err); this.logger.error(err);
dialogContext.message.next( dialogContext.message.next(
"Failed to open repository: " + err.toString()); "Failed to open repository: " + err.toString());
await this.forceCloseRepository(); await this.forceCloseRepository();
@ -111,13 +111,15 @@ export class RepositoriesTabComponent implements OnInit, AfterViewInit {
} }
private async runRepositoryStartupTasks(dialogContext: BusyDialogContext): Promise<void> { private async runRepositoryStartupTasks(dialogContext: BusyDialogContext): Promise<void> {
dialogContext.message.next("Checking integrity...");
await this.jobService.runJob("CheckIntegrity");
dialogContext.message.next("Running a vacuum on the database...");
await this.jobService.runJob("Vacuum");
dialogContext.message.next( dialogContext.message.next(
"Migrating content descriptors to new format..."); "Migrating content descriptors to new format...");
await this.jobService.runJob("MigrateContentDescriptors"); await this.jobService.runJob("MigrateContentDescriptors");
dialogContext.message.next("Calculating repository sizes..."); dialogContext.message.next("Calculating repository sizes...");
await this.jobService.runJob("CalculateSizes"); await this.jobService.runJob("CalculateSizes");
dialogContext.message.next("Checking integrity...");
await this.jobService.runJob("CheckIntegrity");
dialogContext.message.next("Finished repository startup"); dialogContext.message.next("Finished repository startup");
} }

@ -9,7 +9,7 @@ import {BehaviorSubject} from "rxjs";
import {BusyDialogComponent} from "../../app-common/busy-dialog/busy-dialog.component"; import {BusyDialogComponent} from "../../app-common/busy-dialog/busy-dialog.component";
import {ConfirmDialogComponent, ConfirmDialogData} from "../../app-common/confirm-dialog/confirm-dialog.component"; import {ConfirmDialogComponent, ConfirmDialogData} from "../../app-common/confirm-dialog/confirm-dialog.component";
import {MatDialog, MatDialogConfig, MatDialogRef} from "@angular/material/dialog"; import {MatDialog, MatDialogConfig, MatDialogRef} from "@angular/material/dialog";
import {ErrorBrokerService} from "../../../../services/error-broker/error-broker.service"; import {LoggingService} from "../../../../services/logging/logging.service";
type ProgressDialogContext = { type ProgressDialogContext = {
dialog: MatDialogRef<BusyDialogComponent>, dialog: MatDialogRef<BusyDialogComponent>,
@ -22,7 +22,7 @@ type ProgressDialogContext = {
template: "<h1>Do not use</h1>", template: "<h1>Do not use</h1>",
}) })
export class FileActionBaseComponent { export class FileActionBaseComponent {
constructor(private dialog: MatDialog, private errorBroker: ErrorBrokerService, private fileService: FileService) { constructor(private dialog: MatDialog, private errorBroker: LoggingService, private fileService: FileService) {
} }
public async copyFileContentDescriptor(file: File): Promise<void> { public async copyFileContentDescriptor(file: File): Promise<void> {

@ -23,13 +23,18 @@ export class BusyDialogComponent {
public progress = 0; public progress = 0;
public mode: ProgressBarMode = "indeterminate"; public mode: ProgressBarMode = "indeterminate";
constructor(public dialogRef: MatDialogRef<BusyDialogComponent>, @Inject(MAT_DIALOG_DATA) data: BusyDialogData) { constructor(
public dialogRef: MatDialogRef<BusyDialogComponent>,
@Inject(MAT_DIALOG_DATA) data: BusyDialogData
) {
this.title = data.title; this.title = data.title;
if (data.message) { if (data.message) {
data.message.subscribe(m => this.message = m); data.message.subscribe(m => this.message = m);
} }
if (data.progress) { if (data.progress) {
data.progress.subscribe(p => this.progress = p); data.progress.subscribe(p => {
this.progress = Math.floor(p * 100);
});
this.mode = "determinate"; this.mode = "determinate";
} }
this.allowCancel = data.allowCancel ?? false; this.allowCancel = data.allowCancel ?? false;

@ -12,7 +12,7 @@ import {SafeResourceUrl} from "@angular/platform-browser";
import {File} from "../../../../../api/models/File"; import {File} from "../../../../../api/models/File";
import {FileService} from "../../../../services/file/file.service"; import {FileService} from "../../../../services/file/file.service";
import {FileHelper} from "../../../../services/file/file.helper"; import {FileHelper} from "../../../../services/file/file.helper";
import {ErrorBrokerService} from "../../../../services/error-broker/error-broker.service"; import {LoggingService} from "../../../../services/logging/logging.service";
import {BusyIndicatorComponent} from "../../app-common/busy-indicator/busy-indicator.component"; import {BusyIndicatorComponent} from "../../app-common/busy-indicator/busy-indicator.component";
type ContentType = "image" | "video" | "audio" | "other"; type ContentType = "image" | "video" | "audio" | "other";
@ -33,7 +33,7 @@ export class ContentViewerComponent implements AfterViewInit, OnChanges, OnDestr
@ViewChild(BusyIndicatorComponent) busyIndicator!: BusyIndicatorComponent; @ViewChild(BusyIndicatorComponent) busyIndicator!: BusyIndicatorComponent;
constructor( constructor(
private errorBroker: ErrorBrokerService, private errorBroker: LoggingService,
private fileService: FileService private fileService: FileService
) { ) {
} }
@ -70,8 +70,8 @@ export class ContentViewerComponent implements AfterViewInit, OnChanges, OnDestr
if (path) { if (path) {
try { try {
await this.fileService.saveFile(this.file, path); await this.fileService.saveFile(this.file, path);
} catch (err) { } catch (err: any) {
this.errorBroker.showError(err); this.errorBroker.error(err);
} }
} }
} }

@ -2,19 +2,11 @@ import {Component, EventEmitter, OnChanges, Output, SimpleChanges, ViewChild} fr
import {File} from "../../../../../api/models/File"; import {File} from "../../../../../api/models/File";
import {ContextMenuComponent} from "../../app-common/context-menu/context-menu.component"; import {ContextMenuComponent} from "../../app-common/context-menu/context-menu.component";
import {FileService} from "../../../../services/file/file.service"; import {FileService} from "../../../../services/file/file.service";
import {ErrorBrokerService} from "../../../../services/error-broker/error-broker.service"; import {LoggingService} from "../../../../services/logging/logging.service";
import {MatDialog, MatDialogRef} from "@angular/material/dialog"; import {MatDialog} from "@angular/material/dialog";
import {BusyDialogComponent} from "../../app-common/busy-dialog/busy-dialog.component";
import {BehaviorSubject} from "rxjs";
import {FileActionBaseComponent} from "../../app-base/file-action-base/file-action-base.component"; import {FileActionBaseComponent} from "../../app-base/file-action-base/file-action-base.component";
import {FileStatus} from "../../../../../api/api-types/files"; import {FileStatus} from "../../../../../api/api-types/files";
type ProgressDialogContext = {
dialog: MatDialogRef<BusyDialogComponent>,
progress: BehaviorSubject<number>,
message: BehaviorSubject<string>,
};
@Component({ @Component({
selector: "app-file-context-menu", selector: "app-file-context-menu",
templateUrl: "./file-context-menu.component.html", templateUrl: "./file-context-menu.component.html",
@ -34,7 +26,7 @@ export class FileContextMenuComponent extends FileActionBaseComponent implements
@Output() fileDeleted = new EventEmitter<File[]>(); @Output() fileDeleted = new EventEmitter<File[]>();
@Output() fileStatusChange = new EventEmitter<File[]>(); @Output() fileStatusChange = new EventEmitter<File[]>();
constructor(fileService: FileService, errorBroker: ErrorBrokerService, dialog: MatDialog) { constructor(fileService: FileService, errorBroker: LoggingService, dialog: MatDialog) {
super(dialog, errorBroker, fileService); super(dialog, errorBroker, fileService);
} }

@ -5,7 +5,7 @@
class="file-gallery-inner"> class="file-gallery-inner">
<cdk-virtual-scroll-viewport #virtualScrollGrid class="file-scroll" itemSize="260" maxBufferPx="2000" <cdk-virtual-scroll-viewport #virtualScrollGrid class="file-scroll" itemSize="260" maxBufferPx="2000"
minBufferPx="500"> minBufferPx="500">
<div *cdkVirtualFor="let rowEntry of partitionedGridEntries; trackByFileRowId"> <div *cdkVirtualFor="let rowEntry of partitionedGridEntries; trackBy: trackByFileRowId">
<div class="file-row"> <div class="file-row">
<app-file-card <app-file-card
(clickEvent)="setSelectedFile($event.entry)" (clickEvent)="setSelectedFile($event.entry)"
@ -18,7 +18,9 @@
</cdk-virtual-scroll-viewport> </cdk-virtual-scroll-viewport>
</div> </div>
<app-file-context-menu #fileContextMenu (fileDeleted)="this.fileDeleted.emit($event)" (fileStatusChange)="this.onFileStatusChange()"> <app-file-context-menu #fileContextMenu
(fileDeleted)="this.fileDeleted.emit($event)"
(fileStatusChange)="this.onFileStatusChange()">
<button (click)="this.fileOpen.emit(fileContextMenu.files[0])" <button (click)="this.fileOpen.emit(fileContextMenu.files[0])"
*ngIf="fileContextMenu.files.length === 1" *ngIf="fileContextMenu.files.length === 1"
content-before="" content-before=""

@ -19,6 +19,7 @@ import {FileService} from "../../../../../services/file/file.service";
import {Selectable} from "../../../../../models/Selectable"; import {Selectable} from "../../../../../models/Selectable";
import {Key} from "w3c-keys"; import {Key} from "w3c-keys";
import {BehaviorSubject} from "rxjs"; import {BehaviorSubject} from "rxjs";
import {LoggingService} from "../../../../../services/logging/logging.service";
@Component({ @Component({
selector: "app-file-grid", selector: "app-file-grid",
@ -48,6 +49,7 @@ export class FileGridComponent implements OnChanges, OnInit, AfterViewInit {
private gridEntries: Selectable<File>[] = []; private gridEntries: Selectable<File>[] = [];
constructor( constructor(
private logger: LoggingService,
private tabService: TabService, private tabService: TabService,
private fileService: FileService, private fileService: FileService,
) { ) {
@ -88,12 +90,14 @@ export class FileGridComponent implements OnChanges, OnInit, AfterViewInit {
this.handleShiftSelect(clickedEntry); this.handleShiftSelect(clickedEntry);
} else { } else {
clickedEntry.selected.next(!clickedEntry.selected.value); clickedEntry.selected.next(!clickedEntry.selected.value);
if (!clickedEntry.selected) { if (!clickedEntry.selected.value) {
this.logger.trace("File wasn't selected");
const index = this.selectedEntries.indexOf(clickedEntry); const index = this.selectedEntries.indexOf(clickedEntry);
if (index > -1) { if (index > -1) {
this.selectedEntries.splice(index, 1); this.selectedEntries.splice(index, 1);
} }
} else { } else {
this.logger.trace("File was selected");
this.selectedEntries.push(clickedEntry); this.selectedEntries.push(clickedEntry);
} }
} }
@ -258,11 +262,13 @@ export class FileGridComponent implements OnChanges, OnInit, AfterViewInit {
private selectAll() { private selectAll() {
this.selectedEntries = this.gridEntries; this.selectedEntries = this.gridEntries;
this.gridEntries.forEach(g => g.select()); this.gridEntries.forEach(g => g.select());
this.fileSelect.emit(this.selectedEntries.map(e => e.data));
} }
private selectNone() { private selectNone() {
this.selectedEntries = []; this.selectedEntries = [];
this.gridEntries.forEach(g => g.unselect()); this.gridEntries.forEach(g => g.unselect());
this.fileSelect.emit([]);
} }
private handleArrowSelect(direction: "up" | "down" | "left" | "right") { private handleArrowSelect(direction: "up" | "down" | "left" | "right") {

@ -4,7 +4,7 @@ import {FileGalleryComponent} from "./file-gallery/file-gallery.component";
import {FileGridComponent} from "./file-grid/file-grid.component"; import {FileGridComponent} from "./file-grid/file-grid.component";
import {FileActionBaseComponent} from "../../app-base/file-action-base/file-action-base.component"; import {FileActionBaseComponent} from "../../app-base/file-action-base/file-action-base.component";
import {MatDialog} from "@angular/material/dialog"; import {MatDialog} from "@angular/material/dialog";
import {ErrorBrokerService} from "../../../../services/error-broker/error-broker.service"; import {LoggingService} from "../../../../services/logging/logging.service";
import {FileService} from "../../../../services/file/file.service"; import {FileService} from "../../../../services/file/file.service";
import {TabState} from "../../../../models/TabState"; import {TabState} from "../../../../models/TabState";
@ -29,7 +29,7 @@ export class FileMultiviewComponent extends FileActionBaseComponent implements A
public selectedFiles: File[] = []; public selectedFiles: File[] = [];
@Input() public preselectedFile: File | undefined; @Input() public preselectedFile: File | undefined;
constructor(dialog: MatDialog, errorBroker: ErrorBrokerService, fileService: FileService) { constructor(dialog: MatDialog, errorBroker: LoggingService, fileService: FileService) {
super(dialog, errorBroker, fileService); super(dialog, errorBroker, fileService);
} }

@ -113,7 +113,7 @@ export class FilterInputComponent implements OnChanges {
private validateFilters(filters: FilterQuery[]): boolean { private validateFilters(filters: FilterQuery[]): boolean {
for (const filter of filters) { for (const filter of filters) {
if ("Tag" in filter && !this.tagsForAutocomplete.includes(filter["Tag"].tag)) { if ("Tag" in filter && !filter.Tag.tag.endsWith("*") && !this.tagsForAutocomplete.includes(filter.Tag.tag)) {
console.debug("tags don't include", filter); console.debug("tags don't include", filter);
return false; return false;
} }

@ -1,14 +1,8 @@
import {Component, Inject, ViewChild} from "@angular/core"; import {Component, Inject, ViewChild} from "@angular/core";
import {MAT_DIALOG_DATA, MatDialogRef} from "@angular/material/dialog"; import {MAT_DIALOG_DATA, MatDialogRef} from "@angular/material/dialog";
import { import {RepositoryFormComponent} from "../repository-form/repository-form.component";
RepositoryFormComponent import {RepositoryService} from "../../../../../services/repository/repository.service";
} from "../repository-form/repository-form.component"; import {LoggingService} from "../../../../../services/logging/logging.service";
import {
RepositoryService
} from "../../../../../services/repository/repository.service";
import {
ErrorBrokerService
} from "../../../../../services/error-broker/error-broker.service";
@Component({ @Component({
selector: "app-add-repository-dialog", selector: "app-add-repository-dialog",
@ -21,9 +15,10 @@ export class AddRepositoryDialogComponent {
constructor( constructor(
public repoService: RepositoryService, public repoService: RepositoryService,
public errorBroker: ErrorBrokerService, public errorBroker: LoggingService,
public dialogRef: MatDialogRef<AddRepositoryDialogComponent>, public dialogRef: MatDialogRef<AddRepositoryDialogComponent>,
@Inject(MAT_DIALOG_DATA) data: any) { @Inject(MAT_DIALOG_DATA) data: any
) {
} }
public async checkLocalRepoExists() { public async checkLocalRepoExists() {
@ -35,22 +30,23 @@ export class AddRepositoryDialogComponent {
const path = this.repositoryForm.formGroup.value.path; const path = this.repositoryForm.formGroup.value.path;
try { try {
await this.repoService.initRepository(path); await this.repoService.initRepository(path);
} catch (err) { } catch (err: any) {
this.errorBroker.showError(err); this.errorBroker.error(err);
} }
await this.checkLocalRepoExists(); await this.checkLocalRepoExists();
} }
public async addRepository() { public async addRepository() {
let {name, repositoryType, path, address} = this.repositoryForm.formGroup.value; let { name, repositoryType, path, address } = this.repositoryForm.formGroup.value;
path = repositoryType === "local" ? path : undefined; path = repositoryType === "local" ? path : undefined;
address = repositoryType === "remote" ? address : undefined; address = repositoryType === "remote" ? address : undefined;
try { try {
await this.repoService.addRepository(name, path, address, await this.repoService.addRepository(name, path, address,
repositoryType === "local"); repositoryType === "local"
);
this.dialogRef.close(); this.dialogRef.close();
} catch (err) { } catch (err: any) {
this.errorBroker.showError(err); this.errorBroker.error(err);
} }
} }

@ -1,13 +1,7 @@
import {Component, Inject, ViewChild} from "@angular/core"; import {Component, Inject, ViewChild} from "@angular/core";
import { import {RepositoryFormComponent} from "../repository-form/repository-form.component";
RepositoryFormComponent import {RepositoryService} from "../../../../../services/repository/repository.service";
} from "../repository-form/repository-form.component"; import {LoggingService} from "../../../../../services/logging/logging.service";
import {
RepositoryService
} from "../../../../../services/repository/repository.service";
import {
ErrorBrokerService
} from "../../../../../services/error-broker/error-broker.service";
import {MAT_DIALOG_DATA, MatDialogRef} from "@angular/material/dialog"; import {MAT_DIALOG_DATA, MatDialogRef} from "@angular/material/dialog";
import {Repository} from "../../../../../../api/models/Repository"; import {Repository} from "../../../../../../api/models/Repository";
@ -25,9 +19,10 @@ export class EditRepositoryDialogComponent {
constructor( constructor(
public repoService: RepositoryService, public repoService: RepositoryService,
public errorBroker: ErrorBrokerService, public errorBroker: LoggingService,
public dialogRef: MatDialogRef<EditRepositoryDialogComponent>, public dialogRef: MatDialogRef<EditRepositoryDialogComponent>,
@Inject(MAT_DIALOG_DATA) data: any) { @Inject(MAT_DIALOG_DATA) data: any
) {
this.selectedRepository = data.repository; this.selectedRepository = data.repository;
this.originalName = this.selectedRepository.name; this.originalName = this.selectedRepository.name;
} }
@ -41,14 +36,14 @@ export class EditRepositoryDialogComponent {
const path = this.repositoryForm.formGroup.value.path; const path = this.repositoryForm.formGroup.value.path;
try { try {
await this.repoService.initRepository(path); await this.repoService.initRepository(path);
} catch (err) { } catch (err: any) {
this.errorBroker.showError(err); this.errorBroker.error(err);
} }
await this.checkLocalRepoExists(); await this.checkLocalRepoExists();
} }
public async addRepository() { public async addRepository() {
let {name, repositoryType, path, address} = this.repositoryForm.formGroup.value; let { name, repositoryType, path, address } = this.repositoryForm.formGroup.value;
path = repositoryType === "local" ? path : undefined; path = repositoryType === "local" ? path : undefined;
address = repositoryType === "remote" ? address : undefined; address = repositoryType === "remote" ? address : undefined;
@ -61,12 +56,13 @@ export class EditRepositoryDialogComponent {
await this.repoService.removeRepository(this.originalName); await this.repoService.removeRepository(this.originalName);
} }
await this.repoService.addRepository(name, path, address, await this.repoService.addRepository(name, path, address,
repositoryType === "local"); repositoryType === "local"
this.selectedRepository.update({name, local: repositoryType === "local", path, address}); );
this.selectedRepository.update({ name, local: repositoryType === "local", path, address });
this.dialogRef.close(); this.dialogRef.close();
} catch (err) { } catch (err: any) {
this.errorBroker.showError(err); this.errorBroker.error(err);
} }
} }

@ -1,6 +1,6 @@
import {Component, EventEmitter, Output} from "@angular/core"; import {Component, EventEmitter, Output} from "@angular/core";
import {ImportService} from "../../../../../services/import/import.service"; import {ImportService} from "../../../../../services/import/import.service";
import {ErrorBrokerService} from "../../../../../services/error-broker/error-broker.service"; import {LoggingService} from "../../../../../services/logging/logging.service";
import {AddFileOptions} from "../../../../../models/AddFileOptions"; import {AddFileOptions} from "../../../../../models/AddFileOptions";
import {File} from "../../../../../../api/models/File"; import {File} from "../../../../../../api/models/File";
import {DialogFilter} from "@tauri-apps/api/dialog"; import {DialogFilter} from "@tauri-apps/api/dialog";
@ -36,7 +36,7 @@ export class FilesystemImportComponent {
public importing = false; public importing = false;
public importingProgress = 0; public importingProgress = 0;
constructor(private errorBroker: ErrorBrokerService, private importService: ImportService) { constructor(private errorBroker: LoggingService, private importService: ImportService) {
} }
public async setSelectedPaths(paths: string[]) { public async setSelectedPaths(paths: string[]) {
@ -44,9 +44,9 @@ export class FilesystemImportComponent {
try { try {
this.files = await this.importService.resolvePathsToFiles(paths); this.files = await this.importService.resolvePathsToFiles(paths);
this.fileCount = this.files.length; this.fileCount = this.files.length;
} catch (err) { } catch (err: any) {
console.log(err); console.log(err);
this.errorBroker.showError(err); this.errorBroker.error(err);
} }
this.resolving = false; this.resolving = false;
} }
@ -64,9 +64,9 @@ export class FilesystemImportComponent {
this.importOptions this.importOptions
); );
this.fileImported.emit(resultFile); this.fileImported.emit(resultFile);
} catch (err) { } catch (err: any) {
console.log(err); console.log(err);
this.errorBroker.showError(err); this.errorBroker.error(err);
} }
count++; count++;
this.importingProgress = (count / this.fileCount) * 100; this.importingProgress = (count / this.fileCount) * 100;

@ -12,7 +12,7 @@ import {
import {SortKey} from "../../../../models/SortKey"; import {SortKey} from "../../../../models/SortKey";
import {MatDialog} from "@angular/material/dialog"; import {MatDialog} from "@angular/material/dialog";
import {SortDialogComponent} from "./sort-dialog/sort-dialog.component"; import {SortDialogComponent} from "./sort-dialog/sort-dialog.component";
import {ErrorBrokerService} from "../../../../services/error-broker/error-broker.service"; import {LoggingService} from "../../../../services/logging/logging.service";
import {FilterDialogComponent} from "./filter-dialog/filter-dialog.component"; import {FilterDialogComponent} from "./filter-dialog/filter-dialog.component";
import {Tag} from "../../../../../api/models/Tag"; import {Tag} from "../../../../../api/models/Tag";
import {clipboard} from "@tauri-apps/api"; import {clipboard} from "@tauri-apps/api";
@ -57,7 +57,7 @@ export class FileSearchComponent implements AfterViewChecked, OnInit {
private needsScroll = false; private needsScroll = false;
constructor( constructor(
private errorBroker: ErrorBrokerService, private logger: LoggingService,
public dialog: MatDialog public dialog: MatDialog
) { ) {
this.assignDisplayedFilters(); this.assignDisplayedFilters();
@ -85,8 +85,8 @@ export class FileSearchComponent implements AfterViewChecked, OnInit {
this.searchStartEvent.emit(); this.searchStartEvent.emit();
try { try {
await this.state.findFiles(); await this.state.findFiles();
} catch (err) { } catch (err: any) {
this.errorBroker.showError(err); this.logger.error(err);
} }
this.searchEndEvent.emit(); this.searchEndEvent.emit();
} }

@ -13,7 +13,7 @@ import {File} from "../../../../../api/models/File";
import {Tag} from "../../../../../api/models/Tag"; import {Tag} from "../../../../../api/models/Tag";
import {CdkVirtualScrollViewport} from "@angular/cdk/scrolling"; import {CdkVirtualScrollViewport} from "@angular/cdk/scrolling";
import {TagService} from "../../../../services/tag/tag.service"; import {TagService} from "../../../../services/tag/tag.service";
import {ErrorBrokerService} from "../../../../services/error-broker/error-broker.service"; import {LoggingService} from "../../../../services/logging/logging.service";
import {BusyIndicatorComponent} from "../../app-common/busy-indicator/busy-indicator.component"; import {BusyIndicatorComponent} from "../../app-common/busy-indicator/busy-indicator.component";
@Component({ @Component({
@ -36,7 +36,7 @@ export class TagEditComponent implements AfterViewInit, OnChanges {
private fileTags: { [key: number]: Tag[] } = {}; private fileTags: { [key: number]: Tag[] } = {};
constructor( constructor(
private errorBroker: ErrorBrokerService, private logger: LoggingService,
private tagService: TagService, private tagService: TagService,
) { ) {
} }
@ -148,16 +148,12 @@ export class TagEditComponent implements AfterViewInit, OnChanges {
private async loadFileTags() { private async loadFileTags() {
await this.wrapAsyncOperation(async () => { await this.wrapAsyncOperation(async () => {
const promises = []; console.log("loading tags");
const loadFn = async (file: File) => { const mappings = await this.tagService.getFileTagMappings(this.files.map(f => f.cd));
this.fileTags[file.id] = await this.tagService.getTagsForFiles(
[file.cd]);
};
for (const file of this.files) { for (const file of this.files) {
promises.push(loadFn(file)); this.fileTags[file.id] = mappings[file.cd];
} }
await Promise.all(promises);
this.mapFileTagsToTagList(); this.mapFileTagsToTagList();
}); });
} }
@ -176,11 +172,11 @@ export class TagEditComponent implements AfterViewInit, OnChanges {
} }
private async wrapAsyncOperation<T>(cb: () => Promise<T>): Promise<T | undefined> { private async wrapAsyncOperation<T>(cb: () => Promise<T>): Promise<T | undefined> {
if (!this.busyIndicator) { if (!this.busyIndicator?.wrapAsyncOperation) {
try { try {
return cb(); return cb();
} catch (err: any) { } catch (err: any) {
this.errorBroker.showError(err); this.logger.error(err);
return undefined; return undefined;
} }
} else { } else {

@ -1,53 +0,0 @@
import {Injectable} from "@angular/core";
import {listen} from "@tauri-apps/api/event";
@Injectable({
providedIn: "root"
})
export class ErrorBrokerService {
errorCb: Function | undefined;
infoCb: Function | undefined;
constructor() {
this.registerListener().catch(err => console.error(err));
}
async registerListener() {
const _unlisten = await listen("error", event => {
const payload: any = event.payload;
if (payload.message) {
this.showError(payload);
} else {
this.showError(payload.toString());
}
});
}
async try<T>(fn: () => Promise<T>): Promise<T | undefined> {
try {
return await fn();
} catch (err) {
this.showError(err);
return;
}
}
showInfo(info: string) {
console.log(info);
if (this.infoCb) {
this.infoCb(info);
}
}
showError(error: { message: string } | any) {
console.error(error);
if (this.errorCb) {
if (!error.message) {
this.errorCb({ message: error });
} else {
this.errorCb({ ...error });
}
}
}
}

@ -0,0 +1,24 @@
export enum LogLevel {
Trace,
Debug,
Info,
Warn,
Error,
}
export class LogEntry {
constructor(private message: string, private level: LogLevel, private error?: Error) {
}
public getMessage(): string {
return this.message;
}
public getLevel(): LogLevel {
return this.level;
}
public getError(): Error | undefined {
return this.error;
}
}

@ -1,13 +1,13 @@
import {TestBed} from "@angular/core/testing"; import {TestBed} from "@angular/core/testing";
import {ErrorBrokerService} from "./error-broker.service"; import {LoggingService} from "./logging.service";
describe("ErrorBrokerService", () => { describe("ErrorBrokerService", () => {
let service: ErrorBrokerService; let service: LoggingService;
beforeEach(() => { beforeEach(() => {
TestBed.configureTestingModule({}); TestBed.configureTestingModule({});
service = TestBed.inject(ErrorBrokerService); service = TestBed.inject(LoggingService);
}); });
it("should be created", () => { it("should be created", () => {

@ -0,0 +1,60 @@
import {Injectable} from "@angular/core";
import {listen} from "@tauri-apps/api/event";
import {BehaviorSubject} from "rxjs";
import {LogEntry, LogLevel} from "./LogEntry";
@Injectable({
providedIn: "root"
})
export class LoggingService {
logs = new BehaviorSubject<LogEntry>(new LogEntry("Log initialized", LogLevel.Trace));
constructor() {
this.registerListener().catch(err => console.error(err));
}
async registerListener() {
const _unlisten = await listen("error", event => {
const payload: any = event.payload;
if (payload.message) {
this.error(payload);
} else {
this.error(payload.toString());
}
});
}
async try<T>(fn: () => Promise<T>): Promise<T | undefined> {
try {
return await fn();
} catch (err: any) {
this.error(err);
return;
}
}
trace(message: string) {
this.log(LogLevel.Trace, message);
}
debug(message: string) {
this.log(LogLevel.Debug, message);
}
info(message: string) {
this.log(LogLevel.Info, message);
}
warn(message: string) {
this.log(LogLevel.Warn, message);
}
error(error: Error, message?: string) {
this.log(LogLevel.Error, message ?? error.message ?? error.toString(), error);
}
public log(level: LogLevel, message: string, error?: Error) {
this.logs.next(new LogEntry(message, level, error));
}
}

@ -3,7 +3,7 @@ import {Repository} from "../../../api/models/Repository";
import {BehaviorSubject} from "rxjs"; import {BehaviorSubject} from "rxjs";
import {listen} from "@tauri-apps/api/event"; import {listen} from "@tauri-apps/api/event";
import {Info} from "../../models/Info"; import {Info} from "../../models/Info";
import {ErrorBrokerService} from "../error-broker/error-broker.service"; import {LoggingService} from "../logging/logging.service";
import {RepositoryMetadata} from "../../models/RepositoryMetadata"; import {RepositoryMetadata} from "../../models/RepositoryMetadata";
import {MediarepoApi} from "../../../api/Api"; import {MediarepoApi} from "../../../api/Api";
import {mapMany, mapNew, mapOptional,} from "../../../api/models/adaptors"; import {mapMany, mapNew, mapOptional,} from "../../../api/models/adaptors";
@ -17,7 +17,7 @@ export class RepositoryService {
public selectedRepository = new BehaviorSubject<Repository | undefined>( public selectedRepository = new BehaviorSubject<Repository | undefined>(
undefined); undefined);
constructor(private errorBroker: ErrorBrokerService) { constructor(private errorBroker: LoggingService) {
this.registerListener().catch(err => console.error(err)); this.registerListener().catch(err => console.error(err));
} }
@ -25,7 +25,7 @@ export class RepositoryService {
async registerListener() { async registerListener() {
await listen("info", (event: { payload: Info }) => { await listen("info", (event: { payload: Info }) => {
const message = `Connected to ${event.payload.name}, Version: ${event.payload.version}`; const message = `Connected to ${event.payload.name}, Version: ${event.payload.version}`;
this.errorBroker.showInfo(message); this.errorBroker.info(message);
}); });
} }
@ -68,7 +68,7 @@ export class RepositoryService {
console.warn(err); console.warn(err);
} }
} }
await MediarepoApi.selectRepository({name: repo.name}); await MediarepoApi.selectRepository({ name: repo.name });
} }
/** /**
@ -98,7 +98,7 @@ export class RepositoryService {
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
public async addRepository(name: string, path: string | undefined, address: string | undefined, local: boolean) { public async addRepository(name: string, path: string | undefined, address: string | undefined, local: boolean) {
let repos = await MediarepoApi.addRepository({name, path, address, local}).then(mapMany(mapNew(Repository))); let repos = await MediarepoApi.addRepository({ name, path, address, local }).then(mapMany(mapNew(Repository)));
this.repositories.next(repos); this.repositories.next(repos);
} }
@ -108,7 +108,7 @@ export class RepositoryService {
* @returns {Promise<boolean>} * @returns {Promise<boolean>}
*/ */
public async checkDaemonRunning(address: string): Promise<boolean> { public async checkDaemonRunning(address: string): Promise<boolean> {
return MediarepoApi.checkDaemonRunning({address}); return MediarepoApi.checkDaemonRunning({ address });
} }
/** /**
@ -117,7 +117,7 @@ export class RepositoryService {
* @returns {Promise<boolean>} * @returns {Promise<boolean>}
*/ */
public async checkLocalRepositoryExists(path: string): Promise<boolean> { public async checkLocalRepositoryExists(path: string): Promise<boolean> {
return await MediarepoApi.checkLocalRepositoryExists({path}); return await MediarepoApi.checkLocalRepositoryExists({ path });
} }
/** /**
@ -126,7 +126,7 @@ export class RepositoryService {
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
public async removeRepository(name: string): Promise<void> { public async removeRepository(name: string): Promise<void> {
await MediarepoApi.removeRepository({name}); await MediarepoApi.removeRepository({ name });
await this.loadRepositories(); await this.loadRepositories();
} }
@ -136,7 +136,7 @@ export class RepositoryService {
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
public async deleteRepository(name: string): Promise<void> { public async deleteRepository(name: string): Promise<void> {
await MediarepoApi.deleteRepository({name}); await MediarepoApi.deleteRepository({ name });
await this.removeRepository(name); await this.removeRepository(name);
} }
@ -146,7 +146,7 @@ export class RepositoryService {
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
public async startDaemon(repoPath: string): Promise<void> { public async startDaemon(repoPath: string): Promise<void> {
return MediarepoApi.startDaemon({repoPath}); return MediarepoApi.startDaemon({ repoPath });
} }
/** /**
@ -155,7 +155,7 @@ export class RepositoryService {
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
public async initRepository(repoPath: string): Promise<void> { public async initRepository(repoPath: string): Promise<void> {
return MediarepoApi.initRepository({repoPath}); return MediarepoApi.initRepository({ repoPath });
} }
/** /**
@ -172,7 +172,7 @@ export class RepositoryService {
* @param sizeType * @param sizeType
*/ */
public async getSize(sizeType: SizeType): Promise<SizeMetadata> { public async getSize(sizeType: SizeType): Promise<SizeMetadata> {
return MediarepoApi.getSize({sizeType}); return MediarepoApi.getSize({ sizeType });
} }
async loadSelectedRepository() { async loadSelectedRepository() {

@ -29,16 +29,31 @@ export class TagService {
public async getTagsForFiles(cds: string[]): Promise<Tag[]> { public async getTagsForFiles(cds: string[]): Promise<Tag[]> {
let tags: Tag[] = []; let tags: Tag[] = [];
if (cds.length > 0) { if (cds.length > 0) {
tags = await MediarepoApi.getTagsForFiles({cds}).then(mapMany(mapNew(Tag))); tags = await MediarepoApi.getTagsForFiles({ cds }).then(mapMany(mapNew(Tag)));
} }
return tags; return tags;
} }
public async getFileTagMappings(cds: string[]): Promise<{ [key: string]: Tag[] }> {
if (cds.length > 0) {
return await MediarepoApi.getFileTagMap({ cds }).then((cdMappings) => {
let mappings: { [key: string]: Tag[] } = {};
console.log("TAG MAPPINGS", cdMappings);
for (const key in cdMappings) {
mappings[key] = cdMappings[key].map(mapNew(Tag));
}
return mappings;
});
} else {
return {};
}
}
public async createTags(tags: string[]): Promise<Tag[]> { public async createTags(tags: string[]): Promise<Tag[]> {
return MediarepoApi.createTags({tags}).then(mapMany(mapNew(Tag))); return MediarepoApi.createTags({ tags }).then(mapMany(mapNew(Tag)));
} }
public async changeFileTags(fileId: number, addedTags: number[], removedTags: number[]): Promise<Tag[]> { public async changeFileTags(fileId: number, addedTags: number[], removedTags: number[]): Promise<Tag[]> {
return MediarepoApi.changeFileTags({id: fileId, addedTags, removedTags}).then(mapMany(mapNew(Tag))); return MediarepoApi.changeFileTags({ id: fileId, addedTags, removedTags }).then(mapMany(mapNew(Tag)));
} }
} }

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save