Merge pull request #25 from Trivernis/develop

Develop
main latest
Julius Riegel 1 year ago committed by GitHub
commit 3a25a8b812
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -36,7 +36,15 @@ jobs:
- name: Check daemon
working-directory: mediarepo-daemon
run: cargo check --no-default-features
run: cargo check
- name: Lint api
working-directory: mediarepo-api
run: cargo clippy -- -D warnings
- name: Lint daemon
working-directory: mediarepo-daemon
run: cargo clippy -- -D warnings
- name: Install UI dependencies
working-directory: mediarepo-ui
@ -47,4 +55,4 @@ jobs:
- name: Lint ui frontend
working-directory: mediarepo-ui
run: yarn lint
run: yarn lint

@ -1,6 +1,6 @@
[package]
name = "mediarepo-api"
version = "0.32.0"
version = "0.32.1"
edition = "2018"
license = "gpl-3"

@ -34,4 +34,10 @@ impl JobApi {
Ok(())
}
/// Checks if a particular job is already running
#[tracing::instrument(level = "debug", skip(self))]
pub async fn is_job_running(&self, job_type: JobType) -> ApiResult<bool> {
self.emit_and_get("is_job_running", job_type, None).await
}
}

@ -9,3 +9,11 @@ pub async fn run_job(api_state: ApiAccess<'_>, job_type: JobType, sync: bool) ->
Ok(())
}
#[tauri::command]
pub async fn is_job_running(api_state: ApiAccess<'_>, job_type: JobType) -> PluginResult<bool> {
let api = api_state.api().await?;
let running = api.job.is_job_running(job_type).await?;
Ok(running)
}

@ -75,7 +75,8 @@ impl<R: Runtime> MediarepoPlugin<R> {
get_file_tag_map,
all_sorting_presets,
add_sorting_preset,
delete_sorting_preset
delete_sorting_preset,
is_job_running
]),
}
}

@ -8,6 +8,15 @@ version = "0.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
[[package]]
name = "addr2line"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "1.0.2"
@ -40,6 +49,15 @@ dependencies = [
"version_check",
]
[[package]]
name = "aho-corasick"
version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
dependencies = [
"memchr",
]
[[package]]
name = "aliasable"
version = "0.1.3"
@ -185,6 +203,21 @@ dependencies = [
"mime",
]
[[package]]
name = "backtrace"
version = "0.3.65"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61"
dependencies = [
"addr2line",
"cc",
"cfg-if 1.0.0",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "bae"
version = "0.1.7"
@ -1015,6 +1048,12 @@ dependencies = [
"weezl",
]
[[package]]
name = "gimli"
version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
[[package]]
name = "glob"
version = "0.3.0"
@ -1150,6 +1189,21 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
[[package]]
name = "human-panic"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39f357a500abcbd7c5f967c1d45c8838585b36743823b9d43488f24850534e36"
dependencies = [
"backtrace",
"os_type",
"serde",
"serde_derive",
"termcolor",
"toml",
"uuid",
]
[[package]]
name = "humantime"
version = "2.1.0"
@ -1408,7 +1462,7 @@ checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb"
[[package]]
name = "mediarepo-api"
version = "0.32.0"
version = "0.32.1"
dependencies = [
"bromine",
"chrono",
@ -1451,6 +1505,7 @@ version = "1.0.3"
dependencies = [
"console-subscriber",
"glob",
"human-panic",
"log",
"mediarepo-core",
"mediarepo-logic",
@ -1785,6 +1840,15 @@ dependencies = [
"libc",
]
[[package]]
name = "object"
version = "0.28.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40bec70ba014595f99f7aa110b84331ffe1ee9aece7fe6f387cc7e3ecda4d456"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
version = "1.10.0"
@ -1900,6 +1964,15 @@ dependencies = [
"hashbrown 0.12.0",
]
[[package]]
name = "os_type"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3df761f6470298359f84fcfb60d86db02acc22c251c37265c07a3d1057d2389"
dependencies = [
"regex",
]
[[package]]
name = "ouroboros"
version = "0.15.0"
@ -2293,6 +2366,8 @@ version = "1.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
@ -2361,6 +2436,12 @@ dependencies = [
"serde",
]
[[package]]
name = "rustc-demangle"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
[[package]]
name = "rustc_version"
version = "0.2.3"
@ -2958,6 +3039,15 @@ dependencies = [
"winapi",
]
[[package]]
name = "termcolor"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
dependencies = [
"winapi-util",
]
[[package]]
name = "textwrap"
version = "0.11.0"
@ -3689,6 +3779,15 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"

@ -4,7 +4,7 @@ default-members = ["mediarepo-core", "mediarepo-database", "mediarepo-logic", "m
[package]
name = "mediarepo-daemon"
version = "1.0.3"
version = "1.0.4"
edition = "2018"
license = "gpl-3"
repository = "https://github.com/Trivernis/mediarepo-daemon"
@ -30,6 +30,7 @@ log = "0.4.16"
opentelemetry = { version = "0.17.0", features = ["rt-tokio"] }
opentelemetry-jaeger = { version = "0.16.0", features = ["rt-tokio"] }
tracing-opentelemetry = "0.17.2"
human-panic = "1.0.3"
[dependencies.mediarepo-core]
path = "./mediarepo-core"

@ -71,7 +71,7 @@ impl ThumbnailStore {
let name = file_name.to_string_lossy();
let (height, width) = name
.split_once("-")
.split_once('-')
.and_then(|(height, width)| {
Some((height.parse::<u32>().ok()?, width.parse::<u32>().ok()?))
})

@ -34,6 +34,7 @@ pub enum LogLevel {
Trace,
}
#[allow(clippy::from_over_into)]
impl Into<Option<Level>> for LogLevel {
fn into(self) -> Option<Level> {
match self {

@ -1,5 +1,5 @@
use std::fs;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use config::{Config, FileFormat};
use serde::{Deserialize, Serialize};
@ -24,7 +24,7 @@ pub struct Settings {
}
impl Settings {
pub fn read(root: &PathBuf) -> RepoResult<Self> {
pub fn read(root: &Path) -> RepoResult<Self> {
let settings = Config::builder()
.add_source(config::File::from_str(
&*Settings::default().to_toml_string()?,
@ -44,7 +44,7 @@ impl Settings {
settings_main.server.tcp.enabled = true;
settings_main.server.tcp.port = PortSetting::Range(settings_v1.port_range);
settings_main.server.tcp.listen_address = settings_v1.listen_address;
settings_main.paths.thumbnail_directory = settings_v1.thumbnail_store.into();
settings_main.paths.thumbnail_directory = settings_v1.thumbnail_store;
settings_main.paths.database_directory = PathBuf::from(settings_v1.database_path)
.parent()
.map(|p| p.to_string_lossy().to_string())
@ -69,7 +69,7 @@ impl Settings {
Ok(string)
}
pub fn save(&self, root: &PathBuf) -> RepoResult<()> {
pub fn save(&self, root: &Path) -> RepoResult<()> {
let string = toml::to_string_pretty(&self)?;
fs::write(root.join("repo.toml"), string.into_bytes())?;

@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
@ -21,27 +21,27 @@ impl Default for PathSettings {
impl PathSettings {
#[inline]
pub fn database_dir(&self, root: &PathBuf) -> PathBuf {
pub fn database_dir(&self, root: &Path) -> PathBuf {
root.join(&self.database_directory)
}
#[inline]
pub fn files_dir(&self, root: &PathBuf) -> PathBuf {
pub fn files_dir(&self, root: &Path) -> PathBuf {
root.join(&self.files_directory)
}
#[inline]
pub fn thumbs_dir(&self, root: &PathBuf) -> PathBuf {
pub fn thumbs_dir(&self, root: &Path) -> PathBuf {
root.join(&self.thumbnail_directory)
}
#[inline]
pub fn db_file_path(&self, root: &PathBuf) -> PathBuf {
pub fn db_file_path(&self, root: &Path) -> PathBuf {
self.database_dir(root).join("repo.db")
}
#[inline]
pub fn frontend_state_file_path(&self, root: &PathBuf) -> PathBuf {
pub fn frontend_state_file_path(&self, root: &Path) -> PathBuf {
self.database_dir(root).join("frontend-state.json")
}
}

@ -7,9 +7,15 @@ use tracing_subscriber::Layer;
pub struct DynLayerList<S>(Vec<Box<dyn Layer<S> + Send + Sync + 'static>>);
impl<S> Default for DynLayerList<S> {
fn default() -> Self {
Self(Vec::new())
}
}
impl<S> DynLayerList<S> {
pub fn new() -> Self {
Self(Vec::new())
Self::default()
}
pub fn iter(&self) -> Iter<'_, Box<dyn Layer<S> + Send + Sync>> {

@ -1,7 +0,0 @@
use async_trait::async_trait;
#[async_trait]
pub trait AsyncTryFrom<T> {
type Error;
fn async_try_from(other: T) -> Result<Self, Self::Error>;
}

@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use futures::future;
use tokio::fs::{self, OpenOptions};
@ -16,7 +16,7 @@ pub fn parse_namespace_and_tag(norm_tag: String) -> (Option<String>, String) {
}
/// Parses all tags from a file
pub async fn parse_tags_file(path: PathBuf) -> RepoResult<Vec<(Option<String>, String)>> {
pub async fn parse_tags_file(path: &Path) -> RepoResult<Vec<(Option<String>, String)>> {
let file = OpenOptions::new().read(true).open(path).await?;
let mut lines = BufReader::new(file).lines();
let mut tags = Vec::new();
@ -47,7 +47,7 @@ pub async fn get_folder_size(path: PathBuf) -> RepoResult<u64> {
}
}
}
let futures = all_files.into_iter().map(|f| read_file_size(f));
let futures = all_files.into_iter().map(read_file_size);
let results = future::join_all(futures).await;
let size = results.into_iter().filter_map(|r| r.ok()).sum();

@ -30,7 +30,7 @@ pub async fn get_all_counts(db: &DatabaseConnection) -> RepoResult<Counts> {
))
.one(db)
.await?
.ok_or(RepoError::from("could not retrieve metadata from database"))?;
.ok_or_else(|| RepoError::from("could not retrieve metadata from database"))?;
Ok(counts)
}

@ -55,7 +55,7 @@ fn vec_to_query_list<D: Display>(input: Vec<D>) -> String {
let mut entries = input
.into_iter()
.fold(String::new(), |acc, val| format!("{}{},", acc, val));
if entries.len() > 0 {
if !entries.is_empty() {
entries.remove(entries.len() - 1);
}

@ -93,7 +93,7 @@ impl FileDao {
.all(&self.ctx.db)
.await?
.into_iter()
.map(|m| FileMetadataDto::new(m))
.map(FileMetadataDto::new)
.collect();
Ok(metadata)

@ -22,8 +22,8 @@ impl FileDao {
let trx = self.ctx.db.begin().await?;
let model = file::ActiveModel {
id: Set(update_dto.id),
cd_id: update_dto.cd_id.map(|v| Set(v)).unwrap_or(NotSet),
mime_type: update_dto.mime_type.map(|v| Set(v)).unwrap_or(NotSet),
cd_id: update_dto.cd_id.map(Set).unwrap_or(NotSet),
mime_type: update_dto.mime_type.map(Set).unwrap_or(NotSet),
status: update_dto.status.map(|v| Set(v as i32)).unwrap_or(NotSet),
};
let file_model = model.update(&trx).await?;
@ -62,8 +62,8 @@ impl FileDao {
sizes: I,
) -> RepoResult<Vec<ThumbnailDto>> {
let bytes = self.get_bytes(file.cd()).await?;
let mime_type = mime::Mime::from_str(file.mime_type())
.unwrap_or_else(|_| mime::APPLICATION_OCTET_STREAM);
let mime_type =
mime::Mime::from_str(file.mime_type()).unwrap_or(mime::APPLICATION_OCTET_STREAM);
let thumbnails =
thumbnailer::create_thumbnails(Cursor::new(bytes), mime_type.clone(), sizes)?;
let mut dtos = Vec::new();

@ -40,7 +40,7 @@ impl JobDao {
}
}
fn build_state_filters(states: &Vec<UpsertJobStateDto>) -> Condition {
fn build_state_filters(states: &[UpsertJobStateDto]) -> Condition {
states
.iter()
.map(|s| Condition::all().add(job_state::Column::JobType.eq(s.job_type)))

@ -122,7 +122,7 @@ async fn add_keys(
async fn find_sort_keys(
trx: &DatabaseTransaction,
keys: &Vec<AddSortKeyDto>,
keys: &[AddSortKeyDto],
) -> RepoResult<Vec<SortKeyDto>> {
if keys.is_empty() {
return Ok(vec![]);

@ -77,14 +77,12 @@ fn create_cd_tag_map(
)>,
tag_id_map: HashMap<i64, TagDto>,
) -> HashMap<Vec<u8>, Vec<TagDto>> {
let cd_tag_map = tag_cd_entries
tag_cd_entries
.into_iter()
.filter_map(|(t, cd)| Some((cd?, tag_id_map.get(&t.tag_id)?.clone())))
.sorted_by_key(|(cd, _)| cd.id)
.group_by(|(cd, _)| cd.descriptor.to_owned())
.into_iter()
.map(|(key, group)| (key, group.map(|(_, t)| t).collect::<Vec<TagDto>>()))
.collect();
cd_tag_map
.collect::<HashMap<Vec<u8>, Vec<TagDto>>>()
}

@ -45,11 +45,12 @@ fn name_query_to_condition(query: TagByNameQuery) -> Option<Condition> {
let TagByNameQuery { namespace, name } = query;
let mut condition = Condition::all();
#[allow(clippy::question_mark)]
if !name.ends_with('*') {
condition = condition.add(tag::Column::Name.eq(name))
} else if name.len() > 1 {
condition =
condition.add(tag::Column::Name.like(&*format!("{}%", name.trim_end_matches("*"))))
condition.add(tag::Column::Name.like(&*format!("{}%", name.trim_end_matches('*'))))
} else if namespace.is_none() {
return None;
}

@ -58,12 +58,12 @@ impl TagDao {
async fn get_existing_mappings(
trx: &DatabaseTransaction,
cd_ids: &Vec<i64>,
tag_ids: &Vec<i64>,
cd_ids: &[i64],
tag_ids: &[i64],
) -> RepoResult<Vec<(i64, i64)>> {
let existing_mappings: Vec<(i64, i64)> = content_descriptor_tag::Entity::find()
.filter(content_descriptor_tag::Column::CdId.is_in(cd_ids.clone()))
.filter(content_descriptor_tag::Column::TagId.is_in(tag_ids.clone()))
.filter(content_descriptor_tag::Column::CdId.is_in(cd_ids.to_vec()))
.filter(content_descriptor_tag::Column::TagId.is_in(tag_ids.to_vec()))
.all(trx)
.await?
.into_iter()

@ -75,7 +75,7 @@ pub struct AddFileDto {
pub name: Option<String>,
}
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Default)]
pub struct UpdateFileDto {
pub id: i64,
pub cd_id: Option<i64>,
@ -83,17 +83,6 @@ pub struct UpdateFileDto {
pub status: Option<FileStatus>,
}
impl Default for UpdateFileDto {
fn default() -> Self {
Self {
id: 0,
cd_id: None,
mime_type: None,
status: None,
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum FileStatus {
Imported = 10,

@ -84,7 +84,7 @@ impl KeyType {
}
pub fn to_number(&self) -> i32 {
self.clone() as i32
*self as i32
}
}

@ -30,8 +30,9 @@ pub fn start_tcp_server(
return Err(RepoError::PortUnavailable);
}
}
PortSetting::Range((l, r)) => port_check::free_local_port_in_range(*l, *r)
.ok_or_else(|| RepoError::PortUnavailable)?,
PortSetting::Range((l, r)) => {
port_check::free_local_port_in_range(*l, *r).ok_or(RepoError::PortUnavailable)?
}
};
let ip = settings.server.tcp.listen_address.to_owned();
let address = SocketAddr::new(ip, port);

@ -151,7 +151,7 @@ impl FilesNamespace {
content: bytes,
mime_type: metadata
.mime_type
.unwrap_or(String::from("application/octet-stream")),
.unwrap_or_else(|| String::from("application/octet-stream")),
creation_time: metadata.creation_time,
change_time: metadata.change_time,
name: Some(metadata.name),

@ -69,10 +69,10 @@ fn build_filters_from_expressions(
}
}
};
if filters.len() > 0 {
Some(filters)
} else {
if filters.is_empty() {
None
} else {
Some(filters)
}
})
.collect()
@ -92,7 +92,7 @@ fn map_tag_query_to_filter(
query: TagQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
if query.tag.ends_with("*") {
if query.tag.ends_with('*') {
map_wildcard_tag_to_filter(query, tag_id_map)
} else {
map_tag_to_filter(query, tag_id_map)
@ -103,7 +103,7 @@ fn map_wildcard_tag_to_filter(
query: TagQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
let filter_tag = query.tag.trim_end_matches("*");
let filter_tag = query.tag.trim_end_matches('*');
let relevant_ids = tag_id_map
.iter()
.filter_map(|(name, id)| {
@ -115,15 +115,15 @@ fn map_wildcard_tag_to_filter(
})
.collect::<Vec<i64>>();
if relevant_ids.len() > 0 {
if relevant_ids.is_empty() {
None
} else {
let comparator = if query.negate {
IsNot(relevant_ids)
} else {
Is(relevant_ids)
};
Some(FilterProperty::TagWildcardIds(comparator))
} else {
None
}
}

@ -71,7 +71,7 @@ async fn build_sort_context(
mime_type: file.mime_type().to_owned(),
namespaces: cid_nsp
.remove(&file.cd_id())
.unwrap_or(HashMap::with_capacity(0)),
.unwrap_or_else(|| HashMap::with_capacity(0)),
tag_count: cid_tag_counts.remove(&file.cd_id()).unwrap_or(0),
import_time: metadata.import_time().to_owned(),
create_time: metadata.import_time().to_owned(),
@ -176,11 +176,8 @@ fn adjust_for_dir(ordering: Ordering, direction: &SortDirection) -> Ordering {
}
}
fn compare_tag_lists(list_a: &Vec<String>, list_b: &Vec<String>) -> Ordering {
let first_diff = list_a
.into_iter()
.zip(list_b.into_iter())
.find(|(a, b)| *a != *b);
fn compare_tag_lists(list_a: &[String], list_b: &[String]) -> Ordering {
let first_diff = list_a.iter().zip(list_b.iter()).find(|(a, b)| *a != *b);
if let Some(diff) = first_diff {
if let (Some(num_a), Some(num_b)) = (diff.0.parse::<f32>().ok(), diff.1.parse::<f32>().ok())
{

@ -20,8 +20,9 @@ impl NamespaceProvider for JobsNamespace {
fn register(handler: &mut EventHandler) {
events!(handler,
"run_job" => Self::run_job
)
"run_job" => Self::run_job,
"is_job_running" => Self::is_job_running
);
}
}
@ -59,6 +60,26 @@ impl JobsNamespace {
Ok(Response::empty())
}
#[tracing::instrument(skip_all)]
pub async fn is_job_running(ctx: &Context, event: Event) -> IPCResult<Response> {
let job_type = event.payload::<JobType>()?;
let dispatcher = get_job_dispatcher_from_context(ctx).await;
let running = match job_type {
JobType::MigrateContentDescriptors => {
is_job_running::<MigrateCDsJob>(&dispatcher).await
}
JobType::CalculateSizes => is_job_running::<CalculateSizesJob>(&dispatcher).await,
JobType::GenerateThumbnails => {
is_job_running::<GenerateMissingThumbsJob>(&dispatcher).await
}
JobType::CheckIntegrity => is_job_running::<CheckIntegrityJob>(&dispatcher).await,
JobType::Vacuum => is_job_running::<VacuumJob>(&dispatcher).await,
};
Response::payload(ctx, running)
}
}
async fn dispatch_job<J: 'static + Job>(
@ -107,3 +128,12 @@ async fn calculate_all_sizes(ctx: &Context) -> RepoResult<()> {
Ok(())
}
async fn is_job_running<T: 'static + Job>(dispatcher: &JobDispatcher) -> bool {
if let Some(handle) = dispatcher.get_handle::<T>().await {
let state = handle.state().await;
state == JobState::Running
} else {
false
}
}

@ -95,7 +95,7 @@ async fn get_frontend_state_path(ctx: &Context) -> IPCResult<PathBuf> {
let data = ctx.data.read().await;
let settings = data.get::<SettingsKey>().unwrap();
let repo_path = data.get::<RepoPathKey>().unwrap();
let state_path = settings.paths.frontend_state_file_path(&repo_path);
let state_path = settings.paths.frontend_state_file_path(repo_path);
Ok(state_path)
}

@ -33,11 +33,7 @@ pub async fn file_by_identifier(identifier: FileIdentifier, repo: &Repo) -> Repo
pub async fn cd_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult<Vec<u8>> {
match identifier {
FileIdentifier::ID(id) => {
let file = repo
.file()
.by_id(id)
.await?
.ok_or_else(|| "Thumbnail not found")?;
let file = repo.file().by_id(id).await?.ok_or("Thumbnail not found")?;
Ok(file.cd().to_owned())
}
FileIdentifier::CD(cd) => decode_content_descriptor(cd),

@ -6,7 +6,7 @@ use mediarepo_core::mediarepo_api::types::repo::SizeType;
use mediarepo_core::settings::Settings;
use mediarepo_core::utils::get_folder_size;
use mediarepo_logic::dao::repo::Repo;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use tokio::fs;
use tokio::sync::broadcast::{self, Sender};
@ -72,11 +72,11 @@ impl Job for CalculateSizesJob {
async fn calculate_size(
size_type: &SizeType,
repo: &Repo,
repo_path: &PathBuf,
repo_path: &Path,
settings: &Settings,
) -> RepoResult<u64> {
let size = match &size_type {
SizeType::Total => get_folder_size(repo_path.clone()).await?,
SizeType::Total => get_folder_size(repo_path.to_path_buf()).await?,
SizeType::FileFolder => repo.get_main_store_size().await?,
SizeType::ThumbFolder => repo.get_thumb_store_size().await?,
SizeType::DatabaseFile => {

@ -1,5 +1,5 @@
use crate::job_dispatcher::JobDispatcher;
use crate::jobs::{CheckIntegrityJob, MigrateCDsJob, VacuumJob};
use crate::jobs::{CheckIntegrityJob, MigrateCDsJob};
use mediarepo_core::error::RepoError;
use mediarepo_core::tokio_graceful_shutdown::Toplevel;
use mediarepo_logic::dao::repo::Repo;
@ -19,9 +19,6 @@ pub async fn start(top_level: Toplevel, repo: Repo) -> (Toplevel, JobDispatcher)
let dispatcher = JobDispatcher::new(subsystem, repo);
tx.send(dispatcher.clone())
.map_err(|_| RepoError::from("failed to send dispatcher"))?;
dispatcher
.dispatch_periodically(VacuumJob::default(), Duration::from_secs(60 * 30))
.await;
dispatcher
.dispatch_periodically(
CheckIntegrityJob::default(),

@ -1,5 +1,5 @@
use std::fs;
use std::path::PathBuf;
use std::path::Path;
use console_subscriber::ConsoleLayer;
use opentelemetry::sdk::Resource;
@ -24,7 +24,7 @@ use mediarepo_core::tracing_layer_list::DynLayerList;
#[allow(dyn_drop)]
pub type DropGuard = Box<dyn Drop>;
pub fn init_tracing(repo_path: &PathBuf, log_cfg: &LoggingSettings) -> Vec<DropGuard> {
pub fn init_tracing(repo_path: &Path, log_cfg: &LoggingSettings) -> Vec<DropGuard> {
LogTracer::init().expect("failed to subscribe to log entries");
let log_path = repo_path.join("logs");
let mut guards = Vec::new();
@ -97,11 +97,11 @@ fn add_telemetry_layer(log_cfg: &LoggingSettings, layer_list: &mut DynLayerList<
fn add_app_log_layer(
log_cfg: &LoggingSettings,
log_path: &PathBuf,
log_path: &Path,
guards: &mut Vec<DropGuard>,
layer_list: &mut DynLayerList<Registry>,
) {
let (app_log_writer, guard) = get_application_log_writer(&log_path);
let (app_log_writer, guard) = get_application_log_writer(log_path);
guards.push(Box::new(guard) as DropGuard);
let app_log_layer = fmt::layer()
@ -115,11 +115,11 @@ fn add_app_log_layer(
fn add_bromine_layer(
log_cfg: &LoggingSettings,
log_path: &PathBuf,
log_path: &Path,
guards: &mut Vec<DropGuard>,
layer_list: &mut DynLayerList<Registry>,
) {
let (bromine_writer, guard) = get_bromine_log_writer(&log_path);
let (bromine_writer, guard) = get_bromine_log_writer(log_path);
guards.push(Box::new(guard) as DropGuard);
let bromine_layer = fmt::layer()
@ -133,11 +133,11 @@ fn add_bromine_layer(
fn add_sql_layer(
log_cfg: &LoggingSettings,
log_path: &PathBuf,
log_path: &Path,
guards: &mut Vec<DropGuard>,
layer_list: &mut DynLayerList<Registry>,
) {
let (sql_writer, guard) = get_sql_log_writer(&log_path);
let (sql_writer, guard) = get_sql_log_writer(log_path);
guards.push(Box::new(guard) as DropGuard);
let sql_layer = fmt::layer()
@ -161,18 +161,18 @@ fn add_stdout_layer(guards: &mut Vec<DropGuard>, layer_list: &mut DynLayerList<R
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_filter(
std::env::var("RUST_LOG")
.unwrap_or(String::from("info,sqlx=warn"))
.unwrap_or_else(|_| String::from("info,sqlx=warn"))
.parse::<filter::Targets>()
.unwrap_or(
.unwrap_or_else(|_| {
filter::Targets::new()
.with_default(Level::INFO)
.with_target("sqlx", Level::WARN),
),
.with_target("sqlx", Level::WARN)
}),
);
layer_list.add(stdout_layer);
}
fn get_sql_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) {
fn get_sql_log_writer(log_path: &Path) -> (NonBlocking, WorkerGuard) {
tracing_appender::non_blocking(
rolling_file::BasicRollingFileAppender::new(
log_path.join("sql.log"),
@ -183,7 +183,7 @@ fn get_sql_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) {
)
}
fn get_bromine_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) {
fn get_bromine_log_writer(log_path: &Path) -> (NonBlocking, WorkerGuard) {
tracing_appender::non_blocking(
rolling_file::BasicRollingFileAppender::new(
log_path.join("bromine.log"),
@ -194,7 +194,7 @@ fn get_bromine_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) {
)
}
fn get_application_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) {
fn get_application_log_writer(log_path: &Path) -> (NonBlocking, WorkerGuard) {
tracing_appender::non_blocking(
rolling_file::BasicRollingFileAppender::new(
log_path.join("repo.log"),

@ -1,6 +1,6 @@
use std::env;
use std::iter::FromIterator;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
@ -55,6 +55,7 @@ enum SubCommand {
#[tokio::main]
async fn main() -> RepoResult<()> {
human_panic::setup_panic!();
let mut opt: Opt = Opt::from_args();
opt.repo = env::current_dir().unwrap().join(opt.repo);
@ -242,7 +243,7 @@ async fn init(opt: Opt, force: bool) -> RepoResult<()> {
Ok(())
}
async fn clean_old_connection_files(root: &PathBuf) -> RepoResult<()> {
async fn clean_old_connection_files(root: &Path) -> RepoResult<()> {
let paths = ["repo.tcp", "repo.sock"];
for path in paths {

@ -1,14 +1,14 @@
use std::path::PathBuf;
use std::path::Path;
use tokio::fs;
use mediarepo_core::error::RepoResult;
use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_core::settings::v1::SettingsV1;
use mediarepo_core::settings::{PathSettings, Settings};
use mediarepo_logic::dao::repo::Repo;
/// Loads the settings from a toml path
pub fn load_settings(root_path: &PathBuf) -> RepoResult<Settings> {
pub fn load_settings(root_path: &Path) -> RepoResult<Settings> {
let contents = std::fs::read_to_string(root_path.join("repo.toml"))?;
if let Ok(settings_v1) = SettingsV1::from_toml_string(&contents) {
@ -21,7 +21,7 @@ pub fn load_settings(root_path: &PathBuf) -> RepoResult<Settings> {
}
}
pub async fn get_repo(root_path: &PathBuf, path_settings: &PathSettings) -> RepoResult<Repo> {
pub async fn get_repo(root_path: &Path, path_settings: &PathSettings) -> RepoResult<Repo> {
Repo::connect(
format!(
"sqlite://{}",
@ -33,7 +33,7 @@ pub async fn get_repo(root_path: &PathBuf, path_settings: &PathSettings) -> Repo
.await
}
pub async fn create_paths_for_repo(root: &PathBuf, settings: &PathSettings) -> RepoResult<()> {
pub async fn create_paths_for_repo(root: &Path, settings: &PathSettings) -> RepoResult<()> {
if !root.exists() {
fs::create_dir_all(&root).await?;
}

@ -1,6 +1,6 @@
{
"name": "mediarepo-ui",
"version": "1.0.3",
"version": "1.0.4",
"scripts": {
"ng": "ng",
"start": "ng serve",

@ -64,7 +64,7 @@ checksum = "4361135be9122e0870de935d7c439aef945b9f9ddd4199a553b5270b49c82a27"
[[package]]
name = "app"
version = "1.0.2"
version = "1.0.4"
dependencies = [
"mediarepo-api",
"serde",
@ -1591,7 +1591,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
[[package]]
name = "mediarepo-api"
version = "0.32.0"
version = "0.32.1"
dependencies = [
"async-trait",
"bromine",

@ -1,6 +1,6 @@
[package]
name = "app"
version = "1.0.3"
version = "1.0.4"
description = "The UI for the mediarepo media management tool"
authors = ["you"]
license = ""

@ -1,7 +1,7 @@
{
"package": {
"productName": "mediarepo-ui",
"version": "1.0.3"
"version": "1.0.4"
},
"build": {
"distDir": "../dist/mediarepo-ui",

@ -19,6 +19,7 @@ import {
GetSizeRequest,
GetTagsForFilesRequest,
InitRepositoryRequest,
IsJobRunningRequest,
ReadFileRequest,
RemoveRepositoryRequest,
ResolvePathsToFilesRequest,
@ -187,6 +188,10 @@ export class MediarepoApi {
return this.invokePlugin(ApiFunction.RunJob, request);
}
public static async isJobRunning(request: IsJobRunningRequest): Promise<boolean> {
return this.invokePlugin(ApiFunction.IsJobRunning, request);
}
public static async getAllSortingPresets(): Promise<SortingPresetData[]> {
return ShortCache.cached("sorting-presets", () => this.invokePlugin(ApiFunction.GetAllSortingPresets), 1000);
}

@ -40,6 +40,7 @@ export enum ApiFunction {
SetFrontendState = "set_frontend_state",
// jobs
RunJob = "run_job",
IsJobRunning = "is_job_running",
// presets
GetAllSortingPresets = "all_sorting_presets",
AddSortingPreset = "add_sorting_preset",

@ -117,3 +117,7 @@ export type AddSortingPresetRequest = {
export type DeleteSortingPresetRequest = {
id: number
};
export type IsJobRunningRequest = {
jobType: JobType,
}

@ -17,7 +17,7 @@ import {MatSelectModule} from "@angular/material/select";
import {MatCheckboxModule} from "@angular/material/checkbox";
import {MatDividerModule} from "@angular/material/divider";
import {NgIconsModule} from "@ng-icons/core";
import {MatPlus} from "@ng-icons/material-icons/baseline";
import {MatMoreVert, MatPlus} from "@ng-icons/material-icons/baseline";
import {MatMenuModule} from "@angular/material/menu";
import {InputModule} from "../shared/input/input.module";
import {SidebarModule} from "../shared/sidebar/sidebar.module";
@ -34,7 +34,7 @@ import {TagModule} from "../shared/tag/tag.module";
import {
DownloadDaemonDialogComponent
} from "./repositories-tab/download-daemon-dialog/download-daemon-dialog.component";
import {RepositoryModule} from "../shared/repository/repository/repository.module";
import {RepositoryModule} from "../shared/repository/repository.module";
import {MatToolbarModule} from "@angular/material/toolbar";
import {
RepositoryDetailsViewComponent
@ -72,7 +72,7 @@ import {AboutDialogComponent} from "./repositories-tab/repository-overview/about
MatProgressBarModule,
MatCheckboxModule,
ScrollingModule,
NgIconsModule.withIcons({ MatPlus }),
NgIconsModule.withIcons({ MatPlus, MatMoreVert }),
FlexModule,
MatButtonModule,
MatMenuModule,

@ -6,7 +6,7 @@ import {ConfirmDialogComponent} from "../../../shared/app-common/confirm-dialog/
import {BusyIndicatorComponent} from "../../../shared/app-common/busy-indicator/busy-indicator.component";
import {
EditRepositoryDialogComponent
} from "../../../shared/repository/repository/edit-repository-dialog/edit-repository-dialog.component";
} from "../../../shared/repository/edit-repository-dialog/edit-repository-dialog.component";
@Component({
selector: "app-repository-card",

@ -47,13 +47,16 @@
{{this.databaseFileSize | async}}
</app-metadata-entry>
</div>
<div class="repository-charts">
<app-chart *ngIf="this.chartData"
[datasets]="this.chartData"
[labels]="this.chartLabels"
chartType="doughnut"
class="size-chart"
title="Sizes"></app-chart>
</div>
</div>
<div class="repository-charts" fxFlex="50%">
<app-chart *ngIf="this.chartData"
[datasets]="this.chartData"
[labels]="this.chartLabels"
chartType="doughnut"