Add jobs to be run when opening the repository

Signed-off-by: trivernis <trivernis@protonmail.com>
pull/4/head
trivernis 2 years ago
parent ff844f3156
commit 796eb56a62

@ -1136,8 +1136,8 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
[[package]]
name = "mediarepo-api"
version = "0.24.1"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=91d8182548bfdb19f2de9afd8c29d5c8ebd48993#91d8182548bfdb19f2de9afd8c29d5c8ebd48993"
version = "0.24.2"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=7b210251f0986e3be060bcfd69cfddcec4e45466#7b210251f0986e3be060bcfd69cfddcec4e45466"
dependencies = [
"bromine",
"chrono",

@ -39,7 +39,7 @@ features = ["fs", "io-util", "io-std"]
[dependencies.mediarepo-api]
git = "https://github.com/Trivernis/mediarepo-api.git"
rev = "91d8182548bfdb19f2de9afd8c29d5c8ebd48993"
rev = "7b210251f0986e3be060bcfd69cfddcec4e45466"
features = ["bromine"]
[features]

@ -46,6 +46,7 @@ impl FileHashStore {
descriptor: &[u8],
) -> RepoResult<(Option<String>, BufReader<File>)> {
let file_path = self.descriptor_to_file_path(descriptor);
tracing::debug!("Opening file {:?}", file_path);
let extension = file_path
.extension()
.and_then(|s| s.to_str())

@ -1,4 +1,6 @@
use crate::settings::Settings;
use mediarepo_api::types::repo::SizeType;
use std::collections::HashMap;
use std::path::PathBuf;
use typemap_rev::TypeMapKey;
@ -13,3 +15,9 @@ pub struct RepoPathKey;
impl TypeMapKey for RepoPathKey {
type Value = PathBuf;
}
pub struct SizeMetadataKey;
impl TypeMapKey for SizeMetadataKey {
type Value = HashMap<SizeType, u64>;
}

@ -2,7 +2,7 @@ use mediarepo_core::bromine::prelude::*;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::mediarepo_api::types::misc::InfoResponse;
use mediarepo_core::settings::Settings;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey, SizeMetadataKey};
use mediarepo_model::repo::Repo;
use mediarepo_model::type_keys::RepoKey;
use std::net::{IpAddr, SocketAddr};
@ -35,6 +35,7 @@ pub fn start_tcp_server(
.insert::<RepoKey>(Arc::new(repo))
.insert::<SettingsKey>(settings)
.insert::<RepoPathKey>(repo_path)
.insert::<SizeMetadataKey>(Default::default())
.build_server()
.await
.expect("Failed to start tcp server")
@ -64,6 +65,7 @@ pub fn create_unix_socket(
.insert::<RepoKey>(Arc::new(repo))
.insert::<SettingsKey>(settings)
.insert::<RepoPathKey>(repo_path)
.insert::<SizeMetadataKey>(Default::default())
.build_server()
.await
.expect("Failed to create unix domain socket");

@ -0,0 +1,55 @@
use crate::utils::{calculate_size, get_repo_from_context};
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::jobs::{JobType, RunJobRequest};
use mediarepo_core::mediarepo_api::types::repo::SizeType;
use mediarepo_core::type_keys::SizeMetadataKey;
pub struct JobsNamespace;
impl NamespaceProvider for JobsNamespace {
fn name() -> &'static str {
"jobs"
}
fn register(handler: &mut EventHandler) {
events!(handler,
"run_job" => Self::run_job
)
}
}
impl JobsNamespace {
#[tracing::instrument(skip_all)]
pub async fn run_job(ctx: &Context, event: Event) -> IPCResult<()> {
let run_request = event.payload::<RunJobRequest>()?;
let repo = get_repo_from_context(ctx).await;
match run_request.job_type {
JobType::MigrateContentDescriptors => repo.migrate().await?,
JobType::CalculateSizes => calculate_all_sizes(ctx).await?,
JobType::CheckIntegrity => {}
}
ctx.emit_to(Self::name(), "run_job", ()).await?;
Ok(())
}
}
async fn calculate_all_sizes(ctx: &Context) -> RepoResult<()> {
let size_types = vec![
SizeType::Total,
SizeType::FileFolder,
SizeType::ThumbFolder,
SizeType::DatabaseFile,
];
for size_type in size_types {
let size = calculate_size(&size_type, ctx).await?;
let mut data = ctx.data.write().await;
let size_map = data.get_mut::<SizeMetadataKey>().unwrap();
size_map.insert(size_type, size);
}
Ok(())
}

@ -2,6 +2,7 @@ use mediarepo_core::bromine::prelude::AsyncStreamProtocolListener;
use mediarepo_core::bromine::{namespace, namespace::Namespace, IPCBuilder};
pub mod files;
pub mod jobs;
pub mod repo;
pub mod tags;
@ -10,4 +11,5 @@ pub fn build_namespaces<L: AsyncStreamProtocolListener>(builder: IPCBuilder<L>)
.add_namespace(namespace!(files::FilesNamespace))
.add_namespace(namespace!(tags::TagsNamespace))
.add_namespace(namespace!(repo::RepoNamespace))
.add_namespace(namespace!(jobs::JobsNamespace))
}

@ -6,10 +6,9 @@ use mediarepo_core::bromine::prelude::*;
use mediarepo_core::mediarepo_api::types::repo::{
FrontendState, RepositoryMetadata, SizeMetadata, SizeType,
};
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use mediarepo_core::utils::get_folder_size;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey, SizeMetadataKey};
use crate::utils::get_repo_from_context;
use crate::utils::{calculate_size, get_repo_from_context};
pub struct RepoNamespace;
@ -53,29 +52,21 @@ impl RepoNamespace {
#[tracing::instrument(skip_all)]
async fn get_size_metadata(ctx: &Context, event: Event) -> IPCResult<()> {
let size_type = event.payload::<SizeType>()?;
let repo = get_repo_from_context(ctx).await;
let (repo_path, settings) = {
let data = ctx.data.read().await;
(
data.get::<RepoPathKey>().unwrap().clone(),
data.get::<SettingsKey>().unwrap().clone(),
)
};
let size = match &size_type {
SizeType::Total => get_folder_size(repo_path).await?,
SizeType::FileFolder => repo.get_main_store_size().await?,
SizeType::ThumbFolder => repo.get_thumb_store_size().await?,
SizeType::DatabaseFile => {
let db_path = repo_path.join(settings.database_path);
let database_metadata = fs::metadata(db_path).await?;
database_metadata.len()
}
let data = ctx.data.read().await;
let size_cache = data.get::<SizeMetadataKey>().unwrap();
let size = if let Some(size) = size_cache.get(&size_type) {
*size
} else {
calculate_size(&size_type, ctx).await?
};
let response = SizeMetadata { size, size_type };
tracing::debug!("size response = {:?}", response);
ctx.emit_to(Self::name(), "size_metadata", response).await?;
ctx.emit_to(
Self::name(),
"size_metadata",
SizeMetadata { size, size_type },
)
.await?;
Ok(())
}

@ -2,10 +2,14 @@ use mediarepo_core::bromine::ipc::context::Context;
use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier;
use mediarepo_core::mediarepo_api::types::repo::SizeType;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use mediarepo_core::utils::get_folder_size;
use mediarepo_model::file::File;
use mediarepo_model::repo::Repo;
use mediarepo_model::type_keys::RepoKey;
use std::sync::Arc;
use tokio::fs;
pub async fn get_repo_from_context(ctx: &Context) -> Arc<Repo> {
let data = ctx.data.read().await;
@ -33,3 +37,27 @@ pub async fn cd_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoRe
FileIdentifier::CD(cd) => decode_content_descriptor(cd),
}
}
pub async fn calculate_size(size_type: &SizeType, ctx: &Context) -> RepoResult<u64> {
let repo = get_repo_from_context(ctx).await;
let (repo_path, settings) = {
let data = ctx.data.read().await;
(
data.get::<RepoPathKey>().unwrap().clone(),
data.get::<SettingsKey>().unwrap().clone(),
)
};
let size = match &size_type {
SizeType::Total => get_folder_size(repo_path).await?,
SizeType::FileFolder => repo.get_main_store_size().await?,
SizeType::ThumbFolder => repo.get_thumb_store_size().await?,
SizeType::DatabaseFile => {
let db_path = repo_path.join(settings.database_path);
let database_metadata = fs::metadata(db_path).await?;
database_metadata.len()
}
};
Ok(size)
}

@ -116,7 +116,6 @@ async fn init_repo(opt: &Opt) -> RepoResult<(Settings, Repo)> {
repo.set_main_storage(&settings.default_file_store).await?;
repo.set_thumbnail_storage(opt.repo.join(&settings.thumbnail_store))
.await?;
repo.migrate().await?;
Ok((settings, repo))
}

Loading…
Cancel
Save