Implement api to retrieve repository metadata

Signed-off-by: Trivernis <trivernis@protonmail.com>
pull/4/head
Trivernis 3 years ago
parent bddce4d60a
commit 77fcd2e4f5

@ -1136,8 +1136,8 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
[[package]] [[package]]
name = "mediarepo-api" name = "mediarepo-api"
version = "0.17.0" version = "0.18.0"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=72e4d902aff409feab8db10546b81a452ec6f75f#72e4d902aff409feab8db10546b81a452ec6f75f" source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=cb4510c57fb25bd39f2e994e928359599e7cbfaa#cb4510c57fb25bd39f2e994e928359599e7cbfaa"
dependencies = [ dependencies = [
"bromine", "bromine",
"chrono", "chrono",
@ -1165,6 +1165,7 @@ dependencies = [
"thumbnailer", "thumbnailer",
"tokio", "tokio",
"toml", "toml",
"tracing",
"typemap_rev", "typemap_rev",
] ]

@ -17,6 +17,7 @@ typemap_rev = "^0.1.5"
futures = "^0.3.19" futures = "^0.3.19"
itertools = "^0.10.3" itertools = "^0.10.3"
glob = "^0.3.0" glob = "^0.3.0"
tracing = "0.1.29"
[dependencies.thumbnailer] [dependencies.thumbnailer]
version = "^0.2.4" version = "^0.2.4"
@ -37,7 +38,7 @@ features = ["fs", "io-util", "io-std"]
[dependencies.mediarepo-api] [dependencies.mediarepo-api]
git = "https://github.com/Trivernis/mediarepo-api.git" git = "https://github.com/Trivernis/mediarepo-api.git"
rev = "72e4d902aff409feab8db10546b81a452ec6f75f" rev = "cb4510c57fb25bd39f2e994e928359599e7cbfaa"
features = ["bromine"] features = ["bromine"]
[features] [features]

@ -1,4 +1,5 @@
use crate::error::RepoResult; use crate::error::RepoResult;
use crate::utils::get_folder_size;
use multibase::Base; use multibase::Base;
use multihash::{Code, MultihashDigest}; use multihash::{Code, MultihashDigest};
use std::path::PathBuf; use std::path::PathBuf;
@ -62,6 +63,12 @@ impl FileHashStore {
Ok((extension, reader)) Ok((extension, reader))
} }
/// Scans the size of the folder
#[inline]
pub async fn get_size(&self) -> RepoResult<u64> {
get_folder_size(self.path.to_owned()).await
}
fn hash_to_file_path(&self, hash: &str) -> PathBuf { fn hash_to_file_path(&self, hash: &str) -> PathBuf {
let mut path = self.hash_to_folder_path(hash); let mut path = self.hash_to_folder_path(hash);
path.push(hash); path.push(hash);

@ -1,3 +1,5 @@
use crate::error::RepoResult;
use crate::utils::get_folder_size;
use std::io::Result; use std::io::Result;
use std::path::PathBuf; use std::path::PathBuf;
use tokio::fs; use tokio::fs;
@ -74,4 +76,10 @@ impl ThumbnailStore {
Ok(entries) Ok(entries)
} }
/// Returns the size of the folder
#[tracing::instrument(level = "debug")]
pub async fn get_size(&self) -> RepoResult<u64> {
get_folder_size(self.path.to_owned()).await
}
} }

@ -1,6 +1,7 @@
use crate::error::RepoResult; use crate::error::RepoResult;
use futures::future;
use std::path::PathBuf; use std::path::PathBuf;
use tokio::fs::OpenOptions; use tokio::fs::{self, OpenOptions};
use tokio::io::{AsyncBufReadExt, BufReader}; use tokio::io::{AsyncBufReadExt, BufReader};
/// Parses a normalized tag into its two components of namespace and tag /// Parses a normalized tag into its two components of namespace and tag
@ -23,3 +24,40 @@ pub async fn parse_tags_file(path: PathBuf) -> RepoResult<Vec<(Option<String>, S
Ok(tags) Ok(tags)
} }
/// Iteratively scans the size of a folder
#[tracing::instrument(level = "debug")]
pub async fn get_folder_size(path: PathBuf) -> RepoResult<u64> {
let mut unchecked_dirs = vec![path];
let mut all_files = Vec::new();
while !unchecked_dirs.is_empty() {
let dir = unchecked_dirs.remove(0);
let mut read_dir = fs::read_dir(dir).await?;
while let Some(entry) = read_dir.next_entry().await? {
let file_type = entry.file_type().await?;
if file_type.is_file() {
all_files.push(entry.path());
} else if file_type.is_dir() {
unchecked_dirs.push(entry.path())
}
}
}
let futures = all_files.into_iter().map(|f| read_file_size(f));
let results = future::join_all(futures).await;
let size = results
.into_iter()
.filter_map(|r| r.ok())
.fold(0u64, |acc, val| acc + val);
Ok(size)
}
async fn read_file_size(path: PathBuf) -> RepoResult<u64> {
let metadata = fs::metadata(path).await?;
Ok(metadata.len())
}

@ -0,0 +1,36 @@
use sea_orm::DbBackend;
use sea_orm::FromQueryResult;
use sea_orm::{DatabaseConnection, Statement};
use mediarepo_core::error::{RepoError, RepoResult};
#[derive(Debug, FromQueryResult)]
pub struct Counts {
pub file_count: i64,
pub hash_count: i64,
pub tag_count: i64,
pub namespace_count: i64,
pub source_count: i64,
pub mapping_count: i64,
}
pub async fn get_all_counts(db: &DatabaseConnection) -> RepoResult<Counts> {
let counts = Counts::find_by_statement(Statement::from_sql_and_values(
DbBackend::Sqlite,
r#"
SELECT *
FROM (SELECT COUNT(*) AS file_count FROM files),
(SELECT COUNT(*) AS hash_count FROM hashes),
(SELECT COUNT(*) AS tag_count FROM tags),
(SELECT COUNT(*) AS namespace_count FROM namespaces),
(SELECT COUNT(*) AS source_count FROM sources),
(SELECT COUNT(*) AS mapping_count FROM hash_tag_mappings)
"#,
vec![],
))
.one(db)
.await?
.ok_or(RepoError::from("could not retrieve metadata from database"))?;
Ok(counts)
}

@ -1 +1,2 @@
pub mod analysis;
pub mod tags; pub mod tags;

@ -11,6 +11,7 @@ use mediarepo_core::itertools::Itertools;
use mediarepo_core::thumbnailer::ThumbnailSize; use mediarepo_core::thumbnailer::ThumbnailSize;
use mediarepo_core::utils::parse_namespace_and_tag; use mediarepo_core::utils::parse_namespace_and_tag;
use mediarepo_database::get_database; use mediarepo_database::get_database;
use mediarepo_database::queries::analysis::{get_all_counts, Counts};
use sea_orm::DatabaseConnection; use sea_orm::DatabaseConnection;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fmt::Debug; use std::fmt::Debug;
@ -388,6 +389,29 @@ impl Repo {
Tag::add(self.db.clone(), name, Some(namespace.id())).await Tag::add(self.db.clone(), name, Some(namespace.id())).await
} }
/// Returns the size of the main storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_main_store_size(&self) -> RepoResult<u64> {
let main_storage = self.get_main_storage()?;
main_storage.get_size().await
}
/// Returns the size of the thumbnail storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_thumb_store_size(&self) -> RepoResult<u64> {
let thumb_storage = self.get_thumbnail_storage()?;
thumb_storage.get_size().await
}
/// Returns all entity counts
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_counts(&self) -> RepoResult<Counts> {
get_all_counts(&self.db).await
}
#[tracing::instrument(level = "trace", skip(self))] #[tracing::instrument(level = "trace", skip(self))]
fn get_main_storage(&self) -> RepoResult<&Storage> { fn get_main_storage(&self) -> RepoResult<&Storage> {
if let Some(storage) = &self.main_storage { if let Some(storage) = &self.main_storage {

@ -181,6 +181,13 @@ impl Storage {
Ok(reader) Ok(reader)
} }
/// Returns the size of the storage
#[inline]
#[tracing::instrument(level = "debug", skip(self))]
pub async fn get_size(&self) -> RepoResult<u64> {
self.store.get_size().await
}
/// Returns the active model with only the ID filled so saves always perform an update /// Returns the active model with only the ID filled so saves always perform an update
fn get_active_model(&self) -> ActiveStorage { fn get_active_model(&self) -> ActiveStorage {
ActiveStorage { ActiveStorage {

@ -1,9 +1,14 @@
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::mediarepo_api::types::repo::FrontendState;
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use std::path::PathBuf; use std::path::PathBuf;
use tokio::fs; use tokio::fs;
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::mediarepo_api::types::repo::{FrontendState, RepositoryMetadata};
use mediarepo_core::type_keys::{RepoPathKey, SettingsKey};
use mediarepo_core::utils::get_folder_size;
use crate::utils::get_repo_from_context;
pub struct RepoNamespace; pub struct RepoNamespace;
impl NamespaceProvider for RepoNamespace { impl NamespaceProvider for RepoNamespace {
@ -13,6 +18,7 @@ impl NamespaceProvider for RepoNamespace {
fn register(handler: &mut EventHandler) { fn register(handler: &mut EventHandler) {
events!(handler, events!(handler,
"repository_metadata" => Self::get_metadata,
"frontend_state" => Self::frontend_state, "frontend_state" => Self::frontend_state,
"set_frontend_state" => Self::set_frontend_state "set_frontend_state" => Self::set_frontend_state
); );
@ -20,6 +26,46 @@ impl NamespaceProvider for RepoNamespace {
} }
impl RepoNamespace { impl RepoNamespace {
#[tracing::instrument(skip_all)]
async fn get_metadata(ctx: &Context, _: Event) -> IPCResult<()> {
let repo = get_repo_from_context(ctx).await;
let counts = repo.get_counts().await?;
let file_size = repo.get_main_store_size().await?;
let thumbnail_size = repo.get_thumb_store_size().await?;
let (repo_path, settings) = {
let data = ctx.data.read().await;
(
data.get::<RepoPathKey>().unwrap().clone(),
data.get::<SettingsKey>().unwrap().clone(),
)
};
let db_path = repo_path.join(settings.database_path);
let total_size = get_folder_size(repo_path).await?;
let database_metadata = fs::metadata(db_path).await?;
let database_size = database_metadata.len();
let metadata = RepositoryMetadata {
version: env!("CARGO_PKG_VERSION").to_string(),
file_count: counts.file_count as u64,
tag_count: counts.tag_count as u64,
namespace_count: counts.namespace_count as u64,
mapping_count: counts.mapping_count as u64,
hash_count: counts.hash_count as u64,
total_size,
file_size,
database_size,
thumbnail_size,
};
tracing::debug!("metadata = {:?}", metadata);
ctx.emit_to(Self::name(), "repository_metadata", metadata)
.await?;
Ok(())
}
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn frontend_state(ctx: &Context, _: Event) -> IPCResult<()> { async fn frontend_state(ctx: &Context, _: Event) -> IPCResult<()> {
let path = get_frontend_state_path(ctx).await?; let path = get_frontend_state_path(ctx).await?;

Loading…
Cancel
Save