commit
3c6599e66f
@ -0,0 +1,65 @@
|
|||||||
|
use crate::error::RepoResult;
|
||||||
|
use multihash::{Code, MultihashDigest};
|
||||||
|
|
||||||
|
/// Creates a new content descriptor for the given file
|
||||||
|
pub fn create_content_descriptor(bytes: &[u8]) -> Vec<u8> {
|
||||||
|
Code::Sha2_256.digest(bytes).to_bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encodes a content descriptor while respecting the version
|
||||||
|
pub fn encode_content_descriptor(descriptor: &[u8]) -> String {
|
||||||
|
if is_v1_content_descriptor(descriptor) {
|
||||||
|
encode_content_descriptor_v1(descriptor)
|
||||||
|
} else {
|
||||||
|
encode_content_descriptor_v2(descriptor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encodes a v1 descriptor that is already stored encoded in the database (only interprets it as string)
|
||||||
|
pub fn encode_content_descriptor_v1(descriptor: &[u8]) -> String {
|
||||||
|
String::from_utf8_lossy(descriptor).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encodes the content descriptor as base32 lowercase
|
||||||
|
pub fn encode_content_descriptor_v2(descriptor: &[u8]) -> String {
|
||||||
|
data_encoding::BASE32_DNSSEC.encode(descriptor)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decodes a content descriptor
|
||||||
|
pub fn decode_content_descriptor<S: AsRef<str>>(descriptor: S) -> RepoResult<Vec<u8>> {
|
||||||
|
// check for v1 descriptor with a fixed length of 53 starting with the prefix of the base and hash
|
||||||
|
if is_v1_content_descriptor_string(descriptor.as_ref()) {
|
||||||
|
decode_content_descriptor_v1(descriptor)
|
||||||
|
} else {
|
||||||
|
decode_content_descriptor_v2(descriptor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decodes the first version of content descriptors (multibase)
|
||||||
|
pub fn decode_content_descriptor_v1<S: AsRef<str>>(descriptor: S) -> RepoResult<Vec<u8>> {
|
||||||
|
Ok(descriptor.as_ref().as_bytes().to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decodes the second version of content descriptors (faster fixed base32)
|
||||||
|
pub fn decode_content_descriptor_v2<S: AsRef<str>>(descriptor: S) -> RepoResult<Vec<u8>> {
|
||||||
|
let data = data_encoding::BASE32_DNSSEC.decode(descriptor.as_ref().as_bytes())?;
|
||||||
|
|
||||||
|
Ok(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decodes the data stored in the v1 content descriptor into the v2 format
|
||||||
|
pub fn convert_v1_descriptor_to_v2(descriptor_v1: &[u8]) -> RepoResult<Vec<u8>> {
|
||||||
|
let (_, data) = multibase::decode(encode_content_descriptor_v1(descriptor_v1))?;
|
||||||
|
|
||||||
|
Ok(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks if a binary descriptor is v1
|
||||||
|
pub fn is_v1_content_descriptor(descriptor: &[u8]) -> bool {
|
||||||
|
descriptor.len() == 56 && descriptor.starts_with(b"bciq")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks if a descriptor string is a v1 descriptor
|
||||||
|
pub fn is_v1_content_descriptor_string<S: AsRef<str>>(descriptor: S) -> bool {
|
||||||
|
descriptor.as_ref().len() == 56 && descriptor.as_ref().starts_with("bciq")
|
||||||
|
}
|
@ -0,0 +1,35 @@
|
|||||||
|
use std::io::Result;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tokio::fs::{File, OpenOptions};
|
||||||
|
|
||||||
|
/// A file that only exists while being owned.
|
||||||
|
/// Will automatically be deleted on Drop
|
||||||
|
pub struct DropFile {
|
||||||
|
path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DropFile {
|
||||||
|
pub async fn new<P: AsRef<Path>>(path: P) -> Result<(File, Self)> {
|
||||||
|
let file = OpenOptions::new()
|
||||||
|
.write(true)
|
||||||
|
.read(true)
|
||||||
|
.create(true)
|
||||||
|
.open(path.as_ref())
|
||||||
|
.await?;
|
||||||
|
Ok((file, Self::from_path(path)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_path<P: AsRef<Path>>(path: P) -> Self {
|
||||||
|
Self {
|
||||||
|
path: path.as_ref().to_path_buf(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for DropFile {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if let Err(e) = std::fs::remove_file(&self.path) {
|
||||||
|
tracing::error!("failed to remove drop file '{}'", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,2 +1,3 @@
|
|||||||
|
pub mod drop_file;
|
||||||
pub mod file_hash_store;
|
pub mod file_hash_store;
|
||||||
pub mod thumbnail_store;
|
pub mod thumbnail_store;
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
use crate::error::RepoResult;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::net::IpAddr;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
|
||||||
pub struct Settings {
|
|
||||||
pub listen_address: IpAddr,
|
|
||||||
pub port_range: (u16, u16),
|
|
||||||
pub database_path: String,
|
|
||||||
pub default_file_store: String,
|
|
||||||
pub thumbnail_store: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Settings {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
listen_address: IpAddr::from([127, 0, 0, 1]),
|
|
||||||
port_range: (3400, 3500),
|
|
||||||
database_path: "./db/repo.db".to_string(),
|
|
||||||
default_file_store: "Main".to_string(),
|
|
||||||
thumbnail_store: "./thumbnails".to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Settings {
|
|
||||||
/// Parses settings from a string
|
|
||||||
pub fn from_toml_string(s: &str) -> RepoResult<Self> {
|
|
||||||
let settings = toml::from_str(s)?;
|
|
||||||
Ok(settings)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts the settings into a toml string
|
|
||||||
pub fn to_toml_string(&self) -> RepoResult<String> {
|
|
||||||
let string = toml::to_string(&self)?;
|
|
||||||
Ok(string)
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,42 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
|
pub struct LoggingSettings {
|
||||||
|
pub level: LogLevel,
|
||||||
|
pub trace_sql: bool,
|
||||||
|
pub trace_api_calls: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for LoggingSettings {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
level: LogLevel::Info,
|
||||||
|
trace_sql: false,
|
||||||
|
trace_api_calls: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
|
pub enum LogLevel {
|
||||||
|
Off,
|
||||||
|
Error,
|
||||||
|
Warn,
|
||||||
|
Info,
|
||||||
|
Debug,
|
||||||
|
Trace,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<Option<Level>> for LogLevel {
|
||||||
|
fn into(self) -> Option<Level> {
|
||||||
|
match self {
|
||||||
|
LogLevel::Off => None,
|
||||||
|
LogLevel::Error => Some(Level::ERROR),
|
||||||
|
LogLevel::Warn => Some(Level::WARN),
|
||||||
|
LogLevel::Info => Some(Level::INFO),
|
||||||
|
LogLevel::Debug => Some(Level::DEBUG),
|
||||||
|
LogLevel::Trace => Some(Level::TRACE),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,76 @@
|
|||||||
|
mod logging;
|
||||||
|
mod paths;
|
||||||
|
mod server;
|
||||||
|
pub mod v1;
|
||||||
|
|
||||||
|
use crate::error::RepoResult;
|
||||||
|
use crate::settings::v1::SettingsV1;
|
||||||
|
use config::{Config, FileFormat};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::fs;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
pub use logging::*;
|
||||||
|
pub use paths::*;
|
||||||
|
pub use server::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
|
||||||
|
pub struct Settings {
|
||||||
|
pub server: ServerSettings,
|
||||||
|
pub paths: PathSettings,
|
||||||
|
pub logging: LoggingSettings,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Settings {
|
||||||
|
pub fn read(root: &PathBuf) -> RepoResult<Self> {
|
||||||
|
let mut settings = Config::default();
|
||||||
|
settings
|
||||||
|
.merge(config::File::from_str(
|
||||||
|
&*Settings::default().to_toml_string()?,
|
||||||
|
FileFormat::Toml,
|
||||||
|
))?
|
||||||
|
.merge(config::File::from(root.join("repo")))?
|
||||||
|
.merge(config::Environment::with_prefix("MEDIAREPO").separator("."))?;
|
||||||
|
tracing::debug!("Settings are: {:#?}", settings);
|
||||||
|
|
||||||
|
Ok(settings.try_into::<Settings>()?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses settings from a string
|
||||||
|
pub fn from_v1(settings_v1: SettingsV1) -> RepoResult<Self> {
|
||||||
|
let mut settings_main = Settings::default();
|
||||||
|
settings_main.server.tcp.enabled = true;
|
||||||
|
settings_main.server.tcp.port = PortSetting::Range(settings_v1.port_range);
|
||||||
|
settings_main.server.tcp.listen_address = settings_v1.listen_address;
|
||||||
|
settings_main.paths.thumbnail_directory = settings_v1.thumbnail_store.into();
|
||||||
|
settings_main.paths.database_directory = PathBuf::from(settings_v1.database_path)
|
||||||
|
.parent()
|
||||||
|
.map(|p| p.to_string_lossy().to_string())
|
||||||
|
.unwrap_or_else(|| String::from("./"));
|
||||||
|
|
||||||
|
let mut settings = Config::default();
|
||||||
|
settings
|
||||||
|
.merge(config::File::from_str(
|
||||||
|
&*settings_main.to_toml_string()?,
|
||||||
|
FileFormat::Toml,
|
||||||
|
))?
|
||||||
|
.merge(config::Environment::with_prefix("MEDIAREPO"))?;
|
||||||
|
tracing::debug!("Settings are: {:#?}", settings);
|
||||||
|
|
||||||
|
Ok(settings.try_into::<Settings>()?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts the settings into a toml string
|
||||||
|
pub fn to_toml_string(&self) -> RepoResult<String> {
|
||||||
|
let string = toml::to_string(&self)?;
|
||||||
|
|
||||||
|
Ok(string)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save(&self, root: &PathBuf) -> RepoResult<()> {
|
||||||
|
let string = toml::to_string_pretty(&self)?;
|
||||||
|
fs::write(root.join("repo.toml"), string.into_bytes())?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,46 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
|
pub struct PathSettings {
|
||||||
|
pub(crate) database_directory: String,
|
||||||
|
pub(crate) files_directory: String,
|
||||||
|
pub(crate) thumbnail_directory: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for PathSettings {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
database_directory: String::from("db"),
|
||||||
|
files_directory: String::from("files"),
|
||||||
|
thumbnail_directory: String::from("thumbnails"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PathSettings {
|
||||||
|
#[inline]
|
||||||
|
pub fn database_dir(&self, root: &PathBuf) -> PathBuf {
|
||||||
|
root.join(&self.database_directory)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn files_dir(&self, root: &PathBuf) -> PathBuf {
|
||||||
|
root.join(&self.files_directory)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn thumbs_dir(&self, root: &PathBuf) -> PathBuf {
|
||||||
|
root.join(&self.thumbnail_directory)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn db_file_path(&self, root: &PathBuf) -> PathBuf {
|
||||||
|
self.database_dir(root).join("repo.db")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn frontend_state_file_path(&self, root: &PathBuf) -> PathBuf {
|
||||||
|
self.database_dir(root).join("frontend-state.json")
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,46 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::net::IpAddr;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
|
||||||
|
pub struct ServerSettings {
|
||||||
|
pub tcp: TcpServerSettings,
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub unix_socket: UnixSocketServerSettings,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
|
pub struct TcpServerSettings {
|
||||||
|
pub enabled: bool,
|
||||||
|
pub listen_address: IpAddr,
|
||||||
|
pub port: PortSetting,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
pub enum PortSetting {
|
||||||
|
Fixed(u16),
|
||||||
|
Range((u16, u16)),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for TcpServerSettings {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
enabled: cfg!(windows),
|
||||||
|
listen_address: IpAddr::from([127, 0, 0, 1]),
|
||||||
|
port: PortSetting::Range((13400, 13500)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
|
pub struct UnixSocketServerSettings {
|
||||||
|
pub enabled: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
impl Default for UnixSocketServerSettings {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self { enabled: true }
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,20 @@
|
|||||||
|
use crate::error::RepoResult;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::net::IpAddr;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
|
pub struct SettingsV1 {
|
||||||
|
pub listen_address: IpAddr,
|
||||||
|
pub port_range: (u16, u16),
|
||||||
|
pub database_path: String,
|
||||||
|
pub default_file_store: String,
|
||||||
|
pub thumbnail_store: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SettingsV1 {
|
||||||
|
/// Parses settings from a string
|
||||||
|
pub fn from_toml_string(s: &str) -> RepoResult<Self> {
|
||||||
|
let settings = toml::from_str(s)?;
|
||||||
|
Ok(settings)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,107 @@
|
|||||||
|
-- Add migration script here
|
||||||
|
PRAGMA foreign_keys= off;
|
||||||
|
|
||||||
|
-- create backup files table
|
||||||
|
ALTER TABLE files
|
||||||
|
RENAME TO _files_old;
|
||||||
|
|
||||||
|
-- create backup hashes table
|
||||||
|
ALTER TABLE hashes
|
||||||
|
RENAME TO _hashes_old;
|
||||||
|
|
||||||
|
-- create backup hash_tag_mappings table
|
||||||
|
ALTER TABLE hash_tag_mappings
|
||||||
|
RENAME TO _hash_tag_mappings_old;
|
||||||
|
|
||||||
|
-- create backup hash_source_mappings table
|
||||||
|
ALTER TABLE hash_source_mappings
|
||||||
|
RENAME TO _hash_source_mappings_old;
|
||||||
|
|
||||||
|
-- create content id table
|
||||||
|
CREATE TABLE content_descriptors
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
descriptor BLOB NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX content_descriptor_values ON content_descriptors (descriptor);
|
||||||
|
|
||||||
|
-- create content-id tag mappings table
|
||||||
|
CREATE TABLE cd_tag_mappings
|
||||||
|
(
|
||||||
|
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
|
||||||
|
tag_id INTEGER NOT NULL REFERENCES tags (id),
|
||||||
|
PRIMARY KEY (cd_id, tag_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX content_descriptor_tag_mapping_unique ON cd_tag_mappings (cd_id, tag_id);
|
||||||
|
CREATE INDEX content_descriptor_tag_mapping_tag ON cd_tag_mappings (tag_id);
|
||||||
|
|
||||||
|
-- create content-id source mappings table
|
||||||
|
CREATE TABLE cd_source_mappings
|
||||||
|
(
|
||||||
|
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
|
||||||
|
source_id INTEGER NOT NULL REFERENCES sources (id),
|
||||||
|
PRIMARY KEY (cd_id, source_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX content_descriptor_source_mapping_unique ON cd_source_mappings (cd_id, source_id);
|
||||||
|
|
||||||
|
-- create new files table
|
||||||
|
CREATE TABLE files
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
status INTEGER NOT NULL DEFAULT 10,
|
||||||
|
storage_id INTEGER NOT NULL REFERENCES storage_locations (id),
|
||||||
|
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
|
||||||
|
mime_type VARCHAR(128) NOT NULL DEFAULT 'application/octet-stream'
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX files_contend_descriptor ON files (cd_id);
|
||||||
|
|
||||||
|
-- create metadata table
|
||||||
|
CREATE TABLE file_metadata
|
||||||
|
(
|
||||||
|
file_id INTEGER PRIMARY KEY REFERENCES files (id),
|
||||||
|
size INTEGER NOT NULL,
|
||||||
|
name VARCHAR(128),
|
||||||
|
comment VARCHAR(1024),
|
||||||
|
import_time DATETIME NOT NULL,
|
||||||
|
creation_time DATETIME NOT NULL,
|
||||||
|
change_time DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX file_metadata_file_id_unique ON file_metadata (file_id);
|
||||||
|
|
||||||
|
-- add content identifiers from hashes table
|
||||||
|
INSERT INTO content_descriptors
|
||||||
|
SELECT id, value
|
||||||
|
FROM _hashes_old;
|
||||||
|
|
||||||
|
-- add files from files table
|
||||||
|
INSERT INTO files (id, storage_id, cd_id, mime_type)
|
||||||
|
SELECT id, storage_id, hash_id AS content_id, mime_type
|
||||||
|
FROM _files_old;
|
||||||
|
|
||||||
|
-- add metadata from files table
|
||||||
|
INSERT INTO file_metadata
|
||||||
|
SELECT id AS file_id, size, name, comment, import_time, creation_time, change_time
|
||||||
|
FROM _files_old;
|
||||||
|
|
||||||
|
-- add content tag mappings
|
||||||
|
INSERT INTO cd_tag_mappings
|
||||||
|
SELECT hash_id AS content_id, tag_id
|
||||||
|
FROM _hash_tag_mappings_old;
|
||||||
|
|
||||||
|
-- add content id source mappings
|
||||||
|
INSERT INTO cd_source_mappings
|
||||||
|
SELECT hash_id AS content_id, source_id
|
||||||
|
FROM _hash_source_mappings_old;
|
||||||
|
|
||||||
|
-- drop all old tables
|
||||||
|
DROP TABLE _hash_source_mappings_old;
|
||||||
|
DROP TABLE _hash_tag_mappings_old;
|
||||||
|
DROP TABLE _files_old;
|
||||||
|
DROP TABLE _hashes_old;
|
||||||
|
|
||||||
|
pragma foreign_keys= on;
|
@ -0,0 +1,50 @@
|
|||||||
|
-- Add migration script here
|
||||||
|
PRAGMA foreign_keys= off;
|
||||||
|
|
||||||
|
-- rename old files table
|
||||||
|
ALTER TABLE files
|
||||||
|
RENAME TO _files_old;
|
||||||
|
-- rename metadata value (because of foreign key constraints)
|
||||||
|
ALTER TABLE file_metadata
|
||||||
|
RENAME TO _file_metadata_old;
|
||||||
|
|
||||||
|
-- create new files table
|
||||||
|
CREATE TABLE files
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
status INTEGER NOT NULL DEFAULT 10,
|
||||||
|
cd_id INTEGER NOT NULL REFERENCES content_descriptors (id),
|
||||||
|
mime_type VARCHAR(128) NOT NULL DEFAULT 'application/octet-stream'
|
||||||
|
);
|
||||||
|
-- add data from files table
|
||||||
|
INSERT INTO files
|
||||||
|
SELECT id, status, cd_id, mime_type
|
||||||
|
FROM _files_old;
|
||||||
|
|
||||||
|
-- create metadata table
|
||||||
|
CREATE TABLE file_metadata
|
||||||
|
(
|
||||||
|
file_id INTEGER PRIMARY KEY REFERENCES files (id),
|
||||||
|
size INTEGER NOT NULL,
|
||||||
|
name VARCHAR(128),
|
||||||
|
comment VARCHAR(1024),
|
||||||
|
import_time DATETIME NOT NULL,
|
||||||
|
creation_time DATETIME NOT NULL,
|
||||||
|
change_time DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- add back the old values
|
||||||
|
INSERT INTO file_metadata
|
||||||
|
SELECT *
|
||||||
|
FROM _file_metadata_old;
|
||||||
|
|
||||||
|
-- drop old tables
|
||||||
|
DROP TABLE _file_metadata_old;
|
||||||
|
DROP TABLE _files_old;
|
||||||
|
DROP TABLE storage_locations;
|
||||||
|
|
||||||
|
-- create indices on new tables
|
||||||
|
CREATE UNIQUE INDEX file_metadata_file_id_unique ON file_metadata (file_id);
|
||||||
|
CREATE INDEX files_content_descriptor ON files (cd_id);
|
||||||
|
|
||||||
|
PRAGMA foreign_keys= on;
|
@ -0,0 +1,32 @@
|
|||||||
|
use chrono::NaiveDateTime;
|
||||||
|
use sea_orm::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
|
||||||
|
#[sea_orm(table_name = "file_metadata")]
|
||||||
|
pub struct Model {
|
||||||
|
#[sea_orm(primary_key)]
|
||||||
|
pub file_id: i64,
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub comment: Option<String>,
|
||||||
|
pub size: i64,
|
||||||
|
pub import_time: NaiveDateTime,
|
||||||
|
pub creation_time: NaiveDateTime,
|
||||||
|
pub change_time: NaiveDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
pub enum Relation {
|
||||||
|
#[sea_orm(
|
||||||
|
belongs_to = "super::file::Entity",
|
||||||
|
from = "Column::FileId",
|
||||||
|
to = "super::file::Column::Id"
|
||||||
|
)]
|
||||||
|
File,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Related<super::file::Entity> for Entity {
|
||||||
|
fn to() -> RelationDef {
|
||||||
|
Relation::File.def()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl ActiveModelBehavior for ActiveModel {}
|
@ -1,8 +1,8 @@
|
|||||||
|
pub mod content_descriptor;
|
||||||
|
pub mod content_descriptor_source;
|
||||||
|
pub mod content_descriptor_tag;
|
||||||
pub mod file;
|
pub mod file;
|
||||||
pub mod hash;
|
pub mod file_metadata;
|
||||||
pub mod hash_source;
|
|
||||||
pub mod hash_tag;
|
|
||||||
pub mod namespace;
|
pub mod namespace;
|
||||||
pub mod source;
|
pub mod source;
|
||||||
pub mod storage;
|
|
||||||
pub mod tag;
|
pub mod tag;
|
||||||
|
@ -1,24 +0,0 @@
|
|||||||
use sea_orm::prelude::*;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
|
|
||||||
#[sea_orm(table_name = "storage_locations")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key)]
|
|
||||||
pub id: i64,
|
|
||||||
pub name: String,
|
|
||||||
pub path: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
|
||||||
pub enum Relation {
|
|
||||||
#[sea_orm(has_many = "super::file::Entity")]
|
|
||||||
File,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Related<super::file::Entity> for Entity {
|
|
||||||
fn to() -> RelationDef {
|
|
||||||
Relation::File.def()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
@ -1,395 +0,0 @@
|
|||||||
use std::fmt::Debug;
|
|
||||||
use std::io::Cursor;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use chrono::{Local, NaiveDateTime};
|
|
||||||
use sea_orm::prelude::*;
|
|
||||||
use sea_orm::sea_query::{Expr, Query};
|
|
||||||
use sea_orm::{Condition, DatabaseConnection, Set};
|
|
||||||
use sea_orm::{JoinType, QuerySelect};
|
|
||||||
use tokio::io::{AsyncReadExt, BufReader};
|
|
||||||
|
|
||||||
use mediarepo_core::error::RepoResult;
|
|
||||||
use mediarepo_core::thumbnailer::{self, Thumbnail as ThumbnailerThumb, ThumbnailSize};
|
|
||||||
use mediarepo_database::entities::file;
|
|
||||||
use mediarepo_database::entities::hash;
|
|
||||||
use mediarepo_database::entities::hash_tag;
|
|
||||||
use mediarepo_database::entities::namespace;
|
|
||||||
use mediarepo_database::entities::tag;
|
|
||||||
|
|
||||||
use crate::file_type::FileType;
|
|
||||||
use crate::storage::Storage;
|
|
||||||
use crate::tag::Tag;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct File {
|
|
||||||
db: DatabaseConnection,
|
|
||||||
model: file::Model,
|
|
||||||
hash: hash::Model,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl File {
|
|
||||||
#[tracing::instrument(level = "trace")]
|
|
||||||
pub(crate) fn new(db: DatabaseConnection, model: file::Model, hash: hash::Model) -> Self {
|
|
||||||
Self { db, model, hash }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a list of all known stored files
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<File>> {
|
|
||||||
let files: Vec<(file::Model, Option<hash::Model>)> = file::Entity::find()
|
|
||||||
.find_also_related(hash::Entity)
|
|
||||||
.all(&db)
|
|
||||||
.await?;
|
|
||||||
let files = files
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|(f, h)| {
|
|
||||||
let h = h?;
|
|
||||||
Some(Self::new(db.clone(), f, h))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(files)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetches the file by id
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
|
|
||||||
if let Some((model, Some(hash))) = file::Entity::find_by_id(id)
|
|
||||||
.find_also_related(hash::Entity)
|
|
||||||
.one(&db)
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
let file = File::new(db, model, hash);
|
|
||||||
Ok(Some(file))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Finds the file by hash
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub async fn by_hash<S: AsRef<str> + Debug>(
|
|
||||||
db: DatabaseConnection,
|
|
||||||
hash: S,
|
|
||||||
) -> RepoResult<Option<Self>> {
|
|
||||||
if let Some((hash, Some(model))) = hash::Entity::find()
|
|
||||||
.filter(hash::Column::Value.eq(hash.as_ref()))
|
|
||||||
.find_also_related(file::Entity)
|
|
||||||
.one(&db)
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
let file = File::new(db, model, hash);
|
|
||||||
Ok(Some(file))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Finds the file by tags
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub(crate) async fn find_by_tags(
|
|
||||||
db: DatabaseConnection,
|
|
||||||
tag_ids: Vec<Vec<(i64, bool)>>,
|
|
||||||
) -> RepoResult<Vec<Self>> {
|
|
||||||
let main_condition = build_find_filter_conditions(tag_ids);
|
|
||||||
|
|
||||||
let results: Vec<(hash::Model, Option<file::Model>)> = hash::Entity::find()
|
|
||||||
.find_also_related(file::Entity)
|
|
||||||
.filter(main_condition)
|
|
||||||
.group_by(file::Column::Id)
|
|
||||||
.all(&db)
|
|
||||||
.await?;
|
|
||||||
let files: Vec<Self> = results
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|(hash, tag)| Some(Self::new(db.clone(), tag?, hash)))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(files)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds a file with its hash to the database
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub(crate) async fn add(
|
|
||||||
db: DatabaseConnection,
|
|
||||||
storage_id: i64,
|
|
||||||
hash_id: i64,
|
|
||||||
file_type: FileType,
|
|
||||||
mime_type: Option<String>,
|
|
||||||
creation_time: NaiveDateTime,
|
|
||||||
change_time: NaiveDateTime,
|
|
||||||
) -> RepoResult<Self> {
|
|
||||||
let file = file::ActiveModel {
|
|
||||||
hash_id: Set(hash_id),
|
|
||||||
file_type: Set(file_type as u32),
|
|
||||||
mime_type: Set(mime_type),
|
|
||||||
storage_id: Set(storage_id),
|
|
||||||
import_time: Set(Local::now().naive_local()),
|
|
||||||
creation_time: Set(creation_time),
|
|
||||||
change_time: Set(change_time),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
let file: file::ActiveModel = file.insert(&db).await?.into();
|
|
||||||
let file = Self::by_id(db, file.id.unwrap())
|
|
||||||
.await?
|
|
||||||
.expect("Inserted file does not exist");
|
|
||||||
|
|
||||||
Ok(file)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the unique identifier of the file
|
|
||||||
pub fn id(&self) -> i64 {
|
|
||||||
self.model.id
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the hash of the file (content identifier)
|
|
||||||
pub fn hash(&self) -> &String {
|
|
||||||
&self.hash.value
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the hash id of the file
|
|
||||||
pub fn hash_id(&self) -> i64 {
|
|
||||||
self.hash.id
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the type of the file
|
|
||||||
pub fn file_type(&self) -> FileType {
|
|
||||||
match self.model.file_type {
|
|
||||||
1 => FileType::Image,
|
|
||||||
2 => FileType::Video,
|
|
||||||
3 => FileType::Audio,
|
|
||||||
_ => FileType::Unknown,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the optional mime type of the file
|
|
||||||
pub fn mime_type(&self) -> &Option<String> {
|
|
||||||
&self.model.mime_type
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the optional name of the file
|
|
||||||
pub fn name(&self) -> &Option<String> {
|
|
||||||
&self.model.name
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the comment of the file
|
|
||||||
pub fn comment(&self) -> &Option<String> {
|
|
||||||
&self.model.comment
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the import time of the file
|
|
||||||
pub fn import_time(&self) -> &NaiveDateTime {
|
|
||||||
&self.model.import_time
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the datetime when the file was created
|
|
||||||
pub fn creation_time(&self) -> &NaiveDateTime {
|
|
||||||
&self.model.creation_time
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the last time the file was changed
|
|
||||||
pub fn change_time(&self) -> &NaiveDateTime {
|
|
||||||
&self.model.change_time
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the storage where the file is stored
|
|
||||||
pub async fn storage(&self) -> RepoResult<Storage> {
|
|
||||||
let storage = Storage::by_id(self.db.clone(), self.model.storage_id)
|
|
||||||
.await?
|
|
||||||
.expect("The FK storage_id doesn't exist?!");
|
|
||||||
|
|
||||||
Ok(storage)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the list of tags of the file
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn tags(&self) -> RepoResult<Vec<Tag>> {
|
|
||||||
let tags: Vec<(tag::Model, Option<namespace::Model>)> = tag::Entity::find()
|
|
||||||
.find_also_related(namespace::Entity)
|
|
||||||
.join(JoinType::LeftJoin, hash_tag::Relation::Tag.def().rev())
|
|
||||||
.join(JoinType::InnerJoin, hash_tag::Relation::Hash.def())
|
|
||||||
.filter(hash::Column::Id.eq(self.hash.id))
|
|
||||||
.all(&self.db)
|
|
||||||
.await?;
|
|
||||||
let tags = tags
|
|
||||||
.into_iter()
|
|
||||||
.map(|(tag, namespace)| Tag::new(self.db.clone(), tag, namespace))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(tags)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Changes the name of the file
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn set_name<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
|
|
||||||
let mut active_file = self.get_active_model();
|
|
||||||
active_file.name = Set(Some(name.to_string()));
|
|
||||||
let active_file = active_file.update(&self.db).await?;
|
|
||||||
self.model.name = active_file.name;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Changes the comment of the file
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn set_comment<S: ToString + Debug>(&mut self, comment: S) -> RepoResult<()> {
|
|
||||||
let mut active_file = self.get_active_model();
|
|
||||||
active_file.comment = Set(Some(comment.to_string()));
|
|
||||||
let active_file = active_file.update(&self.db).await?;
|
|
||||||
self.model.comment = active_file.comment;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Changes the type of the file
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn set_file_type(&mut self, file_type: FileType) -> RepoResult<()> {
|
|
||||||
let mut active_file = self.get_active_model();
|
|
||||||
active_file.file_type = Set(file_type as u32);
|
|
||||||
let active_file = active_file.update(&self.db).await?;
|
|
||||||
self.model.file_type = active_file.file_type;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds a single tag to the file
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn add_tag(&mut self, tag_id: i64) -> RepoResult<()> {
|
|
||||||
let hash_id = self.hash.id;
|
|
||||||
let active_model = hash_tag::ActiveModel {
|
|
||||||
hash_id: Set(hash_id),
|
|
||||||
tag_id: Set(tag_id),
|
|
||||||
};
|
|
||||||
active_model.insert(&self.db).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds multiple tags to the file at once
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn add_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
|
|
||||||
if tag_ids.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
let hash_id = self.hash.id;
|
|
||||||
let models: Vec<hash_tag::ActiveModel> = tag_ids
|
|
||||||
.into_iter()
|
|
||||||
.map(|tag_id| hash_tag::ActiveModel {
|
|
||||||
hash_id: Set(hash_id),
|
|
||||||
tag_id: Set(tag_id),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
hash_tag::Entity::insert_many(models).exec(&self.db).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Removes multiple tags from the file
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn remove_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
|
|
||||||
let hash_id = self.hash.id;
|
|
||||||
hash_tag::Entity::delete_many()
|
|
||||||
.filter(hash_tag::Column::HashId.eq(hash_id))
|
|
||||||
.filter(hash_tag::Column::TagId.is_in(tag_ids))
|
|
||||||
.exec(&self.db)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the reader for the file
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn get_reader(&self) -> RepoResult<BufReader<tokio::fs::File>> {
|
|
||||||
let storage = self.storage().await?;
|
|
||||||
|
|
||||||
storage.get_file_reader(&self.hash.value).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Retrieves the size of the file from its content
|
|
||||||
#[tracing::instrument(level = "trace", skip(self))]
|
|
||||||
pub async fn get_size(&self) -> RepoResult<u64> {
|
|
||||||
if let Some(size) = self.model.size {
|
|
||||||
Ok(size as u64)
|
|
||||||
} else {
|
|
||||||
let mut reader = self.get_reader().await?;
|
|
||||||
let size = {
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
reader.read_to_end(&mut buf).await
|
|
||||||
}?;
|
|
||||||
let mut model = self.get_active_model();
|
|
||||||
model.size = Set(Some(size as i64));
|
|
||||||
model.update(&self.db).await?;
|
|
||||||
|
|
||||||
Ok(size as u64)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a thumbnail for the file
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn create_thumbnail<I: IntoIterator<Item = ThumbnailSize> + Debug>(
|
|
||||||
&self,
|
|
||||||
sizes: I,
|
|
||||||
) -> RepoResult<Vec<ThumbnailerThumb>> {
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
self.get_reader().await?.read_to_end(&mut buf).await?;
|
|
||||||
let mime_type = self
|
|
||||||
.model
|
|
||||||
.mime_type
|
|
||||||
.clone()
|
|
||||||
.map(|mime_type| mime::Mime::from_str(&mime_type).unwrap())
|
|
||||||
.unwrap_or(mime::IMAGE_STAR);
|
|
||||||
let thumbs = thumbnailer::create_thumbnails(Cursor::new(buf), mime_type, sizes)?;
|
|
||||||
|
|
||||||
Ok(thumbs)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the active model of the file with only the id set
|
|
||||||
fn get_active_model(&self) -> file::ActiveModel {
|
|
||||||
file::ActiveModel {
|
|
||||||
id: Set(self.id()),
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_find_filter_conditions(tag_ids: Vec<Vec<(i64, bool)>>) -> Condition {
|
|
||||||
let mut main_condition = Condition::all();
|
|
||||||
|
|
||||||
for mut expression in tag_ids {
|
|
||||||
if expression.len() == 1 {
|
|
||||||
let (tag_id, negated) = expression.pop().unwrap();
|
|
||||||
main_condition = add_single_filter_expression(main_condition, tag_id, negated)
|
|
||||||
} else if !expression.is_empty() {
|
|
||||||
let mut sub_condition = Condition::any();
|
|
||||||
|
|
||||||
for (tag, negated) in expression {
|
|
||||||
sub_condition = add_single_filter_expression(sub_condition, tag, negated);
|
|
||||||
}
|
|
||||||
main_condition = main_condition.add(sub_condition);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
main_condition
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_single_filter_expression(condition: Condition, tag_id: i64, negated: bool) -> Condition {
|
|
||||||
if negated {
|
|
||||||
condition.add(
|
|
||||||
hash::Column::Id.not_in_subquery(
|
|
||||||
Query::select()
|
|
||||||
.expr(Expr::col(hash_tag::Column::HashId))
|
|
||||||
.from(hash_tag::Entity)
|
|
||||||
.cond_where(hash_tag::Column::TagId.eq(tag_id))
|
|
||||||
.to_owned(),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
condition.add(
|
|
||||||
hash::Column::Id.in_subquery(
|
|
||||||
Query::select()
|
|
||||||
.expr(Expr::col(hash_tag::Column::HashId))
|
|
||||||
.from(hash_tag::Entity)
|
|
||||||
.cond_where(hash_tag::Column::TagId.eq(tag_id))
|
|
||||||
.to_owned(),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,223 @@
|
|||||||
|
use chrono::NaiveDateTime;
|
||||||
|
use mediarepo_database::entities::content_descriptor;
|
||||||
|
use mediarepo_database::entities::content_descriptor_tag;
|
||||||
|
use mediarepo_database::entities::file;
|
||||||
|
use mediarepo_database::entities::file_metadata;
|
||||||
|
use sea_orm::sea_query::{Alias, Expr, Query, SimpleExpr};
|
||||||
|
use sea_orm::ColumnTrait;
|
||||||
|
use sea_orm::Condition;
|
||||||
|
|
||||||
|
macro_rules! apply_ordering_comparator {
|
||||||
|
($column:expr, $filter:expr) => {
|
||||||
|
match $filter {
|
||||||
|
OrderingComparator::Less(value) => $column.lt(value),
|
||||||
|
OrderingComparator::Equal(value) => $column.eq(value),
|
||||||
|
OrderingComparator::Greater(value) => $column.gt(value),
|
||||||
|
OrderingComparator::Between((min_value, max_value)) => {
|
||||||
|
$column.between(min_value, max_value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum FilterProperty {
|
||||||
|
TagId(NegatableComparator<i64>),
|
||||||
|
TagWildcardIds(NegatableComparator<Vec<i64>>),
|
||||||
|
ContentDescriptor(NegatableComparator<Vec<u8>>),
|
||||||
|
TagCount(OrderingComparator<i64>),
|
||||||
|
FileProperty(FilterFileProperty),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum FilterFileProperty {
|
||||||
|
Id(NegatableComparator<i64>),
|
||||||
|
Status(NegatableComparator<i64>),
|
||||||
|
FileSize(OrderingComparator<i64>),
|
||||||
|
ImportedTime(OrderingComparator<NaiveDateTime>),
|
||||||
|
ChangedTime(OrderingComparator<NaiveDateTime>),
|
||||||
|
CreatedTime(OrderingComparator<NaiveDateTime>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum OrderingComparator<T> {
|
||||||
|
Less(T),
|
||||||
|
Equal(T),
|
||||||
|
Greater(T),
|
||||||
|
Between((T, T)),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum NegatableComparator<T> {
|
||||||
|
Is(T),
|
||||||
|
IsNot(T),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "debug")]
|
||||||
|
pub fn build_find_filter_conditions(filters: Vec<Vec<FilterProperty>>) -> Condition {
|
||||||
|
filters
|
||||||
|
.into_iter()
|
||||||
|
.fold(Condition::all(), |all_cond, mut expression| {
|
||||||
|
if expression.len() == 1 {
|
||||||
|
let property = expression.pop().unwrap();
|
||||||
|
|
||||||
|
all_cond.add(build_single_filter(property))
|
||||||
|
} else if !expression.is_empty() {
|
||||||
|
let sub_condition = expression.into_iter().fold(Condition::any(), |cond, prop| {
|
||||||
|
cond.add(build_single_filter(prop))
|
||||||
|
});
|
||||||
|
|
||||||
|
all_cond.add(sub_condition)
|
||||||
|
} else {
|
||||||
|
all_cond
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn build_single_filter(property: FilterProperty) -> SimpleExpr {
|
||||||
|
match property {
|
||||||
|
FilterProperty::TagId(tag_filter) => build_tag_id_filter(tag_filter),
|
||||||
|
FilterProperty::TagWildcardIds(wildcard_filter) => {
|
||||||
|
build_tag_wildcard_ids_filter(wildcard_filter)
|
||||||
|
}
|
||||||
|
FilterProperty::ContentDescriptor(cd_filter) => build_content_descriptor_filter(cd_filter),
|
||||||
|
FilterProperty::TagCount(count_filter) => build_tag_count_filter(count_filter),
|
||||||
|
FilterProperty::FileProperty(property_filter) => {
|
||||||
|
build_file_property_filter(property_filter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_tag_id_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
|
||||||
|
match filter {
|
||||||
|
NegatableComparator::Is(tag_id) => content_descriptor::Column::Id.in_subquery(
|
||||||
|
Query::select()
|
||||||
|
.expr(Expr::col(content_descriptor_tag::Column::CdId))
|
||||||
|
.from(content_descriptor_tag::Entity)
|
||||||
|
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
|
||||||
|
.to_owned(),
|
||||||
|
),
|
||||||
|
NegatableComparator::IsNot(tag_id) => content_descriptor::Column::Id.not_in_subquery(
|
||||||
|
Query::select()
|
||||||
|
.expr(Expr::col(content_descriptor_tag::Column::CdId))
|
||||||
|
.from(content_descriptor_tag::Entity)
|
||||||
|
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
|
||||||
|
.to_owned(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_tag_wildcard_ids_filter(filter: NegatableComparator<Vec<i64>>) -> SimpleExpr {
|
||||||
|
match filter {
|
||||||
|
NegatableComparator::Is(tag_ids) => content_descriptor::Column::Id.in_subquery(
|
||||||
|
Query::select()
|
||||||
|
.expr(Expr::col(content_descriptor_tag::Column::CdId))
|
||||||
|
.from(content_descriptor_tag::Entity)
|
||||||
|
.cond_where(content_descriptor_tag::Column::TagId.is_in(tag_ids))
|
||||||
|
.to_owned(),
|
||||||
|
),
|
||||||
|
NegatableComparator::IsNot(tag_ids) => content_descriptor::Column::Id.not_in_subquery(
|
||||||
|
Query::select()
|
||||||
|
.expr(Expr::col(content_descriptor_tag::Column::CdId))
|
||||||
|
.from(content_descriptor_tag::Entity)
|
||||||
|
.cond_where(content_descriptor_tag::Column::TagId.is_in(tag_ids))
|
||||||
|
.to_owned(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_content_descriptor_filter(filter: NegatableComparator<Vec<u8>>) -> SimpleExpr {
|
||||||
|
match filter {
|
||||||
|
NegatableComparator::Is(cd) => content_descriptor::Column::Descriptor.eq(cd),
|
||||||
|
NegatableComparator::IsNot(cd) => content_descriptor::Column::Descriptor.ne(cd),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_tag_count_filter(filter: OrderingComparator<i64>) -> SimpleExpr {
|
||||||
|
let count_column = Alias::new("count");
|
||||||
|
let cd_id_column = Alias::new("cd_id");
|
||||||
|
|
||||||
|
let count_subquery = Query::select()
|
||||||
|
.expr_as(
|
||||||
|
Expr::col(content_descriptor_tag::Column::CdId),
|
||||||
|
cd_id_column.clone(),
|
||||||
|
)
|
||||||
|
.expr_as(
|
||||||
|
content_descriptor_tag::Column::TagId.count(),
|
||||||
|
count_column.clone(),
|
||||||
|
)
|
||||||
|
.from(content_descriptor_tag::Entity)
|
||||||
|
.group_by_col(cd_id_column.clone())
|
||||||
|
.to_owned();
|
||||||
|
|
||||||
|
let count_expression = apply_ordering_comparator!(Expr::col(count_column), filter);
|
||||||
|
|
||||||
|
content_descriptor::Column::Id.in_subquery(
|
||||||
|
Query::select()
|
||||||
|
.expr(Expr::col(cd_id_column))
|
||||||
|
.from_subquery(count_subquery, Alias::new("tag_counts"))
|
||||||
|
.cond_where(count_expression)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn build_file_property_filter(property: FilterFileProperty) -> SimpleExpr {
|
||||||
|
match property {
|
||||||
|
FilterFileProperty::Id(id_filter) => build_file_id_filter(id_filter),
|
||||||
|
FilterFileProperty::Status(status_filter) => build_file_status_filter(status_filter),
|
||||||
|
FilterFileProperty::FileSize(size_filter) => {
|
||||||
|
build_file_metadata_filter(build_file_size_filter(size_filter))
|
||||||
|
}
|
||||||
|
FilterFileProperty::ImportedTime(time_filter) => {
|
||||||
|
build_file_metadata_filter(build_file_import_time_filter(time_filter))
|
||||||
|
}
|
||||||
|
FilterFileProperty::ChangedTime(time_filter) => {
|
||||||
|
build_file_metadata_filter(build_file_changed_time_filter(time_filter))
|
||||||
|
}
|
||||||
|
FilterFileProperty::CreatedTime(time_filter) => {
|
||||||
|
build_file_metadata_filter(build_file_created_time_filter(time_filter))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_file_id_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
|
||||||
|
match filter {
|
||||||
|
NegatableComparator::Is(id) => file::Column::Id.eq(id),
|
||||||
|
NegatableComparator::IsNot(id) => file::Column::Id.ne(id),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_file_status_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
|
||||||
|
match filter {
|
||||||
|
NegatableComparator::Is(status) => file::Column::Status.eq(status),
|
||||||
|
NegatableComparator::IsNot(status) => file::Column::Status.ne(status),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_file_metadata_filter(property_condition: SimpleExpr) -> SimpleExpr {
|
||||||
|
file::Column::Id.in_subquery(
|
||||||
|
Query::select()
|
||||||
|
.expr(Expr::col(file_metadata::Column::FileId))
|
||||||
|
.from(file_metadata::Entity)
|
||||||
|
.cond_where(property_condition)
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_file_size_filter(filter: OrderingComparator<i64>) -> SimpleExpr {
|
||||||
|
apply_ordering_comparator!(file_metadata::Column::Size, filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_file_import_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
|
||||||
|
apply_ordering_comparator!(file_metadata::Column::ImportTime, filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_file_changed_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
|
||||||
|
apply_ordering_comparator!(file_metadata::Column::ChangeTime, filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_file_created_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
|
||||||
|
apply_ordering_comparator!(file_metadata::Column::CreationTime, filter)
|
||||||
|
}
|
@ -0,0 +1,334 @@
|
|||||||
|
pub mod filter;
|
||||||
|
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::io::Cursor;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use mediarepo_core::content_descriptor::encode_content_descriptor;
|
||||||
|
use sea_orm::prelude::*;
|
||||||
|
use sea_orm::{ConnectionTrait, DatabaseConnection, Set};
|
||||||
|
use sea_orm::{JoinType, QuerySelect};
|
||||||
|
use tokio::io::{AsyncReadExt, BufReader};
|
||||||
|
|
||||||
|
use crate::file::filter::FilterProperty;
|
||||||
|
use crate::file_metadata::FileMetadata;
|
||||||
|
use mediarepo_core::error::{RepoError, RepoResult};
|
||||||
|
use mediarepo_core::fs::file_hash_store::FileHashStore;
|
||||||
|
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
|
||||||
|
use mediarepo_core::thumbnailer::{self, Thumbnail as ThumbnailerThumb, ThumbnailSize};
|
||||||
|
use mediarepo_database::entities::content_descriptor;
|
||||||
|
use mediarepo_database::entities::content_descriptor_tag;
|
||||||
|
use mediarepo_database::entities::file;
|
||||||
|
use mediarepo_database::entities::file_metadata;
|
||||||
|
use mediarepo_database::entities::namespace;
|
||||||
|
use mediarepo_database::entities::tag;
|
||||||
|
|
||||||
|
use crate::tag::Tag;
|
||||||
|
|
||||||
|
pub enum FileStatus {
|
||||||
|
Imported = 10,
|
||||||
|
Archived = 20,
|
||||||
|
Deleted = 30,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ApiFileStatus> for FileStatus {
|
||||||
|
fn from(s: ApiFileStatus) -> Self {
|
||||||
|
match s {
|
||||||
|
ApiFileStatus::Imported => Self::Imported,
|
||||||
|
ApiFileStatus::Archived => Self::Archived,
|
||||||
|
ApiFileStatus::Deleted => Self::Deleted,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct File {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
model: file::Model,
|
||||||
|
content_descriptor: content_descriptor::Model,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl File {
|
||||||
|
#[tracing::instrument(level = "trace")]
|
||||||
|
pub(crate) fn new(
|
||||||
|
db: DatabaseConnection,
|
||||||
|
model: file::Model,
|
||||||
|
hash: content_descriptor::Model,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
db,
|
||||||
|
model,
|
||||||
|
content_descriptor: hash,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a list of all known stored files
|
||||||
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
|
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<File>> {
|
||||||
|
let files: Vec<(file::Model, Option<content_descriptor::Model>)> = file::Entity::find()
|
||||||
|
.find_also_related(content_descriptor::Entity)
|
||||||
|
.all(&db)
|
||||||
|
.await?;
|
||||||
|
let files = files
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(f, h)| {
|
||||||
|
let h = h?;
|
||||||
|
Some(Self::new(db.clone(), f, h))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(files)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetches the file by id
|
||||||
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
|
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
|
||||||
|
if let Some((model, Some(hash))) = file::Entity::find_by_id(id)
|
||||||
|
.find_also_related(content_descriptor::Entity)
|
||||||
|
.one(&db)
|
||||||
|
.await?
|
||||||
|
{
|
||||||
|
let file = File::new(db, model, hash);
|
||||||
|
Ok(Some(file))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Finds the file by hash
|
||||||
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
|
pub async fn by_cd(db: DatabaseConnection, cd: &[u8]) -> RepoResult<Option<Self>> {
|
||||||
|
if let Some((hash, Some(model))) = content_descriptor::Entity::find()
|
||||||
|
.filter(content_descriptor::Column::Descriptor.eq(cd))
|
||||||
|
.find_also_related(file::Entity)
|
||||||
|
.one(&db)
|
||||||
|
.await?
|
||||||
|
{
|
||||||
|
let file = File::new(db, model, hash);
|
||||||
|
Ok(Some(file))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Finds the file by tags
|
||||||
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
|
pub(crate) async fn find_by_filters(
|
||||||
|
db: DatabaseConnection,
|
||||||
|
filters: Vec<Vec<FilterProperty>>,
|
||||||
|
) -> RepoResult<Vec<Self>> {
|
||||||
|
let main_condition = filter::build_find_filter_conditions(filters);
|
||||||
|
|
||||||
|
let results: Vec<(content_descriptor::Model, Option<file::Model>)> =
|
||||||
|
content_descriptor::Entity::find()
|
||||||
|
.find_also_related(file::Entity)
|
||||||
|
.filter(main_condition)
|
||||||
|
.group_by(file::Column::Id)
|
||||||
|
.all(&db)
|
||||||
|
.await?;
|
||||||
|
let files: Vec<Self> = results
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(hash, tag)| Some(Self::new(db.clone(), tag?, hash)))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(files)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a file with its hash to the database
|
||||||
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
|
pub(crate) async fn add(
|
||||||
|
db: DatabaseConnection,
|
||||||
|
cd_id: i64,
|
||||||
|
mime_type: String,
|
||||||
|
) -> RepoResult<Self> {
|
||||||
|
let file = file::ActiveModel {
|
||||||
|
cd_id: Set(cd_id),
|
||||||
|
mime_type: Set(mime_type),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let file: file::ActiveModel = file.insert(&db).await?.into();
|
||||||
|
let file = Self::by_id(db, file.id.unwrap())
|
||||||
|
.await?
|
||||||
|
.expect("Inserted file does not exist");
|
||||||
|
|
||||||
|
Ok(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the unique identifier of the file
|
||||||
|
pub fn id(&self) -> i64 {
|
||||||
|
self.model.id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the hash of the file (content identifier)
|
||||||
|
pub fn cd(&self) -> &[u8] {
|
||||||
|
&self.content_descriptor.descriptor
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the encoded content descriptor
|
||||||
|
pub fn encoded_cd(&self) -> String {
|
||||||
|
encode_content_descriptor(self.cd())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the id of the civ (content identifier value) of the file
|
||||||
|
pub fn cd_id(&self) -> i64 {
|
||||||
|
self.content_descriptor.id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the mime type of the file
|
||||||
|
pub fn mime_type(&self) -> &String {
|
||||||
|
&self.model.mime_type
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the status of the file
|
||||||
|
pub fn status(&self) -> FileStatus {
|
||||||
|
match self.model.status {
|
||||||
|
10 => FileStatus::Imported,
|
||||||
|
20 => FileStatus::Archived,
|
||||||
|
30 => FileStatus::Deleted,
|
||||||
|
_ => FileStatus::Imported,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn set_status(&mut self, status: FileStatus) -> RepoResult<()> {
|
||||||
|
let active_model = file::ActiveModel {
|
||||||
|
id: Set(self.model.id),
|
||||||
|
status: Set(status as i32),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
self.model = active_model.update(&self.db).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the metadata associated with this file
|
||||||
|
/// A file MUST always have metadata associated
|
||||||
|
pub async fn metadata(&self) -> RepoResult<FileMetadata> {
|
||||||
|
FileMetadata::by_id(self.db.clone(), self.model.id)
|
||||||
|
.await
|
||||||
|
.and_then(|f| f.ok_or_else(|| RepoError::from("missing file metadata")))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the list of tags of the file
|
||||||
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
pub async fn tags(&self) -> RepoResult<Vec<Tag>> {
|
||||||
|
let tags: Vec<(tag::Model, Option<namespace::Model>)> = tag::Entity::find()
|
||||||
|
.find_also_related(namespace::Entity)
|
||||||
|
.join(
|
||||||
|
JoinType::LeftJoin,
|
||||||
|
content_descriptor_tag::Relation::Tag.def().rev(),
|
||||||
|
)
|
||||||
|
.join(
|
||||||
|
JoinType::InnerJoin,
|
||||||
|
content_descriptor_tag::Relation::ContentDescriptorId.def(),
|
||||||
|
)
|
||||||
|
.filter(content_descriptor::Column::Id.eq(self.content_descriptor.id))
|
||||||
|
.all(&self.db)
|
||||||
|
.await?;
|
||||||
|
let tags = tags
|
||||||
|
.into_iter()
|
||||||
|
.map(|(tag, namespace)| Tag::new(self.db.clone(), tag, namespace))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(tags)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a single tag to the file
|
||||||
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
pub async fn add_tag(&mut self, tag_id: i64) -> RepoResult<()> {
|
||||||
|
let cd_id = self.content_descriptor.id;
|
||||||
|
let active_model = content_descriptor_tag::ActiveModel {
|
||||||
|
cd_id: Set(cd_id),
|
||||||
|
tag_id: Set(tag_id),
|
||||||
|
};
|
||||||
|
active_model.insert(&self.db).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds multiple tags to the file at once
|
||||||
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
pub async fn add_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
|
||||||
|
if tag_ids.is_empty() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let cd_id = self.content_descriptor.id;
|
||||||
|
let models: Vec<content_descriptor_tag::ActiveModel> = tag_ids
|
||||||
|
.into_iter()
|
||||||
|
.map(|tag_id| content_descriptor_tag::ActiveModel {
|
||||||
|
cd_id: Set(cd_id),
|
||||||
|
tag_id: Set(tag_id),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
content_descriptor_tag::Entity::insert_many(models)
|
||||||
|
.exec(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Removes multiple tags from the file
|
||||||
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
pub async fn remove_tags(&self, tag_ids: Vec<i64>) -> RepoResult<()> {
|
||||||
|
let hash_id = self.content_descriptor.id;
|
||||||
|
content_descriptor_tag::Entity::delete_many()
|
||||||
|
.filter(content_descriptor_tag::Column::CdId.eq(hash_id))
|
||||||
|
.filter(content_descriptor_tag::Column::TagId.is_in(tag_ids))
|
||||||
|
.exec(&self.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the reader for the file
|
||||||
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
pub async fn get_reader(
|
||||||
|
&self,
|
||||||
|
storage: &FileHashStore,
|
||||||
|
) -> RepoResult<BufReader<tokio::fs::File>> {
|
||||||
|
storage
|
||||||
|
.get_file(&self.content_descriptor.descriptor)
|
||||||
|
.await
|
||||||
|
.map(|(_, f)| f)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a thumbnail for the file
|
||||||
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
pub async fn create_thumbnail<I: IntoIterator<Item = ThumbnailSize> + Debug>(
|
||||||
|
&self,
|
||||||
|
storage: &FileHashStore,
|
||||||
|
sizes: I,
|
||||||
|
) -> RepoResult<Vec<ThumbnailerThumb>> {
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
self.get_reader(storage)
|
||||||
|
.await?
|
||||||
|
.read_to_end(&mut buf)
|
||||||
|
.await?;
|
||||||
|
let mime_type = self.model.mime_type.clone();
|
||||||
|
let mime_type =
|
||||||
|
mime::Mime::from_str(&mime_type).unwrap_or_else(|_| mime::APPLICATION_OCTET_STREAM);
|
||||||
|
let thumbs = thumbnailer::create_thumbnails(Cursor::new(buf), mime_type, sizes)?;
|
||||||
|
|
||||||
|
Ok(thumbs)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deletes the file as well as the content descriptor, tag mappings and metadata about the file
|
||||||
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
pub async fn delete(self) -> RepoResult<()> {
|
||||||
|
let trx = self.db.begin().await?;
|
||||||
|
file_metadata::Entity::delete_many()
|
||||||
|
.filter(file_metadata::Column::FileId.eq(self.model.id))
|
||||||
|
.exec(&trx)
|
||||||
|
.await?;
|
||||||
|
self.model.delete(&trx).await?;
|
||||||
|
content_descriptor_tag::Entity::delete_many()
|
||||||
|
.filter(content_descriptor_tag::Column::CdId.eq(self.content_descriptor.id))
|
||||||
|
.exec(&trx)
|
||||||
|
.await?;
|
||||||
|
content_descriptor::Entity::delete_many()
|
||||||
|
.filter(content_descriptor::Column::Id.eq(self.content_descriptor.id))
|
||||||
|
.exec(&trx)
|
||||||
|
.await?;
|
||||||
|
trx.commit().await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,124 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use chrono::{Local, NaiveDateTime};
|
||||||
|
use sea_orm::prelude::*;
|
||||||
|
use sea_orm::{DatabaseConnection, Set};
|
||||||
|
|
||||||
|
use mediarepo_core::error::RepoResult;
|
||||||
|
use mediarepo_database::entities::file_metadata;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct FileMetadata {
|
||||||
|
db: DatabaseConnection,
|
||||||
|
model: file_metadata::Model,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileMetadata {
|
||||||
|
#[tracing::instrument(level = "trace")]
|
||||||
|
pub(crate) fn new(db: DatabaseConnection, model: file_metadata::Model) -> Self {
|
||||||
|
Self { db, model }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetches the file by id
|
||||||
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
|
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
|
||||||
|
let file_metadata = file_metadata::Entity::find_by_id(id)
|
||||||
|
.one(&db)
|
||||||
|
.await?
|
||||||
|
.map(|m| FileMetadata::new(db, m));
|
||||||
|
|
||||||
|
Ok(file_metadata)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetches metadata for all given file ids
|
||||||
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
|
pub async fn all_by_ids(db: DatabaseConnection, ids: Vec<i64>) -> RepoResult<Vec<Self>> {
|
||||||
|
let file_metadata = file_metadata::Entity::find()
|
||||||
|
.filter(file_metadata::Column::FileId.is_in(ids))
|
||||||
|
.all(&db)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.map(|m| FileMetadata::new(db.clone(), m))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(file_metadata)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a file with its hash to the database
|
||||||
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
|
pub(crate) async fn add(
|
||||||
|
db: DatabaseConnection,
|
||||||
|
file_id: i64,
|
||||||
|
size: i64,
|
||||||
|
creation_time: NaiveDateTime,
|
||||||
|
change_time: NaiveDateTime,
|
||||||
|
) -> RepoResult<Self> {
|
||||||
|
let file = file_metadata::ActiveModel {
|
||||||
|
file_id: Set(file_id),
|
||||||
|
size: Set(size),
|
||||||
|
import_time: Set(Local::now().naive_local()),
|
||||||
|
creation_time: Set(creation_time),
|
||||||
|
change_time: Set(change_time),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let model = file.insert(&db).await?;
|
||||||
|
|
||||||
|
Ok(Self::new(db, model))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_id(&self) -> i64 {
|
||||||
|
self.model.file_id
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn size(&self) -> i64 {
|
||||||
|
self.model.size
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn name(&self) -> &Option<String> {
|
||||||
|
&self.model.name
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn comment(&self) -> &Option<String> {
|
||||||
|
&self.model.comment
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn import_time(&self) -> &NaiveDateTime {
|
||||||
|
&self.model.import_time
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn creation_time(&self) -> &NaiveDateTime {
|
||||||
|
&self.model.creation_time
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn change_time(&self) -> &NaiveDateTime {
|
||||||
|
&self.model.change_time
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Changes the name of the file
|
||||||
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
pub async fn set_name<S: ToString + Debug>(&mut self, name: S) -> RepoResult<()> {
|
||||||
|
let mut active_model = self.get_active_model();
|
||||||
|
active_model.name = Set(Some(name.to_string()));
|
||||||
|
self.model = active_model.update(&self.db).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Changes the comment of the file
|
||||||
|
#[tracing::instrument(level = "debug", skip(self))]
|
||||||
|
pub async fn set_comment<S: ToString + Debug>(&mut self, comment: S) -> RepoResult<()> {
|
||||||
|
let mut active_file = self.get_active_model();
|
||||||
|
active_file.comment = Set(Some(comment.to_string()));
|
||||||
|
self.model = active_file.update(&self.db).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the active model of the file with only the id set
|
||||||
|
fn get_active_model(&self) -> file_metadata::ActiveModel {
|
||||||
|
file_metadata::ActiveModel {
|
||||||
|
file_id: Set(self.file_id()),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,22 +0,0 @@
|
|||||||
use mime::Mime;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialOrd, PartialEq)]
|
|
||||||
pub enum FileType {
|
|
||||||
Other = -1,
|
|
||||||
Unknown = 0,
|
|
||||||
Image = 1,
|
|
||||||
Video = 2,
|
|
||||||
Audio = 3,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Mime> for FileType {
|
|
||||||
fn from(mime_type: Mime) -> Self {
|
|
||||||
match mime_type.type_() {
|
|
||||||
mime::IMAGE => Self::Image,
|
|
||||||
mime::VIDEO => Self::Video,
|
|
||||||
mime::AUDIO => Self::Audio,
|
|
||||||
_ => Self::Other,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,16 +0,0 @@
|
|||||||
pub mod tag_handle;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use mediarepo_core::error::RepoResult;
|
|
||||||
use sea_orm::DatabaseConnection;
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait EntityHandle {
|
|
||||||
type Model;
|
|
||||||
|
|
||||||
/// Returns the ID that is stored in the handle
|
|
||||||
fn id(&self) -> i64;
|
|
||||||
|
|
||||||
/// Returns the model associated with the handle
|
|
||||||
async fn model(&self, db: DatabaseConnection) -> RepoResult<Self::Model>;
|
|
||||||
}
|
|
@ -1,24 +0,0 @@
|
|||||||
use crate::handles::EntityHandle;
|
|
||||||
use crate::tag::Tag;
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use mediarepo_core::error::{RepoDatabaseError, RepoResult};
|
|
||||||
use sea_orm::DatabaseConnection;
|
|
||||||
|
|
||||||
pub struct TagHandle(pub(crate) i64);
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl EntityHandle for TagHandle {
|
|
||||||
type Model = Tag;
|
|
||||||
|
|
||||||
fn id(&self) -> i64 {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn model(&self, db: DatabaseConnection) -> RepoResult<Self::Model> {
|
|
||||||
let tag = Tag::by_id(db, self.0)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| RepoDatabaseError::InvalidHandle(self.id()))?;
|
|
||||||
|
|
||||||
Ok(tag)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,10 +1,8 @@
|
|||||||
|
pub mod content_descriptor;
|
||||||
pub mod file;
|
pub mod file;
|
||||||
pub mod file_type;
|
pub mod file_metadata;
|
||||||
pub mod handles;
|
|
||||||
pub mod hash;
|
|
||||||
pub mod namespace;
|
pub mod namespace;
|
||||||
pub mod repo;
|
pub mod repo;
|
||||||
pub mod storage;
|
|
||||||
pub mod tag;
|
pub mod tag;
|
||||||
pub mod thumbnail;
|
pub mod thumbnail;
|
||||||
pub mod type_keys;
|
pub mod type_keys;
|
||||||
|
@ -1,198 +0,0 @@
|
|||||||
use crate::hash::Hash;
|
|
||||||
use mediarepo_core::error::RepoResult;
|
|
||||||
use mediarepo_core::fs::file_hash_store::FileHashStore;
|
|
||||||
use mediarepo_database::entities::storage;
|
|
||||||
use mediarepo_database::entities::storage::ActiveModel as ActiveStorage;
|
|
||||||
use mediarepo_database::entities::storage::Model as StorageModel;
|
|
||||||
use sea_orm::prelude::*;
|
|
||||||
use sea_orm::{DatabaseConnection, NotSet, Set};
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use tokio::fs;
|
|
||||||
use tokio::io::{AsyncRead, BufReader};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Storage {
|
|
||||||
db: DatabaseConnection,
|
|
||||||
model: StorageModel,
|
|
||||||
store: FileHashStore,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Storage {
|
|
||||||
#[tracing::instrument(level = "trace")]
|
|
||||||
fn new(db: DatabaseConnection, model: StorageModel) -> Self {
|
|
||||||
let path = PathBuf::from(&model.path);
|
|
||||||
Self {
|
|
||||||
store: FileHashStore::new(path),
|
|
||||||
db,
|
|
||||||
model,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns all available storages
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub async fn all(db: DatabaseConnection) -> RepoResult<Vec<Self>> {
|
|
||||||
let storages: Vec<storage::Model> = storage::Entity::find().all(&db).await?;
|
|
||||||
let storages = storages
|
|
||||||
.into_iter()
|
|
||||||
.map(|s| Self::new(db.clone(), s))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(storages)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the storage by id
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub async fn by_id(db: DatabaseConnection, id: i64) -> RepoResult<Option<Self>> {
|
|
||||||
if let Some(model) = storage::Entity::find_by_id(id).one(&db).await? {
|
|
||||||
let storage = Self::new(db, model);
|
|
||||||
Ok(Some(storage))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the storage by name
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub async fn by_name<S: AsRef<str> + Debug>(
|
|
||||||
db: DatabaseConnection,
|
|
||||||
name: S,
|
|
||||||
) -> RepoResult<Option<Self>> {
|
|
||||||
if let Some(model) = storage::Entity::find()
|
|
||||||
.filter(storage::Column::Name.eq(name.as_ref()))
|
|
||||||
.one(&db)
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
let storage = Self::new(db, model);
|
|
||||||
Ok(Some(storage))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the storage by path
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub async fn by_path<S: ToString + Debug>(
|
|
||||||
db: DatabaseConnection,
|
|
||||||
path: S,
|
|
||||||
) -> RepoResult<Option<Self>> {
|
|
||||||
if let Some(model) = storage::Entity::find()
|
|
||||||
.filter(storage::Column::Path.eq(path.to_string()))
|
|
||||||
.one(&db)
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
let storage = Self::new(db, model);
|
|
||||||
Ok(Some(storage))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new active storage and also creates the associated directory
|
|
||||||
/// if it doesn't exist yet.
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub async fn create<S1: ToString + Debug, S2: ToString + Debug>(
|
|
||||||
db: DatabaseConnection,
|
|
||||||
name: S1,
|
|
||||||
path: S2,
|
|
||||||
) -> RepoResult<Self> {
|
|
||||||
let path = path.to_string();
|
|
||||||
let name = name.to_string();
|
|
||||||
let path_buf = PathBuf::from(&path);
|
|
||||||
|
|
||||||
if !path_buf.exists() {
|
|
||||||
fs::create_dir(path_buf).await?;
|
|
||||||
}
|
|
||||||
let storage = ActiveStorage {
|
|
||||||
id: NotSet,
|
|
||||||
name: Set(name),
|
|
||||||
path: Set(path),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
let model = storage.insert(&db).await?;
|
|
||||||
|
|
||||||
Ok(Self::new(db, model))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the unique identifier of this storage
|
|
||||||
pub fn id(&self) -> i64 {
|
|
||||||
self.model.id
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the name of the storage
|
|
||||||
pub fn name(&self) -> &String {
|
|
||||||
&self.model.name
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the path of the storage
|
|
||||||
pub fn path(&self) -> &String {
|
|
||||||
&self.model.path
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sets a new name for the storage
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn set_name<S: ToString + Debug>(&self, name: S) -> RepoResult<()> {
|
|
||||||
let mut active_storage: ActiveStorage = self.get_active_model();
|
|
||||||
active_storage.name = Set(name.to_string());
|
|
||||||
active_storage.update(&self.db).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sets a new path for the storage. This will only update the database record
|
|
||||||
/// so if the physical part of the storage is already created it needs to be migrated first
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn set_path<S: ToString + Debug>(&mut self, path: S) -> RepoResult<()> {
|
|
||||||
let mut active_storage: ActiveStorage = self.get_active_model();
|
|
||||||
active_storage.path = Set(path.to_string());
|
|
||||||
let storage = active_storage.update(&self.db).await?;
|
|
||||||
|
|
||||||
self.model = storage;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks if the storage exists on the harddrive
|
|
||||||
pub fn exists(&self) -> bool {
|
|
||||||
let path = PathBuf::from(&self.path());
|
|
||||||
|
|
||||||
path.exists()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds a thumbnail
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, reader))]
|
|
||||||
pub async fn store_entry<R: AsyncRead + Unpin>(&self, reader: R) -> RepoResult<Hash> {
|
|
||||||
let hash = self.store.add_file(reader, None).await?;
|
|
||||||
if let Some(hash) = Hash::by_value(self.db.clone(), &hash).await? {
|
|
||||||
Ok(hash)
|
|
||||||
} else {
|
|
||||||
Hash::add(self.db.clone(), hash).await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the buf reader to the given hash
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn get_file_reader<S: ToString + Debug>(
|
|
||||||
&self,
|
|
||||||
hash: S,
|
|
||||||
) -> RepoResult<BufReader<tokio::fs::File>> {
|
|
||||||
let (_ext, reader) = self.store.get_file(hash.to_string()).await?;
|
|
||||||
|
|
||||||
Ok(reader)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the size of the storage
|
|
||||||
#[inline]
|
|
||||||
#[tracing::instrument(level = "debug", skip(self))]
|
|
||||||
pub async fn get_size(&self) -> RepoResult<u64> {
|
|
||||||
self.store.get_size().await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the active model with only the ID filled so saves always perform an update
|
|
||||||
fn get_active_model(&self) -> ActiveStorage {
|
|
||||||
ActiveStorage {
|
|
||||||
id: Set(self.model.id),
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,185 @@
|
|||||||
|
use mediarepo_core::content_descriptor::decode_content_descriptor;
|
||||||
|
use mediarepo_core::error::RepoResult;
|
||||||
|
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
|
||||||
|
use mediarepo_core::mediarepo_api::types::filtering::{
|
||||||
|
FilterExpression, FilterQuery, PropertyQuery, TagQuery, ValueComparator,
|
||||||
|
};
|
||||||
|
use mediarepo_model::file::filter::NegatableComparator::{Is, IsNot};
|
||||||
|
use mediarepo_model::file::filter::{FilterFileProperty, FilterProperty, OrderingComparator};
|
||||||
|
use mediarepo_model::file::{File, FileStatus};
|
||||||
|
use mediarepo_model::repo::Repo;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "debug", skip(repo))]
|
||||||
|
pub async fn find_files_for_filters(
|
||||||
|
repo: &Repo,
|
||||||
|
expressions: Vec<FilterExpression>,
|
||||||
|
) -> RepoResult<Vec<File>> {
|
||||||
|
let tag_names = get_tag_names_from_expressions(&expressions);
|
||||||
|
let tag_id_map = repo.tag_names_to_ids(tag_names).await?;
|
||||||
|
let filters = build_filters_from_expressions(expressions, &tag_id_map);
|
||||||
|
|
||||||
|
repo.find_files_by_filters(filters).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "debug")]
|
||||||
|
fn get_tag_names_from_expressions(expressions: &Vec<FilterExpression>) -> Vec<String> {
|
||||||
|
expressions
|
||||||
|
.iter()
|
||||||
|
.flat_map(|f| match f {
|
||||||
|
FilterExpression::OrExpression(queries) => queries
|
||||||
|
.iter()
|
||||||
|
.filter_map(|q| match q {
|
||||||
|
FilterQuery::Tag(tag) => Some(tag.tag.to_owned()),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.collect::<Vec<String>>(),
|
||||||
|
FilterExpression::Query(q) => match q {
|
||||||
|
FilterQuery::Tag(tag) => {
|
||||||
|
vec![tag.tag.to_owned()]
|
||||||
|
}
|
||||||
|
FilterQuery::Property(_) => {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "debug")]
|
||||||
|
fn build_filters_from_expressions(
|
||||||
|
expressions: Vec<FilterExpression>,
|
||||||
|
tag_id_map: &HashMap<String, i64>,
|
||||||
|
) -> Vec<Vec<FilterProperty>> {
|
||||||
|
expressions
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|e| {
|
||||||
|
let filters = match e {
|
||||||
|
FilterExpression::OrExpression(queries) => queries
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|q| map_query_to_filter(q, tag_id_map))
|
||||||
|
.collect(),
|
||||||
|
FilterExpression::Query(q) => {
|
||||||
|
if let Some(filter) = map_query_to_filter(q, tag_id_map) {
|
||||||
|
vec![filter]
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if filters.len() > 0 {
|
||||||
|
Some(filters)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_query_to_filter(
|
||||||
|
query: FilterQuery,
|
||||||
|
tag_id_map: &HashMap<String, i64>,
|
||||||
|
) -> Option<FilterProperty> {
|
||||||
|
match query {
|
||||||
|
FilterQuery::Tag(tag_query) => map_tag_query_to_filter(tag_query, tag_id_map),
|
||||||
|
FilterQuery::Property(property) => map_property_query_to_filter(property),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_tag_query_to_filter(
|
||||||
|
query: TagQuery,
|
||||||
|
tag_id_map: &HashMap<String, i64>,
|
||||||
|
) -> Option<FilterProperty> {
|
||||||
|
if query.tag.ends_with("*") {
|
||||||
|
map_wildcard_tag_to_filter(query, tag_id_map)
|
||||||
|
} else {
|
||||||
|
map_tag_to_filter(query, tag_id_map)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_wildcard_tag_to_filter(
|
||||||
|
query: TagQuery,
|
||||||
|
tag_id_map: &HashMap<String, i64>,
|
||||||
|
) -> Option<FilterProperty> {
|
||||||
|
let filter_tag = query.tag.trim_end_matches("*");
|
||||||
|
let relevant_ids = tag_id_map
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(name, id)| {
|
||||||
|
if name.starts_with(filter_tag) {
|
||||||
|
Some(*id)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<i64>>();
|
||||||
|
|
||||||
|
if relevant_ids.len() > 0 {
|
||||||
|
let comparator = if query.negate {
|
||||||
|
IsNot(relevant_ids)
|
||||||
|
} else {
|
||||||
|
Is(relevant_ids)
|
||||||
|
};
|
||||||
|
Some(FilterProperty::TagWildcardIds(comparator))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_tag_to_filter(query: TagQuery, tag_id_map: &HashMap<String, i64>) -> Option<FilterProperty> {
|
||||||
|
tag_id_map.get(&query.tag).map(|id| {
|
||||||
|
let comparator = if query.negate { IsNot(*id) } else { Is(*id) };
|
||||||
|
FilterProperty::TagId(comparator)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_property_query_to_filter(query: PropertyQuery) -> Option<FilterProperty> {
|
||||||
|
match query {
|
||||||
|
PropertyQuery::Status(s) => Some(FilterProperty::FileProperty(FilterFileProperty::Status(
|
||||||
|
Is(file_status_to_number(s)),
|
||||||
|
))),
|
||||||
|
PropertyQuery::FileSize(s) => Some(FilterProperty::FileProperty(
|
||||||
|
FilterFileProperty::FileSize(val_comparator_to_order(s, |v| v as i64)),
|
||||||
|
)),
|
||||||
|
PropertyQuery::ImportedTime(t) => Some(FilterProperty::FileProperty(
|
||||||
|
FilterFileProperty::ImportedTime(val_comparator_to_order(t, |t| t)),
|
||||||
|
)),
|
||||||
|
PropertyQuery::ChangedTime(t) => Some(FilterProperty::FileProperty(
|
||||||
|
FilterFileProperty::ChangedTime(val_comparator_to_order(t, |t| t)),
|
||||||
|
)),
|
||||||
|
PropertyQuery::CreatedTime(t) => Some(FilterProperty::FileProperty(
|
||||||
|
FilterFileProperty::CreatedTime(val_comparator_to_order(t, |t| t)),
|
||||||
|
)),
|
||||||
|
PropertyQuery::TagCount(c) => {
|
||||||
|
Some(FilterProperty::TagCount(val_comparator_to_order(c, |v| {
|
||||||
|
v as i64
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
PropertyQuery::Cd(cd) => decode_content_descriptor(cd)
|
||||||
|
.ok()
|
||||||
|
.map(|cd| FilterProperty::ContentDescriptor(Is(cd))),
|
||||||
|
PropertyQuery::Id(id) => Some(FilterProperty::FileProperty(FilterFileProperty::Id(Is(id)))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn file_status_to_number(status: ApiFileStatus) -> i64 {
|
||||||
|
match status {
|
||||||
|
ApiFileStatus::Imported => FileStatus::Imported as i64,
|
||||||
|
ApiFileStatus::Archived => FileStatus::Archived as i64,
|
||||||
|
ApiFileStatus::Deleted => FileStatus::Deleted as i64,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn val_comparator_to_order<T1, T2, F: Fn(T1) -> T2>(
|
||||||
|
comp: ValueComparator<T1>,
|
||||||
|
conv_fn: F,
|
||||||
|
) -> OrderingComparator<T2> {
|
||||||
|
match comp {
|
||||||
|
ValueComparator::Less(v) => OrderingComparator::Less(conv_fn(v)),
|
||||||
|
ValueComparator::Equal(v) => OrderingComparator::Equal(conv_fn(v)),
|
||||||
|
ValueComparator::Greater(v) => OrderingComparator::Greater(conv_fn(v)),
|
||||||
|
ValueComparator::Between((v1, v2)) => {
|
||||||
|
OrderingComparator::Between((conv_fn(v1), conv_fn(v2)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,193 @@
|
|||||||
|
use chrono::NaiveDateTime;
|
||||||
|
use compare::Compare;
|
||||||
|
use mediarepo_core::error::RepoResult;
|
||||||
|
use mediarepo_core::mediarepo_api::types::filtering::{SortDirection, SortKey};
|
||||||
|
use mediarepo_database::queries::tags::{
|
||||||
|
get_cids_with_namespaced_tags, get_content_descriptors_with_tag_count,
|
||||||
|
};
|
||||||
|
use mediarepo_model::file::File;
|
||||||
|
use mediarepo_model::file_metadata::FileMetadata;
|
||||||
|
use mediarepo_model::repo::Repo;
|
||||||
|
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::iter::FromIterator;
|
||||||
|
|
||||||
|
pub struct FileSortContext {
|
||||||
|
name: Option<String>,
|
||||||
|
size: u64,
|
||||||
|
mime_type: String,
|
||||||
|
namespaces: HashMap<String, Vec<String>>,
|
||||||
|
tag_count: u32,
|
||||||
|
import_time: NaiveDateTime,
|
||||||
|
create_time: NaiveDateTime,
|
||||||
|
change_time: NaiveDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "debug", skip(repo, files))]
|
||||||
|
pub async fn sort_files_by_properties(
|
||||||
|
repo: &Repo,
|
||||||
|
sort_expression: Vec<SortKey>,
|
||||||
|
files: &mut Vec<File>,
|
||||||
|
) -> RepoResult<()> {
|
||||||
|
let contexts = build_sort_context(repo, files).await?;
|
||||||
|
|
||||||
|
files.sort_by(|a, b| {
|
||||||
|
compare_files(
|
||||||
|
contexts.get(&a.id()).unwrap(),
|
||||||
|
contexts.get(&b.id()).unwrap(),
|
||||||
|
&sort_expression,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn build_sort_context(
|
||||||
|
repo: &Repo,
|
||||||
|
files: &Vec<File>,
|
||||||
|
) -> RepoResult<HashMap<i64, FileSortContext>> {
|
||||||
|
let hash_ids: Vec<i64> = files.par_iter().map(|f| f.cd_id()).collect();
|
||||||
|
let file_ids: Vec<i64> = files.par_iter().map(|f| f.id()).collect();
|
||||||
|
|
||||||
|
let mut cid_nsp: HashMap<i64, HashMap<String, Vec<String>>> =
|
||||||
|
get_cids_with_namespaced_tags(repo.db(), hash_ids.clone()).await?;
|
||||||
|
let mut cid_tag_counts = get_content_descriptors_with_tag_count(repo.db(), hash_ids).await?;
|
||||||
|
|
||||||
|
let files_metadata = repo.get_file_metadata_for_ids(file_ids).await?;
|
||||||
|
|
||||||
|
let mut file_metadata_map: HashMap<i64, FileMetadata> =
|
||||||
|
HashMap::from_iter(files_metadata.into_iter().map(|m| (m.file_id(), m)));
|
||||||
|
|
||||||
|
let mut contexts = HashMap::new();
|
||||||
|
|
||||||
|
for file in files {
|
||||||
|
if let Some(metadata) = file_metadata_map.remove(&file.id()) {
|
||||||
|
let context = FileSortContext {
|
||||||
|
name: metadata.name().to_owned(),
|
||||||
|
size: metadata.size() as u64,
|
||||||
|
mime_type: file.mime_type().to_owned(),
|
||||||
|
namespaces: cid_nsp
|
||||||
|
.remove(&file.cd_id())
|
||||||
|
.unwrap_or(HashMap::with_capacity(0)),
|
||||||
|
tag_count: cid_tag_counts.remove(&file.cd_id()).unwrap_or(0),
|
||||||
|
import_time: metadata.import_time().to_owned(),
|
||||||
|
create_time: metadata.import_time().to_owned(),
|
||||||
|
change_time: metadata.change_time().to_owned(),
|
||||||
|
};
|
||||||
|
contexts.insert(file.id(), context);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(contexts)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "trace", skip_all)]
|
||||||
|
fn compare_files(
|
||||||
|
ctx_a: &FileSortContext,
|
||||||
|
ctx_b: &FileSortContext,
|
||||||
|
expression: &Vec<SortKey>,
|
||||||
|
) -> Ordering {
|
||||||
|
let cmp_date = compare::natural();
|
||||||
|
let cmp_u64 = compare::natural();
|
||||||
|
let cmp_u32 = compare::natural();
|
||||||
|
|
||||||
|
for sort_key in expression {
|
||||||
|
let ordering = match sort_key {
|
||||||
|
SortKey::Namespace(namespace) => {
|
||||||
|
let list_a = ctx_a.namespaces.get(&namespace.name);
|
||||||
|
let list_b = ctx_b.namespaces.get(&namespace.name);
|
||||||
|
|
||||||
|
let cmp_result = if let (Some(list_a), Some(list_b)) = (list_a, list_b) {
|
||||||
|
compare_tag_lists(list_a, list_b)
|
||||||
|
} else if list_a.is_some() {
|
||||||
|
Ordering::Greater
|
||||||
|
} else if list_b.is_some() {
|
||||||
|
Ordering::Less
|
||||||
|
} else {
|
||||||
|
Ordering::Equal
|
||||||
|
};
|
||||||
|
adjust_for_dir(cmp_result, &namespace.direction)
|
||||||
|
}
|
||||||
|
SortKey::FileName(direction) => {
|
||||||
|
adjust_for_dir(compare_opts(&ctx_a.name, &ctx_b.name), direction)
|
||||||
|
}
|
||||||
|
SortKey::FileSize(direction) => {
|
||||||
|
adjust_for_dir(cmp_u64.compare(&ctx_a.size, &ctx_b.size), direction)
|
||||||
|
}
|
||||||
|
SortKey::FileImportedTime(direction) => adjust_for_dir(
|
||||||
|
cmp_date.compare(&ctx_a.import_time, &ctx_b.import_time),
|
||||||
|
direction,
|
||||||
|
),
|
||||||
|
SortKey::FileCreatedTime(direction) => adjust_for_dir(
|
||||||
|
cmp_date.compare(&ctx_a.create_time, &ctx_b.create_time),
|
||||||
|
direction,
|
||||||
|
),
|
||||||
|
SortKey::FileChangeTime(direction) => adjust_for_dir(
|
||||||
|
cmp_date.compare(&ctx_a.change_time, &ctx_b.change_time),
|
||||||
|
direction,
|
||||||
|
),
|
||||||
|
SortKey::FileType(direction) => {
|
||||||
|
adjust_for_dir(ctx_a.mime_type.cmp(&ctx_b.mime_type), direction)
|
||||||
|
}
|
||||||
|
SortKey::NumTags(direction) => adjust_for_dir(
|
||||||
|
cmp_u32.compare(&ctx_a.tag_count, &ctx_b.tag_count),
|
||||||
|
direction,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
if !ordering.is_eq() {
|
||||||
|
return ordering;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ordering::Equal
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compare_opts<T: Ord + Sized>(opt_a: &Option<T>, opt_b: &Option<T>) -> Ordering {
|
||||||
|
let cmp = compare::natural();
|
||||||
|
if let (Some(a), Some(b)) = (opt_a, opt_b) {
|
||||||
|
cmp.compare(a, b)
|
||||||
|
} else if opt_a.is_some() {
|
||||||
|
Ordering::Greater
|
||||||
|
} else if opt_b.is_some() {
|
||||||
|
Ordering::Less
|
||||||
|
} else {
|
||||||
|
Ordering::Equal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compare_f32(a: f32, b: f32) -> Ordering {
|
||||||
|
if a > b {
|
||||||
|
Ordering::Greater
|
||||||
|
} else if b > a {
|
||||||
|
Ordering::Less
|
||||||
|
} else {
|
||||||
|
Ordering::Equal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn adjust_for_dir(ordering: Ordering, direction: &SortDirection) -> Ordering {
|
||||||
|
if *direction == SortDirection::Descending {
|
||||||
|
ordering.reverse()
|
||||||
|
} else {
|
||||||
|
ordering
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compare_tag_lists(list_a: &Vec<String>, list_b: &Vec<String>) -> Ordering {
|
||||||
|
let first_diff = list_a
|
||||||
|
.into_iter()
|
||||||
|
.zip(list_b.into_iter())
|
||||||
|
.find(|(a, b)| *a != *b);
|
||||||
|
if let Some(diff) = first_diff {
|
||||||
|
if let (Some(num_a), Some(num_b)) = (diff.0.parse::<f32>().ok(), diff.1.parse::<f32>().ok())
|
||||||
|
{
|
||||||
|
compare_f32(num_a, num_b)
|
||||||
|
} else {
|
||||||
|
let cmp = compare::natural();
|
||||||
|
cmp.compare(diff.0, diff.1)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ordering::Equal
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,55 @@
|
|||||||
|
use crate::utils::{calculate_size, get_repo_from_context};
|
||||||
|
use mediarepo_core::bromine::prelude::*;
|
||||||
|
use mediarepo_core::error::RepoResult;
|
||||||
|
use mediarepo_core::mediarepo_api::types::jobs::{JobType, RunJobRequest};
|
||||||
|
use mediarepo_core::mediarepo_api::types::repo::SizeType;
|
||||||
|
use mediarepo_core::type_keys::SizeMetadataKey;
|
||||||
|
|
||||||
|
pub struct JobsNamespace;
|
||||||
|
|
||||||
|
impl NamespaceProvider for JobsNamespace {
|
||||||
|
fn name() -> &'static str {
|
||||||
|
"jobs"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn register(handler: &mut EventHandler) {
|
||||||
|
events!(handler,
|
||||||
|
"run_job" => Self::run_job
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobsNamespace {
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
|
pub async fn run_job(ctx: &Context, event: Event) -> IPCResult<()> {
|
||||||
|
let run_request = event.payload::<RunJobRequest>()?;
|
||||||
|
let repo = get_repo_from_context(ctx).await;
|
||||||
|
|
||||||
|
match run_request.job_type {
|
||||||
|
JobType::MigrateContentDescriptors => repo.migrate().await?,
|
||||||
|
JobType::CalculateSizes => calculate_all_sizes(ctx).await?,
|
||||||
|
JobType::CheckIntegrity => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.emit_to(Self::name(), "run_job", ()).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn calculate_all_sizes(ctx: &Context) -> RepoResult<()> {
|
||||||
|
let size_types = vec![
|
||||||
|
SizeType::Total,
|
||||||
|
SizeType::FileFolder,
|
||||||
|
SizeType::ThumbFolder,
|
||||||
|
SizeType::DatabaseFile,
|
||||||
|
];
|
||||||
|
for size_type in size_types {
|
||||||
|
let size = calculate_size(&size_type, ctx).await?;
|
||||||
|
let mut data = ctx.data.write().await;
|
||||||
|
let size_map = data.get_mut::<SizeMetadataKey>().unwrap();
|
||||||
|
size_map.insert(size_type, size);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
@ -1,5 +0,0 @@
|
|||||||
pub static SETTINGS_PATH: &str = "./repo.toml";
|
|
||||||
pub static DEFAULT_STORAGE_NAME: &str = "Main";
|
|
||||||
pub static DEFAULT_STORAGE_PATH: &str = "files";
|
|
||||||
pub static THUMBNAIL_STORAGE_NAME: &str = "Thumbnails";
|
|
||||||
pub static THUMBNAIL_STORAGE_PATH: &str = "./thumbnails";
|
|
Loading…
Reference in New Issue