Implement filtering based on file properties

Signed-off-by: trivernis <trivernis@protonmail.com>
pull/4/head
trivernis 2 years ago
parent 810f9986af
commit 9e0c72bd66

@ -1183,7 +1183,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
[[package]]
name = "mediarepo-api"
version = "0.26.0"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=845874bafc253b6ed670594dbcf6d754709ac1e4#845874bafc253b6ed670594dbcf6d754709ac1e4"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=06d51ed147c4f65361f8351c30e3417ffd457f0c#06d51ed147c4f65361f8351c30e3417ffd457f0c"
dependencies = [
"bromine",
"chrono",

@ -44,7 +44,7 @@ features = ["toml"]
[dependencies.mediarepo-api]
git = "https://github.com/Trivernis/mediarepo-api.git"
rev = "845874bafc253b6ed670594dbcf6d754709ac1e4"
rev = "06d51ed147c4f65361f8351c30e3417ffd457f0c"
features = ["bromine"]
[features]

@ -0,0 +1,220 @@
use chrono::NaiveDateTime;
use mediarepo_database::entities::content_descriptor;
use mediarepo_database::entities::content_descriptor_tag;
use mediarepo_database::entities::file;
use mediarepo_database::entities::file_metadata;
use sea_orm::sea_query::{Alias, Expr, IntoColumnRef, Query, SimpleExpr};
use sea_orm::ColumnTrait;
use sea_orm::Condition;
macro_rules! apply_ordering_comparator {
($column:expr, $filter:expr) => {
match $filter {
OrderingComparator::Less(value) => $column.lt(value),
OrderingComparator::Equal(value) => $column.eq(value),
OrderingComparator::Greater(value) => $column.gt(value),
OrderingComparator::Between((min_value, max_value)) => {
$column.between(min_value, max_value)
}
}
};
}
#[derive(Clone, Debug)]
pub enum FilterProperty {
TagId(NegatableComparator<i64>),
TagWildcardIds(NegatableComparator<Vec<i64>>),
ContentDescriptor(NegatableComparator<Vec<u8>>),
TagCount(OrderingComparator<i64>),
FileProperty(FilterFileProperty),
}
#[derive(Clone, Debug)]
pub enum FilterFileProperty {
Id(NegatableComparator<i64>),
Status(NegatableComparator<i64>),
FileSize(OrderingComparator<i64>),
ImportedTime(OrderingComparator<NaiveDateTime>),
ChangedTime(OrderingComparator<NaiveDateTime>),
CreatedTime(OrderingComparator<NaiveDateTime>),
}
#[derive(Clone, Debug)]
pub enum OrderingComparator<T> {
Less(T),
Equal(T),
Greater(T),
Between((T, T)),
}
#[derive(Clone, Debug)]
pub enum NegatableComparator<T> {
Is(T),
IsNot(T),
}
pub fn build_find_filter_conditions(filters: Vec<Vec<FilterProperty>>) -> Condition {
filters
.into_iter()
.fold(Condition::all(), |all_cond, mut expression| {
if expression.len() == 1 {
let property = expression.pop().unwrap();
all_cond.add(build_single_filter(property))
} else if !expression.is_empty() {
let sub_condition = expression.into_iter().fold(Condition::any(), |cond, prop| {
cond.add(build_single_filter(prop))
});
all_cond.add(sub_condition)
} else {
all_cond
}
})
}
#[inline]
fn build_single_filter(property: FilterProperty) -> SimpleExpr {
match property {
FilterProperty::TagId(tag_filter) => build_tag_id_filter(tag_filter),
FilterProperty::TagWildcardIds(wildcard_filter) => {
build_tag_wildcard_ids_filter(wildcard_filter)
}
FilterProperty::ContentDescriptor(cd_filter) => build_content_descriptor_filter(cd_filter),
FilterProperty::TagCount(count_filter) => build_tag_count_filter(count_filter),
FilterProperty::FileProperty(property_filter) => {
build_file_property_filter(property_filter)
}
}
}
fn build_tag_id_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
match filter {
NegatableComparator::Is(tag_id) => content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
NegatableComparator::IsNot(tag_id) => content_descriptor::Column::Id.not_in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
}
}
fn build_tag_wildcard_ids_filter(filter: NegatableComparator<Vec<i64>>) -> SimpleExpr {
match filter {
NegatableComparator::Is(tag_ids) => content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.to_owned(),
),
NegatableComparator::IsNot(tag_ids) => content_descriptor::Column::Id.not_in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.is_in(tag_ids))
.to_owned(),
),
}
}
fn build_content_descriptor_filter(filter: NegatableComparator<Vec<u8>>) -> SimpleExpr {
match filter {
NegatableComparator::Is(cd) => content_descriptor::Column::Descriptor.eq(cd),
NegatableComparator::IsNot(cd) => content_descriptor::Column::Descriptor.ne(cd),
}
}
fn build_tag_count_filter(filter: OrderingComparator<i64>) -> SimpleExpr {
let count_subquery = Query::select()
.expr(content_descriptor_tag::Column::TagId.count())
.from(content_descriptor_tag::Entity)
.group_by_col(content_descriptor_tag::Column::CdId)
.to_owned();
let count_column = Alias::new("count").into_column_ref();
let count_expression = match filter {
OrderingComparator::Less(count) => Expr::col(count_column).lt(count),
OrderingComparator::Equal(count) => Expr::col(count_column).eq(count),
OrderingComparator::Greater(count) => Expr::col(count_column).gt(count),
OrderingComparator::Between((min_count, max_count)) => {
Expr::col(count_column).between(min_count, max_count)
}
};
content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from_subquery(count_subquery, Alias::new("tag_counts"))
.cond_where(count_expression)
.to_owned(),
)
}
#[inline]
fn build_file_property_filter(property: FilterFileProperty) -> SimpleExpr {
match property {
FilterFileProperty::Id(id_filter) => build_file_id_filter(id_filter),
FilterFileProperty::Status(status_filter) => build_file_status_filter(status_filter),
FilterFileProperty::FileSize(size_filter) => {
build_file_metadata_filter(build_file_size_filter(size_filter))
}
FilterFileProperty::ImportedTime(time_filter) => {
build_file_metadata_filter(build_file_import_time_filter(time_filter))
}
FilterFileProperty::ChangedTime(time_filter) => {
build_file_metadata_filter(build_file_changed_time_filter(time_filter))
}
FilterFileProperty::CreatedTime(time_filter) => {
build_file_metadata_filter(build_file_created_time_filter(time_filter))
}
}
}
fn build_file_id_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
match filter {
NegatableComparator::Is(id) => file::Column::Id.eq(id),
NegatableComparator::IsNot(id) => file::Column::Id.ne(id),
}
}
fn build_file_status_filter(filter: NegatableComparator<i64>) -> SimpleExpr {
match filter {
NegatableComparator::Is(status) => file::Column::Status.eq(status),
NegatableComparator::IsNot(status) => file::Column::Status.ne(status),
}
}
fn build_file_metadata_filter(property_condition: SimpleExpr) -> SimpleExpr {
file::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(file_metadata::Column::FileId))
.from(file_metadata::Entity)
.cond_where(property_condition)
.to_owned(),
)
}
fn build_file_size_filter(filter: OrderingComparator<i64>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::Size, filter)
}
fn build_file_import_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::ImportTime, filter)
}
fn build_file_changed_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::ChangeTime, filter)
}
fn build_file_created_time_filter(filter: OrderingComparator<NaiveDateTime>) -> SimpleExpr {
apply_ordering_comparator!(file_metadata::Column::CreationTime, filter)
}

@ -1,14 +1,16 @@
pub mod filter;
use std::fmt::Debug;
use std::io::Cursor;
use std::str::FromStr;
use mediarepo_core::content_descriptor::encode_content_descriptor;
use sea_orm::prelude::*;
use sea_orm::sea_query::{Expr, Query};
use sea_orm::{Condition, DatabaseConnection, Set};
use sea_orm::{DatabaseConnection, Set};
use sea_orm::{JoinType, QuerySelect};
use tokio::io::{AsyncReadExt, BufReader};
use crate::file::filter::FilterProperty;
use crate::file_metadata::FileMetadata;
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::fs::file_hash_store::FileHashStore;
@ -99,11 +101,11 @@ impl File {
/// Finds the file by tags
#[tracing::instrument(level = "debug", skip(db))]
pub(crate) async fn find_by_tags(
pub(crate) async fn find_by_filters(
db: DatabaseConnection,
tag_ids: Vec<Vec<(i64, bool)>>,
filters: Vec<Vec<FilterProperty>>,
) -> RepoResult<Vec<Self>> {
let main_condition = build_find_filter_conditions(tag_ids);
let main_condition = filter::build_find_filter_conditions(filters);
let results: Vec<(content_descriptor::Model, Option<file::Model>)> =
content_descriptor::Entity::find()
@ -285,46 +287,3 @@ impl File {
Ok(thumbs)
}
}
fn build_find_filter_conditions(tag_ids: Vec<Vec<(i64, bool)>>) -> Condition {
let mut main_condition = Condition::all();
for mut expression in tag_ids {
if expression.len() == 1 {
let (tag_id, negated) = expression.pop().unwrap();
main_condition = add_single_filter_expression(main_condition, tag_id, negated)
} else if !expression.is_empty() {
let mut sub_condition = Condition::any();
for (tag, negated) in expression {
sub_condition = add_single_filter_expression(sub_condition, tag, negated);
}
main_condition = main_condition.add(sub_condition);
}
}
main_condition
}
fn add_single_filter_expression(condition: Condition, tag_id: i64, negated: bool) -> Condition {
if negated {
condition.add(
content_descriptor::Column::Id.not_in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
)
} else {
condition.add(
content_descriptor::Column::Id.in_subquery(
Query::select()
.expr(Expr::col(content_descriptor_tag::Column::CdId))
.from(content_descriptor_tag::Entity)
.cond_where(content_descriptor_tag::Column::TagId.eq(tag_id))
.to_owned(),
),
)
}
}

@ -1,4 +1,5 @@
use crate::content_descriptor::ContentDescriptor;
use crate::file::filter::FilterProperty;
use crate::file::File;
use crate::file_metadata::FileMetadata;
use crate::namespace::Namespace;
@ -80,23 +81,11 @@ impl Repo {
/// Finds all files by a list of tags
#[tracing::instrument(level = "debug", skip(self))]
pub async fn find_files_by_tags(
pub async fn find_files_by_filters(
&self,
tags: Vec<Vec<(String, bool)>>,
filters: Vec<Vec<FilterProperty>>,
) -> RepoResult<Vec<File>> {
let parsed_tags = tags
.iter()
.flat_map(|e| e.into_iter().map(|t| parse_namespace_and_tag(t.0.clone())))
.unique()
.collect();
let db_tags = self.tags_by_names(parsed_tags).await?;
let tag_map: HashMap<String, i64> =
HashMap::from_iter(db_tags.into_iter().map(|t| (t.normalized_name(), t.id())));
let tag_ids = process_filters_with_tag_ids(tags, tag_map);
File::find_by_tags(self.db.clone(), tag_ids).await
File::find_by_filters(self.db.clone(), filters).await
}
/// Returns all file metadata entries for the given file ids
@ -257,6 +246,22 @@ impl Repo {
Namespace::all(self.db.clone()).await
}
/// Converts a list of tag names to tag ids
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tag_names_to_ids(&self, tags: Vec<String>) -> RepoResult<HashMap<String, i64>> {
let parsed_tags = tags
.iter()
.map(|tag| parse_namespace_and_tag(tag.clone()))
.unique()
.collect();
let db_tags = self.tags_by_names(parsed_tags).await?;
let tag_map: HashMap<String, i64> =
HashMap::from_iter(db_tags.into_iter().map(|t| (t.normalized_name(), t.id())));
Ok(tag_map)
}
/// Finds all tags by name
#[tracing::instrument(level = "debug", skip(self))]
pub async fn tags_by_names(&self, tags: Vec<(Option<String>, String)>) -> RepoResult<Vec<Tag>> {
@ -413,49 +418,3 @@ impl Repo {
Ok(())
}
}
fn process_filters_with_tag_ids(
filters: Vec<Vec<(String, bool)>>,
tag_ids: HashMap<String, i64>,
) -> Vec<Vec<(i64, bool)>> {
let mut id_filters = Vec::new();
for expression in filters {
let mut id_sub_filters = Vec::new();
let mut negated_wildcard_filters = Vec::new();
for (tag, negate) in expression {
if tag.ends_with("*") {
let tag_prefix = tag.trim_end_matches('*');
let mut found_tag_ids = tag_ids
.iter()
.filter(|(k, _)| k.starts_with(tag_prefix))
.map(|(_, id)| (*id, negate))
.collect::<Vec<(i64, bool)>>();
if negate {
negated_wildcard_filters.push(found_tag_ids)
} else {
id_sub_filters.append(&mut found_tag_ids);
}
} else {
if let Some(id) = tag_ids.get(&tag) {
id_sub_filters.push((*id, negate));
}
}
}
if !negated_wildcard_filters.is_empty() {
for wildcard_filter in negated_wildcard_filters {
for query in wildcard_filter {
let mut sub_filters = id_sub_filters.clone();
sub_filters.push(query);
id_filters.push(sub_filters)
}
}
} else if !id_sub_filters.is_empty() {
id_filters.push(id_sub_filters);
}
}
id_filters
}

@ -2,8 +2,9 @@ mod searching;
mod sorting;
use crate::from_model::FromModel;
use crate::namespaces::files::searching::find_files_for_filters;
use crate::namespaces::files::sorting::sort_files_by_properties;
use crate::utils::{cd_by_identifier, file_by_identifier, get_repo_from_context};
use chrono::NaiveDateTime;
use mediarepo_core::bromine::prelude::*;
use mediarepo_core::fs::thumbnail_store::Dimensions;
use mediarepo_core::itertools::Itertools;
@ -12,30 +13,13 @@ use mediarepo_core::mediarepo_api::types::files::{
GetFileThumbnailOfSizeRequest, GetFileThumbnailsRequest, ReadFileRequest,
ThumbnailMetadataResponse, UpdateFileNameRequest,
};
use mediarepo_core::mediarepo_api::types::filtering::{FilterExpression, FindFilesRequest};
use mediarepo_core::mediarepo_api::types::filtering::FindFilesRequest;
use mediarepo_core::mediarepo_api::types::identifier::FileIdentifier;
use mediarepo_core::thumbnailer::ThumbnailSize;
use mediarepo_core::utils::parse_namespace_and_tag;
use mediarepo_database::queries::tags::{
get_cids_with_namespaced_tags, get_content_descriptors_with_tag_count,
};
use mediarepo_model::file_metadata::FileMetadata;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::collections::HashMap;
use std::iter::FromIterator;
use tokio::io::AsyncReadExt;
pub struct FilesNamespace;
pub struct FileSortContext {
name: Option<String>,
size: u64,
mime_type: String,
namespaces: HashMap<String, Vec<String>>,
tag_count: u32,
import_time: NaiveDateTime,
create_time: NaiveDateTime,
change_time: NaiveDateTime,
}
impl NamespaceProvider for FilesNamespace {
fn name() -> &'static str {
@ -130,61 +114,8 @@ impl FilesNamespace {
let req = event.payload::<FindFilesRequest>()?;
let repo = get_repo_from_context(ctx).await;
let tags = req
.filters
.into_iter()
.map(|e| match e {
FilterExpression::OrExpression(tags) => {
tags.into_iter().map(|t| (t.tag, t.negate)).collect_vec()
}
FilterExpression::Query(tag) => {
vec![(tag.tag, tag.negate)]
}
})
.collect();
let mut files = repo.find_files_by_tags(tags).await?;
let hash_ids: Vec<i64> = files.par_iter().map(|f| f.cd_id()).collect();
let file_ids: Vec<i64> = files.par_iter().map(|f| f.id()).collect();
let mut cid_nsp: HashMap<i64, HashMap<String, Vec<String>>> =
get_cids_with_namespaced_tags(repo.db(), hash_ids.clone()).await?;
let mut cid_tag_counts =
get_content_descriptors_with_tag_count(repo.db(), hash_ids).await?;
let files_metadata = repo.get_file_metadata_for_ids(file_ids).await?;
let mut file_metadata_map: HashMap<i64, FileMetadata> =
HashMap::from_iter(files_metadata.into_iter().map(|m| (m.file_id(), m)));
let mut contexts = HashMap::new();
for file in &files {
if let Some(metadata) = file_metadata_map.remove(&file.id()) {
let context = FileSortContext {
name: metadata.name().to_owned(),
size: metadata.size() as u64,
mime_type: file.mime_type().to_owned(),
namespaces: cid_nsp
.remove(&file.cd_id())
.unwrap_or(HashMap::with_capacity(0)),
tag_count: cid_tag_counts.remove(&file.cd_id()).unwrap_or(0),
import_time: metadata.import_time().to_owned(),
create_time: metadata.import_time().to_owned(),
change_time: metadata.change_time().to_owned(),
};
contexts.insert(file.id(), context);
}
}
let sort_expression = req.sort_expression;
tracing::debug!("sort_expression = {:?}", sort_expression);
files.sort_by(|a, b| {
sorting::compare_files(
contexts.get(&a.id()).unwrap(),
contexts.get(&b.id()).unwrap(),
&sort_expression,
)
});
let mut files = find_files_for_filters(&repo, req.filters).await?;
sort_files_by_properties(&repo, req.sort_expression, &mut files).await?;
let responses: Vec<FileBasicDataResponse> = files
.into_iter()

@ -0,0 +1,182 @@
use mediarepo_core::content_descriptor::decode_content_descriptor;
use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::files::FileStatus as ApiFileStatus;
use mediarepo_core::mediarepo_api::types::filtering::{
FilterExpression, FilterQuery, PropertyQuery, TagQuery, ValueComparator,
};
use mediarepo_model::file::filter::NegatableComparator::{Is, IsNot};
use mediarepo_model::file::filter::{FilterFileProperty, FilterProperty, OrderingComparator};
use mediarepo_model::file::{File, FileStatus};
use mediarepo_model::repo::Repo;
use std::collections::HashMap;
pub async fn find_files_for_filters(
repo: &Repo,
expressions: Vec<FilterExpression>,
) -> RepoResult<Vec<File>> {
let tag_names = get_tag_names_from_expressions(&expressions);
let tag_id_map = repo.tag_names_to_ids(tag_names).await?;
let filters = build_filters_from_expressions(expressions, &tag_id_map);
repo.find_files_by_filters(filters).await
}
fn get_tag_names_from_expressions(expressions: &Vec<FilterExpression>) -> Vec<String> {
expressions
.iter()
.flat_map(|f| match f {
FilterExpression::OrExpression(queries) => queries
.iter()
.filter_map(|q| match q {
FilterQuery::Tag(tag) => Some(tag.tag.to_owned()),
_ => None,
})
.collect::<Vec<String>>(),
FilterExpression::Query(q) => match q {
FilterQuery::Tag(tag) => {
vec![tag.tag.to_owned()]
}
FilterQuery::Property(_) => {
vec![]
}
},
})
.collect::<Vec<String>>()
}
fn build_filters_from_expressions(
expressions: Vec<FilterExpression>,
tag_id_map: &HashMap<String, i64>,
) -> Vec<Vec<FilterProperty>> {
expressions
.into_iter()
.filter_map(|e| {
let filters = match e {
FilterExpression::OrExpression(queries) => queries
.into_iter()
.filter_map(|q| map_query_to_filter(q, tag_id_map))
.collect(),
FilterExpression::Query(q) => {
if let Some(filter) = map_query_to_filter(q, tag_id_map) {
vec![filter]
} else {
vec![]
}
}
};
if filters.len() > 0 {
Some(filters)
} else {
None
}
})
.collect()
}
fn map_query_to_filter(
query: FilterQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
match query {
FilterQuery::Tag(tag_query) => map_tag_query_to_filter(tag_query, tag_id_map),
FilterQuery::Property(property) => map_property_query_to_filter(property),
}
}
fn map_tag_query_to_filter(
query: TagQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
if query.tag.ends_with("*") {
map_wildcard_tag_to_filter(query, tag_id_map)
} else {
map_tag_to_filter(query, tag_id_map)
}
}
fn map_wildcard_tag_to_filter(
query: TagQuery,
tag_id_map: &HashMap<String, i64>,
) -> Option<FilterProperty> {
let filter_tag = query.tag.trim_end_matches("*");
let relevant_ids = tag_id_map
.iter()
.filter_map(|(name, id)| {
if name.starts_with(filter_tag) {
Some(*id)
} else {
None
}
})
.collect::<Vec<i64>>();
if relevant_ids.len() > 0 {
let comparator = if query.negate {
IsNot(relevant_ids)
} else {
Is(relevant_ids)
};
Some(FilterProperty::TagWildcardIds(comparator))
} else {
None
}
}
fn map_tag_to_filter(query: TagQuery, tag_id_map: &HashMap<String, i64>) -> Option<FilterProperty> {
tag_id_map.get(&query.tag).map(|id| {
let comparator = if query.negate { IsNot(*id) } else { Is(*id) };
FilterProperty::TagId(comparator)
})
}
fn map_property_query_to_filter(query: PropertyQuery) -> Option<FilterProperty> {
match query {
PropertyQuery::Status(s) => Some(FilterProperty::FileProperty(FilterFileProperty::Status(
Is(file_status_to_number(s)),
))),
PropertyQuery::FileSize(s) => Some(FilterProperty::FileProperty(
FilterFileProperty::FileSize(val_comparator_to_order(s, |v| v as i64)),
)),
PropertyQuery::ImportedTime(t) => Some(FilterProperty::FileProperty(
FilterFileProperty::ImportedTime(val_comparator_to_order(t, |t| t)),
)),
PropertyQuery::ChangedTime(t) => Some(FilterProperty::FileProperty(
FilterFileProperty::ChangedTime(val_comparator_to_order(t, |t| t)),
)),
PropertyQuery::CreatedTime(t) => Some(FilterProperty::FileProperty(
FilterFileProperty::CreatedTime(val_comparator_to_order(t, |t| t)),
)),
PropertyQuery::TagCount(c) => {
Some(FilterProperty::TagCount(val_comparator_to_order(c, |v| {
v as i64
})))
}
PropertyQuery::Cd(cd) => decode_content_descriptor(cd)
.ok()
.map(|cd| FilterProperty::ContentDescriptor(Is(cd))),
PropertyQuery::Id(id) => Some(FilterProperty::FileProperty(FilterFileProperty::Id(Is(id)))),
}
}
fn file_status_to_number(status: ApiFileStatus) -> i64 {
match status {
ApiFileStatus::Imported => FileStatus::Imported as i64,
ApiFileStatus::Archived => FileStatus::Archived as i64,
ApiFileStatus::Deleted => FileStatus::Deleted as i64,
}
}
#[inline]
fn val_comparator_to_order<T1, T2, F: Fn(T1) -> T2>(
comp: ValueComparator<T1>,
conv_fn: F,
) -> OrderingComparator<T2> {
match comp {
ValueComparator::Less(v) => OrderingComparator::Less(conv_fn(v)),
ValueComparator::Equal(v) => OrderingComparator::Equal(conv_fn(v)),
ValueComparator::Greater(v) => OrderingComparator::Greater(conv_fn(v)),
ValueComparator::Between((v1, v2)) => {
OrderingComparator::Between((conv_fn(v1), conv_fn(v2)))
}
}
}

@ -1,10 +1,88 @@
use crate::namespaces::files::FileSortContext;
use chrono::NaiveDateTime;
use compare::Compare;
use mediarepo_core::error::RepoResult;
use mediarepo_core::mediarepo_api::types::filtering::{SortDirection, SortKey};
use mediarepo_database::queries::tags::{
get_cids_with_namespaced_tags, get_content_descriptors_with_tag_count,
};
use mediarepo_model::file::File;
use mediarepo_model::file_metadata::FileMetadata;
use mediarepo_model::repo::Repo;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::iter::FromIterator;
pub struct FileSortContext {
name: Option<String>,
size: u64,
mime_type: String,
namespaces: HashMap<String, Vec<String>>,
tag_count: u32,
import_time: NaiveDateTime,
create_time: NaiveDateTime,
change_time: NaiveDateTime,
}
pub async fn sort_files_by_properties(
repo: &Repo,
sort_expression: Vec<SortKey>,
files: &mut Vec<File>,
) -> RepoResult<()> {
let contexts = build_sort_context(repo, files).await?;
files.sort_by(|a, b| {
compare_files(
contexts.get(&a.id()).unwrap(),
contexts.get(&b.id()).unwrap(),
&sort_expression,
)
});
Ok(())
}
async fn build_sort_context(
repo: &Repo,
files: &Vec<File>,
) -> RepoResult<HashMap<i64, FileSortContext>> {
let hash_ids: Vec<i64> = files.par_iter().map(|f| f.cd_id()).collect();
let file_ids: Vec<i64> = files.par_iter().map(|f| f.id()).collect();
let mut cid_nsp: HashMap<i64, HashMap<String, Vec<String>>> =
get_cids_with_namespaced_tags(repo.db(), hash_ids.clone()).await?;
let mut cid_tag_counts = get_content_descriptors_with_tag_count(repo.db(), hash_ids).await?;
let files_metadata = repo.get_file_metadata_for_ids(file_ids).await?;
let mut file_metadata_map: HashMap<i64, FileMetadata> =
HashMap::from_iter(files_metadata.into_iter().map(|m| (m.file_id(), m)));
let mut contexts = HashMap::new();
for file in files {
if let Some(metadata) = file_metadata_map.remove(&file.id()) {
let context = FileSortContext {
name: metadata.name().to_owned(),
size: metadata.size() as u64,
mime_type: file.mime_type().to_owned(),
namespaces: cid_nsp
.remove(&file.cd_id())
.unwrap_or(HashMap::with_capacity(0)),
tag_count: cid_tag_counts.remove(&file.cd_id()).unwrap_or(0),
import_time: metadata.import_time().to_owned(),
create_time: metadata.import_time().to_owned(),
change_time: metadata.change_time().to_owned(),
};
contexts.insert(file.id(), context);
}
}
Ok(contexts)
}
#[tracing::instrument(level = "trace", skip_all)]
pub fn compare_files(
fn compare_files(
ctx_a: &FileSortContext,
ctx_b: &FileSortContext,
expression: &Vec<SortKey>,

Loading…
Cancel
Save