Update api version

Signed-off-by: trivernis <trivernis@protonmail.com>
pull/4/head
trivernis 3 years ago
parent d704cc91fa
commit 066914f50e

@ -848,7 +848,7 @@ dependencies = [
[[package]]
name = "mediarepo-api"
version = "0.1.0"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=476b9d152457f78c73f6f6a36c2421cbce9c9194#476b9d152457f78c73f6f6a36c2421cbce9c9194"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=822abb32a1aa35f209f01a6bafc3106f56d11ddc#822abb32a1aa35f209f01a6bafc3106f56d11ddc"
dependencies = [
"chrono",
"serde",
@ -883,6 +883,7 @@ dependencies = [
"mediarepo-core",
"sea-orm",
"sqlx",
"tracing",
]
[[package]]
@ -909,6 +910,7 @@ dependencies = [
"compare",
"mediarepo-api",
"mediarepo-core",
"mediarepo-database",
"mediarepo-model",
"serde",
"tokio",

@ -764,6 +764,7 @@ dependencies = [
"mediarepo-core",
"sea-orm",
"sqlx",
"tracing",
]
[[package]]

@ -7,6 +7,7 @@ edition = "2018"
[dependencies]
chrono = "0.4.19"
tracing = "0.1.29"
[dependencies.mediarepo-core]
path = "../mediarepo-core"

@ -1,43 +1,53 @@
use crate::entities::hash;
use crate::entities::hash_tag;
use crate::entities::namespace;
use crate::entities::tag;
use sea_orm::prelude::*;
use sea_orm::sea_query::Query;
use sea_orm::{DatabaseConnection, JoinType};
use mediarepo_core::error::RepoResult;
use sea_orm::DbBackend;
use sea_orm::FromQueryResult;
use sea_orm::{DatabaseConnection, Statement};
use std::collections::HashMap;
/*
use std::iter::FromIterator;
#[derive(Debug, FromQueryResult)]
struct HashNamespaceTags {
hash_id: i64,
namespace: String,
tag: String,
}
#[tracing::instrument(level = "debug", skip_all)]
pub async fn get_hashes_with_namespaced_tags(
db: DatabaseConnection,
db: &DatabaseConnection,
hash_ids: Vec<i64>,
) -> HashMap<i64, HashMap<String, String>> {
Query::select()
.expr(hash_tag::Column::HashId)
.expr(tag::Column::Name)
.expr(namespace::Column::Name)
.from(tag::Entity)
.join(
JoinType::LeftJoin,
hash_tag::Entity,
hash_tag::Column::TagId.eq(tag::Column::Id),
)
.join(
JoinType::InnerJoin,
namespace::Entity,
tag::Column::NamespaceId.eq(namespace::Column::Id),
)
.build(&db)
.await?;
let tags: Vec<(tag::Model, Option<namespace::Model>)> = tag::Entity::find()
.find_also_related(namespace::Entity)
.join(JoinType::LeftJoin, hash_tag::Relation::Tag.def().rev())
.join(JoinType::InnerJoin, hash_tag::Relation::Hash.def())
.filter(hash::Column::Id.eq(self.hash.id))
.all(&self.db)
) -> RepoResult<HashMap<i64, HashMap<String, String>>> {
let hash_namespace_tags: Vec<HashNamespaceTags> =
HashNamespaceTags::find_by_statement(Statement::from_sql_and_values(
DbBackend::Sqlite,
format!(
r#"SELECT htm.hash_id, n.name as namespace, t.name as tag
FROM hash_tag_mappings htm
INNER JOIN tags t on htm.tag_id = t.id
JOIN namespaces n on t.namespace_id = n.id
WHERE t.namespace_id IS NOT NULL
AND htm.hash_id IN ({});"#,
hash_ids
.into_iter()
.fold(String::new(), |acc, val| format!("{}{},", acc, val))
.trim_end_matches(",")
)
.as_str(),
vec![],
))
.all(&db)
.await?;
let tags = tags
.into_iter()
.map(|(tag, namespace)| Tag::new(self.db.clone(), tag, namespace))
.collect();
let mut hash_namespaces: HashMap<i64, HashMap<String, String>> = HashMap::new();
for hnt in hash_namespace_tags {
if let Some(entry) = hash_namespaces.get_mut(&hnt.hash_id) {
entry.insert(hnt.namespace, hnt.tag);
} else {
hash_namespaces.insert(
hnt.hash_id,
HashMap::from_iter(vec![(hnt.namespace, hnt.tag)].into_iter()),
);
}
}
Ok(hash_namespaces)
}
*/

@ -764,6 +764,7 @@ dependencies = [
"mediarepo-core",
"sea-orm",
"sqlx",
"tracing",
]
[[package]]

@ -169,6 +169,11 @@ impl File {
&self.hash.value
}
/// Returns the hash id of the file
pub fn hash_id(&self) -> i64 {
self.hash.id
}
/// Returns the type of the file
pub fn file_type(&self) -> FileType {
match self.model.file_type {

@ -40,6 +40,11 @@ impl Repo {
Ok(Self::new(db))
}
/// Returns the database of the repo for raw sql queries
pub fn db(&self) -> &DatabaseConnection {
&self.db
}
/// Returns all available storages
#[tracing::instrument(level = "debug", skip(self))]
pub async fn storages(&self) -> RepoResult<Vec<Storage>> {

@ -770,7 +770,7 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
[[package]]
name = "mediarepo-api"
version = "0.1.0"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=476b9d152457f78c73f6f6a36c2421cbce9c9194#476b9d152457f78c73f6f6a36c2421cbce9c9194"
source = "git+https://github.com/Trivernis/mediarepo-api.git?rev=822abb32a1aa35f209f01a6bafc3106f56d11ddc#822abb32a1aa35f209f01a6bafc3106f56d11ddc"
dependencies = [
"chrono",
"serde",
@ -805,6 +805,7 @@ dependencies = [
"mediarepo-core",
"sea-orm",
"sqlx",
"tracing",
]
[[package]]
@ -831,6 +832,7 @@ dependencies = [
"compare",
"mediarepo-api",
"mediarepo-core",
"mediarepo-database",
"mediarepo-model",
"serde",
"tokio",

@ -13,6 +13,9 @@ compare = "0.1.0"
[dependencies.mediarepo-core]
path = "../mediarepo-core"
[dependencies.mediarepo-database]
path = "../mediarepo-database"
[dependencies.mediarepo-model]
path = "../mediarepo-model"
@ -30,4 +33,4 @@ features = ["tokio-executor"]
[dependencies.mediarepo-api]
git = "https://github.com/Trivernis/mediarepo-api.git"
rev = "476b9d152457f78c73f6f6a36c2421cbce9c9194"
rev = "822abb32a1aa35f209f01a6bafc3106f56d11ddc"

@ -5,13 +5,12 @@ use mediarepo_api::types::files::{
AddFileRequest, FileMetadataResponse, FindFilesByTagsRequest, GetFileThumbnailsRequest,
ReadFileRequest, SortDirection, SortKey, ThumbnailMetadataResponse,
};
use mediarepo_core::error::{RepoError, RepoResult};
use mediarepo_core::futures::future;
use mediarepo_core::error::RepoError;
use mediarepo_core::rmp_ipc::prelude::*;
use mediarepo_database::queries::tags::get_hashes_with_namespaced_tags;
use mediarepo_model::file::File;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::iter::FromIterator;
use std::path::PathBuf;
use tokio::io::AsyncReadExt;
@ -60,35 +59,21 @@ impl FilesNamespace {
let repo = get_repo_from_context(ctx).await;
let tags = req.tags.into_iter().map(|t| (t.name, t.negate)).collect();
let mut files = repo.find_files_by_tags(tags).await?;
let hash_ids = files.iter().map(|f| f.hash_id()).collect();
let files_nsp: HashMap<String, HashMap<String, String>> = HashMap::from_iter(
future::join_all(files.iter().map(|f| {
let file = f.clone();
async move {
let result: RepoResult<(String, HashMap<String, String>)> =
Ok((f.hash().clone(), get_namespaces_for_file(&file).await?));
result
}
}))
.await
.into_iter()
.filter_map(|r| match r {
Ok(value) => Some(value),
Err(e) => {
tracing::error!("{:?}", e);
None
}
}),
);
let hash_nsp: HashMap<i64, HashMap<String, String>> =
get_hashes_with_namespaced_tags(repo.db(), hash_ids).await?;
let sort_expression = req.sort_expression;
tracing::debug!("sort_expression = {:?}", sort_expression);
let empty_map = HashMap::with_capacity(0);
files.sort_by(|a, b| {
compare_files(
a,
files_nsp.get(a.hash()).unwrap(),
hash_nsp.get(&a.hash_id()).unwrap_or(&empty_map),
b,
files_nsp.get(b.hash()).unwrap(),
hash_nsp.get(&b.hash_id()).unwrap_or(&empty_map),
&sort_expression,
)
});
@ -205,8 +190,8 @@ fn compare_files(
for sort_key in expression {
let ordering = match sort_key {
SortKey::Namespace(namespace) => {
let tag_a = nsp_a.get(&namespace.tag);
let tag_b = nsp_b.get(&namespace.tag);
let tag_a = nsp_a.get(&namespace.name);
let tag_b = nsp_b.get(&namespace.name);
if let (Some(a), Some(b)) = (
tag_a.and_then(|a| a.parse::<f32>().ok()),
@ -252,17 +237,6 @@ fn compare_files(
Ordering::Equal
}
async fn get_namespaces_for_file(file: &File) -> RepoResult<HashMap<String, String>> {
let tags = file.tags().await?;
let namespaces: HashMap<String, String> =
HashMap::from_iter(tags.into_iter().filter_map(|tag| {
let namespace = tag.namespace()?;
Some((namespace.name().clone(), tag.name().clone()))
}));
Ok(namespaces)
}
fn compare_opts<T: Ord + Sized>(opt_a: Option<T>, opt_b: Option<T>) -> Ordering {
let cmp = compare::natural();
if let (Some(a), Some(b)) = (&opt_a, &opt_b) {

Loading…
Cancel
Save