diff --git a/Cargo.toml b/Cargo.toml index bfc3cb1..a4f09d6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hydrus-api" -version = "0.3.5" +version = "0.4.0" authors = ["trivernis "] edition = "2018" license = "Apache-2.0" @@ -15,6 +15,7 @@ serde = {version = "^1.0", features = ["derive"]} reqwest = {version = "0.11.4", features = ["json"]} log = "0.4.14" mime = "0.3.16" +chrono = "0.4.19" [dev-dependencies] env_logger = "0.8.4" diff --git a/README.md b/README.md index 3a88b9d..4977b61 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,9 @@ use hydrus_api::wrapper::tag::Tag; use hydrus_api::wrapper::service::ServiceName; use hydrus_api::wrapper::hydrus_file::FileStatus; use hydrus_api::wrapper::page::PageIdentifier; +use hydrus_api::wrapper::builders::tag_builder::{ + SystemTagBuilder, Comparator +}; #[tokio::main] async fn main() { @@ -30,7 +33,11 @@ async fn main() { let access_key = env::var("HYDRUS_ACCESS_KEY").unwrap(); let hydrus = Hydrus::new(Client::new(hydrus_url, access_key)); - let files = hydrus.search(FileSearchLocation::Archive,vec![Tag::from("character:megumin")]).await.unwrap(); + let files = hydrus.search(FileSearchLocation::Archive,vec![ + Tag::from("character:megumin"), + SystemTagBuilder::new().archive().build(), + SystemTagBuilder::new().number_of_tags(Comparator::Greater, 12).build(), + ]).await.unwrap(); for mut file in files { file.add_tags(ServiceName::my_tags(), vec![Tag::from("ark mage")]).await.unwrap(); diff --git a/src/api_core/client.rs b/src/api_core/client.rs index 2b56966..abe4391 100644 --- a/src/api_core/client.rs +++ b/src/api_core/client.rs @@ -17,11 +17,11 @@ use crate::api_core::managing_cookies_and_http_headers::{ SetUserAgentRequest, }; use crate::api_core::managing_pages::{ - FocusPage, FocusPageRequest, GetPageInfo, GetPageInfoResponse, GetPages, GetPagesResponse, + AddFiles, AddFilesRequest, FocusPage, FocusPageRequest, GetPageInfo, GetPageInfoResponse, + GetPages, GetPagesResponse, }; use crate::api_core::searching_and_fetching_files::{ - FileMetadata, FileMetadataResponse, FileSearchLocation, GetFile, SearchFiles, - SearchFilesResponse, + FileMetadata, FileMetadataResponse, GetFile, SearchFiles, SearchFilesResponse, }; use crate::api_core::Endpoint; use crate::error::{Error, Result}; @@ -222,17 +222,12 @@ impl Client { } /// Searches for files in the inbox, the archive or both - pub async fn search_files( - &self, - tags: Vec, - location: FileSearchLocation, - ) -> Result { - log::trace!("Searching for files in {:?} with tags {:?}", location, tags); - self.get_and_parse::(&[ - ("tags", string_list_to_json_array(tags)), - ("system_inbox", location.is_inbox().to_string()), - ("system_archive", location.is_archive().to_string()), - ]) + pub async fn search_files(&self, tags: Vec) -> Result { + log::trace!("Searching for files with tags {:?}", tags); + self.get_and_parse::(&[( + "tags", + string_list_to_json_array(tags), + )]) .await } @@ -367,6 +362,30 @@ impl Client { Ok(()) } + /// Adds files to a page + pub async fn add_files_to_page( + &self, + page_key: S, + file_ids: Vec, + hashes: Vec, + ) -> Result<()> { + let page_key = page_key.to_string(); + log::trace!( + "Adding files with ids {:?} or hashes {:?} to page {}", + file_ids, + hashes, + page_key + ); + self.post::(AddFilesRequest { + page_key, + file_ids, + hashes, + }) + .await?; + + Ok(()) + } + /// Returns all cookies for the given domain pub async fn get_cookies>(&self, domain: S) -> Result { log::trace!("Getting cookies"); diff --git a/src/api_core/managing_pages.rs b/src/api_core/managing_pages.rs index 49786e0..f42556d 100644 --- a/src/api_core/managing_pages.rs +++ b/src/api_core/managing_pages.rs @@ -49,3 +49,23 @@ impl Endpoint for FocusPage { String::from("manage_pages/focus_page") } } + +#[derive(Clone, Debug, Serialize)] +pub struct AddFilesRequest { + pub page_key: String, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub file_ids: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub hashes: Vec, +} + +pub struct AddFiles; + +impl Endpoint for AddFiles { + type Request = AddFilesRequest; + type Response = (); + + fn path() -> String { + String::from("manage_pages/add_files") + } +} diff --git a/src/api_core/searching_and_fetching_files.rs b/src/api_core/searching_and_fetching_files.rs index 0beb1bd..9b47857 100644 --- a/src/api_core/searching_and_fetching_files.rs +++ b/src/api_core/searching_and_fetching_files.rs @@ -6,30 +6,6 @@ pub struct SearchFilesResponse { pub file_ids: Vec, } -#[derive(Clone, Debug)] -pub enum FileSearchLocation { - Inbox, - Archive, -} - -impl FileSearchLocation { - pub fn is_inbox(&self) -> bool { - if let &Self::Inbox = &self { - true - } else { - false - } - } - - pub fn is_archive(&self) -> bool { - if let &Self::Archive = &self { - true - } else { - false - } - } -} - pub struct SearchFiles; impl Endpoint for SearchFiles { diff --git a/src/lib.rs b/src/lib.rs index 05af9c0..cbf98e5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -8,18 +8,22 @@ //! ``` //! # use hydrus_api::{Hydrus, Client}; //! use std::env; -//! use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation; //! use hydrus_api::wrapper::tag::Tag; //! use hydrus_api::wrapper::service::ServiceName; //! use hydrus_api::wrapper::hydrus_file::FileStatus; //! use hydrus_api::wrapper::page::PageIdentifier; +//! use hydrus_api::wrapper::builders::tag_builder::{SystemTagBuilder, Comparator}; //! //! # #[tokio::test] //! # async fn doctest() { //! let hydrus_url = env::var("HYDRUS_URL").unwrap(); //! let access_key = env::var("HYDRUS_ACCESS_KEY").unwrap(); //! let hydrus = Hydrus::new(Client::new(hydrus_url, access_key)); -//! let files = hydrus.search(FileSearchLocation::Archive,vec![Tag::from("character:megumin")]).await.unwrap(); +//! let files = hydrus.search(vec![ +//! Tag::from("character:megumin"), +//! SystemTagBuilder::new().archive().build(), +//! SystemTagBuilder::new().tag_namespace_as_number("page", Comparator::Equal, 5).negate().build(), +//! ]).await.unwrap(); //! //! for mut file in files { //! file.add_tags(ServiceName::my_tags(), vec![Tag::from("ark mage")]).await.unwrap(); diff --git a/src/utils.rs b/src/utils.rs index 8263999..e615d37 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,4 +1,5 @@ use crate::wrapper::tag::Tag; +use chrono::{Datelike, Duration}; pub fn string_list_to_json_array(l: Vec) -> String { format!("[\"{}\"]", l.join("\",\"")) @@ -21,3 +22,37 @@ pub fn number_list_to_json_array(l: Vec) -> String { pub fn tag_list_to_string_list(tags: Vec) -> Vec { tags.into_iter().map(|t| t.to_string()).collect() } + +pub fn format_datetime(datetime: D) -> String { + format!( + "{:04}-{:02}-{:02}", + datetime.year(), + datetime.month(), + datetime.day() + ) +} + +pub fn format_duration(duration: Duration) -> String { + let mut expression = String::new(); + let days = duration.num_days(); + let hours = duration.num_hours() % 24; + let minutes = duration.num_minutes() % 60; + let seconds = duration.num_seconds() % 60; + + if days > 0 { + expression.push_str(&days.to_string()); + expression.push_str(" days "); + } + if hours > 0 { + expression.push_str(&hours.to_string()); + expression.push_str(" hours ") + } + if minutes > 0 { + expression.push_str(&minutes.to_string()); + expression.push_str(" minutes "); + } + expression.push_str(&seconds.to_string()); + expression.push_str(" seconds"); + + expression +} diff --git a/src/wrapper/builders/mod.rs b/src/wrapper/builders/mod.rs index 2402223..7ab8348 100644 --- a/src/wrapper/builders/mod.rs +++ b/src/wrapper/builders/mod.rs @@ -1,2 +1,3 @@ pub mod import_builder; pub mod tagging_builder; +pub mod tag_builder; diff --git a/src/wrapper/builders/tag_builder.rs b/src/wrapper/builders/tag_builder.rs new file mode 100644 index 0000000..35a415e --- /dev/null +++ b/src/wrapper/builders/tag_builder.rs @@ -0,0 +1,479 @@ +use crate::utils::{format_datetime, format_duration}; +use crate::wrapper::service::ServiceName; +use crate::wrapper::tag::Tag; +use chrono::{Datelike, Duration}; +use mime::Mime; +use std::fmt::{Display, Formatter}; + +#[derive(Clone, Debug)] +pub struct TagBuilder { + negated: bool, + name: String, + namespace: Option, +} + +impl TagBuilder { + pub fn new(name: S) -> Self { + Self { + negated: false, + name: name.to_string(), + namespace: None, + } + } + + /// Set a namespace for the tag + pub fn namespace(mut self, namespace: S) -> Self { + self.namespace = Some(namespace.to_string()); + + self + } + + /// Converts the builder into a system tag builder + pub fn system(self) -> SystemTagBuilder { + SystemTagBuilder { + negated: false, + name: self.name, + } + } + + /// Negates the tag. + /// if it has already been negated it will be positive again + pub fn negate(mut self) -> Self { + self.negated = !self.negated; + + self + } + + pub fn build(self) -> Tag { + Tag { + negated: self.negated, + name: self.name, + namespace: self.namespace, + } + } +} + +#[derive(Clone, Debug)] +pub struct SystemTagBuilder { + name: String, + negated: bool, +} + +impl SystemTagBuilder { + pub fn new() -> SystemTagBuilder { + SystemTagBuilder { + name: String::new(), + negated: false, + } + } + + pub fn build(self) -> Tag { + Tag { + negated: self.negated, + name: self.name, + namespace: Some(String::from("system")), + } + } + + /// Negates the tag. + /// if it has already been negated it will be positive again + pub fn negate(mut self) -> Self { + self.negated = !self.negated; + + self + } + + /// All files stored in the client + pub fn everything(self) -> Self { + self.change_name("everything") + } + + /// Files stored in the inbox + pub fn inbox(self) -> Self { + self.change_name("inbox") + } + + /// Archived files + pub fn archive(self) -> Self { + self.change_name("archive") + } + + /// Files that have a duration (e.g. videos) + pub fn has_duration(self) -> Self { + self.change_name("has duration") + } + + /// Files that don't have a duration + pub fn no_duration(self) -> Self { + self.change_name("no duration") + } + + /// Files with a specific duration + pub fn duration(self, comparator: Comparator, value: u64, unit: DurationUnit) -> Self { + self.change_name(format!("duration {} {} {}", comparator, value, unit)) + } + + /// Files that have the best quality in their duplicate group + pub fn best_duplicate_quality(self) -> Self { + self.change_name("best quality of group") + } + + /// Files that don't have the best quality in their duplicate group + pub fn not_best_duplicate_quality(self) -> Self { + self.change_name("isn't best quality of group") + } + + /// Files with audio + pub fn has_audio(self) -> Self { + self.change_name("has audio") + } + + /// Files without audio + pub fn no_audio(self) -> Self { + self.change_name("no audio") + } + + /// Files with tags + pub fn has_tags(self) -> Self { + self.change_name("has tags") + } + + /// Files without tags + pub fn no_tags(self) -> Self { + self.change_name("no tags") + } + + /// Untagged files + pub fn untagged(self) -> Self { + self.change_name("untagged") + } + + /// Files with a specific number of tags + pub fn number_of_tags(self, comparator: Comparator, value: u64) -> Self { + self.change_name(format!("number of tags {} {}", comparator, value)) + } + + /// Files with a specific height + pub fn height(self, comparator: Comparator, value: u64) -> Self { + self.change_name(format!("height {} {}", comparator, value)) + } + + /// Files with a specific width + pub fn width(self, comparator: Comparator, value: u64) -> Self { + self.change_name(format!("width {} {}", comparator, value)) + } + + /// Files with a specific filesize + pub fn filesize(self, comparator: Comparator, value: u64, unit: FileSizeUnit) -> Self { + self.change_name(format!("filesize {} {} {}", comparator, value, unit)) + } + + /// Files that are similar to a list of other files with a specific [hamming distance](https://en.wikipedia.org/wiki/Hamming_distance) + pub fn similar_to(self, hashes: Vec, distance: u32) -> Self { + self.change_name(format!( + "similar to {} with distance {}", + hashes.join(", "), + distance + )) + } + + /// Limit the number of returned files + pub fn limit(self, value: u64) -> Self { + self.change_name(format!("limit = {}", value)) + } + + /// Files with a specific mimetype + pub fn filetype(self, mimes: Vec) -> Self { + self.change_name(format!( + "filetype = {}", + mimes + .into_iter() + .map(|m| m.to_string()) + .collect::>() + .join(", ") + )) + } + + /// Files with a specific hash + pub fn hash(self, hashes: Vec) -> Self { + self.change_name(format!("hash = {}", hashes.join(" "))) + } + + /// Files that have been modified before / after / at / around a specific date and time + pub fn date_modified(self, comparator: Comparator, datetime: D) -> Self { + self.change_name(format!( + "modified date {} {}", + comparator, + format_datetime(datetime) + )) + } + + /// Files with a specific import time + pub fn time_imported(self, comparator: Comparator, datetime: D) -> Self { + self.change_name(format!( + "time imported {} {}", + comparator, + format_datetime(datetime) + )) + } + + /// Files that are in a file service or pending to it + pub fn file_service( + self, + comparator: IsComparator, + cur_pen: CurrentlyOrPending, + service: ServiceName, + ) -> Self { + self.change_name(format!( + "file service {} {} {}", + comparator, cur_pen, service + )) + } + + /// Files that have a specific number of relationships + pub fn number_of_relationships( + self, + comparator: Comparator, + value: u64, + relationship: FileRelationshipType, + ) -> Self { + self.change_name(format!( + "num file relationships {} {} {}", + comparator, value, relationship + )) + } + + /// Files with a specific aspect ratio + pub fn ratio(self, wte: WiderTallerEqual, value: (u64, u64)) -> Self { + self.change_name(format!("ratio {} {}:{}", wte, value.0, value.1)) + } + + /// Files with a specific number of pixels + pub fn number_of_pixels(self, comparator: Comparator, value: u64, unit: PixelUnit) -> Self { + self.change_name(format!("num pixels {} {} {}", comparator, value, unit)) + } + + /// Files that have been viewed a specific number of times + pub fn views(self, view_type: ViewType, comparator: Comparator, value: u64) -> Self { + self.change_name(format!("{} views {} {}", view_type, comparator, value)) + } + + /// Files that have been viewed for a specific number of times + pub fn viewtime(self, view_type: ViewType, comparator: Comparator, duration: Duration) -> Self { + self.change_name(format!( + "{} viewtime {} {}", + view_type, + comparator, + format_duration(duration) + )) + } + + /// Files that have associated urls that match a defined regex + pub fn has_url_matching_regex(self, regex: S) -> Self { + self.change_name(format!("has url matching regex {}", regex)) + } + + /// Files that don't have an url that matches a defined regex + pub fn does_not_have_url_matching_regex(self, regex: S) -> Self { + self.change_name(format!("does not have url matching regex {}", regex)) + } + + /// Files that have an url that matches a class (e.g. 'safebooru file page') + pub fn has_url_with_class(self, class: S) -> Self { + self.change_name(format!("has url with class {}", class)) + } + + /// Files that don't have an url that matches a class (e.g. 'safebooru file page') + pub fn does_not_have_url_with_class(self, class: S) -> Self { + self.change_name(format!("does not have url with class {}", class)) + } + + /// Converts a tag namespace (e.g. 'page') into a number and compares it + pub fn tag_namespace_as_number( + self, + namespace: S, + comparator: Comparator, + value: u64, + ) -> Self { + self.change_name(format!( + "tag as number {} {} {}", + namespace, comparator, value + )) + } + + fn change_name(mut self, value: S) -> Self { + self.name = value.to_string(); + + self + } +} + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum Comparator { + /// rhs > lhs + Greater, + /// rhs < lhs + Less, + /// rhs == lhs + Equal, + /// If the rhs is in a +-15% range of the lhs + Approximate, +} + +impl Display for Comparator { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let symbol = match self { + Comparator::Greater => ">", + Comparator::Less => "<", + Comparator::Equal => "=", + Comparator::Approximate => "~=", + }; + symbol.fmt(f) + } +} + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum FileSizeUnit { + Bytes, + Kilobytes, + Megabytes, + Gigabytes, +} + +impl Display for FileSizeUnit { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let name = match self { + FileSizeUnit::Bytes => "B", + FileSizeUnit::Kilobytes => "KB", + FileSizeUnit::Megabytes => "MB", + FileSizeUnit::Gigabytes => "GB", + }; + name.fmt(f) + } +} + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum DurationUnit { + Hours, + Minutes, + Seconds, + Milliseconds, +} + +impl Display for DurationUnit { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let name = match self { + DurationUnit::Hours => "hours", + DurationUnit::Minutes => "minutes", + DurationUnit::Seconds => "seconds", + DurationUnit::Milliseconds => "milliseconds", + }; + name.fmt(f) + } +} + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum IsComparator { + Is, + IsNot, +} + +impl Display for IsComparator { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let name = match self { + IsComparator::Is => "is", + IsComparator::IsNot => "is not", + }; + name.fmt(f) + } +} + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum CurrentlyOrPending { + CurrentlyIn, + PendingTo, +} + +impl Display for CurrentlyOrPending { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let name = match self { + CurrentlyOrPending::CurrentlyIn => "currently in", + CurrentlyOrPending::PendingTo => "pending to", + }; + name.fmt(f) + } +} + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum WiderTallerEqual { + Wider, + Taller, + Equal, +} + +impl Display for WiderTallerEqual { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let name = match self { + WiderTallerEqual::Wider => "wider than", + WiderTallerEqual::Taller => "taller than", + WiderTallerEqual::Equal => "is", + }; + name.fmt(f) + } +} + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum PixelUnit { + Pixels, + Kilopixels, + Megapixels, +} + +impl Display for PixelUnit { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let name = match self { + PixelUnit::Pixels => "pixels", + PixelUnit::Kilopixels => "kilopixels", + PixelUnit::Megapixels => "megapixels", + }; + name.fmt(f) + } +} + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum ViewType { + Media, + Preview, + All, +} + +impl Display for ViewType { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let name = match self { + ViewType::Media => "media", + ViewType::Preview => "preview", + ViewType::All => "all", + }; + name.fmt(f) + } +} + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum FileRelationshipType { + Alternates, + FalsePositives, + Duplicates, + PotentialDuplicates, +} + +impl Display for FileRelationshipType { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let name = match self { + FileRelationshipType::Alternates => "alternates", + FileRelationshipType::FalsePositives => "false positives", + FileRelationshipType::Duplicates => "duplicates", + FileRelationshipType::PotentialDuplicates => "potential duplicates", + }; + name.fmt(f) + } +} diff --git a/src/wrapper/hydrus.rs b/src/wrapper/hydrus.rs index da49c01..39dd6f9 100644 --- a/src/wrapper/hydrus.rs +++ b/src/wrapper/hydrus.rs @@ -1,5 +1,4 @@ use crate::api_core::common::FileIdentifier; -use crate::api_core::searching_and_fetching_files::FileSearchLocation; use crate::error::Result; use crate::utils::tag_list_to_string_list; use crate::wrapper::address::Address; @@ -84,14 +83,10 @@ impl Hydrus { } /// Searches for files that have the given tags and returns a list of hydrus files as a result - pub async fn search( - &self, - location: FileSearchLocation, - tags: Vec, - ) -> Result> { + pub async fn search(&self, tags: Vec) -> Result> { let search_result = self .client - .search_files(tag_list_to_string_list(tags), location) + .search_files(tag_list_to_string_list(tags)) .await?; let files = search_result .file_ids diff --git a/src/wrapper/page.rs b/src/wrapper/page.rs index 1338a49..93669a2 100644 --- a/src/wrapper/page.rs +++ b/src/wrapper/page.rs @@ -1,4 +1,4 @@ -use crate::api_core::common::PageInformation; +use crate::api_core::common::{FileIdentifier, PageInformation}; use crate::error::Result; use crate::Client; @@ -32,11 +32,36 @@ impl HydrusPage { pub async fn focus(&self) -> Result<()> { self.client.focus_page(&self.key).await } - + /// Returns an identifier of the page pub fn id(&self) -> PageIdentifier { PageIdentifier::key(&self.key) } + + /// Adds files to a page + pub async fn add_files(&self, files: Vec) -> Result<()> { + let mut hashes = Vec::new(); + let mut ids = Vec::new(); + + for file in files { + match file { + FileIdentifier::ID(id) => ids.push(id), + FileIdentifier::Hash(hash) => hashes.push(hash), + } + } + // resolve file ids to hashes + if ids.len() > 0 && hashes.len() > 0 { + while let Some(id) = ids.pop() { + let metadata = self + .client + .get_file_metadata_by_identifier(FileIdentifier::ID(id)) + .await?; + hashes.push(metadata.hash); + } + } + + self.client.add_files_to_page(&self.key, ids, hashes).await + } } #[derive(Clone)] diff --git a/src/wrapper/service.rs b/src/wrapper/service.rs index 1109616..0e0b36b 100644 --- a/src/wrapper/service.rs +++ b/src/wrapper/service.rs @@ -8,6 +8,7 @@ use crate::error::Error; use crate::Client; use std::collections::HashMap; use std::convert::TryFrom; +use std::fmt::{Display, Formatter}; #[derive(Clone, PartialOrd, PartialEq, Hash)] pub enum ServiceType { @@ -87,6 +88,12 @@ impl ServiceName { } } +impl Display for ServiceName { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + #[derive(Clone)] pub struct Service { client: Client, diff --git a/src/wrapper/tag.rs b/src/wrapper/tag.rs index 85ab7d1..efd48dc 100644 --- a/src/wrapper/tag.rs +++ b/src/wrapper/tag.rs @@ -1,5 +1,6 @@ #[derive(Clone, Debug)] pub struct Tag { + pub negated: bool, pub name: String, pub namespace: Option, } @@ -9,14 +10,17 @@ where S: AsRef, { fn from(value: S) -> Self { - let value = value.as_ref(); + let value = value.as_ref().trim(); + let negated = value.strip_prefix("-").is_some(); if let Some((namespace, tag)) = value.split_once(":") { Self { + negated, namespace: Some(namespace.to_string()), name: tag.to_string(), } } else { Self { + negated, name: value.to_string(), namespace: None, } @@ -26,10 +30,11 @@ where impl ToString for Tag { fn to_string(&self) -> String { + let negation = if self.negated { "-" } else { "" }; if let Some(namespace) = &self.namespace { - format!("{}:{}", namespace, self.name) + format!("{}{}:{}", negation, namespace, self.name) } else { - self.name.clone() + format!("{}{}", negation, self.name) } } } diff --git a/tests/client/test_managing_pages.rs b/tests/client/test_managing_pages.rs index c787f9b..1e651a8 100644 --- a/tests/client/test_managing_pages.rs +++ b/tests/client/test_managing_pages.rs @@ -24,3 +24,17 @@ async fn it_focuses_pages() { assert!(result.is_err()); // page does not exist } + +#[tokio::test] +async fn it_adds_files_to_a_page() { + let client = common::get_client(); + let result = client + .add_files_to_page( + "0c33d6599c22d5ec12a57b79d8c5a528ebdab7a8c2b462e6d76e2d0512e917fd", + vec![0], + vec![], + ) + .await; + + assert!(result.is_err()) // page does not exist +} diff --git a/tests/client/test_searching_and_fetching_files.rs b/tests/client/test_searching_and_fetching_files.rs index 98816d7..4289443 100644 --- a/tests/client/test_searching_and_fetching_files.rs +++ b/tests/client/test_searching_and_fetching_files.rs @@ -1,12 +1,11 @@ use super::super::common; use hydrus_api::api_core::common::FileIdentifier; -use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation; #[tokio::test] async fn is_searches_files() { let client = common::get_client(); client - .search_files(vec!["beach".to_string()], FileSearchLocation::Archive) + .search_files(vec!["beach".to_string()]) .await .unwrap(); } diff --git a/tests/wrapper/mod.rs b/tests/wrapper/mod.rs index 61e3aca..9102ce3 100644 --- a/tests/wrapper/mod.rs +++ b/tests/wrapper/mod.rs @@ -4,3 +4,4 @@ mod test_import; mod test_url; mod test_page; mod test_address; +mod test_tags; diff --git a/tests/wrapper/test_hydrus.rs b/tests/wrapper/test_hydrus.rs index 605eeb1..525c203 100644 --- a/tests/wrapper/test_hydrus.rs +++ b/tests/wrapper/test_hydrus.rs @@ -1,6 +1,5 @@ use super::super::common; use hydrus_api::api_core::adding_tags::TagAction; -use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation; use hydrus_api::wrapper::service::{ServiceName, ServiceType}; use hydrus_api::wrapper::url::UrlType; @@ -37,10 +36,7 @@ async fn it_retrieves_url_information() { async fn it_searches() { let hydrus = common::get_hydrus(); hydrus - .search( - FileSearchLocation::Archive, - vec!["character:megumin".into()], - ) + .search(vec!["character:megumin".into()]) .await .unwrap(); } diff --git a/tests/wrapper/test_page.rs b/tests/wrapper/test_page.rs index 9bcf39a..666c44f 100644 --- a/tests/wrapper/test_page.rs +++ b/tests/wrapper/test_page.rs @@ -1,4 +1,5 @@ use super::super::common; +use hydrus_api::api_core::common::FileIdentifier; use hydrus_api::wrapper::page::HydrusPage; async fn get_page() -> HydrusPage { @@ -30,3 +31,14 @@ async fn it_has_a_id() { let page = get_page().await; page.id(); } + +#[tokio::test] +async fn it_can_have_files_assigned() { + let page = get_page().await; + let result = page + .add_files(vec![FileIdentifier::hash( + "0000000000000000000000000000000000000000000000000000000000000000", + )]) + .await; + assert!(result.is_err()) // root pages are not media pages +} diff --git a/tests/wrapper/test_tags.rs b/tests/wrapper/test_tags.rs new file mode 100644 index 0000000..e51e3a6 --- /dev/null +++ b/tests/wrapper/test_tags.rs @@ -0,0 +1,413 @@ +use super::super::common; +use chrono::{Duration, Local}; +use hydrus_api::error::Result; +use hydrus_api::wrapper::builders::tag_builder::{ + Comparator, CurrentlyOrPending, FileRelationshipType, FileSizeUnit, IsComparator, PixelUnit, + SystemTagBuilder, ViewType, WiderTallerEqual, +}; +use hydrus_api::wrapper::service::ServiceName; +use hydrus_api::wrapper::tag::Tag; + +async fn retrieve_single_tag(tag: Tag) -> Result<()> { + let hydrus = common::get_hydrus(); + hydrus.search(vec![tag]).await?; + + Ok(()) +} + +#[tokio::test] +async fn it_returns_everything() { + let tag = SystemTagBuilder::new().everything().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_everything_negated() { + let tag = SystemTagBuilder::new().everything().negate().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_the_inbox() { + let tag = SystemTagBuilder::new().inbox().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_archived_files() { + let tag = SystemTagBuilder::new().archive().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_duration() { + let tag = SystemTagBuilder::new().has_duration().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_without_duration() { + let tag = SystemTagBuilder::new().no_duration().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_the_best_from_duplicates() { + let tag = SystemTagBuilder::new().best_duplicate_quality().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_worse_duplicates() { + let tag = SystemTagBuilder::new().not_best_duplicate_quality().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_audio() { + let tag = SystemTagBuilder::new().has_audio().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_without_audio() { + let tag = SystemTagBuilder::new().no_audio().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_tags() { + let tag = SystemTagBuilder::new().has_tags().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_without_tags() { + let tag = SystemTagBuilder::new().no_tags().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_untagged_files() { + let tag = SystemTagBuilder::new().untagged().build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_tags() { + let tag = SystemTagBuilder::new() + .number_of_tags(Comparator::Greater, 12) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_height() { + let tag = SystemTagBuilder::new() + .height(Comparator::Approximate, 200) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_width() { + let tag = SystemTagBuilder::new() + .width(Comparator::Equal, 200) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_size_in_gigabytes() { + let tag = SystemTagBuilder::new() + .filesize(Comparator::Less, 200, FileSizeUnit::Gigabytes) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_size_in_megabytes() { + let tag = SystemTagBuilder::new() + .filesize(Comparator::Less, 200, FileSizeUnit::Megabytes) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_size_in_kilobytes() { + let tag = SystemTagBuilder::new() + .filesize(Comparator::Less, 200, FileSizeUnit::Kilobytes) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_size_in_bytes() { + let tag = SystemTagBuilder::new() + .filesize(Comparator::Less, 200, FileSizeUnit::Bytes) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_that_are_similar_to_others() { + let tag = SystemTagBuilder::new() + .similar_to( + vec![String::from( + "0000000000000000000000000000000000000000000000000000000000000000", + )], + 20, + ) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_limits_results() { + let tag = SystemTagBuilder::new().limit(50).build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_modification_date() { + let tag = SystemTagBuilder::new() + .date_modified(Comparator::Greater, Local::now()) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_import_time() { + let tag = SystemTagBuilder::new() + .time_imported(Comparator::Less, Local::now()) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_of_a_service() { + let tag = SystemTagBuilder::new() + .file_service( + IsComparator::Is, + CurrentlyOrPending::CurrentlyIn, + ServiceName::my_files(), + ) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_that_are_not_of_a_service() { + let tag = SystemTagBuilder::new() + .file_service( + IsComparator::IsNot, + CurrentlyOrPending::CurrentlyIn, + ServiceName::my_files(), + ) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_pending_to_service() { + let tag = SystemTagBuilder::new() + .file_service( + IsComparator::Is, + CurrentlyOrPending::PendingTo, + ServiceName::my_files(), + ) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_not_pending_to_service() { + let tag = SystemTagBuilder::new() + .file_service( + IsComparator::IsNot, + CurrentlyOrPending::PendingTo, + ServiceName::my_files(), + ) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_alternate_relationships() { + let tag = SystemTagBuilder::new() + .number_of_relationships(Comparator::Approximate, 3, FileRelationshipType::Alternates) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_duplicate_relationships() { + let tag = SystemTagBuilder::new() + .number_of_relationships(Comparator::Approximate, 3, FileRelationshipType::Duplicates) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_false_positive_relationships() { + let tag = SystemTagBuilder::new() + .number_of_relationships( + Comparator::Approximate, + 3, + FileRelationshipType::FalsePositives, + ) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_potential_duplicate_relationships() { + let tag = SystemTagBuilder::new() + .number_of_relationships( + Comparator::Approximate, + 3, + FileRelationshipType::PotentialDuplicates, + ) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_wider_than_a_specific_ratio() { + let tag = SystemTagBuilder::new() + .ratio(WiderTallerEqual::Wider, (40, 50)) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_taller_than_a_specific_ratio() { + let tag = SystemTagBuilder::new() + .ratio(WiderTallerEqual::Taller, (40, 50)) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_taller_with_specific_ratio() { + let tag = SystemTagBuilder::new() + .ratio(WiderTallerEqual::Equal, (40, 50)) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_megapixels() { + let tag = SystemTagBuilder::new() + .number_of_pixels(Comparator::Less, 50, PixelUnit::Megapixels) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_kilopixels() { + let tag = SystemTagBuilder::new() + .number_of_pixels(Comparator::Equal, 50, PixelUnit::Kilopixels) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_pixels() { + let tag = SystemTagBuilder::new() + .number_of_pixels(Comparator::Greater, 50, PixelUnit::Pixels) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_views() { + let tag = SystemTagBuilder::new() + .views(ViewType::All, Comparator::Less, 1000) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_preview_views() { + let tag = SystemTagBuilder::new() + .views(ViewType::Preview, Comparator::Equal, 1000) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_number_of_media_views() { + let tag = SystemTagBuilder::new() + .views(ViewType::Media, Comparator::Greater, 1000) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_preview_viewtime() { + let tag = SystemTagBuilder::new() + .viewtime( + ViewType::Preview, + Comparator::Greater, + Duration::minutes(10), + ) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_media_viewtime() { + let tag = SystemTagBuilder::new() + .viewtime(ViewType::Media, Comparator::Equal, Duration::minutes(10)) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_a_specific_viewtime() { + let tag = SystemTagBuilder::new() + .viewtime(ViewType::All, Comparator::Less, Duration::hours(10)) + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_urls_matching_a_regex() { + let tag = SystemTagBuilder::new() + .has_url_matching_regex(".*pixiv.net.*") + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_urls_not_matching_a_regex() { + let tag = SystemTagBuilder::new() + .does_not_have_url_matching_regex(".*pixiv.net.*") + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_urls_matching_a_class() { + let tag = SystemTagBuilder::new() + .has_url_with_class("pixiv file page") + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_urls_not_matching_a_class() { + let tag = SystemTagBuilder::new() + .does_not_have_url_with_class("pixiv file page") + .build(); + retrieve_single_tag(tag).await.unwrap(); +} + +#[tokio::test] +async fn it_returns_files_with_namespace_properties() { + let tag = SystemTagBuilder::new() + .tag_namespace_as_number("page", Comparator::Approximate, 5) + .build(); + retrieve_single_tag(tag).await.unwrap(); +}