diff --git a/Cargo.toml b/Cargo.toml index 23cd4fc..4d0d679 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hydrus-api" -version = "0.1.0" +version = "0.3.0" authors = ["trivernis "] edition = "2018" license = "Apache-2.0" @@ -14,6 +14,7 @@ repository = "https://github.com/trivernis/hydrus-api-rs" serde = {version = "^1.0", features = ["derive"]} reqwest = {version = "0.11.4", features = ["json"]} log = "0.4.14" +mime = "0.3.16" [dev-dependencies] env_logger = "0.8.4" diff --git a/README.md b/README.md index 1f72a5b..3a88b9d 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,51 @@ -# Hydrus Rust API +

+Hydrus Rust API +

+

+ + + + + + +

+ This is a WIP Rust Wrapper for the Hydrus Client API. The official API documentation can be found [here](https://hydrusnetwork.github.io/hydrus/help/client_api.html). -## Example +## Example with Wrapper + +```rust +use std::env; +use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation; +use hydrus_api::wrapper::tag::Tag; +use hydrus_api::wrapper::service::ServiceName; +use hydrus_api::wrapper::hydrus_file::FileStatus; +use hydrus_api::wrapper::page::PageIdentifier; + +#[tokio::main] +async fn main() { + let hydrus_url = env::var("HYDRUS_URL").unwrap(); + let access_key = env::var("HYDRUS_ACCESS_KEY").unwrap(); + + let hydrus = Hydrus::new(Client::new(hydrus_url, access_key)); + let files = hydrus.search(FileSearchLocation::Archive,vec![Tag::from("character:megumin")]).await.unwrap(); + + for mut file in files { + file.add_tags(ServiceName::my_tags(), vec![Tag::from("ark mage")]).await.unwrap(); + } + + let url = hydrus.import() + .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") + .page(PageIdentifier::name("My Import Page")) + .add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin")) + .show_page(true) + .run().await.unwrap(); +} +``` + +## Example with Client ```rust use hydrus_api::Client; @@ -15,7 +57,7 @@ async fn main() { Client::new( env::var("HYDRUS_URL").unwrap(), env::var("HYDRUS_ACCESS_KEY").unwrap(), - ).unwrap(); + ); // let's first import a file let hash = client.add_file("/path/to/my/file").await.unwrap().hash; diff --git a/src/endpoints/access_management.rs b/src/api_core/access_management.rs similarity index 63% rename from src/endpoints/access_management.rs rename to src/api_core/access_management.rs index 96d759c..8c5ff11 100644 --- a/src/endpoints/access_management.rs +++ b/src/api_core/access_management.rs @@ -1,7 +1,16 @@ -use crate::endpoints::common::BasicServiceInfo; -use crate::endpoints::Endpoint; +use crate::api_core::common::BasicServiceInfo; +use crate::api_core::Endpoint; use std::collections::HashMap; +pub static SERVICE_TYPE_LOCAL_TAGS: &str = "local_tags"; +pub static SERVICE_TYPE_TAG_REPOSITORIES: &str = "tag_repositories"; +pub static SERVICE_TYPE_LOCAL_FILES: &str = "local_files"; +pub static SERVICE_TYPE_FILE_REPOSITORIES: &str = "file_repositories"; +pub static SERVICE_TYPE_ALL_LOCAL_FILES: &str = "all_local_files"; +pub static SERVICE_TYPE_ALL_KNOWN_FILES: &str = "all_known_files"; +pub static SERVICE_TYPE_ALL_KNOWN_TAGS: &str = "all_known_tags"; +pub static SERVICE_TYPE_TRASH: &str = "trash"; + #[derive(Debug, Clone, Deserialize)] pub struct ApiVersionResponse { pub version: u32, @@ -14,7 +23,7 @@ impl Endpoint for ApiVersion { type Request = (); type Response = ApiVersionResponse; - fn get_path() -> String { + fn path() -> String { String::from("api_version") } } @@ -30,7 +39,7 @@ impl Endpoint for SessionKey { type Request = (); type Response = SessionKeyResponse; - fn get_path() -> String { + fn path() -> String { String::from("session_key") } } @@ -47,7 +56,7 @@ impl Endpoint for VerifyAccessKey { type Request = (); type Response = VerifyAccessKeyResponse; - fn get_path() -> String { + fn path() -> String { String::from("verify_access_key") } } @@ -61,7 +70,7 @@ impl Endpoint for GetServices { type Request = (); type Response = GetServicesResponse; - fn get_path() -> String { + fn path() -> String { String::from("get_services") } } diff --git a/src/endpoints/adding_files.rs b/src/api_core/adding_files.rs similarity index 75% rename from src/endpoints/adding_files.rs rename to src/api_core/adding_files.rs index 2ff9892..fce22b4 100644 --- a/src/endpoints/adding_files.rs +++ b/src/api_core/adding_files.rs @@ -1,5 +1,11 @@ -use crate::endpoints::common::BasicHashList; -use crate::endpoints::Endpoint; +use crate::api_core::common::BasicHashList; +use crate::api_core::Endpoint; + +pub static STATUS_IMPORT_SUCCESS: u8 = 1; +pub static STATUS_IMPORT_ALREADY_EXISTS: u8 = 2; +pub static STATUS_IMPORT_PREVIOUSLY_DELETED: u8 = 3; +pub static STATUS_IMPORT_FAILED: u8 = 4; +pub static STATUS_IMPORT_VETOED: u8 = 5; #[derive(Debug, Clone, Serialize)] pub struct AddFileRequest { @@ -19,7 +25,7 @@ impl Endpoint for AddFile { type Request = AddFileRequest; type Response = AddFileResponse; - fn get_path() -> String { + fn path() -> String { String::from("add_files/add_file") } } @@ -32,7 +38,7 @@ impl Endpoint for DeleteFiles { type Request = DeleteFilesRequest; type Response = (); - fn get_path() -> String { + fn path() -> String { String::from("add_files/delete_files") } } @@ -44,7 +50,7 @@ impl Endpoint for UndeleteFiles { type Request = UndeleteFilesRequest; type Response = (); - fn get_path() -> String { + fn path() -> String { String::from("add_files/undelete_files") } } @@ -56,7 +62,7 @@ impl Endpoint for ArchiveFiles { type Request = ArchiveFilesRequest; type Response = (); - fn get_path() -> String { + fn path() -> String { String::from("add_files/archive_files") } } @@ -68,7 +74,7 @@ impl Endpoint for UnarchiveFiles { type Request = UndeleteFilesRequest; type Response = (); - fn get_path() -> String { + fn path() -> String { String::from("add_files/unarchive_files") } } diff --git a/src/endpoints/adding_tags.rs b/src/api_core/adding_tags.rs similarity index 96% rename from src/endpoints/adding_tags.rs rename to src/api_core/adding_tags.rs index 04c09d8..91c883f 100644 --- a/src/endpoints/adding_tags.rs +++ b/src/api_core/adding_tags.rs @@ -1,4 +1,4 @@ -use crate::endpoints::Endpoint; +use crate::api_core::Endpoint; use std::collections::HashMap; #[derive(Debug, Clone, Deserialize)] @@ -12,7 +12,7 @@ impl Endpoint for CleanTags { type Request = (); type Response = CleanTagsResponse; - fn get_path() -> String { + fn path() -> String { String::from("add_tags/clean_tags") } } @@ -30,7 +30,7 @@ impl Endpoint for AddTags { type Request = AddTagsRequest; type Response = (); - fn get_path() -> String { + fn path() -> String { String::from("add_tags/add_tags") } } @@ -42,6 +42,7 @@ pub struct AddTagsRequestBuilder { } /// List of actions for a given tag +#[derive(Clone, Debug, PartialOrd, PartialEq, Hash)] pub enum TagAction { /// Add to a local tag service. AddToLocalService, @@ -62,6 +63,8 @@ pub enum TagAction { RescindPetitionFromRepository, } +impl Eq for TagAction {} + impl TagAction { fn into_id(self) -> u8 { match self { diff --git a/src/endpoints/adding_urls.rs b/src/api_core/adding_urls.rs similarity index 94% rename from src/endpoints/adding_urls.rs rename to src/api_core/adding_urls.rs index 5b3068b..a92aabc 100644 --- a/src/endpoints/adding_urls.rs +++ b/src/api_core/adding_urls.rs @@ -1,4 +1,4 @@ -use crate::endpoints::Endpoint; +use crate::api_core::Endpoint; use serde::Serialize; use std::collections::HashMap; @@ -16,7 +16,7 @@ pub struct GetUrlFilesResponse { #[derive(Clone, Debug, Deserialize)] pub struct UrlFileStatus { - pub status: u32, + pub status: u8, pub hash: String, pub note: String, } @@ -27,7 +27,7 @@ impl Endpoint for GetUrlFiles { type Request = (); type Response = GetUrlFilesResponse; - fn get_path() -> String { + fn path() -> String { String::from("add_urls/get_url_files") } } @@ -47,7 +47,7 @@ impl Endpoint for GetUrlInfo { type Request = (); type Response = GetUrlInfoResponse; - fn get_path() -> String { + fn path() -> String { String::from("add_urls/get_url_info") } } @@ -72,7 +72,7 @@ pub struct AddUrlRequest { /// /// Example: /// ``` -/// use hydrus_api::endpoints::adding_urls::AddUrlRequestBuilder; +/// use hydrus_api::api_core::adding_urls::AddUrlRequestBuilder; /// /// let request = AddUrlRequestBuilder::default() /// .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") @@ -164,7 +164,7 @@ impl Endpoint for AddUrl { type Request = AddUrlRequest; type Response = AddUrlResponse; - fn get_path() -> String { + fn path() -> String { String::from("add_urls/add_url") } } @@ -181,7 +181,7 @@ impl Endpoint for AssociateUrl { type Request = AssociateUrlRequest; type Response = (); - fn get_path() -> String { + fn path() -> String { String::from("add_urls/associate_url") } } diff --git a/src/client.rs b/src/api_core/client.rs similarity index 68% rename from src/client.rs rename to src/api_core/client.rs index 1b2eddb..b714320 100644 --- a/src/client.rs +++ b/src/api_core/client.rs @@ -1,22 +1,22 @@ -use crate::endpoints::access_management::{ +use crate::api_core::access_management::{ ApiVersion, ApiVersionResponse, GetServices, GetServicesResponse, SessionKey, SessionKeyResponse, VerifyAccessKey, VerifyAccessKeyResponse, }; -use crate::endpoints::adding_files::{ +use crate::api_core::adding_files::{ AddFile, AddFileRequest, AddFileResponse, ArchiveFiles, ArchiveFilesRequest, DeleteFiles, DeleteFilesRequest, UnarchiveFiles, UnarchiveFilesRequest, UndeleteFiles, UndeleteFilesRequest, }; -use crate::endpoints::adding_tags::{AddTags, AddTagsRequest, CleanTags, CleanTagsResponse}; -use crate::endpoints::adding_urls::{ +use crate::api_core::adding_tags::{AddTags, AddTagsRequest, CleanTags, CleanTagsResponse}; +use crate::api_core::adding_urls::{ AddUrl, AddUrlRequest, AddUrlResponse, AssociateUrl, AssociateUrlRequest, GetUrlFiles, GetUrlFilesResponse, GetUrlInfo, GetUrlInfoResponse, }; -use crate::endpoints::common::{FileIdentifier, FileRecord}; -use crate::endpoints::searching_and_fetching_files::{ +use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord}; +use crate::api_core::searching_and_fetching_files::{ FileMetadata, FileMetadataResponse, FileSearchLocation, GetFile, SearchFiles, SearchFilesResponse, }; -use crate::endpoints::Endpoint; +use crate::api_core::Endpoint; use crate::error::{Error, Result}; use crate::utils::{number_list_to_json_array, string_list_to_json_array}; use reqwest::Response; @@ -25,6 +25,9 @@ use serde::Serialize; static ACCESS_KEY_HEADER: &str = "Hydrus-Client-API-Access-Key"; +#[derive(Clone)] +/// A low level Client for the hydrus API. It provides basic abstraction +/// over the REST api. pub struct Client { inner: reqwest::Client, base_url: String, @@ -33,19 +36,19 @@ pub struct Client { impl Client { /// Creates a new client to start requests against the hydrus api. - pub fn new>(url: S, access_key: S) -> Result { - Ok(Self { + pub fn new>(url: S, access_key: S) -> Self { + Self { inner: reqwest::Client::new(), access_key: access_key.as_ref().to_string(), base_url: url.as_ref().to_string(), - }) + } } /// Starts a get request to the path - async fn get(&mut self, query: &Q) -> Result { + async fn get(&self, query: &Q) -> Result { let response = self .inner - .get(format!("{}/{}", self.base_url, E::get_path())) + .get(format!("{}/{}", self.base_url, E::path())) .header(ACCESS_KEY_HEADER, &self.access_key) .query(query) .send() @@ -56,7 +59,7 @@ impl Client { /// Starts a get request to the path associated with the Endpoint Type async fn get_and_parse( - &mut self, + &self, query: &Q, ) -> Result { let response = self.get::(query).await?; @@ -65,10 +68,10 @@ impl Client { } /// Stats a post request to the path associated with the Endpoint Type - async fn post(&mut self, body: E::Request) -> Result { + async fn post(&self, body: E::Request) -> Result { let response = self .inner - .post(format!("{}/{}", self.base_url, E::get_path())) + .post(format!("{}/{}", self.base_url, E::path())) .json(&body) .header(ACCESS_KEY_HEADER, &self.access_key) .send() @@ -78,17 +81,17 @@ impl Client { } /// Stats a post request and parses the body as json - async fn post_and_parse(&mut self, body: E::Request) -> Result { + async fn post_and_parse(&self, body: E::Request) -> Result { let response = self.post::(body).await?; Self::extract_content(response).await } /// Stats a post request to the path associated with the return type - async fn post_binary(&mut self, data: Vec) -> Result { + async fn post_binary(&self, data: Vec) -> Result { let response = self .inner - .post(format!("{}/{}", self.base_url, E::get_path())) + .post(format!("{}/{}", self.base_url, E::path())) .body(data) .header(ACCESS_KEY_HEADER, &self.access_key) .header("Content-Type", "application/octet-stream") @@ -115,27 +118,27 @@ impl Client { } /// Returns the current API version. It's being incremented every time the API changes. - pub async fn api_version(&mut self) -> Result { + pub async fn api_version(&self) -> Result { self.get_and_parse::(&()).await } /// Creates a new session key - pub async fn session_key(&mut self) -> Result { + pub async fn session_key(&self) -> Result { self.get_and_parse::(&()).await } /// Verifies if the access key is valid and returns some information about its permissions - pub async fn verify_access_key(&mut self) -> Result { + pub async fn verify_access_key(&self) -> Result { self.get_and_parse::(&()).await } /// Returns the list of tag and file services of the client - pub async fn get_services(&mut self) -> Result { + pub async fn get_services(&self) -> Result { self.get_and_parse::(&()).await } /// Adds a file to hydrus - pub async fn add_file>(&mut self, path: S) -> Result { + pub async fn add_file>(&self, path: S) -> Result { self.post_and_parse::(AddFileRequest { path: path.as_ref().to_string(), }) @@ -143,12 +146,12 @@ impl Client { } /// Adds a file from binary data to hydrus - pub async fn add_binary_file(&mut self, data: Vec) -> Result { + pub async fn add_binary_file(&self, data: Vec) -> Result { self.post_binary::(data).await } /// Moves files with matching hashes to the trash - pub async fn delete_files(&mut self, hashes: Vec) -> Result<()> { + pub async fn delete_files(&self, hashes: Vec) -> Result<()> { self.post::(DeleteFilesRequest { hashes }) .await?; @@ -156,7 +159,7 @@ impl Client { } /// Pulls files out of the trash by hash - pub async fn undelete_files(&mut self, hashes: Vec) -> Result<()> { + pub async fn undelete_files(&self, hashes: Vec) -> Result<()> { self.post::(UndeleteFilesRequest { hashes }) .await?; @@ -164,7 +167,7 @@ impl Client { } /// Moves files from the inbox into the archive - pub async fn archive_files(&mut self, hashes: Vec) -> Result<()> { + pub async fn archive_files(&self, hashes: Vec) -> Result<()> { self.post::(ArchiveFilesRequest { hashes }) .await?; @@ -172,7 +175,7 @@ impl Client { } /// Moves files from the archive into the inbox - pub async fn unarchive_files(&mut self, hashes: Vec) -> Result<()> { + pub async fn unarchive_files(&self, hashes: Vec) -> Result<()> { self.post::(UnarchiveFilesRequest { hashes }) .await?; @@ -180,7 +183,7 @@ impl Client { } /// Returns the list of tags as the client would see them in a human friendly order - pub async fn clean_tags(&mut self, tags: Vec) -> Result { + pub async fn clean_tags(&self, tags: Vec) -> Result { self.get_and_parse::(&[( "tags", string_list_to_json_array(tags), @@ -189,7 +192,7 @@ impl Client { } /// Adds tags to files with the given hashes - pub async fn add_tags(&mut self, request: AddTagsRequest) -> Result<()> { + pub async fn add_tags(&self, request: AddTagsRequest) -> Result<()> { self.post::(request).await?; Ok(()) @@ -197,7 +200,7 @@ impl Client { /// Searches for files in the inbox, the archive or both pub async fn search_files( - &mut self, + &self, tags: Vec, location: FileSearchLocation, ) -> Result { @@ -211,19 +214,37 @@ impl Client { /// Returns the metadata for a given list of file_ids or hashes pub async fn get_file_metadata( - &mut self, + &self, file_ids: Vec, hashes: Vec, ) -> Result { - self.get_and_parse::(&[ - ("file_ids", number_list_to_json_array(file_ids)), - ("hashes", string_list_to_json_array(hashes)), - ]) - .await + let query = if file_ids.len() > 0 { + ("file_ids", number_list_to_json_array(file_ids)) + } else { + ("hashes", string_list_to_json_array(hashes)) + }; + self.get_and_parse::(&[query]) + .await + } + + /// Returns the metadata for a single file identifier + pub async fn get_file_metadata_by_identifier( + &self, + identifier: FileIdentifier, + ) -> Result { + let mut response = match identifier.clone() { + FileIdentifier::ID(id) => self.get_file_metadata(vec![id], vec![]).await?, + FileIdentifier::Hash(hash) => self.get_file_metadata(vec![], vec![hash]).await?, + }; + + response + .metadata + .pop() + .ok_or_else(|| Error::FileNotFound(identifier)) } /// Returns the bytes of a file from hydrus - pub async fn get_file(&mut self, id: FileIdentifier) -> Result { + pub async fn get_file(&self, id: FileIdentifier) -> Result { let response = match id { FileIdentifier::ID(id) => { self.get::(&[("file_id", id)]) @@ -247,24 +268,24 @@ impl Client { } /// Returns all files associated with the given url - pub async fn get_url_files>(&mut self, url: S) -> Result { + pub async fn get_url_files>(&self, url: S) -> Result { self.get_and_parse::(&[("url", url.as_ref())]) .await } /// Returns information about the given url - pub async fn get_url_info>(&mut self, url: S) -> Result { + pub async fn get_url_info>(&self, url: S) -> Result { self.get_and_parse::(&[("url", url.as_ref())]) .await } /// Adds an url to hydrus, optionally with additional tags and a destination page - pub async fn add_url(&mut self, request: AddUrlRequest) -> Result { + pub async fn add_url(&self, request: AddUrlRequest) -> Result { self.post_and_parse::(request).await } /// Associates urls with the given file hashes - pub async fn associate_urls(&mut self, urls: Vec, hashes: Vec) -> Result<()> { + pub async fn associate_urls(&self, urls: Vec, hashes: Vec) -> Result<()> { self.post::(AssociateUrlRequest { hashes, urls_to_add: urls, @@ -276,11 +297,7 @@ impl Client { } /// Disassociates urls with the given file hashes - pub async fn disassociate_urls( - &mut self, - urls: Vec, - hashes: Vec, - ) -> Result<()> { + pub async fn disassociate_urls(&self, urls: Vec, hashes: Vec) -> Result<()> { self.post::(AssociateUrlRequest { hashes, urls_to_add: vec![], diff --git a/src/endpoints/common.rs b/src/api_core/common.rs similarity index 71% rename from src/endpoints/common.rs rename to src/api_core/common.rs index b4e5862..6160ee6 100644 --- a/src/endpoints/common.rs +++ b/src/api_core/common.rs @@ -11,18 +11,18 @@ pub struct BasicHashList { pub hashes: Vec, } -#[derive(Clone, Default, Deserialize)] +#[derive(Clone, Debug, Default, Deserialize)] pub struct FileMetadataInfo { pub file_id: u64, pub hash: String, - pub size: u64, + pub size: Option, pub mime: String, pub ext: String, - pub width: u32, - pub height: u32, + pub width: Option, + pub height: Option, pub duration: Option, - pub has_audio: bool, - pub num_frames: Option, + pub has_audio: Option, + pub num_frames: Option, pub num_words: Option, pub is_inbox: bool, pub is_local: bool, @@ -32,11 +32,19 @@ pub struct FileMetadataInfo { pub service_names_to_statuses_to_display_tags: HashMap>>, } +#[derive(Clone, Debug)] pub enum FileIdentifier { ID(u64), Hash(String), } +impl FileIdentifier { + pub fn hash(hash: S) -> Self { + Self::Hash(hash.to_string()) + } +} + +#[derive(Clone)] pub struct FileRecord { pub bytes: Vec, pub mime_type: String, diff --git a/src/endpoints/mod.rs b/src/api_core/mod.rs similarity index 79% rename from src/endpoints/mod.rs rename to src/api_core/mod.rs index 13cda47..222f522 100644 --- a/src/endpoints/mod.rs +++ b/src/api_core/mod.rs @@ -5,12 +5,13 @@ pub mod access_management; pub mod adding_files; pub mod adding_tags; pub mod adding_urls; +pub mod client; pub mod common; pub mod searching_and_fetching_files; -pub trait Endpoint { +pub(crate) trait Endpoint { type Request: Serialize; type Response: DeserializeOwned; - fn get_path() -> String; + fn path() -> String; } diff --git a/src/endpoints/searching_and_fetching_files.rs b/src/api_core/searching_and_fetching_files.rs similarity index 70% rename from src/endpoints/searching_and_fetching_files.rs rename to src/api_core/searching_and_fetching_files.rs index aa07178..b3e20ef 100644 --- a/src/endpoints/searching_and_fetching_files.rs +++ b/src/api_core/searching_and_fetching_files.rs @@ -1,5 +1,5 @@ -use crate::endpoints::common::FileMetadataInfo; -use crate::endpoints::Endpoint; +use crate::api_core::common::FileMetadataInfo; +use crate::api_core::Endpoint; #[derive(Debug, Clone, Deserialize)] pub struct SearchFilesResponse { @@ -7,7 +7,6 @@ pub struct SearchFilesResponse { } pub enum FileSearchLocation { - All, Inbox, Archive, } @@ -16,14 +15,6 @@ impl FileSearchLocation { pub fn is_inbox(&self) -> bool { if let &Self::Inbox = &self { true - } else { - self.is_all() - } - } - - pub fn is_all(&self) -> bool { - if let &Self::All = &self { - true } else { false } @@ -33,7 +24,7 @@ impl FileSearchLocation { if let &Self::Archive = &self { true } else { - self.is_all() + false } } } @@ -44,14 +35,14 @@ impl Endpoint for SearchFiles { type Request = (); type Response = SearchFilesResponse; - fn get_path() -> String { + fn path() -> String { String::from("get_files/search_files") } } -#[derive(Clone, Default, Deserialize)] +#[derive(Clone, Debug, Default, Deserialize)] pub struct FileMetadataResponse { - metadata: Vec, + pub metadata: Vec, } pub struct FileMetadata; @@ -60,7 +51,7 @@ impl Endpoint for FileMetadata { type Request = (); type Response = FileMetadataResponse; - fn get_path() -> String { + fn path() -> String { String::from("get_files/file_metadata") } } @@ -71,7 +62,7 @@ impl Endpoint for GetFile { type Request = (); type Response = (); - fn get_path() -> String { + fn path() -> String { String::from("get_files/file") } } diff --git a/src/error.rs b/src/error.rs index 34e7d64..60f3ad6 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,3 +1,4 @@ +use crate::api_core::common::FileIdentifier; use std::error::Error as StdError; use std::fmt; @@ -7,6 +8,11 @@ pub type Result = std::result::Result; pub enum Error { Reqwest(reqwest::Error), Hydrus(String), + InvalidServiceType(String), + ImportVetoed(String), + ImportFailed(String), + FileNotFound(FileIdentifier), + InvalidMime(String), } impl fmt::Display for Error { @@ -14,6 +20,13 @@ impl fmt::Display for Error { match self { Self::Reqwest(e) => e.fmt(f), Self::Hydrus(msg) => msg.fmt(f), + Self::InvalidServiceType(service_type) => { + write!(f, "Invalid Service Type '{}'", service_type) + } + Self::ImportFailed(msg) => write!(f, "File import failed: {}", msg), + Self::ImportVetoed(msg) => write!(f, "File import vetoed: {}", msg), + Self::FileNotFound(id) => write!(f, "File {:?} not found", id), + Self::InvalidMime(mime) => write!(f, "Failed to parse invalid mime {}", mime), } } } @@ -22,7 +35,7 @@ impl StdError for Error { fn source(&self) -> Option<&(dyn StdError + 'static)> { match self { Self::Reqwest(e) => e.source(), - Self::Hydrus(_) => None, + _ => None, } } } diff --git a/src/lib.rs b/src/lib.rs index 068f010..05af9c0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -3,10 +3,41 @@ //! token that can be retrieved in the hydrus client from the *review services* dialog. //! Different actions require different permissions, you can read about it in the [official docs](https://hydrusnetwork.github.io/hydrus/help/client_api.html). //! -//! ## Usage Example +//! ## Hydrus Usage Example +//! +//! ``` +//! # use hydrus_api::{Hydrus, Client}; +//! use std::env; +//! use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation; +//! use hydrus_api::wrapper::tag::Tag; +//! use hydrus_api::wrapper::service::ServiceName; +//! use hydrus_api::wrapper::hydrus_file::FileStatus; +//! use hydrus_api::wrapper::page::PageIdentifier; +//! +//! # #[tokio::test] +//! # async fn doctest() { +//! let hydrus_url = env::var("HYDRUS_URL").unwrap(); +//! let access_key = env::var("HYDRUS_ACCESS_KEY").unwrap(); +//! let hydrus = Hydrus::new(Client::new(hydrus_url, access_key)); +//! let files = hydrus.search(FileSearchLocation::Archive,vec![Tag::from("character:megumin")]).await.unwrap(); +//! +//! for mut file in files { +//! file.add_tags(ServiceName::my_tags(), vec![Tag::from("ark mage")]).await.unwrap(); +//! } +//! +//! let url = hydrus.import() +//! .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") +//! .page(PageIdentifier::name("My Import Page")) +//! .add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin")) +//! .show_page(true) +//! .run().await.unwrap(); +//! # } +//! ``` +//! +//! ## Client Usage Example //! ``` //! use hydrus_api::Client; -//! use hydrus_api::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction}; +//! use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; //! use std::env; //! # #[tokio::test] //! # async fn doctest() { @@ -14,7 +45,7 @@ //! Client::new( //! env::var("HYDRUS_URL").unwrap(), //! env::var("HYDRUS_ACCESS_KEY").unwrap(), -//! ).unwrap(); +//! ); //! // let's first import a file //! let hash = client.add_file("/path/to/my/file").await.unwrap().hash; //! @@ -34,9 +65,10 @@ #[macro_use] extern crate serde; -pub mod client; -pub mod endpoints; +pub use api_core::client::Client; +pub use wrapper::hydrus::Hydrus; + +pub mod api_core; pub mod error; pub(crate) mod utils; - -pub use client::Client; +pub mod wrapper; diff --git a/src/utils.rs b/src/utils.rs index da0e527..95f308a 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,3 +1,5 @@ +use crate::wrapper::tag::Tag; + pub fn string_list_to_json_array(l: Vec) -> String { format!("[\"{}\"]", l.join("\",\"")) } @@ -12,3 +14,8 @@ pub fn number_list_to_json_array(l: Vec) -> String { )) ) } + +/// Converts a list of tags into a list of string tags +pub fn tag_list_to_string_list(tags: Vec) -> Vec { + tags.into_iter().map(|t| t.to_string()).collect() +} diff --git a/src/wrapper/builders/import_builder.rs b/src/wrapper/builders/import_builder.rs new file mode 100644 index 0000000..f9b9e6d --- /dev/null +++ b/src/wrapper/builders/import_builder.rs @@ -0,0 +1,167 @@ +use crate::api_core::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED}; +use crate::api_core::adding_urls::AddUrlRequestBuilder; +use crate::error::{Error, Result}; +use crate::utils::tag_list_to_string_list; +use crate::wrapper::hydrus_file::HydrusFile; +use crate::wrapper::page::PageIdentifier; +use crate::wrapper::service::ServiceName; +use crate::wrapper::tag::Tag; +use crate::wrapper::url::Url; +use crate::Client; +use std::collections::HashMap; +use std::io::Read; + +pub struct ImportBuilder { + pub(crate) client: Client, +} + +impl ImportBuilder { + pub fn file(self, file: FileImport) -> FileImportBuilder { + FileImportBuilder { + client: self.client, + file, + } + } + + pub fn url(self, url: S) -> UrlImportBuilder { + UrlImportBuilder::new(self.client.clone(), url) + } +} + +pub enum FileImport { + Path(String), + Binary(Vec), +} + +impl FileImport { + pub fn path(path: S) -> Self { + Self::Path(path.to_string()) + } + + pub fn binary(reader: &mut R) -> Self { + let mut bytes = Vec::new(); + let _ = reader.read_to_end(&mut bytes); + Self::Binary(bytes) + } +} + +pub struct FileImportBuilder { + client: Client, + file: FileImport, +} + +impl FileImportBuilder { + pub async fn run(self) -> Result { + let response = match self.file { + FileImport::Path(path) => self.client.add_file(path).await?, + FileImport::Binary(b) => self.client.add_binary_file(b).await?, + }; + + if response.status == STATUS_IMPORT_FAILED { + Err(Error::ImportFailed(response.note)) + } else if response.status == STATUS_IMPORT_VETOED { + Err(Error::ImportVetoed(response.note)) + } else { + Ok(HydrusFile::from_raw_status_and_hash( + self.client, + response.status, + response.hash, + )) + } + } +} + +pub struct UrlImportBuilder { + client: Client, + url: String, + page: Option, + show_page: bool, + filter_tags: Vec, + service_tag_mappings: HashMap>, +} + +impl UrlImportBuilder { + pub fn new(client: Client, url: S) -> Self { + Self { + client, + url: url.to_string(), + page: None, + show_page: false, + filter_tags: vec![], + service_tag_mappings: Default::default(), + } + } + + /// Sets the destination page of the import + pub fn page(mut self, page: PageIdentifier) -> Self { + self.page = Some(page); + + self + } + + /// If the destination page of the import should be focussed + pub fn show_page(mut self, show: bool) -> Self { + self.show_page = show; + + self + } + + /// Adds a tag that should be filtered + pub fn add_filter_tag(mut self, tag: Tag) -> Self { + self.filter_tags.push(tag); + + self + } + + /// Adds multiple tags that should be filtered + pub fn add_filter_tags(mut self, mut tags: Vec) -> Self { + self.filter_tags.append(&mut tags); + + self + } + + /// Adds an additional tag for the imported file + pub fn add_additional_tag(self, service: ServiceName, tag: Tag) -> Self { + self.add_additional_tags(service, vec![tag]) + } + + /// Adds multiple additional tags for the import + pub fn add_additional_tags(mut self, service: ServiceName, mut tags: Vec) -> Self { + if let Some(service_tags) = self.service_tag_mappings.get_mut(&service.0) { + service_tags.append(&mut tags); + } else { + self.service_tag_mappings.insert(service.0, tags); + } + + self + } + + /// Imports the URL + pub async fn run(self) -> Result { + let mut request = AddUrlRequestBuilder::default().url(&self.url); + + for (service, tags) in self.service_tag_mappings { + request = request.add_tags(service, tag_list_to_string_list(tags)); + } + request = request.add_filter_tags(tag_list_to_string_list(self.filter_tags)); + if let Some(page) = self.page { + request = match page { + PageIdentifier::Name(n) => request.destination_page_name(n), + PageIdentifier::Key(k) => request.destination_page_key(k), + }; + } + request = request.show_destination_page(self.show_page); + + let response = self.client.add_url(request.build()).await?; + let url_info = self.client.get_url_info(&self.url).await?; + + Ok(Url { + url: self.url, + client: self.client, + normalised_url: response.normalised_url, + url_type: url_info.url_type.into(), + match_name: url_info.match_name, + can_parse: url_info.can_parse, + }) + } +} diff --git a/src/wrapper/builders/mod.rs b/src/wrapper/builders/mod.rs new file mode 100644 index 0000000..2402223 --- /dev/null +++ b/src/wrapper/builders/mod.rs @@ -0,0 +1,2 @@ +pub mod import_builder; +pub mod tagging_builder; diff --git a/src/wrapper/builders/tagging_builder.rs b/src/wrapper/builders/tagging_builder.rs new file mode 100644 index 0000000..c80fa8b --- /dev/null +++ b/src/wrapper/builders/tagging_builder.rs @@ -0,0 +1,70 @@ +use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; +use crate::error::Result; +use crate::wrapper::service::ServiceName; +use crate::wrapper::tag::Tag; +use crate::Client; +use std::collections::HashMap; + +pub struct TaggingBuilder { + client: Client, + hashes: Vec, + tag_mappings: HashMap>>, +} + +impl TaggingBuilder { + pub(crate) fn new(client: Client) -> Self { + Self { + client, + hashes: Vec::new(), + tag_mappings: Default::default(), + } + } + + /// Adds a file that should get the tags defined for this request + pub fn add_file(mut self, hash: S) -> Self { + self.hashes.push(hash.to_string()); + + self + } + + /// Adds a single tag for a given service + pub fn add_tag(self, service: ServiceName, action: TagAction, tag: Tag) -> Self { + self.add_tags(service, action, vec![tag]) + } + + /// Adds tags with actions for the given service + pub fn add_tags(mut self, service: ServiceName, action: TagAction, mut tags: Vec) -> Self { + let service_action_mappings = + if let Some(service_action_mappings) = self.tag_mappings.get_mut(&service) { + service_action_mappings + } else { + self.tag_mappings.insert(service.clone(), HashMap::new()); + self.tag_mappings.get_mut(&service).unwrap() + }; + if let Some(action_tag_mappings) = service_action_mappings.get_mut(&action) { + action_tag_mappings.append(&mut tags) + } else { + service_action_mappings.insert(action, tags); + } + + self + } + + /// Executes the request + pub async fn run(self) -> Result<()> { + let mut request = AddTagsRequestBuilder::default().add_hashes(self.hashes); + for (service, action_tag_mappings) in self.tag_mappings { + for (action, tags) in action_tag_mappings { + for tag in tags { + request = request.add_tag_with_action( + service.0.clone(), + tag.to_string(), + action.clone(), + ); + } + } + } + + self.client.add_tags(request.build()).await + } +} diff --git a/src/wrapper/hydrus.rs b/src/wrapper/hydrus.rs new file mode 100644 index 0000000..0cf58ab --- /dev/null +++ b/src/wrapper/hydrus.rs @@ -0,0 +1,97 @@ +use crate::api_core::common::FileIdentifier; +use crate::api_core::searching_and_fetching_files::FileSearchLocation; +use crate::error::Result; +use crate::utils::tag_list_to_string_list; +use crate::wrapper::builders::import_builder::ImportBuilder; +use crate::wrapper::builders::tagging_builder::TaggingBuilder; +use crate::wrapper::hydrus_file::HydrusFile; +use crate::wrapper::service::Services; +use crate::wrapper::tag::Tag; +use crate::wrapper::url::Url; +use crate::wrapper::version::Version; +use crate::Client; + +/// A high level wrapper for the hydrus API for easier management of files, tags +/// urls etc. +pub struct Hydrus { + client: Client, +} + +impl Hydrus { + /// Creates a new high level Hydrus API client + pub fn new(client: Client) -> Self { + Self { client } + } + + /// Returns the Hydrus and API Version + pub async fn version(&self) -> Result { + let response = self.client.api_version().await?; + Ok(Version { + api: response.version, + hydrus: response.hydrus_version, + }) + } + + /// Returns a list of available services + pub async fn services(&self) -> Result { + let response = self.client.get_services().await?; + + Ok(Services::from_response(self.client.clone(), response)) + } + + /// Creates an import builder to build an import request to hydrus + pub fn import(&self) -> ImportBuilder { + ImportBuilder { + client: self.client.clone(), + } + } + + /// Returns information about a given url in an object that allows + /// further operations with that url + pub async fn url>(&self, url: S) -> Result { + let info = self.client.get_url_info(&url).await?; + + Ok(Url { + client: self.client.clone(), + normalised_url: info.normalised_url, + url_type: info.url_type.into(), + match_name: info.match_name, + url: url.as_ref().to_string(), + can_parse: info.can_parse, + }) + } + + /// Returns a file by identifier to perform further operations on + pub async fn file(&self, identifier: FileIdentifier) -> Result { + let metadata = self + .client + .get_file_metadata_by_identifier(identifier) + .await?; + + Ok(HydrusFile::from_metadata(self.client.clone(), metadata)) + } + + /// Starts a request to bulk add tags to files + pub fn tagging(&self) -> TaggingBuilder { + TaggingBuilder::new(self.client.clone()) + } + + /// Searches for files that have the given tags and returns a list of hydrus files as a result + pub async fn search( + &self, + location: FileSearchLocation, + tags: Vec, + ) -> Result> { + let search_result = self + .client + .search_files(tag_list_to_string_list(tags), location) + .await?; + let files = search_result + .file_ids + .into_iter() + .map(|id| HydrusFile::from_id(self.client.clone(), id)) + .collect(); + + Ok(files) + } +} diff --git a/src/wrapper/hydrus_file.rs b/src/wrapper/hydrus_file.rs new file mode 100644 index 0000000..12036cc --- /dev/null +++ b/src/wrapper/hydrus_file.rs @@ -0,0 +1,264 @@ +use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; +use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord}; +use crate::error::{Error, Result}; +use crate::utils::tag_list_to_string_list; +use crate::wrapper::service::ServiceName; +use crate::wrapper::tag::Tag; +use crate::Client; +use mime::Mime; +use std::collections::HashMap; + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum FileStatus { + ReadyForImport, + InDatabase, + Deleted, + Unknown, +} + +impl Eq for FileStatus {} + +#[derive(Clone)] +pub struct HydrusFile { + pub(crate) client: Client, + pub id: FileIdentifier, + pub status: FileStatus, + pub(crate) metadata: Option, +} + +impl HydrusFile { + pub(crate) fn from_id(client: Client, id: u64) -> Self { + Self { + client, + id: FileIdentifier::ID(id), + status: FileStatus::Unknown, + metadata: None, + } + } + + pub(crate) fn from_raw_status_and_hash( + client: Client, + status: u8, + hash: S, + ) -> Self { + let status = if status == 3 { + FileStatus::Deleted + } else if status == 0 { + FileStatus::ReadyForImport + } else { + FileStatus::InDatabase + }; + Self { + client, + id: FileIdentifier::Hash(hash.to_string()), + status, + metadata: None, + } + } + + pub(crate) fn from_metadata(client: Client, metadata: FileMetadataInfo) -> Self { + let status = if metadata.is_trashed { + FileStatus::Deleted + } else { + FileStatus::InDatabase + }; + + Self { + client, + id: FileIdentifier::Hash(metadata.hash.clone()), + status, + metadata: Some(metadata), + } + } + + /// Deletes the internally stored metadata about the file retrieves it again + pub async fn update(&mut self) -> Result<()> { + self.metadata = None; + self.metadata().await?; + Ok(()) + } + + /// Returns the hash of the file + /// if the file identifier is an id it calls hydrus to resolve the file + pub async fn hash(&mut self) -> Result { + match &self.id { + FileIdentifier::ID(_) => { + let metadata = self.metadata().await?; + Ok(metadata.hash.clone()) + } + FileIdentifier::Hash(hash) => Ok(hash.clone()), + } + } + + /// Returns the file size in bytes + pub async fn size(&mut self) -> Result> { + let metadata = self.metadata().await?; + + Ok(metadata.size.clone()) + } + + /// Returns the mime of the file + pub async fn mime(&mut self) -> Result { + let metadata = self.metadata().await?; + let mime = metadata + .mime + .as_str() + .parse() + .map_err(|_| Error::InvalidMime(metadata.mime.clone()))?; + + Ok(mime) + } + + /// Return the file extension + pub async fn ext(&mut self) -> Result { + let metadata = self.metadata().await?; + + Ok(metadata.ext.clone()) + } + + /// Returns the dimensions of the file in pixels + pub async fn dimensions(&mut self) -> Result> { + let metadata = self.metadata().await?; + if let (Some(width), Some(height)) = (&metadata.width, &metadata.height) { + Ok(Some((*width, *height))) + } else { + Ok(None) + } + } + + /// Returns the duration of the file in seconds if it's a video + pub async fn duration(&mut self) -> Result> { + let metadata = self.metadata().await?; + + Ok(metadata.duration.clone()) + } + + /// Returns the number of frames of the file if it's a video + pub async fn num_frames(&mut self) -> Result> { + let metadata = self.metadata().await?; + + Ok(metadata.num_frames.clone()) + } + + /// Returns if the file has audio + pub async fn has_audio(&mut self) -> Result { + let metadata = self.metadata().await?; + + Ok(metadata.has_audio.unwrap_or(false)) + } + + /// Returns if the file is currently in the inbox + pub async fn in_inbox(&mut self) -> Result { + let metadata = self.metadata().await?; + + Ok(metadata.is_inbox) + } + + /// Returns if the file is stored locally + pub async fn stored_locally(&mut self) -> Result { + let metadata = self.metadata().await?; + + Ok(metadata.is_local) + } + + /// Returns if the file has been moved to the trash + pub async fn moved_to_trashed(&mut self) -> Result { + let metadata = self.metadata().await?; + + Ok(metadata.is_trashed) + } + + /// Associates the file with a list of urls + pub async fn associate_urls(&mut self, urls: Vec) -> Result<()> { + let hash = self.hash().await?; + self.client.associate_urls(urls, vec![hash]).await + } + + /// Disassociates the file with a list of urls + pub async fn disassociate_urls(&mut self, urls: Vec) -> Result<()> { + let hash = self.hash().await?; + self.client.disassociate_urls(urls, vec![hash]).await + } + + /// Returns map mapping lists of tags to services + pub async fn services_with_tags(&mut self) -> Result>> { + let metadata = self.metadata().await?; + let mut tag_mappings = HashMap::new(); + + for (service, status_tags) in &metadata.service_names_to_statuses_to_tags { + let mut tag_list = Vec::new(); + + for (_, tags) in status_tags { + tag_list.append(&mut tags.into_iter().map(|t| t.into()).collect()) + } + tag_mappings.insert(ServiceName(service.clone()), tag_list); + } + + Ok(tag_mappings) + } + + /// Returns a list of all tags assigned to the file + pub async fn tags(&mut self) -> Result> { + let mut tag_list = Vec::new(); + let tag_mappings = self.services_with_tags().await?; + + for (_, mut tags) in tag_mappings { + tag_list.append(&mut tags); + } + + Ok(tag_list) + } + + /// Adds tags for a specific service to the file + pub async fn add_tags(&mut self, service: ServiceName, tags: Vec) -> Result<()> { + let hash = self.hash().await?; + let request = AddTagsRequestBuilder::default() + .add_hash(hash) + .add_tags(service.0, tag_list_to_string_list(tags)) + .build(); + + self.client.add_tags(request).await + } + + /// Allows modification of tags by using the defined tag actions + pub async fn modify_tags( + &mut self, + service: ServiceName, + action: TagAction, + tags: Vec, + ) -> Result<()> { + let hash = self.hash().await?; + let mut reqwest = AddTagsRequestBuilder::default().add_hash(hash); + + for tag in tags { + reqwest = + reqwest.add_tag_with_action(service.0.clone(), tag.to_string(), action.clone()); + } + + self.client.add_tags(reqwest.build()).await + } + + /// Retrieves the file record bytes + pub async fn retrieve(&self) -> Result { + self.client.get_file(self.id.clone()).await + } + + /// Returns the metadata for the given file + /// if there's already known metadata about the file it uses that + async fn metadata(&mut self) -> Result<&FileMetadataInfo> { + if self.metadata.is_none() { + let metadata = self + .client + .get_file_metadata_by_identifier(self.id.clone()) + .await?; + self.status = if metadata.is_trashed { + FileStatus::Deleted + } else { + FileStatus::InDatabase + }; + self.metadata = Some(metadata); + } + + Ok(self.metadata.as_ref().unwrap()) + } +} diff --git a/src/wrapper/mod.rs b/src/wrapper/mod.rs new file mode 100644 index 0000000..1b67080 --- /dev/null +++ b/src/wrapper/mod.rs @@ -0,0 +1,8 @@ +pub mod builders; +pub mod hydrus; +pub mod hydrus_file; +pub mod page; +pub mod service; +pub mod tag; +pub mod url; +pub mod version; diff --git a/src/wrapper/page.rs b/src/wrapper/page.rs new file mode 100644 index 0000000..de4e914 --- /dev/null +++ b/src/wrapper/page.rs @@ -0,0 +1,20 @@ +#[derive(Clone)] +pub struct HydrusPage { + pub id: PageIdentifier, +} + +#[derive(Clone)] +pub enum PageIdentifier { + Name(String), + Key(String), +} + +impl PageIdentifier { + pub fn name(name: S) -> Self { + Self::Name(name.to_string()) + } + + pub fn key(key: S) -> Self { + Self::Key(key.to_string()) + } +} diff --git a/src/wrapper/service.rs b/src/wrapper/service.rs new file mode 100644 index 0000000..1109616 --- /dev/null +++ b/src/wrapper/service.rs @@ -0,0 +1,145 @@ +use crate::api_core::access_management::GetServicesResponse; +use crate::api_core::access_management::{ + SERVICE_TYPE_ALL_KNOWN_FILES, SERVICE_TYPE_ALL_KNOWN_TAGS, SERVICE_TYPE_ALL_LOCAL_FILES, + SERVICE_TYPE_FILE_REPOSITORIES, SERVICE_TYPE_LOCAL_FILES, SERVICE_TYPE_LOCAL_TAGS, + SERVICE_TYPE_TAG_REPOSITORIES, SERVICE_TYPE_TRASH, +}; +use crate::error::Error; +use crate::Client; +use std::collections::HashMap; +use std::convert::TryFrom; + +#[derive(Clone, PartialOrd, PartialEq, Hash)] +pub enum ServiceType { + LocalTags, + TagRepositories, + LocalFiles, + FileRepositories, + AllLocalFiles, + AllKnownFiles, + AllKnownTags, + Trash, +} + +impl Eq for ServiceType {} + +impl TryFrom for ServiceType { + type Error = Error; + + fn try_from(value: String) -> Result { + match value.as_str() { + s if s == SERVICE_TYPE_LOCAL_TAGS => Ok(Self::LocalTags), + s if s == SERVICE_TYPE_TAG_REPOSITORIES => Ok(Self::TagRepositories), + s if s == SERVICE_TYPE_LOCAL_FILES => Ok(Self::LocalFiles), + s if s == SERVICE_TYPE_FILE_REPOSITORIES => Ok(Self::FileRepositories), + s if s == SERVICE_TYPE_ALL_LOCAL_FILES => Ok(Self::AllLocalFiles), + s if s == SERVICE_TYPE_ALL_KNOWN_FILES => Ok(Self::AllKnownFiles), + s if s == SERVICE_TYPE_ALL_KNOWN_TAGS => Ok(Self::AllKnownTags), + s if s == SERVICE_TYPE_TRASH => Ok(Self::Trash), + _ => Err(Error::InvalidServiceType(value)), + } + } +} + +impl ToString for ServiceType { + fn to_string(&self) -> String { + match self { + ServiceType::LocalTags => String::from(SERVICE_TYPE_LOCAL_TAGS), + ServiceType::TagRepositories => String::from(SERVICE_TYPE_TAG_REPOSITORIES), + ServiceType::LocalFiles => String::from(SERVICE_TYPE_LOCAL_FILES), + ServiceType::FileRepositories => String::from(SERVICE_TYPE_FILE_REPOSITORIES), + ServiceType::AllLocalFiles => String::from(SERVICE_TYPE_ALL_LOCAL_FILES), + ServiceType::AllKnownFiles => String::from(SERVICE_TYPE_ALL_KNOWN_FILES), + ServiceType::AllKnownTags => String::from(SERVICE_TYPE_ALL_KNOWN_TAGS), + ServiceType::Trash => String::from(SERVICE_TYPE_TRASH), + } + } +} + +#[derive(Clone, PartialOrd, PartialEq, Hash)] +pub struct ServiceName(pub String); + +impl Eq for ServiceName {} + +impl ServiceName { + pub fn my_tags() -> Self { + Self(String::from("my tags")) + } + + pub fn my_files() -> Self { + Self(String::from("my files")) + } + + pub fn public_tag_repository() -> Self { + Self(String::from("public tag repository")) + } + + pub fn all_local_files() -> Self { + Self(String::from("all local files")) + } + + pub fn all_known_tags() -> Self { + Self(String::from("all known tags")) + } + + pub fn all_known_files() -> Self { + Self(String::from("all known files")) + } +} + +#[derive(Clone)] +pub struct Service { + client: Client, + pub name: ServiceName, + pub key: String, + pub service_type: ServiceType, +} + +#[derive(Clone)] +pub struct Services { + inner: HashMap>, +} + +impl Services { + /// Creates the services list from a given hydrus response + pub fn from_response(client: Client, response: GetServicesResponse) -> Self { + let mut response = response.0; + let mut mapped_types = HashMap::with_capacity(response.keys().len()); + let keys = response.keys().cloned().collect::>().clone(); + + for service_type in &keys { + if let Ok(mapped_type) = ServiceType::try_from(service_type.clone()) { + let basic_services = response.remove(service_type).unwrap(); + let mut service_list = Vec::new(); + + for basic_service in basic_services { + service_list.push(Service { + service_type: mapped_type.clone(), + name: ServiceName(basic_service.name), + key: basic_service.service_key, + client: client.clone(), + }) + } + + mapped_types.insert(mapped_type, service_list); + } + } + + Self { + inner: mapped_types, + } + } + + /// Returns a list of all services of the given type + pub fn get_services(&self, service_type: ServiceType) -> Vec<&Service> { + if let Some(services) = self.inner.get(&service_type) { + let mut borrowed_services = Vec::with_capacity(services.len()); + for service in services { + borrowed_services.push(service) + } + borrowed_services + } else { + Vec::with_capacity(0) + } + } +} diff --git a/src/wrapper/tag.rs b/src/wrapper/tag.rs new file mode 100644 index 0000000..85ab7d1 --- /dev/null +++ b/src/wrapper/tag.rs @@ -0,0 +1,35 @@ +#[derive(Clone, Debug)] +pub struct Tag { + pub name: String, + pub namespace: Option, +} + +impl From for Tag +where + S: AsRef, +{ + fn from(value: S) -> Self { + let value = value.as_ref(); + if let Some((namespace, tag)) = value.split_once(":") { + Self { + namespace: Some(namespace.to_string()), + name: tag.to_string(), + } + } else { + Self { + name: value.to_string(), + namespace: None, + } + } + } +} + +impl ToString for Tag { + fn to_string(&self) -> String { + if let Some(namespace) = &self.namespace { + format!("{}:{}", namespace, self.name) + } else { + self.name.clone() + } + } +} diff --git a/src/wrapper/url.rs b/src/wrapper/url.rs new file mode 100644 index 0000000..bd82056 --- /dev/null +++ b/src/wrapper/url.rs @@ -0,0 +1,75 @@ +use crate::api_core::adding_urls::{ + URL_TYPE_FILE, URL_TYPE_GALLERY, URL_TYPE_POST, URL_TYPE_WATCHABLE, +}; +use crate::error::Result; +use crate::wrapper::builders::import_builder::UrlImportBuilder; +use crate::wrapper::hydrus_file::HydrusFile; +use crate::Client; + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum UrlType { + Post, + File, + Gallery, + Watchable, + Unknown, +} + +impl Eq for UrlType {} + +impl From for UrlType { + fn from(value: u8) -> Self { + match value { + v if v == URL_TYPE_POST => Self::Post, + v if v == URL_TYPE_FILE => Self::File, + v if v == URL_TYPE_GALLERY => Self::Gallery, + v if v == URL_TYPE_WATCHABLE => Self::Watchable, + _ => Self::Unknown, + } + } +} + +#[derive(Clone)] +pub struct Url { + pub url: String, + pub(crate) client: Client, + pub normalised_url: String, + pub url_type: UrlType, + pub match_name: String, + pub can_parse: bool, +} + +impl Url { + /// Returns a list of files associated with the url + pub async fn files(&mut self) -> Result> { + let response = self.client.get_url_files(&self.url).await?; + let files = response + .url_file_statuses + .into_iter() + .map(|file| { + HydrusFile::from_raw_status_and_hash(self.client.clone(), file.status, file.hash) + }) + .collect(); + + Ok(files) + } + + /// Creates an import builder for the url + pub fn import(&mut self) -> UrlImportBuilder { + UrlImportBuilder::new(self.client.clone(), &self.url) + } + + /// Associates the url with a list of file hashes + pub async fn associate(&mut self, hashes: Vec) -> Result<()> { + self.client + .associate_urls(vec![self.url.clone()], hashes) + .await + } + + /// Disassociates the url with a list of file hashes + pub async fn disassociate(&mut self, hashes: Vec) -> Result<()> { + self.client + .disassociate_urls(vec![self.url.clone()], hashes) + .await + } +} diff --git a/src/wrapper/version.rs b/src/wrapper/version.rs new file mode 100644 index 0000000..5150d26 --- /dev/null +++ b/src/wrapper/version.rs @@ -0,0 +1,4 @@ +pub struct Version { + pub api: u32, + pub hydrus: u32, +} diff --git a/tests/client/mod.rs b/tests/client/mod.rs new file mode 100644 index 0000000..3c81da6 --- /dev/null +++ b/tests/client/mod.rs @@ -0,0 +1,5 @@ +mod test_access_management; +mod test_adding_files; +mod test_adding_tags; +mod test_adding_urls; +mod test_searching_and_fetching_files; diff --git a/tests/test_access_management.rs b/tests/client/test_access_management.rs similarity index 82% rename from tests/test_access_management.rs rename to tests/client/test_access_management.rs index a06313f..0ff5101 100644 --- a/tests/test_access_management.rs +++ b/tests/client/test_access_management.rs @@ -1,8 +1,8 @@ -mod common; +use super::super::common; #[tokio::test] async fn it_returns_the_api_version() { - let mut client = common::get_client(); + let client = common::get_client(); let api_version = client.api_version().await.unwrap(); assert!(api_version.hydrus_version > 0); assert!(api_version.version > 0); @@ -10,14 +10,14 @@ async fn it_returns_the_api_version() { #[tokio::test] async fn it_returns_the_session_key() { - let mut client = common::get_client(); + let client = common::get_client(); let session_key = client.session_key().await.unwrap(); assert!(session_key.session_key.len() > 0); } #[tokio::test] async fn it_verifies_the_access_key() { - let mut client = common::get_client(); + let client = common::get_client(); let verification_response = client.verify_access_key().await.unwrap(); assert!(verification_response.basic_permissions.len() > 0); // needs to be configured in the client but we want at least some permissions for the test assert!(verification_response.human_description.len() > 0); @@ -25,7 +25,7 @@ async fn it_verifies_the_access_key() { #[tokio::test] async fn it_returns_a_list_of_services() { - let mut client = common::get_client(); + let client = common::get_client(); let services_response = client.get_services().await.unwrap(); assert!(services_response.0.keys().len() > 0); } diff --git a/tests/test_adding_files.rs b/tests/client/test_adding_files.rs similarity index 74% rename from tests/test_adding_files.rs rename to tests/client/test_adding_files.rs index 5c4bc2d..37b1a7e 100644 --- a/tests/test_adding_files.rs +++ b/tests/client/test_adding_files.rs @@ -1,15 +1,15 @@ -mod common; +use super::super::common; #[tokio::test] async fn it_adds_files() { - let mut client = common::get_client(); + let client = common::get_client(); let result = client.add_file("/does/not/exist").await; assert!(result.is_err()); // because the path does not exist } #[tokio::test] async fn it_adds_binary_files() { - let mut client = common::get_client(); + let client = common::get_client(); let result = client .add_binary_file(vec![0u8, 0u8, 0u8, 0u8]) .await @@ -19,24 +19,24 @@ async fn it_adds_binary_files() { #[tokio::test] async fn it_deletes_files() { - let mut client = common::get_client(); + let client = common::get_client(); client.delete_files(vec![]).await.unwrap(); } #[tokio::test] async fn it_undeletes_files() { - let mut client = common::get_client(); + let client = common::get_client(); client.undelete_files(vec![]).await.unwrap(); } #[tokio::test] async fn it_archives_files() { - let mut client = common::get_client(); + let client = common::get_client(); client.archive_files(vec![]).await.unwrap(); } #[tokio::test] async fn it_unarchives_files() { - let mut client = common::get_client(); + let client = common::get_client(); client.unarchive_files(vec![]).await.unwrap(); } diff --git a/tests/test_adding_tags.rs b/tests/client/test_adding_tags.rs similarity index 80% rename from tests/test_adding_tags.rs rename to tests/client/test_adding_tags.rs index eb98b11..2dacb13 100644 --- a/tests/test_adding_tags.rs +++ b/tests/client/test_adding_tags.rs @@ -1,9 +1,9 @@ -use hydrus_api::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction}; -mod common; +use super::super::common; +use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; #[tokio::test] async fn it_cleans_tags() { - let mut client = common::get_client(); + let client = common::get_client(); let response = client .clean_tags(vec![ "summer".into(), @@ -18,7 +18,7 @@ async fn it_cleans_tags() { #[tokio::test] async fn it_adds_tags() { - let mut client = common::get_client(); + let client = common::get_client(); let request = AddTagsRequestBuilder::default() .add_hash("0000000000000000000000000000000000000000000000000000000000000000") // valid hash, I hope no files are affected .add_tags("my tags", vec!["beach".into(), "summer".into()]) diff --git a/tests/test_adding_urls.rs b/tests/client/test_adding_urls.rs similarity index 85% rename from tests/test_adding_urls.rs rename to tests/client/test_adding_urls.rs index f8672ef..8b1f9aa 100644 --- a/tests/test_adding_urls.rs +++ b/tests/client/test_adding_urls.rs @@ -1,10 +1,9 @@ -use hydrus_api::endpoints::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST}; - -mod common; +use super::super::common; +use hydrus_api::api_core::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST}; #[tokio::test] async fn it_returns_files_for_an_url() { - let mut client = common::get_client(); + let client = common::get_client(); let response = client .get_url_files("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") .await @@ -15,7 +14,7 @@ async fn it_returns_files_for_an_url() { #[tokio::test] async fn it_returns_url_information() { - let mut client = common::get_client(); + let client = common::get_client(); let info = client .get_url_info("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") .await @@ -26,7 +25,7 @@ async fn it_returns_url_information() { #[tokio::test] async fn it_adds_urls() { - let mut client = common::get_client(); + let client = common::get_client(); let request = AddUrlRequestBuilder::default() .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") .add_tags( @@ -42,7 +41,7 @@ async fn it_adds_urls() { #[tokio::test] async fn it_associates_urls() { - let mut client = common::get_client(); + let client = common::get_client(); client .associate_urls( vec![ @@ -57,7 +56,7 @@ async fn it_associates_urls() { #[tokio::test] async fn it_disassociates_urls() { - let mut client = common::get_client(); + let client = common::get_client(); client .disassociate_urls( vec![ diff --git a/tests/test_searching_and_fetching_files.rs b/tests/client/test_searching_and_fetching_files.rs similarity index 61% rename from tests/test_searching_and_fetching_files.rs rename to tests/client/test_searching_and_fetching_files.rs index 3403579..5d4b146 100644 --- a/tests/test_searching_and_fetching_files.rs +++ b/tests/client/test_searching_and_fetching_files.rs @@ -1,11 +1,10 @@ -use hydrus_api::endpoints::common::FileIdentifier; -use hydrus_api::endpoints::searching_and_fetching_files::FileSearchLocation; - -mod common; +use super::super::common; +use hydrus_api::api_core::common::FileIdentifier; +use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation; #[tokio::test] async fn is_searches_files() { - let mut client = common::get_client(); + let client = common::get_client(); client .search_files(vec!["beach".to_string()], FileSearchLocation::Archive) .await @@ -14,19 +13,19 @@ async fn is_searches_files() { #[tokio::test] async fn it_fetches_file_metadata() { - let mut client = common::get_client(); - client + let client = common::get_client(); + let response = client .get_file_metadata( vec![], vec!["0000000000000000000000000000000000000000000000000000000000000000".to_string()], ) - .await - .unwrap(); + .await; + assert!(response.is_ok()); // Even if the file doesn't exist it still returns some information about it } #[tokio::test] async fn it_fetches_single_files() { - let mut client = common::get_client(); + let client = common::get_client(); let response = client .get_file(FileIdentifier::Hash( "0000000000000000000000000000000000000000000000000000000000000000".to_string(), diff --git a/tests/common.rs b/tests/common.rs index d84f128..dba8290 100644 --- a/tests/common.rs +++ b/tests/common.rs @@ -1,4 +1,5 @@ -use hydrus_api::client::Client; +use hydrus_api::api_core::client::Client; +use hydrus_api::Hydrus; use log::LevelFilter; use std::env; use std::sync::atomic::{AtomicBool, Ordering}; @@ -15,9 +16,15 @@ pub fn setup() { pub fn get_client() -> Client { setup(); + Client::new( env::var("HYDRUS_URL").unwrap(), env::var("HYDRUS_ACCESS_KEY").unwrap(), ) - .unwrap() +} + +pub fn get_hydrus() -> Hydrus { + let client = get_client(); + + Hydrus::new(client) } diff --git a/tests/mod.rs b/tests/mod.rs new file mode 100644 index 0000000..aad7eab --- /dev/null +++ b/tests/mod.rs @@ -0,0 +1,3 @@ +mod client; +mod common; +mod wrapper; diff --git a/tests/wrapper/mod.rs b/tests/wrapper/mod.rs new file mode 100644 index 0000000..44e1c9a --- /dev/null +++ b/tests/wrapper/mod.rs @@ -0,0 +1,4 @@ +mod test_files; +mod test_hydrus; +mod test_import; +mod test_url; diff --git a/tests/wrapper/test_files.rs b/tests/wrapper/test_files.rs new file mode 100644 index 0000000..5291bfe --- /dev/null +++ b/tests/wrapper/test_files.rs @@ -0,0 +1,90 @@ +use super::super::common; +use hydrus_api::api_core::adding_tags::TagAction; +use hydrus_api::api_core::common::FileIdentifier; +use hydrus_api::wrapper::hydrus_file::HydrusFile; +use hydrus_api::wrapper::service::ServiceName; + +async fn get_file() -> HydrusFile { + let hydrus = common::get_hydrus(); + hydrus + .file(FileIdentifier::hash( + "277a138cd1ee79fc1fdb2869c321b848d4861e45b82184487139ef66dd40b62d", // needs to exist + )) + .await + .unwrap() +} + +#[tokio::test] +async fn it_associates_with_urls() { + let mut file = get_file().await; + file.associate_urls(vec![ + "https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium".to_string(), + ]) + .await + .unwrap(); +} + +#[tokio::test] +async fn it_disassociates_with_urls() { + let mut file = get_file().await; + file.disassociate_urls(vec![ + "https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium".to_string(), + ]) + .await + .unwrap(); +} + +#[tokio::test] +async fn it_has_tags_with_services() { + let mut file = get_file().await; + let tags = file.services_with_tags().await.unwrap(); + + assert!(tags.keys().len() > 0) +} + +#[tokio::test] +async fn it_has_tags() { + let mut file = get_file().await; + let tags = file.tags().await.unwrap(); + + assert!(tags.len() > 0) // test data needs to be prepared this way +} + +#[tokio::test] +async fn it_adds_tags() { + let mut file = get_file().await; + file.add_tags( + ServiceName::public_tag_repository(), + vec!["character:megumin".into(), "ark mage".into()], + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn it_modifies_tags() { + let mut file = get_file().await; + file.modify_tags( + ServiceName::public_tag_repository(), + TagAction::RescindPendFromRepository, + vec!["ark mage".into()], + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn it_retrieves_content() { + let file = get_file().await; + let file = file.retrieve().await.unwrap(); + + assert!(file.bytes.len() > 0) // assuming it exists +} + +#[tokio::test] +async fn it_retrieves_metadata() { + let mut file = get_file().await; + assert!(file.dimensions().await.unwrap().is_some()); + assert!(file.stored_locally().await.unwrap()); + assert!(file.duration().await.unwrap().is_none()); +} diff --git a/tests/wrapper/test_hydrus.rs b/tests/wrapper/test_hydrus.rs new file mode 100644 index 0000000..794c347 --- /dev/null +++ b/tests/wrapper/test_hydrus.rs @@ -0,0 +1,62 @@ +use super::super::common; +use hydrus_api::api_core::adding_tags::TagAction; +use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation; +use hydrus_api::wrapper::service::{ServiceName, ServiceType}; +use hydrus_api::wrapper::url::UrlType; + +#[tokio::test] +async fn it_retrieves_version_info() { + let hydrus = common::get_hydrus(); + let version = hydrus.version().await.unwrap(); + assert!(version.hydrus > 0); + assert!(version.api > 0); +} + +#[tokio::test] +async fn it_retrieves_services() { + let hydrus = common::get_hydrus(); + let services = hydrus.services().await.unwrap(); + + // assuming hydrus is configured correctly + assert!(services.get_services(ServiceType::AllKnownFiles).len() > 0); + assert!(services.get_services(ServiceType::AllKnownTags).len() > 0); +} + +#[tokio::test] +async fn it_retrieves_url_information() { + let hydrus = common::get_hydrus(); + let url = hydrus + .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") + .await + .unwrap(); + + assert_eq!(url.url_type, UrlType::Post) +} + +#[tokio::test] +async fn it_searches() { + let hydrus = common::get_hydrus(); + hydrus + .search( + FileSearchLocation::Archive, + vec!["character:megumin".into()], + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn it_adds_tags() { + let hydrus = common::get_hydrus(); + hydrus + .tagging() + .add_tag( + ServiceName::my_tags(), + TagAction::AddToLocalService, + "summer".into(), + ) + .add_file("0000000000000000000000000000000000000000000000000000000000000000") + .run() + .await + .unwrap(); +} diff --git a/tests/wrapper/test_import.rs b/tests/wrapper/test_import.rs new file mode 100644 index 0000000..21a988e --- /dev/null +++ b/tests/wrapper/test_import.rs @@ -0,0 +1,50 @@ +use super::super::common; +use hydrus_api::wrapper::builders::import_builder::FileImport; +use hydrus_api::wrapper::page::PageIdentifier; +use hydrus_api::wrapper::service::ServiceName; +use hydrus_api::wrapper::tag::Tag; +use hydrus_api::wrapper::url::UrlType; + +#[tokio::test] +async fn it_imports_file_paths() { + let hydrus = common::get_hydrus(); + let result = hydrus + .import() + .file(FileImport::path("/does/not/exist/sadly")) + .run() + .await; + + assert!(result.is_err()) // file does not exist +} + +#[tokio::test] +async fn it_imports_binary_files() { + let hydrus = common::get_hydrus(); + let bytes = [0u8, 0u8, 0u8, 0u8]; + let result = hydrus + .import() + .file(FileImport::binary(&mut &bytes[..])) + .run() + .await; + + assert!(result.is_err()) // return status should be 4 +} + +#[tokio::test] +async fn it_imports_urls() { + let hydrus = common::get_hydrus(); + + let result = hydrus + .import() + .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") + .page(PageIdentifier::name("Rusty Import")) + .show_page(true) + .add_additional_tag(ServiceName::my_tags(), Tag::from("ark mage")) + .add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin")) + .run() + .await + .unwrap(); + + assert!(result.normalised_url.len() > 0); + assert_eq!(result.url_type, UrlType::Post) +} diff --git a/tests/wrapper/test_url.rs b/tests/wrapper/test_url.rs new file mode 100644 index 0000000..700bad5 --- /dev/null +++ b/tests/wrapper/test_url.rs @@ -0,0 +1,47 @@ +use super::super::common; +use hydrus_api::wrapper::page::PageIdentifier; +use hydrus_api::wrapper::service::ServiceName; +use hydrus_api::wrapper::tag::Tag; +use hydrus_api::wrapper::url::Url; + +async fn get_url() -> Url { + let hydrus = common::get_hydrus(); + hydrus + .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") + .await + .unwrap() +} + +#[tokio::test] +async fn it_imports() { + let mut url = get_url().await; + + url.import() + .page(PageIdentifier::name("Rusty Import")) + .add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin")) + .run() + .await + .unwrap(); +} + +#[tokio::test] +async fn it_associates() { + let mut url = get_url().await; + + url.associate(vec![ + "0000000000000000000000000000000000000000000000000000000000000000".to_string(), + ]) + .await + .unwrap(); +} + +#[tokio::test] +async fn it_disassociates() { + let mut url = get_url().await; + + url.disassociate(vec![ + "0000000000000000000000000000000000000000000000000000000000000000".to_string(), + ]) + .await + .unwrap(); +}