diff --git a/src/endpoints/adding_files.rs b/src/endpoints/adding_files.rs index 2ff9892..09da721 100644 --- a/src/endpoints/adding_files.rs +++ b/src/endpoints/adding_files.rs @@ -1,6 +1,12 @@ use crate::endpoints::common::BasicHashList; use crate::endpoints::Endpoint; +pub static STATUS_IMPORT_SUCCESS: u8 = 1; +pub static STATUS_IMPORT_ALREADY_EXISTS: u8 = 2; +pub static STATUS_IMPORT_PREVIOUSLY_DELETED: u8 = 3; +pub static STATUS_IMPORT_FAILED: u8 = 4; +pub static STATUS_IMPORT_VETOED: u8 = 5; + #[derive(Debug, Clone, Serialize)] pub struct AddFileRequest { pub path: String, diff --git a/src/endpoints/common.rs b/src/endpoints/common.rs index b4e5862..f3d368d 100644 --- a/src/endpoints/common.rs +++ b/src/endpoints/common.rs @@ -32,11 +32,13 @@ pub struct FileMetadataInfo { pub service_names_to_statuses_to_display_tags: HashMap>>, } +#[derive(Clone)] pub enum FileIdentifier { ID(u64), Hash(String), } +#[derive(Clone)] pub struct FileRecord { pub bytes: Vec, pub mime_type: String, diff --git a/src/error.rs b/src/error.rs index a0cb93f..24010d0 100644 --- a/src/error.rs +++ b/src/error.rs @@ -8,6 +8,8 @@ pub enum Error { Reqwest(reqwest::Error), Hydrus(String), InvalidServiceType(String), + ImportVetoed(String), + ImportFailed(String), } impl fmt::Display for Error { @@ -18,6 +20,8 @@ impl fmt::Display for Error { Self::InvalidServiceType(service_type) => { write!(f, "Invalid Service Type '{}'", service_type) } + Self::ImportFailed(msg) => write!(f, "File import failed: {}", msg), + Self::ImportVetoed(msg) => write!(f, "File import vetoed: {}", msg), } } } @@ -28,6 +32,8 @@ impl StdError for Error { Self::Reqwest(e) => e.source(), Self::Hydrus(_) => None, Self::InvalidServiceType(_) => None, + Self::ImportVetoed(_) => None, + Self::ImportFailed(_) => None, } } } diff --git a/src/models/builders/import_builder.rs b/src/models/builders/import_builder.rs new file mode 100644 index 0000000..dbf8ff6 --- /dev/null +++ b/src/models/builders/import_builder.rs @@ -0,0 +1,158 @@ +use crate::endpoints::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED}; +use crate::endpoints::adding_urls::AddUrlRequestBuilder; +use crate::endpoints::common::FileIdentifier; +use crate::error::{Error, Result}; +use crate::hydrus_file::HydrusFile; +use crate::models::url::Url; +use crate::page::PageIdentifier; +use crate::service::ServiceName; +use crate::tag::Tag; +use crate::utils::tag_list_to_string_list; +use crate::Client; +use std::collections::HashMap; +use std::io::Read; + +pub struct ImportBuilder { + pub(crate) client: Client, +} + +impl ImportBuilder { + pub fn file(self, file: FileImport) -> FileImportBuilder { + FileImportBuilder { + client: self.client, + file, + } + } + + pub fn url(self, url: S) -> UrlImportBuilder { + UrlImportBuilder { + client: self.client, + url: url.to_string(), + page: None, + show_page: false, + filter_tags: vec![], + service_tag_mappings: Default::default(), + } + } +} + +pub enum FileImport { + Path(String), + Binary(Vec), +} + +impl FileImport { + pub fn path(path: S) -> Self { + Self::Path(path.to_string()) + } + + pub fn binary(reader: &mut R) -> Self { + let mut bytes = Vec::new(); + let _ = reader.read_to_end(&mut bytes); + Self::Binary(bytes) + } +} + +pub struct FileImportBuilder { + client: Client, + file: FileImport, +} + +pub struct UrlImportBuilder { + client: Client, + url: String, + page: Option, + show_page: bool, + filter_tags: Vec, + service_tag_mappings: HashMap>, +} + +impl FileImportBuilder { + pub async fn run(mut self) -> Result { + let response = match self.file { + FileImport::Path(path) => self.client.add_file(path).await?, + FileImport::Binary(b) => self.client.add_binary_file(b).await?, + }; + + if response.status == STATUS_IMPORT_FAILED { + Err(Error::ImportFailed(response.note)) + } else if response.status == STATUS_IMPORT_VETOED { + Err(Error::ImportVetoed(response.note)) + } else { + Ok(HydrusFile { + client: self.client, + id: FileIdentifier::Hash(response.hash), + }) + } + } +} + +impl UrlImportBuilder { + /// Sets the destination page of the import + pub fn set_page(mut self, page: PageIdentifier) -> Self { + self.page = Some(page); + + self + } + + /// If the destination page of the import should be focussed + pub fn show_page(mut self, show: bool) -> Self { + self.show_page = show; + + self + } + + /// Adds a tag that should be filtered + pub fn add_filter_tag(mut self, tag: Tag) -> Self { + self.filter_tags.push(tag); + + self + } + + /// Adds multiple tags that should be filtered + pub fn add_filter_tags(mut self, mut tags: Vec) -> Self { + self.filter_tags.append(&mut tags); + + self + } + + /// Adds an additional tag for the imported file + pub fn add_additional_tag(self, service: ServiceName, tag: Tag) -> Self { + self.add_additional_tags(service, vec![tag]) + } + + /// Adds multiple additional tags for the import + pub fn add_additional_tags(mut self, service: ServiceName, mut tags: Vec) -> Self { + if let Some(service_tags) = self.service_tag_mappings.get_mut(&service.0) { + service_tags.append(&mut tags); + } else { + self.service_tag_mappings.insert(service.0, tags); + } + + self + } + + /// Imports the URL + pub async fn run(mut self) -> Result { + let mut request = AddUrlRequestBuilder::default().url(self.url.clone()); + + for (service, tags) in self.service_tag_mappings { + request = request.add_tags(service, tag_list_to_string_list(tags)); + } + request = request.add_filter_tags(tag_list_to_string_list(self.filter_tags)); + if let Some(page) = self.page { + request = match page { + PageIdentifier::Name(n) => request.destination_page_name(n), + PageIdentifier::Key(k) => request.destination_page_key(k), + }; + } + request = request.show_destination_page(self.show_page); + + let response = self.client.add_url(request.build()).await?; + + Ok(Url { + url: self.url, + normalised_url: Some(response.normalised_url), + }) + } +} diff --git a/src/models/builders/mod.rs b/src/models/builders/mod.rs index e69de29..8645e63 100644 --- a/src/models/builders/mod.rs +++ b/src/models/builders/mod.rs @@ -0,0 +1 @@ +pub mod import_builder; diff --git a/src/models/hydrus.rs b/src/models/hydrus.rs index 19137ee..270380a 100644 --- a/src/models/hydrus.rs +++ b/src/models/hydrus.rs @@ -1,3 +1,4 @@ +use crate::builders::import_builder::ImportBuilder; use crate::error::Result; use crate::models::version::Version; use crate::service::Services; @@ -28,4 +29,11 @@ impl Hydrus { Ok(Services::from_response(self.client.clone(), response)) } + + /// Creates an import builder to build an import request to hydrus + pub fn import(&mut self) -> ImportBuilder { + ImportBuilder { + client: self.client.clone(), + } + } } diff --git a/src/models/hydrus_file.rs b/src/models/hydrus_file.rs new file mode 100644 index 0000000..92beef7 --- /dev/null +++ b/src/models/hydrus_file.rs @@ -0,0 +1,8 @@ +use crate::endpoints::common::FileIdentifier; +use crate::Client; + +#[derive(Clone)] +pub struct HydrusFile { + pub(crate) client: Client, + pub id: FileIdentifier, +} diff --git a/src/models/mod.rs b/src/models/mod.rs index 7a1c167..1b67080 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -1,4 +1,8 @@ pub mod builders; pub mod hydrus; +pub mod hydrus_file; +pub mod page; pub mod service; +pub mod tag; +pub mod url; pub mod version; diff --git a/src/models/page.rs b/src/models/page.rs new file mode 100644 index 0000000..de4e914 --- /dev/null +++ b/src/models/page.rs @@ -0,0 +1,20 @@ +#[derive(Clone)] +pub struct HydrusPage { + pub id: PageIdentifier, +} + +#[derive(Clone)] +pub enum PageIdentifier { + Name(String), + Key(String), +} + +impl PageIdentifier { + pub fn name(name: S) -> Self { + Self::Name(name.to_string()) + } + + pub fn key(key: S) -> Self { + Self::Key(key.to_string()) + } +} diff --git a/src/models/service.rs b/src/models/service.rs index a0f36d3..d39df71 100644 --- a/src/models/service.rs +++ b/src/models/service.rs @@ -56,10 +56,39 @@ impl ToString for ServiceType { } } +#[derive(Clone)] +pub struct ServiceName(pub String); + +impl ServiceName { + pub fn my_tags() -> Self { + Self(String::from("my tags")) + } + + pub fn my_files() -> Self { + Self(String::from("my files")) + } + + pub fn public_tag_repository() -> Self { + Self(String::from("public tag repository")) + } + + pub fn all_local_files() -> Self { + Self(String::from("all local files")) + } + + pub fn all_known_tags() -> Self { + Self(String::from("all known tags")) + } + + pub fn all_known_files() -> Self { + Self(String::from("all known files")) + } +} + #[derive(Clone)] pub struct Service { client: Client, - pub name: String, + pub name: ServiceName, pub key: String, pub service_type: ServiceType, } @@ -70,6 +99,7 @@ pub struct Services { } impl Services { + /// Creates the services list from a given hydrus response pub fn from_response(client: Client, response: GetServicesResponse) -> Self { let mut response = response.0; let mut mapped_types = HashMap::with_capacity(response.keys().len()); @@ -83,7 +113,7 @@ impl Services { for basic_service in basic_services { service_list.push(Service { service_type: mapped_type.clone(), - name: basic_service.name, + name: ServiceName(basic_service.name), key: basic_service.service_key, client: client.clone(), }) diff --git a/src/models/tag.rs b/src/models/tag.rs new file mode 100644 index 0000000..85ab7d1 --- /dev/null +++ b/src/models/tag.rs @@ -0,0 +1,35 @@ +#[derive(Clone, Debug)] +pub struct Tag { + pub name: String, + pub namespace: Option, +} + +impl From for Tag +where + S: AsRef, +{ + fn from(value: S) -> Self { + let value = value.as_ref(); + if let Some((namespace, tag)) = value.split_once(":") { + Self { + namespace: Some(namespace.to_string()), + name: tag.to_string(), + } + } else { + Self { + name: value.to_string(), + namespace: None, + } + } + } +} + +impl ToString for Tag { + fn to_string(&self) -> String { + if let Some(namespace) = &self.namespace { + format!("{}:{}", namespace, self.name) + } else { + self.name.clone() + } + } +} diff --git a/src/models/url.rs b/src/models/url.rs new file mode 100644 index 0000000..027fcbf --- /dev/null +++ b/src/models/url.rs @@ -0,0 +1,5 @@ +#[derive(Clone)] +pub struct Url { + pub url: String, + pub normalised_url: Option, +} diff --git a/src/utils.rs b/src/utils.rs index da0e527..b06ba26 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,3 +1,5 @@ +use crate::models::tag::Tag; + pub fn string_list_to_json_array(l: Vec) -> String { format!("[\"{}\"]", l.join("\",\"")) } @@ -12,3 +14,8 @@ pub fn number_list_to_json_array(l: Vec) -> String { )) ) } + +/// Converts a list of tags into a list of string tags +pub fn tag_list_to_string_list(tags: Vec) -> Vec { + tags.into_iter().map(|t| t.to_string()).collect() +} diff --git a/tests/wrapper/mod.rs b/tests/wrapper/mod.rs index bcb74a6..f418e02 100644 --- a/tests/wrapper/mod.rs +++ b/tests/wrapper/mod.rs @@ -1 +1,2 @@ mod test_hydrus; +mod test_import; diff --git a/tests/wrapper/test_import.rs b/tests/wrapper/test_import.rs new file mode 100644 index 0000000..0a6bfbf --- /dev/null +++ b/tests/wrapper/test_import.rs @@ -0,0 +1,48 @@ +use super::super::common; +use hydrus_api::builders::import_builder::FileImport; +use hydrus_api::page::PageIdentifier; +use hydrus_api::service::ServiceName; +use hydrus_api::tag::Tag; + +#[tokio::test] +async fn it_imports_file_paths() { + let mut hydrus = common::get_hydrus(); + let result = hydrus + .import() + .file(FileImport::path("/does/not/exist/sadly")) + .run() + .await; + + assert!(result.is_err()) // file does not exist +} + +#[tokio::test] +async fn it_imports_binary_files() { + let mut hydrus = common::get_hydrus(); + let bytes = [0u8, 0u8, 0u8, 0u8]; + let result = hydrus + .import() + .file(FileImport::binary(&mut &bytes[..])) + .run() + .await; + + assert!(result.is_err()) // return status should be 4 +} + +#[tokio::test] +async fn it_imports_urls() { + let mut hydrus = common::get_hydrus(); + + let result = hydrus + .import() + .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") + .set_page(PageIdentifier::name("Rusty Import")) + .show_page(true) + .add_additional_tag(ServiceName::my_tags(), Tag::from("ark mage")) + .add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin")) + .run() + .await + .unwrap(); + + assert!(result.normalised_url.is_some()) // because it's returned by the import +}