diff --git a/src/endpoints/adding_urls.rs b/src/endpoints/adding_urls.rs index 5b3068b..9c6432c 100644 --- a/src/endpoints/adding_urls.rs +++ b/src/endpoints/adding_urls.rs @@ -16,7 +16,7 @@ pub struct GetUrlFilesResponse { #[derive(Clone, Debug, Deserialize)] pub struct UrlFileStatus { - pub status: u32, + pub status: u8, pub hash: String, pub note: String, } diff --git a/src/models/builders/import_builder.rs b/src/models/builders/import_builder.rs index dbf8ff6..66442c7 100644 --- a/src/models/builders/import_builder.rs +++ b/src/models/builders/import_builder.rs @@ -1,6 +1,5 @@ use crate::endpoints::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED}; use crate::endpoints::adding_urls::AddUrlRequestBuilder; -use crate::endpoints::common::FileIdentifier; use crate::error::{Error, Result}; use crate::hydrus_file::HydrusFile; use crate::models::url::Url; @@ -25,14 +24,7 @@ impl ImportBuilder { } pub fn url(self, url: S) -> UrlImportBuilder { - UrlImportBuilder { - client: self.client, - url: url.to_string(), - page: None, - show_page: false, - filter_tags: vec![], - service_tag_mappings: Default::default(), - } + UrlImportBuilder::new(self.client.clone(), url) } } @@ -58,15 +50,6 @@ pub struct FileImportBuilder { file: FileImport, } -pub struct UrlImportBuilder { - client: Client, - url: String, - page: Option, - show_page: bool, - filter_tags: Vec, - service_tag_mappings: HashMap>, -} - impl FileImportBuilder { pub async fn run(mut self) -> Result { let response = match self.file { @@ -79,17 +62,38 @@ impl FileImportBuilder { } else if response.status == STATUS_IMPORT_VETOED { Err(Error::ImportVetoed(response.note)) } else { - Ok(HydrusFile { - client: self.client, - id: FileIdentifier::Hash(response.hash), - }) + Ok(HydrusFile::from_raw_status_and_hash( + self.client, + response.status, + response.hash, + )) } } } +pub struct UrlImportBuilder { + client: Client, + url: String, + page: Option, + show_page: bool, + filter_tags: Vec, + service_tag_mappings: HashMap>, +} + impl UrlImportBuilder { + pub fn new(client: Client, url: S) -> Self { + Self { + client, + url: url.to_string(), + page: None, + show_page: false, + filter_tags: vec![], + service_tag_mappings: Default::default(), + } + } + /// Sets the destination page of the import - pub fn set_page(mut self, page: PageIdentifier) -> Self { + pub fn page(mut self, page: PageIdentifier) -> Self { self.page = Some(page); self @@ -134,7 +138,7 @@ impl UrlImportBuilder { /// Imports the URL pub async fn run(mut self) -> Result { - let mut request = AddUrlRequestBuilder::default().url(self.url.clone()); + let mut request = AddUrlRequestBuilder::default().url(&self.url); for (service, tags) in self.service_tag_mappings { request = request.add_tags(service, tag_list_to_string_list(tags)); @@ -149,10 +153,15 @@ impl UrlImportBuilder { request = request.show_destination_page(self.show_page); let response = self.client.add_url(request.build()).await?; + let url_info = self.client.get_url_info(&self.url).await?; Ok(Url { url: self.url, - normalised_url: Some(response.normalised_url), + client: self.client, + normalised_url: response.normalised_url, + url_type: url_info.url_type.into(), + match_name: url_info.match_name, + can_parse: url_info.can_parse, }) } } diff --git a/src/models/hydrus.rs b/src/models/hydrus.rs index 270380a..4522a37 100644 --- a/src/models/hydrus.rs +++ b/src/models/hydrus.rs @@ -1,5 +1,6 @@ use crate::builders::import_builder::ImportBuilder; use crate::error::Result; +use crate::models::url::Url; use crate::models::version::Version; use crate::service::Services; use crate::Client; @@ -36,4 +37,19 @@ impl Hydrus { client: self.client.clone(), } } + + /// Returns information about a given url in an object that allows + /// further operations with that url + pub async fn url>(&mut self, url: S) -> Result { + let info = self.client.get_url_info(&url).await?; + + Ok(Url { + client: self.client.clone(), + normalised_url: info.normalised_url, + url_type: info.url_type.into(), + match_name: info.match_name, + url: url.as_ref().to_string(), + can_parse: info.can_parse, + }) + } } diff --git a/src/models/hydrus_file.rs b/src/models/hydrus_file.rs index 92beef7..6c7af26 100644 --- a/src/models/hydrus_file.rs +++ b/src/models/hydrus_file.rs @@ -1,8 +1,40 @@ use crate::endpoints::common::FileIdentifier; use crate::Client; +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum FileStatus { + ReadyForImport, + InDatabase, + Deleted, + Unknown, +} + +impl Eq for FileStatus {} + #[derive(Clone)] pub struct HydrusFile { pub(crate) client: Client, pub id: FileIdentifier, + pub status: FileStatus, +} + +impl HydrusFile { + pub(crate) fn from_raw_status_and_hash( + client: Client, + status: u8, + hash: S, + ) -> Self { + let status = if status == 3 { + FileStatus::Deleted + } else if status == 0 { + FileStatus::ReadyForImport + } else { + FileStatus::InDatabase + }; + Self { + client, + id: FileIdentifier::Hash(hash.to_string()), + status, + } + } } diff --git a/src/models/url.rs b/src/models/url.rs index 027fcbf..71363d0 100644 --- a/src/models/url.rs +++ b/src/models/url.rs @@ -1,5 +1,61 @@ +use crate::builders::import_builder::UrlImportBuilder; +use crate::endpoints::adding_urls::{ + URL_TYPE_FILE, URL_TYPE_GALLERY, URL_TYPE_POST, URL_TYPE_WATCHABLE, +}; +use crate::error::Result; +use crate::hydrus_file::HydrusFile; +use crate::Client; + +#[derive(Clone, Debug, PartialOrd, PartialEq)] +pub enum UrlType { + Post, + File, + Gallery, + Watchable, + Unknown, +} + +impl Eq for UrlType {} + +impl From for UrlType { + fn from(value: u8) -> Self { + match value { + v if v == URL_TYPE_POST => Self::Post, + v if v == URL_TYPE_FILE => Self::File, + v if v == URL_TYPE_GALLERY => Self::Gallery, + v if v == URL_TYPE_WATCHABLE => Self::Watchable, + _ => Self::Unknown, + } + } +} + #[derive(Clone)] pub struct Url { pub url: String, - pub normalised_url: Option, + pub(crate) client: Client, + pub normalised_url: String, + pub url_type: UrlType, + pub match_name: String, + pub can_parse: bool, +} + +impl Url { + /// Returns a list of files associated with the url + pub async fn files(&mut self) -> Result> { + let response = self.client.get_url_files(&self.url).await?; + let files = response + .url_file_statuses + .into_iter() + .map(|file| { + HydrusFile::from_raw_status_and_hash(self.client.clone(), file.status, file.hash) + }) + .collect(); + + Ok(files) + } + + /// Creates an import builder for the url + pub fn import(&mut self) -> UrlImportBuilder { + UrlImportBuilder::new(self.client.clone(), &self.url) + } } diff --git a/tests/wrapper/mod.rs b/tests/wrapper/mod.rs index f418e02..fe956cc 100644 --- a/tests/wrapper/mod.rs +++ b/tests/wrapper/mod.rs @@ -1,2 +1,3 @@ mod test_hydrus; mod test_import; +mod test_url; diff --git a/tests/wrapper/test_hydrus.rs b/tests/wrapper/test_hydrus.rs index 4f46687..f9ec065 100644 --- a/tests/wrapper/test_hydrus.rs +++ b/tests/wrapper/test_hydrus.rs @@ -1,5 +1,6 @@ use super::super::common; use hydrus_api::service::ServiceType; +use hydrus_api::url::UrlType; #[tokio::test] async fn it_retrieves_version_info() { @@ -18,3 +19,14 @@ async fn it_retrieves_services() { assert!(services.get_services(ServiceType::AllKnownFiles).len() > 0); assert!(services.get_services(ServiceType::AllKnownTags).len() > 0); } + +#[tokio::test] +async fn it_retrieves_url_information() { + let mut hydrus = common::get_hydrus(); + let url = hydrus + .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") + .await + .unwrap(); + + assert_eq!(url.url_type, UrlType::Post) +} diff --git a/tests/wrapper/test_import.rs b/tests/wrapper/test_import.rs index 0a6bfbf..f1e2140 100644 --- a/tests/wrapper/test_import.rs +++ b/tests/wrapper/test_import.rs @@ -3,6 +3,7 @@ use hydrus_api::builders::import_builder::FileImport; use hydrus_api::page::PageIdentifier; use hydrus_api::service::ServiceName; use hydrus_api::tag::Tag; +use hydrus_api::url::UrlType; #[tokio::test] async fn it_imports_file_paths() { @@ -36,7 +37,7 @@ async fn it_imports_urls() { let result = hydrus .import() .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") - .set_page(PageIdentifier::name("Rusty Import")) + .page(PageIdentifier::name("Rusty Import")) .show_page(true) .add_additional_tag(ServiceName::my_tags(), Tag::from("ark mage")) .add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin")) @@ -44,5 +45,6 @@ async fn it_imports_urls() { .await .unwrap(); - assert!(result.normalised_url.is_some()) // because it's returned by the import + assert!(result.normalised_url.len() > 0); + assert_eq!(result.url_type, UrlType::Post) } diff --git a/tests/wrapper/test_url.rs b/tests/wrapper/test_url.rs new file mode 100644 index 0000000..89122f4 --- /dev/null +++ b/tests/wrapper/test_url.rs @@ -0,0 +1,25 @@ +use super::super::common; +use hydrus_api::page::PageIdentifier; +use hydrus_api::service::ServiceName; +use hydrus_api::tag::Tag; +use hydrus_api::url::Url; + +async fn get_url() -> Url { + let mut hydrus = common::get_hydrus(); + hydrus + .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium") + .await + .unwrap() +} + +#[tokio::test] +async fn it_imports() { + let mut url = get_url().await; + + url.import() + .page(PageIdentifier::name("Rusty Import")) + .add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin")) + .run() + .await + .unwrap(); +}