diff --git a/.github/workflows/public.yml b/.github/workflows/public.yml index 53c5e7f..1902572 100644 --- a/.github/workflows/public.yml +++ b/.github/workflows/public.yml @@ -20,5 +20,5 @@ jobs: CRATES_IO_TOKEN: ${{secrets.CRATES_IO_TOKEN}} run: cargo login "$CRATES_IO_TOKEN" - - name: Publish to crates.io - run: cargo publish --all-features \ No newline at end of file + - name: Publish json to crates.io + run: cargo publish \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 326b368..bcad5e6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hydrus-api" -version = "0.8.0" +version = "0.9.0" authors = ["trivernis "] edition = "2018" license = "Apache-2.0" diff --git a/src/api_core/client.rs b/src/api_core/client.rs index fe616c1..8c08187 100644 --- a/src/api_core/client.rs +++ b/src/api_core/client.rs @@ -1,32 +1,38 @@ -use crate::api_core::access_management::{ +use crate::api_core::common::{ + FileIdentifier, FileRecord, FileSelection, FileServiceSelection, OptionalStringNumber, +}; +use crate::api_core::endpoints::access_management::{ ApiVersion, ApiVersionResponse, GetServices, GetServicesResponse, SessionKey, SessionKeyResponse, VerifyAccessKey, VerifyAccessKeyResponse, }; -use crate::api_core::adding_files::{ +use crate::api_core::endpoints::adding_files::{ AddFile, AddFileRequest, AddFileResponse, ArchiveFiles, ArchiveFilesRequest, DeleteFiles, DeleteFilesRequest, UnarchiveFiles, UnarchiveFilesRequest, UndeleteFiles, UndeleteFilesRequest, }; -use crate::api_core::adding_notes::{DeleteNotes, DeleteNotesRequest, SetNotes, SetNotesRequest}; -use crate::api_core::adding_tags::{AddTags, AddTagsRequest, CleanTags, CleanTagsResponse}; -use crate::api_core::adding_urls::{ +use crate::api_core::endpoints::adding_notes::{ + DeleteNotes, DeleteNotesRequest, SetNotes, SetNotesRequest, +}; +use crate::api_core::endpoints::adding_tags::{ + AddTags, AddTagsRequest, CleanTags, CleanTagsResponse, +}; +use crate::api_core::endpoints::adding_urls::{ AddUrl, AddUrlRequest, AddUrlResponse, AssociateUrl, AssociateUrlRequest, GetUrlFiles, GetUrlFilesResponse, GetUrlInfo, GetUrlInfoResponse, }; -use crate::api_core::client_builder::ClientBuilder; -use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord, OptionalStringNumber}; -use crate::api_core::managing_cookies_and_http_headers::{ +use crate::api_core::endpoints::client_builder::ClientBuilder; +use crate::api_core::endpoints::managing_cookies_and_http_headers::{ GetCookies, GetCookiesResponse, SetCookies, SetCookiesRequest, SetUserAgent, SetUserAgentRequest, }; -use crate::api_core::managing_pages::{ +use crate::api_core::endpoints::managing_pages::{ AddFiles, AddFilesRequest, FocusPage, FocusPageRequest, GetPageInfo, GetPageInfoResponse, GetPages, GetPagesResponse, }; -use crate::api_core::searching_and_fetching_files::{ - FileMetadata, FileMetadataResponse, FileSearchOptions, GetFile, SearchFileHashes, - SearchFileHashesResponse, SearchFiles, SearchFilesResponse, SearchQueryEntry, +use crate::api_core::endpoints::searching_and_fetching_files::{ + FileMetadata, FileMetadataResponse, FileMetadataType, FileSearchOptions, GetFile, + SearchFileHashes, SearchFileHashesResponse, SearchFiles, SearchFilesResponse, SearchQueryEntry, }; -use crate::api_core::Endpoint; +use crate::api_core::endpoints::Endpoint; use crate::error::{Error, Result}; use bytes::Buf; use reqwest::Response; @@ -106,36 +112,66 @@ impl Client { /// Moves files with matching hashes to the trash #[tracing::instrument(skip(self), level = "debug")] - pub async fn delete_files(&self, hashes: Vec) -> Result<()> { - self.post::(DeleteFilesRequest { hashes }) - .await?; + pub async fn delete_files( + &self, + files: FileSelection, + service: FileServiceSelection, + reason: Option, + ) -> Result<()> { + self.post::(DeleteFilesRequest { + file_selection: files, + service_selection: service, + reason, + }) + .await?; Ok(()) } /// Pulls files out of the trash by hash #[tracing::instrument(skip(self), level = "debug")] - pub async fn undelete_files(&self, hashes: Vec) -> Result<()> { - self.post::(UndeleteFilesRequest { hashes }) - .await?; + pub async fn undelete_files( + &self, + files: FileSelection, + service: FileServiceSelection, + ) -> Result<()> { + self.post::(UndeleteFilesRequest { + file_selection: files, + service_selection: service, + }) + .await?; Ok(()) } /// Moves files from the inbox into the archive #[tracing::instrument(skip(self), level = "debug")] - pub async fn archive_files(&self, hashes: Vec) -> Result<()> { - self.post::(ArchiveFilesRequest { hashes }) - .await?; + pub async fn archive_files( + &self, + files: FileSelection, + service: FileServiceSelection, + ) -> Result<()> { + self.post::(ArchiveFilesRequest { + file_selection: files, + service_selection: service, + }) + .await?; Ok(()) } /// Moves files from the archive into the inbox #[tracing::instrument(skip(self), level = "debug")] - pub async fn unarchive_files(&self, hashes: Vec) -> Result<()> { - self.post::(UnarchiveFilesRequest { hashes }) - .await?; + pub async fn unarchive_files( + &self, + files: FileSelection, + service: FileServiceSelection, + ) -> Result<()> { + self.post::(UnarchiveFilesRequest { + file_selection: files, + service_selection: service, + }) + .await?; Ok(()) } @@ -180,36 +216,47 @@ impl Client { ) -> Result { let mut args = options.into_query_args(); args.push(("tags", Self::serialize_query_object(query)?)); - args.push(("return_hashes", String::from("true"))); + args.push(("return_hashes", Self::serialize_query_object(true)?)); self.get_and_parse::(&args) .await } /// Returns the metadata for a given list of file_ids or hashes #[tracing::instrument(skip(self), level = "debug")] - pub async fn get_file_metadata( + pub async fn get_file_metadata( &self, file_ids: Vec, hashes: Vec, - ) -> Result { - let query = if file_ids.len() > 0 { + ) -> Result> { + let id_query = if file_ids.len() > 0 { ("file_ids", Self::serialize_query_object(file_ids)?) } else { ("hashes", Self::serialize_query_object(hashes)?) }; - self.get_and_parse::(&[query]) + let query = [ + id_query, + ( + "only_return_identifiers", + Self::serialize_query_object(M::only_identifiers())?, + ), + ( + "only_return_basic_information", + Self::serialize_query_object(M::only_basic_information())?, + ), + ]; + self.get_and_parse::, [(&str, String)]>(&query) .await } /// Returns the metadata for a single file identifier #[tracing::instrument(skip(self), level = "debug")] - pub async fn get_file_metadata_by_identifier( + pub async fn get_file_metadata_by_identifier( &self, id: FileIdentifier, - ) -> Result { + ) -> Result { let mut response = match id.clone() { - FileIdentifier::ID(id) => self.get_file_metadata(vec![id], vec![]).await?, - FileIdentifier::Hash(hash) => self.get_file_metadata(vec![], vec![hash]).await?, + FileIdentifier::ID(id) => self.get_file_metadata::(vec![id], vec![]).await?, + FileIdentifier::Hash(hash) => self.get_file_metadata::(vec![], vec![hash]).await?, }; response @@ -432,11 +479,13 @@ impl Client { fn serialize_query_object(obj: S) -> Result { #[cfg(feature = "json")] { + tracing::trace!("Serializing query to JSON"); serde_json::ser::to_string(&obj).map_err(|e| Error::Serialization(e.to_string())) } #[cfg(feature = "cbor")] { + tracing::trace!("Serializing query to CBOR"); let mut buf = Vec::new(); ciborium::ser::into_writer(&obj, &mut buf) .map_err(|e| Error::Serialization(e.to_string()))?; @@ -471,11 +520,19 @@ impl Client { #[tracing::instrument(skip(body), level = "trace")] fn serialize_body(body: S) -> Result> { let mut buf = Vec::new(); - #[cfg(feature = "cbor")] - ciborium::ser::into_writer(&body, &mut buf) - .map_err(|e| Error::Serialization(e.to_string()))?; + #[cfg(feature = "json")] - serde_json::to_writer(&mut buf, &body).map_err(|e| Error::Serialization(e.to_string()))?; + { + tracing::trace!("Serializing body to JSON"); + serde_json::to_writer(&mut buf, &body) + .map_err(|e| Error::Serialization(e.to_string()))?; + } + #[cfg(feature = "cbor")] + { + tracing::trace!("Serializing body to CBOR"); + ciborium::ser::into_writer(&body, &mut buf) + .map_err(|e| Error::Serialization(e.to_string()))?; + } Ok(buf) } @@ -525,11 +582,16 @@ impl Client { let bytes = response.bytes().await?; let reader = bytes.reader(); #[cfg(feature = "json")] - let content = serde_json::from_reader::<_, T>(reader) - .map_err(|e| Error::Deserialization(e.to_string()))?; + let content = { + tracing::trace!("Deserializing content from JSON"); + serde_json::from_reader::<_, T>(reader) + .map_err(|e| Error::Deserialization(e.to_string()))? + }; #[cfg(feature = "cbor")] - let content = - ciborium::de::from_reader(reader).map_err(|e| Error::Deserialization(e.to_string()))?; + let content = { + tracing::trace!("Deserializing content from CBOR"); + ciborium::de::from_reader(reader).map_err(|e| Error::Deserialization(e.to_string()))? + }; tracing::trace!("response content: {:?}", content); Ok(content) diff --git a/src/api_core/common.rs b/src/api_core/common.rs index 0d8937d..4a227d5 100644 --- a/src/api_core/common.rs +++ b/src/api_core/common.rs @@ -1,3 +1,5 @@ +use crate::wrapper::service::ServiceName; +use serde::Serialize; use std::collections::HashMap; #[derive(Debug, Clone, Serialize, Deserialize)] @@ -43,33 +45,6 @@ pub struct BasicHashList { pub hashes: Vec, } -#[derive(Clone, Debug, Default, Deserialize)] -pub struct FileMetadataInfo { - pub file_id: u64, - pub hash: String, - pub size: Option, - pub mime: String, - pub ext: String, - pub width: Option, - pub height: Option, - pub duration: Option, - pub time_modified: Option, - pub file_services: FileMetadataServices, - pub has_audio: Option, - pub num_frames: Option, - pub num_words: Option, - pub is_inbox: bool, - pub is_local: bool, - pub is_trashed: bool, - pub known_urls: Vec, - #[deprecated] - pub service_names_to_statuses_to_tags: HashMap>>, - pub service_keys_to_statuses_to_tags: HashMap>>, - #[deprecated] - pub service_names_to_statuses_to_display_tags: HashMap>>, - pub service_keys_to_statuses_to_display_tags: HashMap>>, -} - #[derive(Clone, Debug)] pub enum FileIdentifier { ID(u64), @@ -98,6 +73,119 @@ impl FileIdentifier { } } +/// A generic selection for one or multiple files +#[derive(Clone, Debug, Serialize, Default)] +pub struct FileSelection { + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) hash: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub(crate) hashes: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) file_id: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub(crate) file_ids: Vec, +} + +impl From for FileSelection { + fn from(id: FileIdentifier) -> Self { + let mut selection = Self::default(); + match id { + FileIdentifier::ID(id) => selection.file_id = Some(id), + FileIdentifier::Hash(hash) => selection.hash = Some(hash), + } + selection + } +} + +impl FileSelection { + /// Creates a new single hash file selection + pub fn by_hash(hash: S) -> Self { + Self { + hash: Some(hash.to_string()), + ..Default::default() + } + } + + /// Creates a new file selection with a single file id + pub fn by_file_id(file_id: u64) -> Self { + Self { + file_id: Some(file_id), + ..Default::default() + } + } + + /// Creates a new file selection with several hashes + pub fn by_hashes(mut hashes: Vec) -> Self { + if hashes.len() == 1 { + Self::by_hash(hashes.pop().unwrap()) + } else { + Self { + hashes, + ..Default::default() + } + } + } + + /// Creates a new file selection with several IDs + pub fn by_file_ids(mut file_ids: Vec) -> Self { + if file_ids.len() == 1 { + Self::by_file_id(file_ids.pop().unwrap()) + } else { + Self { + file_ids, + ..Default::default() + } + } + } +} + +/// A selection for a single file service +#[derive(Clone, Debug, Serialize, Default)] +pub struct FileServiceSelection { + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) file_service_name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) file_service_key: Option, +} + +impl FileServiceSelection { + /// Creates a new file service selection by name + pub fn by_name(name: S) -> Self { + Self { + file_service_name: Some(name.to_string()), + ..Default::default() + } + } + + /// Creates a new file service selection by service key + pub fn by_key(key: S) -> Self { + Self { + file_service_key: Some(key.to_string()), + ..Default::default() + } + } + + /// Selects no service + pub fn none() -> Self { + Self::default() + } +} + +impl From for FileServiceSelection { + fn from(id: ServiceIdentifier) -> Self { + match id { + ServiceIdentifier::Name(n) => Self::by_name(n), + ServiceIdentifier::Key(k) => Self::by_key(k), + } + } +} + +impl From for FileServiceSelection { + fn from(name: ServiceName) -> Self { + Self::by_name(name) + } +} + #[derive(Clone)] pub struct FileRecord { pub bytes: Vec, diff --git a/src/api_core/access_management.rs b/src/api_core/endpoints/access_management.rs similarity index 97% rename from src/api_core/access_management.rs rename to src/api_core/endpoints/access_management.rs index 8c5ff11..1e2f275 100644 --- a/src/api_core/access_management.rs +++ b/src/api_core/endpoints/access_management.rs @@ -1,5 +1,5 @@ use crate::api_core::common::BasicServiceInfo; -use crate::api_core::Endpoint; +use crate::api_core::endpoints::Endpoint; use std::collections::HashMap; pub static SERVICE_TYPE_LOCAL_TAGS: &str = "local_tags"; diff --git a/src/api_core/adding_files.rs b/src/api_core/endpoints/adding_files.rs similarity index 58% rename from src/api_core/adding_files.rs rename to src/api_core/endpoints/adding_files.rs index fce22b4..135f49e 100644 --- a/src/api_core/adding_files.rs +++ b/src/api_core/endpoints/adding_files.rs @@ -1,5 +1,6 @@ -use crate::api_core::common::BasicHashList; -use crate::api_core::Endpoint; +use crate::api_core::common::{FileSelection, FileServiceSelection}; +use crate::api_core::endpoints::Endpoint; +use serde::Serialize; pub static STATUS_IMPORT_SUCCESS: u8 = 1; pub static STATUS_IMPORT_ALREADY_EXISTS: u8 = 2; @@ -30,7 +31,14 @@ impl Endpoint for AddFile { } } -pub type DeleteFilesRequest = BasicHashList; +#[derive(Clone, Debug, Serialize)] +pub struct DeleteFilesRequest { + #[serde(flatten)] + pub file_selection: FileSelection, + #[serde(flatten)] + pub service_selection: FileServiceSelection, + pub reason: Option, +} pub struct DeleteFiles; @@ -43,7 +51,14 @@ impl Endpoint for DeleteFiles { } } -pub type UndeleteFilesRequest = BasicHashList; +#[derive(Clone, Debug, Serialize)] +pub struct UndeleteFilesRequest { + #[serde(flatten)] + pub file_selection: FileSelection, + #[serde(flatten)] + pub service_selection: FileServiceSelection, +} + pub struct UndeleteFiles; impl Endpoint for UndeleteFiles { @@ -55,7 +70,14 @@ impl Endpoint for UndeleteFiles { } } -pub type ArchiveFilesRequest = BasicHashList; +#[derive(Clone, Debug, Serialize)] +pub struct ArchiveFilesRequest { + #[serde(flatten)] + pub file_selection: FileSelection, + #[serde(flatten)] + pub service_selection: FileServiceSelection, +} + pub struct ArchiveFiles; impl Endpoint for ArchiveFiles { @@ -67,11 +89,18 @@ impl Endpoint for ArchiveFiles { } } -pub type UnarchiveFilesRequest = BasicHashList; +#[derive(Clone, Debug, Serialize)] +pub struct UnarchiveFilesRequest { + #[serde(flatten)] + pub file_selection: FileSelection, + #[serde(flatten)] + pub service_selection: FileServiceSelection, +} + pub struct UnarchiveFiles; impl Endpoint for UnarchiveFiles { - type Request = UndeleteFilesRequest; + type Request = UnarchiveFilesRequest; type Response = (); fn path() -> String { diff --git a/src/api_core/adding_notes.rs b/src/api_core/endpoints/adding_notes.rs similarity index 97% rename from src/api_core/adding_notes.rs rename to src/api_core/endpoints/adding_notes.rs index 794d958..d7c875a 100644 --- a/src/api_core/adding_notes.rs +++ b/src/api_core/endpoints/adding_notes.rs @@ -1,5 +1,5 @@ use crate::api_core::common::FileIdentifier; -use crate::api_core::Endpoint; +use crate::api_core::endpoints::Endpoint; use std::collections::HashMap; pub struct SetNotes; diff --git a/src/api_core/adding_tags.rs b/src/api_core/endpoints/adding_tags.rs similarity index 99% rename from src/api_core/adding_tags.rs rename to src/api_core/endpoints/adding_tags.rs index 1c9aafe..8ac4363 100644 --- a/src/api_core/adding_tags.rs +++ b/src/api_core/endpoints/adding_tags.rs @@ -1,5 +1,5 @@ use crate::api_core::common::ServiceIdentifier; -use crate::api_core::Endpoint; +use crate::api_core::endpoints::Endpoint; use std::collections::HashMap; #[derive(Debug, Clone, Deserialize)] diff --git a/src/api_core/adding_urls.rs b/src/api_core/endpoints/adding_urls.rs similarity index 97% rename from src/api_core/adding_urls.rs rename to src/api_core/endpoints/adding_urls.rs index 33aae0b..c083e37 100644 --- a/src/api_core/adding_urls.rs +++ b/src/api_core/endpoints/adding_urls.rs @@ -1,5 +1,5 @@ use crate::api_core::common::ServiceIdentifier; -use crate::api_core::Endpoint; +use crate::api_core::endpoints::Endpoint; use serde::Serialize; use std::collections::HashMap; @@ -74,7 +74,7 @@ pub struct AddUrlRequest { /// /// Example: /// ``` -/// use hydrus_api::api_core::adding_urls::AddUrlRequestBuilder; +/// use hydrus_api::api_core::endpoints::adding_urls::AddUrlRequestBuilder; /// use hydrus_api::api_core::common::ServiceIdentifier; /// /// let request = AddUrlRequestBuilder::default() diff --git a/src/api_core/client_builder.rs b/src/api_core/endpoints/client_builder.rs similarity index 100% rename from src/api_core/client_builder.rs rename to src/api_core/endpoints/client_builder.rs diff --git a/src/api_core/managing_cookies_and_http_headers.rs b/src/api_core/endpoints/managing_cookies_and_http_headers.rs similarity index 98% rename from src/api_core/managing_cookies_and_http_headers.rs rename to src/api_core/endpoints/managing_cookies_and_http_headers.rs index 1587657..c8fc4ea 100644 --- a/src/api_core/managing_cookies_and_http_headers.rs +++ b/src/api_core/endpoints/managing_cookies_and_http_headers.rs @@ -1,5 +1,5 @@ use crate::api_core::common::OptionalStringNumber; -use crate::api_core::Endpoint; +use crate::api_core::endpoints::Endpoint; #[derive(Clone, Debug, Deserialize)] pub struct GetCookiesResponse { diff --git a/src/api_core/managing_pages.rs b/src/api_core/endpoints/managing_pages.rs similarity index 97% rename from src/api_core/managing_pages.rs rename to src/api_core/endpoints/managing_pages.rs index f42556d..119047c 100644 --- a/src/api_core/managing_pages.rs +++ b/src/api_core/endpoints/managing_pages.rs @@ -1,5 +1,5 @@ use crate::api_core::common::PageInformation; -use crate::api_core::Endpoint; +use crate::api_core::endpoints::Endpoint; #[derive(Clone, Debug, Deserialize)] pub struct GetPagesResponse { diff --git a/src/api_core/endpoints/mod.rs b/src/api_core/endpoints/mod.rs new file mode 100644 index 0000000..a1130ed --- /dev/null +++ b/src/api_core/endpoints/mod.rs @@ -0,0 +1,20 @@ +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::fmt::Debug; + +pub mod access_management; +pub mod adding_files; +pub mod adding_notes; +pub mod adding_tags; +pub mod adding_urls; +pub mod client_builder; +pub mod managing_cookies_and_http_headers; +pub mod managing_pages; +pub mod searching_and_fetching_files; + +pub(crate) trait Endpoint { + type Request: Serialize + Debug; + type Response: DeserializeOwned + Debug; + + fn path() -> String; +} diff --git a/src/api_core/searching_and_fetching_files.rs b/src/api_core/endpoints/searching_and_fetching_files.rs similarity index 60% rename from src/api_core/searching_and_fetching_files.rs rename to src/api_core/endpoints/searching_and_fetching_files.rs index 42ca9f8..e7324c6 100644 --- a/src/api_core/searching_and_fetching_files.rs +++ b/src/api_core/endpoints/searching_and_fetching_files.rs @@ -1,5 +1,10 @@ -use crate::api_core::common::FileMetadataInfo; -use crate::api_core::Endpoint; +use crate::api_core::common::FileMetadataServices; +use crate::api_core::endpoints::Endpoint; +use serde::de::DeserializeOwned; +use serde::Deserialize; +use std::collections::HashMap; +use std::fmt::Debug; +use std::marker::PhantomData; pub mod file_sort_type { pub const SORT_FILE_SIZE: u8 = 0; @@ -129,15 +134,15 @@ impl Endpoint for SearchFileHashes { } #[derive(Clone, Debug, Default, Deserialize)] -pub struct FileMetadataResponse { - pub metadata: Vec, +pub struct FileMetadataResponse { + pub metadata: Vec, } -pub struct FileMetadata; +pub struct FileMetadata(PhantomData); -impl Endpoint for FileMetadata { +impl Endpoint for FileMetadata { type Request = (); - type Response = FileMetadataResponse; + type Response = FileMetadataResponse; fn path() -> String { String::from("get_files/file_metadata") @@ -169,3 +174,94 @@ where Self::Tag(s.to_string()) } } + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct FileMetadataIdentifiers { + pub file_id: u64, + pub hash: String, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct FileBasicMetadata { + #[serde(flatten)] + pub identifiers: FileMetadataIdentifiers, + pub size: Option, + pub mime: String, + pub ext: String, + pub width: Option, + pub height: Option, + pub duration: Option, + pub time_modified: Option, + pub file_services: FileMetadataServices, + pub has_audio: Option, + pub num_frames: Option, + pub num_words: Option, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct FileFullMetadata { + #[serde(flatten)] + pub basic_metadata: FileBasicMetadata, + pub is_inbox: bool, + pub is_local: bool, + pub is_trashed: bool, + pub known_urls: Vec, + #[deprecated] + pub service_names_to_statuses_to_tags: HashMap>>, + pub service_keys_to_statuses_to_tags: HashMap>>, + #[deprecated] + pub service_names_to_statuses_to_display_tags: HashMap>>, + pub service_keys_to_statuses_to_display_tags: HashMap>>, +} + +pub trait FileMetadataType: Clone + Debug { + type Response: DeserializeOwned + Clone + Debug; + + fn only_identifiers() -> bool; + fn only_basic_information() -> bool; +} + +#[derive(Clone, Debug)] +pub struct FullMetadata; + +impl FileMetadataType for FullMetadata { + type Response = FileFullMetadata; + + fn only_identifiers() -> bool { + false + } + + fn only_basic_information() -> bool { + false + } +} + +#[derive(Clone, Debug)] +pub struct BasicMetadata; + +impl FileMetadataType for BasicMetadata { + type Response = FileBasicMetadata; + + fn only_identifiers() -> bool { + false + } + + fn only_basic_information() -> bool { + true + } +} + +#[derive(Clone, Debug)] +pub struct Identifiers; + +impl FileMetadataType for Identifiers { + type Response = FileMetadataIdentifiers; + + fn only_identifiers() -> bool { + true + } + + fn only_basic_information() -> bool { + false + } +} diff --git a/src/api_core/mod.rs b/src/api_core/mod.rs index b07f339..8891c59 100644 --- a/src/api_core/mod.rs +++ b/src/api_core/mod.rs @@ -1,24 +1,3 @@ -use serde::de::DeserializeOwned; -use serde::Serialize; -use std::fmt::Debug; - -pub mod access_management; -pub mod adding_files; -pub mod adding_tags; -pub mod adding_urls; pub mod client; -pub mod client_builder; pub mod common; -pub mod managing_cookies_and_http_headers; -pub mod managing_pages; -pub mod searching_and_fetching_files; -pub mod adding_notes; - -pub use searching_and_fetching_files::file_sort_type; - -pub(crate) trait Endpoint { - type Request: Serialize + Debug; - type Response: DeserializeOwned + Debug; - - fn path() -> String; -} +pub mod endpoints; diff --git a/src/lib.rs b/src/lib.rs index d7b6884..5f3f53c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -56,7 +56,7 @@ //! ## Client Usage Example //! ``` //! use hydrus_api::Client; -//! use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; +//! use hydrus_api::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction}; //! use std::env; //! use hydrus_api::api_core::common::ServiceIdentifier; //! # #[tokio::test] diff --git a/src/wrapper/address.rs b/src/wrapper/address.rs index b3f13c7..85830a2 100644 --- a/src/wrapper/address.rs +++ b/src/wrapper/address.rs @@ -1,5 +1,5 @@ use crate::api_core::common::OptionalStringNumber; -use crate::api_core::managing_cookies_and_http_headers::CookieBuilder; +use crate::api_core::endpoints::managing_cookies_and_http_headers::CookieBuilder; use crate::error::Result; use crate::Client; use std::time::{Duration, SystemTime, UNIX_EPOCH}; diff --git a/src/wrapper/builders/delete_files_builder.rs b/src/wrapper/builders/delete_files_builder.rs new file mode 100644 index 0000000..8333379 --- /dev/null +++ b/src/wrapper/builders/delete_files_builder.rs @@ -0,0 +1,71 @@ +use crate::api_core::common::{ + FileIdentifier, FileSelection, FileServiceSelection, ServiceIdentifier, +}; +use crate::error::Result; +use crate::Client; + +pub struct DeleteFilesBuilder { + client: Client, + hashes: Vec, + ids: Vec, + reason: Option, + service: Option, +} + +impl DeleteFilesBuilder { + pub(crate) fn new(client: Client) -> Self { + Self { + client, + hashes: Vec::new(), + ids: Vec::new(), + reason: None, + service: None, + } + } + + /// Adds a file to be deleted + pub fn add_file(mut self, identifier: FileIdentifier) -> Self { + match identifier { + FileIdentifier::ID(id) => self.ids.push(id), + FileIdentifier::Hash(hash) => self.hashes.push(hash), + } + + self + } + + /// Adds multiple files to be deleted + pub fn add_files(self, ids: Vec) -> Self { + ids.into_iter().fold(self, |acc, id| acc.add_file(id)) + } + + /// Restricts deletion to a single file service + pub fn service(mut self, service: ServiceIdentifier) -> Self { + self.service = Some(service); + + self + } + + /// Adds a reason for why the file was deleted + pub fn reason(mut self, reason: S) -> Self { + self.reason = Some(reason.to_string()); + + self + } + + /// Deletes all files specified in this builder + pub async fn run(self) -> Result<()> { + let file_selection = FileSelection { + hashes: self.hashes, + file_ids: self.ids, + ..Default::default() + }; + let service_selection = self + .service + .map(FileServiceSelection::from) + .unwrap_or_default(); + + self.client + .delete_files(file_selection, service_selection, self.reason) + .await + } +} diff --git a/src/wrapper/builders/import_builder.rs b/src/wrapper/builders/import_builder.rs index c8969e8..ec9aae0 100644 --- a/src/wrapper/builders/import_builder.rs +++ b/src/wrapper/builders/import_builder.rs @@ -1,6 +1,6 @@ -use crate::api_core::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED}; -use crate::api_core::adding_urls::AddUrlRequestBuilder; use crate::api_core::common::ServiceIdentifier; +use crate::api_core::endpoints::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED}; +use crate::api_core::endpoints::adding_urls::AddUrlRequestBuilder; use crate::error::{Error, Result}; use crate::utils::tag_list_to_string_list; use crate::wrapper::hydrus_file::HydrusFile; diff --git a/src/wrapper/builders/mod.rs b/src/wrapper/builders/mod.rs index a58dde9..c3426b8 100644 --- a/src/wrapper/builders/mod.rs +++ b/src/wrapper/builders/mod.rs @@ -1,6 +1,7 @@ +pub mod delete_files_builder; pub mod import_builder; +pub mod notes_builder; pub mod or_chain_builder; pub mod search_builder; pub mod tag_builder; pub mod tagging_builder; -pub mod notes_builder; diff --git a/src/wrapper/builders/search_builder.rs b/src/wrapper/builders/search_builder.rs index 86a5d06..4443138 100644 --- a/src/wrapper/builders/search_builder.rs +++ b/src/wrapper/builders/search_builder.rs @@ -1,4 +1,6 @@ -use crate::api_core::searching_and_fetching_files::{FileSearchOptions, SearchQueryEntry}; +use crate::api_core::endpoints::searching_and_fetching_files::{ + FileSearchOptions, SearchQueryEntry, +}; use crate::error::Result; use crate::wrapper::hydrus_file::HydrusFile; use crate::wrapper::or_chain::OrChain; diff --git a/src/wrapper/builders/tagging_builder.rs b/src/wrapper/builders/tagging_builder.rs index 07f85b0..ad0c748 100644 --- a/src/wrapper/builders/tagging_builder.rs +++ b/src/wrapper/builders/tagging_builder.rs @@ -1,5 +1,5 @@ -use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; use crate::api_core::common::ServiceIdentifier; +use crate::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction}; use crate::error::Result; use crate::wrapper::tag::Tag; use crate::Client; diff --git a/src/wrapper/hydrus.rs b/src/wrapper/hydrus.rs index 13dbeae..52a3294 100644 --- a/src/wrapper/hydrus.rs +++ b/src/wrapper/hydrus.rs @@ -1,6 +1,8 @@ use crate::api_core::common::FileIdentifier; +use crate::api_core::endpoints::searching_and_fetching_files::FullMetadata; use crate::error::Result; use crate::wrapper::address::Address; +use crate::wrapper::builders::delete_files_builder::DeleteFilesBuilder; use crate::wrapper::builders::import_builder::ImportBuilder; use crate::wrapper::builders::search_builder::SearchBuilder; use crate::wrapper::builders::tagging_builder::TaggingBuilder; @@ -71,12 +73,17 @@ impl Hydrus { pub async fn file(&self, identifier: FileIdentifier) -> Result { let metadata = self .client - .get_file_metadata_by_identifier(identifier) + .get_file_metadata_by_identifier::(identifier) .await?; Ok(HydrusFile::from_metadata(self.client.clone(), metadata)) } + /// Creates a builder to delete files + pub async fn delete(&self) -> DeleteFilesBuilder { + DeleteFilesBuilder::new(self.client.clone()) + } + /// Starts a request to bulk add tags to files pub fn tagging(&self) -> TaggingBuilder { TaggingBuilder::new(self.client.clone()) diff --git a/src/wrapper/hydrus_file.rs b/src/wrapper/hydrus_file.rs index 70e2b4c..ff61b93 100644 --- a/src/wrapper/hydrus_file.rs +++ b/src/wrapper/hydrus_file.rs @@ -1,7 +1,11 @@ -use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; -use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord, ServiceIdentifier}; +use crate::api_core::common::{ + FileIdentifier, FileRecord, FileSelection, FileServiceSelection, ServiceIdentifier, +}; +use crate::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction}; +use crate::api_core::endpoints::searching_and_fetching_files::{FileFullMetadata, FullMetadata}; use crate::error::{Error, Result}; use crate::utils::tag_list_to_string_list; +use crate::wrapper::builders::delete_files_builder::DeleteFilesBuilder; use crate::wrapper::builders::notes_builder::AddNotesBuilder; use crate::wrapper::service::ServiceName; use crate::wrapper::tag::Tag; @@ -35,7 +39,7 @@ pub struct HydrusFile { pub(crate) client: Client, pub id: FileIdentifier, pub status: FileStatus, - pub(crate) metadata: Option, + pub(crate) metadata: Option, } impl HydrusFile { @@ -61,7 +65,7 @@ impl HydrusFile { } } - pub(crate) fn from_metadata(client: Client, metadata: FileMetadataInfo) -> Self { + pub(crate) fn from_metadata(client: Client, metadata: FileFullMetadata) -> Self { let status = if metadata.is_trashed { FileStatus::Deleted } else { @@ -70,7 +74,7 @@ impl HydrusFile { Self { client, - id: FileIdentifier::Hash(metadata.hash.clone()), + id: FileIdentifier::Hash(metadata.basic_metadata.identifiers.hash.clone()), status, metadata: Some(metadata), } @@ -89,7 +93,7 @@ impl HydrusFile { match &self.id { FileIdentifier::ID(_) => { let metadata = self.metadata().await?; - Ok(metadata.hash.clone()) + Ok(metadata.basic_metadata.identifiers.hash.clone()) } FileIdentifier::Hash(hash) => Ok(hash.clone()), } @@ -99,17 +103,18 @@ impl HydrusFile { pub async fn size(&mut self) -> Result> { let metadata = self.metadata().await?; - Ok(metadata.size.clone()) + Ok(metadata.basic_metadata.size.clone()) } /// Returns the mime of the file pub async fn mime(&mut self) -> Result { let metadata = self.metadata().await?; let mime = metadata + .basic_metadata .mime .as_str() .parse() - .map_err(|_| Error::InvalidMime(metadata.mime.clone()))?; + .map_err(|_| Error::InvalidMime(metadata.basic_metadata.mime.clone()))?; Ok(mime) } @@ -118,13 +123,16 @@ impl HydrusFile { pub async fn ext(&mut self) -> Result { let metadata = self.metadata().await?; - Ok(metadata.ext.clone()) + Ok(metadata.basic_metadata.ext.clone()) } /// Returns the dimensions of the file in pixels pub async fn dimensions(&mut self) -> Result> { let metadata = self.metadata().await?; - if let (Some(width), Some(height)) = (&metadata.width, &metadata.height) { + if let (Some(width), Some(height)) = ( + &metadata.basic_metadata.width, + &metadata.basic_metadata.height, + ) { Ok(Some((*width, *height))) } else { Ok(None) @@ -135,21 +143,21 @@ impl HydrusFile { pub async fn duration(&mut self) -> Result> { let metadata = self.metadata().await?; - Ok(metadata.duration.clone()) + Ok(metadata.basic_metadata.duration.clone()) } /// Returns the number of frames of the file if it's a video pub async fn num_frames(&mut self) -> Result> { let metadata = self.metadata().await?; - Ok(metadata.num_frames.clone()) + Ok(metadata.basic_metadata.num_frames.clone()) } /// Returns if the file has audio pub async fn has_audio(&mut self) -> Result { let metadata = self.metadata().await?; - Ok(metadata.has_audio.unwrap_or(false)) + Ok(metadata.basic_metadata.has_audio.unwrap_or(false)) } /// Returns if the file is currently in the inbox @@ -184,6 +192,7 @@ impl HydrusFile { pub async fn time_modified(&mut self) -> Result> { let metadata = self.metadata().await?; let naive_time_modified = metadata + .basic_metadata .time_modified .map(|m| Utc.timestamp_millis(m as i64).naive_utc()); @@ -197,12 +206,14 @@ impl HydrusFile { ) -> Result> { let metadata = self.metadata().await?; let naive_time_imported = metadata + .basic_metadata .file_services .current .get(service_key.as_ref()) .map(|s| s.time_imported) .or_else(|| { metadata + .basic_metadata .file_services .deleted .get(service_key.as_ref()) @@ -220,6 +231,7 @@ impl HydrusFile { ) -> Result> { let metadata = self.metadata().await?; let naive_time_deleted = metadata + .basic_metadata .file_services .deleted .get(service_key.as_ref()) @@ -229,6 +241,41 @@ impl HydrusFile { Ok(naive_time_deleted) } + /// Creates a request builder to delete the file + pub fn delete(&mut self) -> DeleteFilesBuilder { + self.metadata = None; + DeleteFilesBuilder::new(self.client.clone()).add_file(self.id.clone()) + } + + /// Undeletes the file for the given service or all services + /// if `FileServiceSelection::none` is passed + pub async fn undelete(&mut self, service_selection: FileServiceSelection) -> Result<()> { + let hash = self.hash().await?; + self.metadata = None; + self.client + .undelete_files(FileSelection::by_hash(hash), service_selection) + .await + } + + /// Archives the file in all passed file services or all configured services + /// if no selection is passed + pub async fn archive(&mut self, service_selection: FileServiceSelection) -> Result<()> { + let hash = self.hash().await?; + self.metadata = None; + self.client + .archive_files(FileSelection::by_hash(hash), service_selection) + .await + } + + /// Unarchives the file for the given services + pub async fn unarchive(&mut self, service_selection: FileServiceSelection) -> Result<()> { + let hash = self.hash().await?; + self.metadata = None; + self.client + .unarchive_files(FileSelection::by_hash(hash), service_selection) + .await + } + /// Associates the file with a list of urls pub async fn associate_urls(&mut self, urls: Vec) -> Result<()> { let hash = self.hash().await?; @@ -347,11 +394,11 @@ impl HydrusFile { /// Returns the metadata for the given file /// if there's already known metadata about the file it uses that - async fn metadata(&mut self) -> Result<&FileMetadataInfo> { + async fn metadata(&mut self) -> Result<&FileFullMetadata> { if self.metadata.is_none() { let metadata = self .client - .get_file_metadata_by_identifier(self.id.clone()) + .get_file_metadata_by_identifier::(self.id.clone()) .await?; self.status = if metadata.is_trashed { FileStatus::Deleted diff --git a/src/wrapper/page.rs b/src/wrapper/page.rs index 48f37ea..cc8f68d 100644 --- a/src/wrapper/page.rs +++ b/src/wrapper/page.rs @@ -1,4 +1,5 @@ use crate::api_core::common::{FileIdentifier, PageInformation}; +use crate::api_core::endpoints::searching_and_fetching_files::Identifiers; use crate::error::Result; use crate::utils::split_file_identifiers_into_hashes_and_ids; use crate::Client; @@ -56,7 +57,7 @@ impl HydrusPage { for id in ids { let metadata = self .client - .get_file_metadata_by_identifier(FileIdentifier::ID(id)) + .get_file_metadata_by_identifier::(FileIdentifier::ID(id)) .await?; hashes.push(metadata.hash); } diff --git a/src/wrapper/service.rs b/src/wrapper/service.rs index c8b9ab1..87664f7 100644 --- a/src/wrapper/service.rs +++ b/src/wrapper/service.rs @@ -1,5 +1,5 @@ -use crate::api_core::access_management::GetServicesResponse; -use crate::api_core::access_management::{ +use crate::api_core::endpoints::access_management::GetServicesResponse; +use crate::api_core::endpoints::access_management::{ SERVICE_TYPE_ALL_KNOWN_FILES, SERVICE_TYPE_ALL_KNOWN_TAGS, SERVICE_TYPE_ALL_LOCAL_FILES, SERVICE_TYPE_FILE_REPOSITORIES, SERVICE_TYPE_LOCAL_FILES, SERVICE_TYPE_LOCAL_TAGS, SERVICE_TYPE_TAG_REPOSITORIES, SERVICE_TYPE_TRASH, diff --git a/src/wrapper/url.rs b/src/wrapper/url.rs index dc7060f..e1582fd 100644 --- a/src/wrapper/url.rs +++ b/src/wrapper/url.rs @@ -1,4 +1,4 @@ -use crate::api_core::adding_urls::{ +use crate::api_core::endpoints::adding_urls::{ URL_TYPE_FILE, URL_TYPE_GALLERY, URL_TYPE_POST, URL_TYPE_WATCHABLE, }; use crate::error::Result; diff --git a/tests/client/test_adding_files.rs b/tests/client/test_adding_files.rs index 285c038..8cafa81 100644 --- a/tests/client/test_adding_files.rs +++ b/tests/client/test_adding_files.rs @@ -1,6 +1,8 @@ use crate::common; use crate::common::create_testdata; -use crate::common::test_data::get_test_hashes; +use crate::common::test_data::{get_test_hashes, TEST_HASH_1}; +use hydrus_api::api_core::common::FileSelection; +use hydrus_api::wrapper::service::ServiceName; #[tokio::test] async fn it_adds_files() { @@ -22,26 +24,52 @@ async fn it_adds_binary_files() { #[tokio::test] async fn it_deletes_files() { let client = common::get_client(); - client.delete_files(get_test_hashes()).await.unwrap(); + create_testdata(&client).await; + client + .delete_files( + FileSelection::by_hashes(get_test_hashes()), + ServiceName::my_files().into(), + Some("Test".to_string()), + ) + .await + .unwrap(); } #[tokio::test] async fn it_undeletes_files() { let client = common::get_client(); create_testdata(&client).await; - client.undelete_files(get_test_hashes()).await.unwrap(); + client + .undelete_files( + FileSelection::by_hashes(get_test_hashes()), + ServiceName::my_files().into(), + ) + .await + .unwrap(); } #[tokio::test] async fn it_archives_files() { let client = common::get_client(); create_testdata(&client).await; - client.archive_files(get_test_hashes()).await.unwrap(); + client + .archive_files( + FileSelection::by_hashes(vec![TEST_HASH_1.to_string()]), + ServiceName::my_files().into(), + ) + .await + .unwrap(); } #[tokio::test] async fn it_unarchives_files() { let client = common::get_client(); create_testdata(&client).await; - client.unarchive_files(get_test_hashes()).await.unwrap(); + client + .unarchive_files( + FileSelection::by_hashes(get_test_hashes()), + ServiceName::my_files().into(), + ) + .await + .unwrap(); } diff --git a/tests/client/test_adding_tags.rs b/tests/client/test_adding_tags.rs index 3b34952..5c43792 100644 --- a/tests/client/test_adding_tags.rs +++ b/tests/client/test_adding_tags.rs @@ -1,7 +1,7 @@ use super::super::common; use crate::common::test_data::EMPTY_HASH; -use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; use hydrus_api::api_core::common::ServiceIdentifier; +use hydrus_api::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction}; #[tokio::test] async fn it_cleans_tags() { diff --git a/tests/client/test_adding_urls.rs b/tests/client/test_adding_urls.rs index 4e7a629..03f0b24 100644 --- a/tests/client/test_adding_urls.rs +++ b/tests/client/test_adding_urls.rs @@ -1,7 +1,7 @@ use super::super::common; use crate::common::test_data::{get_test_hashes, get_test_urls, TEST_URL_1}; -use hydrus_api::api_core::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST}; use hydrus_api::api_core::common::ServiceIdentifier; +use hydrus_api::api_core::endpoints::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST}; #[tokio::test] async fn it_returns_files_for_an_url() { diff --git a/tests/client/test_managing_cookies_and_http_headers.rs b/tests/client/test_managing_cookies_and_http_headers.rs index 31e7b1e..a308850 100644 --- a/tests/client/test_managing_cookies_and_http_headers.rs +++ b/tests/client/test_managing_cookies_and_http_headers.rs @@ -1,5 +1,5 @@ use super::super::common; -use hydrus_api::api_core::managing_cookies_and_http_headers::CookieBuilder; +use hydrus_api::api_core::endpoints::managing_cookies_and_http_headers::CookieBuilder; #[tokio::test] async fn it_returns_cookies_for_a_domain() { diff --git a/tests/client/test_searching_and_fetching_files.rs b/tests/client/test_searching_and_fetching_files.rs index dde77ad..134912b 100644 --- a/tests/client/test_searching_and_fetching_files.rs +++ b/tests/client/test_searching_and_fetching_files.rs @@ -1,7 +1,9 @@ use super::super::common; use hydrus_api::api_core::common::FileIdentifier; -use hydrus_api::api_core::file_sort_type::SORT_FILE_PIXEL_COUNT; -use hydrus_api::api_core::searching_and_fetching_files::{FileSearchOptions, SearchQueryEntry}; +use hydrus_api::api_core::endpoints::searching_and_fetching_files::file_sort_type::SORT_FILE_PIXEL_COUNT; +use hydrus_api::api_core::endpoints::searching_and_fetching_files::{ + BasicMetadata, FileSearchOptions, FullMetadata, Identifiers, SearchQueryEntry, +}; #[tokio::test] async fn is_searches_files() { @@ -45,7 +47,7 @@ async fn is_searches_file_hashes() { async fn it_fetches_file_metadata() { let client = common::get_client(); client - .get_file_metadata( + .get_file_metadata::( vec![], vec!["0000000000000000000000000000000000000000000000000000000000000000".to_string()], ) @@ -56,8 +58,18 @@ async fn it_fetches_file_metadata() { #[tokio::test] async fn it_fetches_file_metadata_by_id() { let client = common::get_client(); - let response = client.get_file_metadata(vec![1], vec![]).await; - assert!(response.is_ok()); // Even if the file doesn't exist it still returns some information about it + let response = client + .get_file_metadata::(vec![1], vec![]) + .await; + assert!(response.is_ok()); + let response = client + .get_file_metadata::(vec![1], vec![]) + .await; + assert!(response.is_ok()); + let response = client + .get_file_metadata::(vec![1], vec![]) + .await; + assert!(response.is_ok()); } #[tokio::test] diff --git a/tests/common/mod.rs b/tests/common/mod.rs index cac4110..575d449 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -1,5 +1,5 @@ -use hydrus_api::api_core::adding_urls::AddUrlRequestBuilder; use hydrus_api::api_core::client::Client; +use hydrus_api::api_core::endpoints::adding_urls::AddUrlRequestBuilder; use hydrus_api::Hydrus; use std::env; use std::sync::{Arc, Mutex, MutexGuard}; diff --git a/tests/wrapper/test_files.rs b/tests/wrapper/test_files.rs index 536ff4a..ee2699c 100644 --- a/tests/wrapper/test_files.rs +++ b/tests/wrapper/test_files.rs @@ -1,14 +1,18 @@ use super::super::common; -use hydrus_api::api_core::adding_tags::TagAction; +use crate::common::test_data::TEST_HASH_2; +use crate::common::{create_testdata, get_client}; use hydrus_api::api_core::common::FileIdentifier; +use hydrus_api::api_core::endpoints::adding_tags::TagAction; use hydrus_api::wrapper::hydrus_file::HydrusFile; use hydrus_api::wrapper::service::ServiceName; async fn get_file() -> HydrusFile { + let client = get_client(); + create_testdata(&client).await; let hydrus = common::get_hydrus(); hydrus .file(FileIdentifier::hash( - "277a138cd1ee79fc1fdb2869c321b848d4861e45b82184487139ef66dd40b62d", // needs to exist + TEST_HASH_2, // needs to exist )) .await .unwrap() @@ -102,9 +106,19 @@ async fn it_retrieves_content() { async fn it_retrieves_metadata() { let mut file = get_file().await; assert!(file.dimensions().await.unwrap().is_some()); - assert!(file.stored_locally().await.unwrap()); assert!(file.duration().await.unwrap().is_none()); assert!(file.time_modified().await.is_ok()); assert!(file.time_deleted("000").await.is_ok()); assert!(file.time_imported("000").await.is_ok()); } + +#[tokio::test] +async fn it_deletes() { + let mut file = get_file().await; + file.delete() + .reason("I just don't like that file") + .run() + .await + .unwrap(); + file.undelete(ServiceName::my_files().into()).await.unwrap(); +} diff --git a/tests/wrapper/test_hydrus.rs b/tests/wrapper/test_hydrus.rs index 777d1d3..4abd3b8 100644 --- a/tests/wrapper/test_hydrus.rs +++ b/tests/wrapper/test_hydrus.rs @@ -1,5 +1,5 @@ use super::super::common; -use hydrus_api::api_core::adding_tags::TagAction; +use hydrus_api::api_core::endpoints::adding_tags::TagAction; use hydrus_api::wrapper::builders::or_chain_builder::OrChainBuilder; use hydrus_api::wrapper::builders::search_builder::SortType; use hydrus_api::wrapper::builders::tag_builder::TagBuilder;