Merge pull request #15 from Trivernis/develop

Develop
main
Julius Riegel 3 years ago committed by GitHub
commit 2ad46cc423
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -20,5 +20,5 @@ jobs:
CRATES_IO_TOKEN: ${{secrets.CRATES_IO_TOKEN}} CRATES_IO_TOKEN: ${{secrets.CRATES_IO_TOKEN}}
run: cargo login "$CRATES_IO_TOKEN" run: cargo login "$CRATES_IO_TOKEN"
- name: Publish to crates.io - name: Publish json to crates.io
run: cargo publish --all-features run: cargo publish

@ -1,6 +1,6 @@
[package] [package]
name = "hydrus-api" name = "hydrus-api"
version = "0.8.0" version = "0.9.0"
authors = ["trivernis <trivernis@protonmail.com>"] authors = ["trivernis <trivernis@protonmail.com>"]
edition = "2018" edition = "2018"
license = "Apache-2.0" license = "Apache-2.0"

@ -1,32 +1,38 @@
use crate::api_core::access_management::{ use crate::api_core::common::{
FileIdentifier, FileRecord, FileSelection, FileServiceSelection, OptionalStringNumber,
};
use crate::api_core::endpoints::access_management::{
ApiVersion, ApiVersionResponse, GetServices, GetServicesResponse, SessionKey, ApiVersion, ApiVersionResponse, GetServices, GetServicesResponse, SessionKey,
SessionKeyResponse, VerifyAccessKey, VerifyAccessKeyResponse, SessionKeyResponse, VerifyAccessKey, VerifyAccessKeyResponse,
}; };
use crate::api_core::adding_files::{ use crate::api_core::endpoints::adding_files::{
AddFile, AddFileRequest, AddFileResponse, ArchiveFiles, ArchiveFilesRequest, DeleteFiles, AddFile, AddFileRequest, AddFileResponse, ArchiveFiles, ArchiveFilesRequest, DeleteFiles,
DeleteFilesRequest, UnarchiveFiles, UnarchiveFilesRequest, UndeleteFiles, UndeleteFilesRequest, DeleteFilesRequest, UnarchiveFiles, UnarchiveFilesRequest, UndeleteFiles, UndeleteFilesRequest,
}; };
use crate::api_core::adding_notes::{DeleteNotes, DeleteNotesRequest, SetNotes, SetNotesRequest}; use crate::api_core::endpoints::adding_notes::{
use crate::api_core::adding_tags::{AddTags, AddTagsRequest, CleanTags, CleanTagsResponse}; DeleteNotes, DeleteNotesRequest, SetNotes, SetNotesRequest,
use crate::api_core::adding_urls::{ };
use crate::api_core::endpoints::adding_tags::{
AddTags, AddTagsRequest, CleanTags, CleanTagsResponse,
};
use crate::api_core::endpoints::adding_urls::{
AddUrl, AddUrlRequest, AddUrlResponse, AssociateUrl, AssociateUrlRequest, GetUrlFiles, AddUrl, AddUrlRequest, AddUrlResponse, AssociateUrl, AssociateUrlRequest, GetUrlFiles,
GetUrlFilesResponse, GetUrlInfo, GetUrlInfoResponse, GetUrlFilesResponse, GetUrlInfo, GetUrlInfoResponse,
}; };
use crate::api_core::client_builder::ClientBuilder; use crate::api_core::endpoints::client_builder::ClientBuilder;
use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord, OptionalStringNumber}; use crate::api_core::endpoints::managing_cookies_and_http_headers::{
use crate::api_core::managing_cookies_and_http_headers::{
GetCookies, GetCookiesResponse, SetCookies, SetCookiesRequest, SetUserAgent, GetCookies, GetCookiesResponse, SetCookies, SetCookiesRequest, SetUserAgent,
SetUserAgentRequest, SetUserAgentRequest,
}; };
use crate::api_core::managing_pages::{ use crate::api_core::endpoints::managing_pages::{
AddFiles, AddFilesRequest, FocusPage, FocusPageRequest, GetPageInfo, GetPageInfoResponse, AddFiles, AddFilesRequest, FocusPage, FocusPageRequest, GetPageInfo, GetPageInfoResponse,
GetPages, GetPagesResponse, GetPages, GetPagesResponse,
}; };
use crate::api_core::searching_and_fetching_files::{ use crate::api_core::endpoints::searching_and_fetching_files::{
FileMetadata, FileMetadataResponse, FileSearchOptions, GetFile, SearchFileHashes, FileMetadata, FileMetadataResponse, FileMetadataType, FileSearchOptions, GetFile,
SearchFileHashesResponse, SearchFiles, SearchFilesResponse, SearchQueryEntry, SearchFileHashes, SearchFileHashesResponse, SearchFiles, SearchFilesResponse, SearchQueryEntry,
}; };
use crate::api_core::Endpoint; use crate::api_core::endpoints::Endpoint;
use crate::error::{Error, Result}; use crate::error::{Error, Result};
use bytes::Buf; use bytes::Buf;
use reqwest::Response; use reqwest::Response;
@ -106,36 +112,66 @@ impl Client {
/// Moves files with matching hashes to the trash /// Moves files with matching hashes to the trash
#[tracing::instrument(skip(self), level = "debug")] #[tracing::instrument(skip(self), level = "debug")]
pub async fn delete_files(&self, hashes: Vec<String>) -> Result<()> { pub async fn delete_files(
self.post::<DeleteFiles>(DeleteFilesRequest { hashes }) &self,
.await?; files: FileSelection,
service: FileServiceSelection,
reason: Option<String>,
) -> Result<()> {
self.post::<DeleteFiles>(DeleteFilesRequest {
file_selection: files,
service_selection: service,
reason,
})
.await?;
Ok(()) Ok(())
} }
/// Pulls files out of the trash by hash /// Pulls files out of the trash by hash
#[tracing::instrument(skip(self), level = "debug")] #[tracing::instrument(skip(self), level = "debug")]
pub async fn undelete_files(&self, hashes: Vec<String>) -> Result<()> { pub async fn undelete_files(
self.post::<UndeleteFiles>(UndeleteFilesRequest { hashes }) &self,
.await?; files: FileSelection,
service: FileServiceSelection,
) -> Result<()> {
self.post::<UndeleteFiles>(UndeleteFilesRequest {
file_selection: files,
service_selection: service,
})
.await?;
Ok(()) Ok(())
} }
/// Moves files from the inbox into the archive /// Moves files from the inbox into the archive
#[tracing::instrument(skip(self), level = "debug")] #[tracing::instrument(skip(self), level = "debug")]
pub async fn archive_files(&self, hashes: Vec<String>) -> Result<()> { pub async fn archive_files(
self.post::<ArchiveFiles>(ArchiveFilesRequest { hashes }) &self,
.await?; files: FileSelection,
service: FileServiceSelection,
) -> Result<()> {
self.post::<ArchiveFiles>(ArchiveFilesRequest {
file_selection: files,
service_selection: service,
})
.await?;
Ok(()) Ok(())
} }
/// Moves files from the archive into the inbox /// Moves files from the archive into the inbox
#[tracing::instrument(skip(self), level = "debug")] #[tracing::instrument(skip(self), level = "debug")]
pub async fn unarchive_files(&self, hashes: Vec<String>) -> Result<()> { pub async fn unarchive_files(
self.post::<UnarchiveFiles>(UnarchiveFilesRequest { hashes }) &self,
.await?; files: FileSelection,
service: FileServiceSelection,
) -> Result<()> {
self.post::<UnarchiveFiles>(UnarchiveFilesRequest {
file_selection: files,
service_selection: service,
})
.await?;
Ok(()) Ok(())
} }
@ -180,36 +216,47 @@ impl Client {
) -> Result<SearchFileHashesResponse> { ) -> Result<SearchFileHashesResponse> {
let mut args = options.into_query_args(); let mut args = options.into_query_args();
args.push(("tags", Self::serialize_query_object(query)?)); args.push(("tags", Self::serialize_query_object(query)?));
args.push(("return_hashes", String::from("true"))); args.push(("return_hashes", Self::serialize_query_object(true)?));
self.get_and_parse::<SearchFileHashes, [(&str, String)]>(&args) self.get_and_parse::<SearchFileHashes, [(&str, String)]>(&args)
.await .await
} }
/// Returns the metadata for a given list of file_ids or hashes /// Returns the metadata for a given list of file_ids or hashes
#[tracing::instrument(skip(self), level = "debug")] #[tracing::instrument(skip(self), level = "debug")]
pub async fn get_file_metadata( pub async fn get_file_metadata<M: FileMetadataType>(
&self, &self,
file_ids: Vec<u64>, file_ids: Vec<u64>,
hashes: Vec<String>, hashes: Vec<String>,
) -> Result<FileMetadataResponse> { ) -> Result<FileMetadataResponse<M>> {
let query = if file_ids.len() > 0 { let id_query = if file_ids.len() > 0 {
("file_ids", Self::serialize_query_object(file_ids)?) ("file_ids", Self::serialize_query_object(file_ids)?)
} else { } else {
("hashes", Self::serialize_query_object(hashes)?) ("hashes", Self::serialize_query_object(hashes)?)
}; };
self.get_and_parse::<FileMetadata, [(&str, String)]>(&[query]) let query = [
id_query,
(
"only_return_identifiers",
Self::serialize_query_object(M::only_identifiers())?,
),
(
"only_return_basic_information",
Self::serialize_query_object(M::only_basic_information())?,
),
];
self.get_and_parse::<FileMetadata<M>, [(&str, String)]>(&query)
.await .await
} }
/// Returns the metadata for a single file identifier /// Returns the metadata for a single file identifier
#[tracing::instrument(skip(self), level = "debug")] #[tracing::instrument(skip(self), level = "debug")]
pub async fn get_file_metadata_by_identifier( pub async fn get_file_metadata_by_identifier<M: FileMetadataType>(
&self, &self,
id: FileIdentifier, id: FileIdentifier,
) -> Result<FileMetadataInfo> { ) -> Result<M::Response> {
let mut response = match id.clone() { let mut response = match id.clone() {
FileIdentifier::ID(id) => self.get_file_metadata(vec![id], vec![]).await?, FileIdentifier::ID(id) => self.get_file_metadata::<M>(vec![id], vec![]).await?,
FileIdentifier::Hash(hash) => self.get_file_metadata(vec![], vec![hash]).await?, FileIdentifier::Hash(hash) => self.get_file_metadata::<M>(vec![], vec![hash]).await?,
}; };
response response
@ -432,11 +479,13 @@ impl Client {
fn serialize_query_object<S: Serialize>(obj: S) -> Result<String> { fn serialize_query_object<S: Serialize>(obj: S) -> Result<String> {
#[cfg(feature = "json")] #[cfg(feature = "json")]
{ {
tracing::trace!("Serializing query to JSON");
serde_json::ser::to_string(&obj).map_err(|e| Error::Serialization(e.to_string())) serde_json::ser::to_string(&obj).map_err(|e| Error::Serialization(e.to_string()))
} }
#[cfg(feature = "cbor")] #[cfg(feature = "cbor")]
{ {
tracing::trace!("Serializing query to CBOR");
let mut buf = Vec::new(); let mut buf = Vec::new();
ciborium::ser::into_writer(&obj, &mut buf) ciborium::ser::into_writer(&obj, &mut buf)
.map_err(|e| Error::Serialization(e.to_string()))?; .map_err(|e| Error::Serialization(e.to_string()))?;
@ -471,11 +520,19 @@ impl Client {
#[tracing::instrument(skip(body), level = "trace")] #[tracing::instrument(skip(body), level = "trace")]
fn serialize_body<S: Serialize>(body: S) -> Result<Vec<u8>> { fn serialize_body<S: Serialize>(body: S) -> Result<Vec<u8>> {
let mut buf = Vec::new(); let mut buf = Vec::new();
#[cfg(feature = "cbor")]
ciborium::ser::into_writer(&body, &mut buf)
.map_err(|e| Error::Serialization(e.to_string()))?;
#[cfg(feature = "json")] #[cfg(feature = "json")]
serde_json::to_writer(&mut buf, &body).map_err(|e| Error::Serialization(e.to_string()))?; {
tracing::trace!("Serializing body to JSON");
serde_json::to_writer(&mut buf, &body)
.map_err(|e| Error::Serialization(e.to_string()))?;
}
#[cfg(feature = "cbor")]
{
tracing::trace!("Serializing body to CBOR");
ciborium::ser::into_writer(&body, &mut buf)
.map_err(|e| Error::Serialization(e.to_string()))?;
}
Ok(buf) Ok(buf)
} }
@ -525,11 +582,16 @@ impl Client {
let bytes = response.bytes().await?; let bytes = response.bytes().await?;
let reader = bytes.reader(); let reader = bytes.reader();
#[cfg(feature = "json")] #[cfg(feature = "json")]
let content = serde_json::from_reader::<_, T>(reader) let content = {
.map_err(|e| Error::Deserialization(e.to_string()))?; tracing::trace!("Deserializing content from JSON");
serde_json::from_reader::<_, T>(reader)
.map_err(|e| Error::Deserialization(e.to_string()))?
};
#[cfg(feature = "cbor")] #[cfg(feature = "cbor")]
let content = let content = {
ciborium::de::from_reader(reader).map_err(|e| Error::Deserialization(e.to_string()))?; tracing::trace!("Deserializing content from CBOR");
ciborium::de::from_reader(reader).map_err(|e| Error::Deserialization(e.to_string()))?
};
tracing::trace!("response content: {:?}", content); tracing::trace!("response content: {:?}", content);
Ok(content) Ok(content)

@ -1,3 +1,5 @@
use crate::wrapper::service::ServiceName;
use serde::Serialize;
use std::collections::HashMap; use std::collections::HashMap;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -43,33 +45,6 @@ pub struct BasicHashList {
pub hashes: Vec<String>, pub hashes: Vec<String>,
} }
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileMetadataInfo {
pub file_id: u64,
pub hash: String,
pub size: Option<u64>,
pub mime: String,
pub ext: String,
pub width: Option<u32>,
pub height: Option<u32>,
pub duration: Option<u64>,
pub time_modified: Option<u64>,
pub file_services: FileMetadataServices,
pub has_audio: Option<bool>,
pub num_frames: Option<u64>,
pub num_words: Option<u64>,
pub is_inbox: bool,
pub is_local: bool,
pub is_trashed: bool,
pub known_urls: Vec<String>,
#[deprecated]
pub service_names_to_statuses_to_tags: HashMap<String, HashMap<String, Vec<String>>>,
pub service_keys_to_statuses_to_tags: HashMap<String, HashMap<String, Vec<String>>>,
#[deprecated]
pub service_names_to_statuses_to_display_tags: HashMap<String, HashMap<String, Vec<String>>>,
pub service_keys_to_statuses_to_display_tags: HashMap<String, HashMap<String, Vec<String>>>,
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum FileIdentifier { pub enum FileIdentifier {
ID(u64), ID(u64),
@ -98,6 +73,119 @@ impl FileIdentifier {
} }
} }
/// A generic selection for one or multiple files
#[derive(Clone, Debug, Serialize, Default)]
pub struct FileSelection {
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) hash: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub(crate) hashes: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) file_id: Option<u64>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub(crate) file_ids: Vec<u64>,
}
impl From<FileIdentifier> for FileSelection {
fn from(id: FileIdentifier) -> Self {
let mut selection = Self::default();
match id {
FileIdentifier::ID(id) => selection.file_id = Some(id),
FileIdentifier::Hash(hash) => selection.hash = Some(hash),
}
selection
}
}
impl FileSelection {
/// Creates a new single hash file selection
pub fn by_hash<S: ToString>(hash: S) -> Self {
Self {
hash: Some(hash.to_string()),
..Default::default()
}
}
/// Creates a new file selection with a single file id
pub fn by_file_id(file_id: u64) -> Self {
Self {
file_id: Some(file_id),
..Default::default()
}
}
/// Creates a new file selection with several hashes
pub fn by_hashes(mut hashes: Vec<String>) -> Self {
if hashes.len() == 1 {
Self::by_hash(hashes.pop().unwrap())
} else {
Self {
hashes,
..Default::default()
}
}
}
/// Creates a new file selection with several IDs
pub fn by_file_ids(mut file_ids: Vec<u64>) -> Self {
if file_ids.len() == 1 {
Self::by_file_id(file_ids.pop().unwrap())
} else {
Self {
file_ids,
..Default::default()
}
}
}
}
/// A selection for a single file service
#[derive(Clone, Debug, Serialize, Default)]
pub struct FileServiceSelection {
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) file_service_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) file_service_key: Option<String>,
}
impl FileServiceSelection {
/// Creates a new file service selection by name
pub fn by_name<S: ToString>(name: S) -> Self {
Self {
file_service_name: Some(name.to_string()),
..Default::default()
}
}
/// Creates a new file service selection by service key
pub fn by_key<S: ToString>(key: S) -> Self {
Self {
file_service_key: Some(key.to_string()),
..Default::default()
}
}
/// Selects no service
pub fn none() -> Self {
Self::default()
}
}
impl From<ServiceIdentifier> for FileServiceSelection {
fn from(id: ServiceIdentifier) -> Self {
match id {
ServiceIdentifier::Name(n) => Self::by_name(n),
ServiceIdentifier::Key(k) => Self::by_key(k),
}
}
}
impl From<ServiceName> for FileServiceSelection {
fn from(name: ServiceName) -> Self {
Self::by_name(name)
}
}
#[derive(Clone)] #[derive(Clone)]
pub struct FileRecord { pub struct FileRecord {
pub bytes: Vec<u8>, pub bytes: Vec<u8>,

@ -1,5 +1,5 @@
use crate::api_core::common::BasicServiceInfo; use crate::api_core::common::BasicServiceInfo;
use crate::api_core::Endpoint; use crate::api_core::endpoints::Endpoint;
use std::collections::HashMap; use std::collections::HashMap;
pub static SERVICE_TYPE_LOCAL_TAGS: &str = "local_tags"; pub static SERVICE_TYPE_LOCAL_TAGS: &str = "local_tags";

@ -1,5 +1,6 @@
use crate::api_core::common::BasicHashList; use crate::api_core::common::{FileSelection, FileServiceSelection};
use crate::api_core::Endpoint; use crate::api_core::endpoints::Endpoint;
use serde::Serialize;
pub static STATUS_IMPORT_SUCCESS: u8 = 1; pub static STATUS_IMPORT_SUCCESS: u8 = 1;
pub static STATUS_IMPORT_ALREADY_EXISTS: u8 = 2; pub static STATUS_IMPORT_ALREADY_EXISTS: u8 = 2;
@ -30,7 +31,14 @@ impl Endpoint for AddFile {
} }
} }
pub type DeleteFilesRequest = BasicHashList; #[derive(Clone, Debug, Serialize)]
pub struct DeleteFilesRequest {
#[serde(flatten)]
pub file_selection: FileSelection,
#[serde(flatten)]
pub service_selection: FileServiceSelection,
pub reason: Option<String>,
}
pub struct DeleteFiles; pub struct DeleteFiles;
@ -43,7 +51,14 @@ impl Endpoint for DeleteFiles {
} }
} }
pub type UndeleteFilesRequest = BasicHashList; #[derive(Clone, Debug, Serialize)]
pub struct UndeleteFilesRequest {
#[serde(flatten)]
pub file_selection: FileSelection,
#[serde(flatten)]
pub service_selection: FileServiceSelection,
}
pub struct UndeleteFiles; pub struct UndeleteFiles;
impl Endpoint for UndeleteFiles { impl Endpoint for UndeleteFiles {
@ -55,7 +70,14 @@ impl Endpoint for UndeleteFiles {
} }
} }
pub type ArchiveFilesRequest = BasicHashList; #[derive(Clone, Debug, Serialize)]
pub struct ArchiveFilesRequest {
#[serde(flatten)]
pub file_selection: FileSelection,
#[serde(flatten)]
pub service_selection: FileServiceSelection,
}
pub struct ArchiveFiles; pub struct ArchiveFiles;
impl Endpoint for ArchiveFiles { impl Endpoint for ArchiveFiles {
@ -67,11 +89,18 @@ impl Endpoint for ArchiveFiles {
} }
} }
pub type UnarchiveFilesRequest = BasicHashList; #[derive(Clone, Debug, Serialize)]
pub struct UnarchiveFilesRequest {
#[serde(flatten)]
pub file_selection: FileSelection,
#[serde(flatten)]
pub service_selection: FileServiceSelection,
}
pub struct UnarchiveFiles; pub struct UnarchiveFiles;
impl Endpoint for UnarchiveFiles { impl Endpoint for UnarchiveFiles {
type Request = UndeleteFilesRequest; type Request = UnarchiveFilesRequest;
type Response = (); type Response = ();
fn path() -> String { fn path() -> String {

@ -1,5 +1,5 @@
use crate::api_core::common::FileIdentifier; use crate::api_core::common::FileIdentifier;
use crate::api_core::Endpoint; use crate::api_core::endpoints::Endpoint;
use std::collections::HashMap; use std::collections::HashMap;
pub struct SetNotes; pub struct SetNotes;

@ -1,5 +1,5 @@
use crate::api_core::common::ServiceIdentifier; use crate::api_core::common::ServiceIdentifier;
use crate::api_core::Endpoint; use crate::api_core::endpoints::Endpoint;
use std::collections::HashMap; use std::collections::HashMap;
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]

@ -1,5 +1,5 @@
use crate::api_core::common::ServiceIdentifier; use crate::api_core::common::ServiceIdentifier;
use crate::api_core::Endpoint; use crate::api_core::endpoints::Endpoint;
use serde::Serialize; use serde::Serialize;
use std::collections::HashMap; use std::collections::HashMap;
@ -74,7 +74,7 @@ pub struct AddUrlRequest {
/// ///
/// Example: /// Example:
/// ``` /// ```
/// use hydrus_api::api_core::adding_urls::AddUrlRequestBuilder; /// use hydrus_api::api_core::endpoints::adding_urls::AddUrlRequestBuilder;
/// use hydrus_api::api_core::common::ServiceIdentifier; /// use hydrus_api::api_core::common::ServiceIdentifier;
/// ///
/// let request = AddUrlRequestBuilder::default() /// let request = AddUrlRequestBuilder::default()

@ -1,5 +1,5 @@
use crate::api_core::common::OptionalStringNumber; use crate::api_core::common::OptionalStringNumber;
use crate::api_core::Endpoint; use crate::api_core::endpoints::Endpoint;
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
pub struct GetCookiesResponse { pub struct GetCookiesResponse {

@ -1,5 +1,5 @@
use crate::api_core::common::PageInformation; use crate::api_core::common::PageInformation;
use crate::api_core::Endpoint; use crate::api_core::endpoints::Endpoint;
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
pub struct GetPagesResponse { pub struct GetPagesResponse {

@ -0,0 +1,20 @@
use serde::de::DeserializeOwned;
use serde::Serialize;
use std::fmt::Debug;
pub mod access_management;
pub mod adding_files;
pub mod adding_notes;
pub mod adding_tags;
pub mod adding_urls;
pub mod client_builder;
pub mod managing_cookies_and_http_headers;
pub mod managing_pages;
pub mod searching_and_fetching_files;
pub(crate) trait Endpoint {
type Request: Serialize + Debug;
type Response: DeserializeOwned + Debug;
fn path() -> String;
}

@ -1,5 +1,10 @@
use crate::api_core::common::FileMetadataInfo; use crate::api_core::common::FileMetadataServices;
use crate::api_core::Endpoint; use crate::api_core::endpoints::Endpoint;
use serde::de::DeserializeOwned;
use serde::Deserialize;
use std::collections::HashMap;
use std::fmt::Debug;
use std::marker::PhantomData;
pub mod file_sort_type { pub mod file_sort_type {
pub const SORT_FILE_SIZE: u8 = 0; pub const SORT_FILE_SIZE: u8 = 0;
@ -129,15 +134,15 @@ impl Endpoint for SearchFileHashes {
} }
#[derive(Clone, Debug, Default, Deserialize)] #[derive(Clone, Debug, Default, Deserialize)]
pub struct FileMetadataResponse { pub struct FileMetadataResponse<M: FileMetadataType> {
pub metadata: Vec<FileMetadataInfo>, pub metadata: Vec<M::Response>,
} }
pub struct FileMetadata; pub struct FileMetadata<M: FileMetadataType>(PhantomData<M>);
impl Endpoint for FileMetadata { impl<M: FileMetadataType> Endpoint for FileMetadata<M> {
type Request = (); type Request = ();
type Response = FileMetadataResponse; type Response = FileMetadataResponse<M>;
fn path() -> String { fn path() -> String {
String::from("get_files/file_metadata") String::from("get_files/file_metadata")
@ -169,3 +174,94 @@ where
Self::Tag(s.to_string()) Self::Tag(s.to_string())
} }
} }
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileMetadataIdentifiers {
pub file_id: u64,
pub hash: String,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileBasicMetadata {
#[serde(flatten)]
pub identifiers: FileMetadataIdentifiers,
pub size: Option<u64>,
pub mime: String,
pub ext: String,
pub width: Option<u32>,
pub height: Option<u32>,
pub duration: Option<u64>,
pub time_modified: Option<u64>,
pub file_services: FileMetadataServices,
pub has_audio: Option<bool>,
pub num_frames: Option<u64>,
pub num_words: Option<u64>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileFullMetadata {
#[serde(flatten)]
pub basic_metadata: FileBasicMetadata,
pub is_inbox: bool,
pub is_local: bool,
pub is_trashed: bool,
pub known_urls: Vec<String>,
#[deprecated]
pub service_names_to_statuses_to_tags: HashMap<String, HashMap<String, Vec<String>>>,
pub service_keys_to_statuses_to_tags: HashMap<String, HashMap<String, Vec<String>>>,
#[deprecated]
pub service_names_to_statuses_to_display_tags: HashMap<String, HashMap<String, Vec<String>>>,
pub service_keys_to_statuses_to_display_tags: HashMap<String, HashMap<String, Vec<String>>>,
}
pub trait FileMetadataType: Clone + Debug {
type Response: DeserializeOwned + Clone + Debug;
fn only_identifiers() -> bool;
fn only_basic_information() -> bool;
}
#[derive(Clone, Debug)]
pub struct FullMetadata;
impl FileMetadataType for FullMetadata {
type Response = FileFullMetadata;
fn only_identifiers() -> bool {
false
}
fn only_basic_information() -> bool {
false
}
}
#[derive(Clone, Debug)]
pub struct BasicMetadata;
impl FileMetadataType for BasicMetadata {
type Response = FileBasicMetadata;
fn only_identifiers() -> bool {
false
}
fn only_basic_information() -> bool {
true
}
}
#[derive(Clone, Debug)]
pub struct Identifiers;
impl FileMetadataType for Identifiers {
type Response = FileMetadataIdentifiers;
fn only_identifiers() -> bool {
true
}
fn only_basic_information() -> bool {
false
}
}

@ -1,24 +1,3 @@
use serde::de::DeserializeOwned;
use serde::Serialize;
use std::fmt::Debug;
pub mod access_management;
pub mod adding_files;
pub mod adding_tags;
pub mod adding_urls;
pub mod client; pub mod client;
pub mod client_builder;
pub mod common; pub mod common;
pub mod managing_cookies_and_http_headers; pub mod endpoints;
pub mod managing_pages;
pub mod searching_and_fetching_files;
pub mod adding_notes;
pub use searching_and_fetching_files::file_sort_type;
pub(crate) trait Endpoint {
type Request: Serialize + Debug;
type Response: DeserializeOwned + Debug;
fn path() -> String;
}

@ -56,7 +56,7 @@
//! ## Client Usage Example //! ## Client Usage Example
//! ``` //! ```
//! use hydrus_api::Client; //! use hydrus_api::Client;
//! use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; //! use hydrus_api::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
//! use std::env; //! use std::env;
//! use hydrus_api::api_core::common::ServiceIdentifier; //! use hydrus_api::api_core::common::ServiceIdentifier;
//! # #[tokio::test] //! # #[tokio::test]

@ -1,5 +1,5 @@
use crate::api_core::common::OptionalStringNumber; use crate::api_core::common::OptionalStringNumber;
use crate::api_core::managing_cookies_and_http_headers::CookieBuilder; use crate::api_core::endpoints::managing_cookies_and_http_headers::CookieBuilder;
use crate::error::Result; use crate::error::Result;
use crate::Client; use crate::Client;
use std::time::{Duration, SystemTime, UNIX_EPOCH}; use std::time::{Duration, SystemTime, UNIX_EPOCH};

@ -0,0 +1,71 @@
use crate::api_core::common::{
FileIdentifier, FileSelection, FileServiceSelection, ServiceIdentifier,
};
use crate::error::Result;
use crate::Client;
pub struct DeleteFilesBuilder {
client: Client,
hashes: Vec<String>,
ids: Vec<u64>,
reason: Option<String>,
service: Option<ServiceIdentifier>,
}
impl DeleteFilesBuilder {
pub(crate) fn new(client: Client) -> Self {
Self {
client,
hashes: Vec::new(),
ids: Vec::new(),
reason: None,
service: None,
}
}
/// Adds a file to be deleted
pub fn add_file(mut self, identifier: FileIdentifier) -> Self {
match identifier {
FileIdentifier::ID(id) => self.ids.push(id),
FileIdentifier::Hash(hash) => self.hashes.push(hash),
}
self
}
/// Adds multiple files to be deleted
pub fn add_files(self, ids: Vec<FileIdentifier>) -> Self {
ids.into_iter().fold(self, |acc, id| acc.add_file(id))
}
/// Restricts deletion to a single file service
pub fn service(mut self, service: ServiceIdentifier) -> Self {
self.service = Some(service);
self
}
/// Adds a reason for why the file was deleted
pub fn reason<S: ToString>(mut self, reason: S) -> Self {
self.reason = Some(reason.to_string());
self
}
/// Deletes all files specified in this builder
pub async fn run(self) -> Result<()> {
let file_selection = FileSelection {
hashes: self.hashes,
file_ids: self.ids,
..Default::default()
};
let service_selection = self
.service
.map(FileServiceSelection::from)
.unwrap_or_default();
self.client
.delete_files(file_selection, service_selection, self.reason)
.await
}
}

@ -1,6 +1,6 @@
use crate::api_core::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED};
use crate::api_core::adding_urls::AddUrlRequestBuilder;
use crate::api_core::common::ServiceIdentifier; use crate::api_core::common::ServiceIdentifier;
use crate::api_core::endpoints::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED};
use crate::api_core::endpoints::adding_urls::AddUrlRequestBuilder;
use crate::error::{Error, Result}; use crate::error::{Error, Result};
use crate::utils::tag_list_to_string_list; use crate::utils::tag_list_to_string_list;
use crate::wrapper::hydrus_file::HydrusFile; use crate::wrapper::hydrus_file::HydrusFile;

@ -1,6 +1,7 @@
pub mod delete_files_builder;
pub mod import_builder; pub mod import_builder;
pub mod notes_builder;
pub mod or_chain_builder; pub mod or_chain_builder;
pub mod search_builder; pub mod search_builder;
pub mod tag_builder; pub mod tag_builder;
pub mod tagging_builder; pub mod tagging_builder;
pub mod notes_builder;

@ -1,4 +1,6 @@
use crate::api_core::searching_and_fetching_files::{FileSearchOptions, SearchQueryEntry}; use crate::api_core::endpoints::searching_and_fetching_files::{
FileSearchOptions, SearchQueryEntry,
};
use crate::error::Result; use crate::error::Result;
use crate::wrapper::hydrus_file::HydrusFile; use crate::wrapper::hydrus_file::HydrusFile;
use crate::wrapper::or_chain::OrChain; use crate::wrapper::or_chain::OrChain;

@ -1,5 +1,5 @@
use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
use crate::api_core::common::ServiceIdentifier; use crate::api_core::common::ServiceIdentifier;
use crate::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
use crate::error::Result; use crate::error::Result;
use crate::wrapper::tag::Tag; use crate::wrapper::tag::Tag;
use crate::Client; use crate::Client;

@ -1,6 +1,8 @@
use crate::api_core::common::FileIdentifier; use crate::api_core::common::FileIdentifier;
use crate::api_core::endpoints::searching_and_fetching_files::FullMetadata;
use crate::error::Result; use crate::error::Result;
use crate::wrapper::address::Address; use crate::wrapper::address::Address;
use crate::wrapper::builders::delete_files_builder::DeleteFilesBuilder;
use crate::wrapper::builders::import_builder::ImportBuilder; use crate::wrapper::builders::import_builder::ImportBuilder;
use crate::wrapper::builders::search_builder::SearchBuilder; use crate::wrapper::builders::search_builder::SearchBuilder;
use crate::wrapper::builders::tagging_builder::TaggingBuilder; use crate::wrapper::builders::tagging_builder::TaggingBuilder;
@ -71,12 +73,17 @@ impl Hydrus {
pub async fn file(&self, identifier: FileIdentifier) -> Result<HydrusFile> { pub async fn file(&self, identifier: FileIdentifier) -> Result<HydrusFile> {
let metadata = self let metadata = self
.client .client
.get_file_metadata_by_identifier(identifier) .get_file_metadata_by_identifier::<FullMetadata>(identifier)
.await?; .await?;
Ok(HydrusFile::from_metadata(self.client.clone(), metadata)) Ok(HydrusFile::from_metadata(self.client.clone(), metadata))
} }
/// Creates a builder to delete files
pub async fn delete(&self) -> DeleteFilesBuilder {
DeleteFilesBuilder::new(self.client.clone())
}
/// Starts a request to bulk add tags to files /// Starts a request to bulk add tags to files
pub fn tagging(&self) -> TaggingBuilder { pub fn tagging(&self) -> TaggingBuilder {
TaggingBuilder::new(self.client.clone()) TaggingBuilder::new(self.client.clone())

@ -1,7 +1,11 @@
use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction}; use crate::api_core::common::{
use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord, ServiceIdentifier}; FileIdentifier, FileRecord, FileSelection, FileServiceSelection, ServiceIdentifier,
};
use crate::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
use crate::api_core::endpoints::searching_and_fetching_files::{FileFullMetadata, FullMetadata};
use crate::error::{Error, Result}; use crate::error::{Error, Result};
use crate::utils::tag_list_to_string_list; use crate::utils::tag_list_to_string_list;
use crate::wrapper::builders::delete_files_builder::DeleteFilesBuilder;
use crate::wrapper::builders::notes_builder::AddNotesBuilder; use crate::wrapper::builders::notes_builder::AddNotesBuilder;
use crate::wrapper::service::ServiceName; use crate::wrapper::service::ServiceName;
use crate::wrapper::tag::Tag; use crate::wrapper::tag::Tag;
@ -35,7 +39,7 @@ pub struct HydrusFile {
pub(crate) client: Client, pub(crate) client: Client,
pub id: FileIdentifier, pub id: FileIdentifier,
pub status: FileStatus, pub status: FileStatus,
pub(crate) metadata: Option<FileMetadataInfo>, pub(crate) metadata: Option<FileFullMetadata>,
} }
impl HydrusFile { impl HydrusFile {
@ -61,7 +65,7 @@ impl HydrusFile {
} }
} }
pub(crate) fn from_metadata(client: Client, metadata: FileMetadataInfo) -> Self { pub(crate) fn from_metadata(client: Client, metadata: FileFullMetadata) -> Self {
let status = if metadata.is_trashed { let status = if metadata.is_trashed {
FileStatus::Deleted FileStatus::Deleted
} else { } else {
@ -70,7 +74,7 @@ impl HydrusFile {
Self { Self {
client, client,
id: FileIdentifier::Hash(metadata.hash.clone()), id: FileIdentifier::Hash(metadata.basic_metadata.identifiers.hash.clone()),
status, status,
metadata: Some(metadata), metadata: Some(metadata),
} }
@ -89,7 +93,7 @@ impl HydrusFile {
match &self.id { match &self.id {
FileIdentifier::ID(_) => { FileIdentifier::ID(_) => {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
Ok(metadata.hash.clone()) Ok(metadata.basic_metadata.identifiers.hash.clone())
} }
FileIdentifier::Hash(hash) => Ok(hash.clone()), FileIdentifier::Hash(hash) => Ok(hash.clone()),
} }
@ -99,17 +103,18 @@ impl HydrusFile {
pub async fn size(&mut self) -> Result<Option<u64>> { pub async fn size(&mut self) -> Result<Option<u64>> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
Ok(metadata.size.clone()) Ok(metadata.basic_metadata.size.clone())
} }
/// Returns the mime of the file /// Returns the mime of the file
pub async fn mime(&mut self) -> Result<Mime> { pub async fn mime(&mut self) -> Result<Mime> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
let mime = metadata let mime = metadata
.basic_metadata
.mime .mime
.as_str() .as_str()
.parse() .parse()
.map_err(|_| Error::InvalidMime(metadata.mime.clone()))?; .map_err(|_| Error::InvalidMime(metadata.basic_metadata.mime.clone()))?;
Ok(mime) Ok(mime)
} }
@ -118,13 +123,16 @@ impl HydrusFile {
pub async fn ext(&mut self) -> Result<String> { pub async fn ext(&mut self) -> Result<String> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
Ok(metadata.ext.clone()) Ok(metadata.basic_metadata.ext.clone())
} }
/// Returns the dimensions of the file in pixels /// Returns the dimensions of the file in pixels
pub async fn dimensions(&mut self) -> Result<Option<(u32, u32)>> { pub async fn dimensions(&mut self) -> Result<Option<(u32, u32)>> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
if let (Some(width), Some(height)) = (&metadata.width, &metadata.height) { if let (Some(width), Some(height)) = (
&metadata.basic_metadata.width,
&metadata.basic_metadata.height,
) {
Ok(Some((*width, *height))) Ok(Some((*width, *height)))
} else { } else {
Ok(None) Ok(None)
@ -135,21 +143,21 @@ impl HydrusFile {
pub async fn duration(&mut self) -> Result<Option<u64>> { pub async fn duration(&mut self) -> Result<Option<u64>> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
Ok(metadata.duration.clone()) Ok(metadata.basic_metadata.duration.clone())
} }
/// Returns the number of frames of the file if it's a video /// Returns the number of frames of the file if it's a video
pub async fn num_frames(&mut self) -> Result<Option<u64>> { pub async fn num_frames(&mut self) -> Result<Option<u64>> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
Ok(metadata.num_frames.clone()) Ok(metadata.basic_metadata.num_frames.clone())
} }
/// Returns if the file has audio /// Returns if the file has audio
pub async fn has_audio(&mut self) -> Result<bool> { pub async fn has_audio(&mut self) -> Result<bool> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
Ok(metadata.has_audio.unwrap_or(false)) Ok(metadata.basic_metadata.has_audio.unwrap_or(false))
} }
/// Returns if the file is currently in the inbox /// Returns if the file is currently in the inbox
@ -184,6 +192,7 @@ impl HydrusFile {
pub async fn time_modified(&mut self) -> Result<Option<NaiveDateTime>> { pub async fn time_modified(&mut self) -> Result<Option<NaiveDateTime>> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
let naive_time_modified = metadata let naive_time_modified = metadata
.basic_metadata
.time_modified .time_modified
.map(|m| Utc.timestamp_millis(m as i64).naive_utc()); .map(|m| Utc.timestamp_millis(m as i64).naive_utc());
@ -197,12 +206,14 @@ impl HydrusFile {
) -> Result<Option<NaiveDateTime>> { ) -> Result<Option<NaiveDateTime>> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
let naive_time_imported = metadata let naive_time_imported = metadata
.basic_metadata
.file_services .file_services
.current .current
.get(service_key.as_ref()) .get(service_key.as_ref())
.map(|s| s.time_imported) .map(|s| s.time_imported)
.or_else(|| { .or_else(|| {
metadata metadata
.basic_metadata
.file_services .file_services
.deleted .deleted
.get(service_key.as_ref()) .get(service_key.as_ref())
@ -220,6 +231,7 @@ impl HydrusFile {
) -> Result<Option<NaiveDateTime>> { ) -> Result<Option<NaiveDateTime>> {
let metadata = self.metadata().await?; let metadata = self.metadata().await?;
let naive_time_deleted = metadata let naive_time_deleted = metadata
.basic_metadata
.file_services .file_services
.deleted .deleted
.get(service_key.as_ref()) .get(service_key.as_ref())
@ -229,6 +241,41 @@ impl HydrusFile {
Ok(naive_time_deleted) Ok(naive_time_deleted)
} }
/// Creates a request builder to delete the file
pub fn delete(&mut self) -> DeleteFilesBuilder {
self.metadata = None;
DeleteFilesBuilder::new(self.client.clone()).add_file(self.id.clone())
}
/// Undeletes the file for the given service or all services
/// if `FileServiceSelection::none` is passed
pub async fn undelete(&mut self, service_selection: FileServiceSelection) -> Result<()> {
let hash = self.hash().await?;
self.metadata = None;
self.client
.undelete_files(FileSelection::by_hash(hash), service_selection)
.await
}
/// Archives the file in all passed file services or all configured services
/// if no selection is passed
pub async fn archive(&mut self, service_selection: FileServiceSelection) -> Result<()> {
let hash = self.hash().await?;
self.metadata = None;
self.client
.archive_files(FileSelection::by_hash(hash), service_selection)
.await
}
/// Unarchives the file for the given services
pub async fn unarchive(&mut self, service_selection: FileServiceSelection) -> Result<()> {
let hash = self.hash().await?;
self.metadata = None;
self.client
.unarchive_files(FileSelection::by_hash(hash), service_selection)
.await
}
/// Associates the file with a list of urls /// Associates the file with a list of urls
pub async fn associate_urls(&mut self, urls: Vec<String>) -> Result<()> { pub async fn associate_urls(&mut self, urls: Vec<String>) -> Result<()> {
let hash = self.hash().await?; let hash = self.hash().await?;
@ -347,11 +394,11 @@ impl HydrusFile {
/// Returns the metadata for the given file /// Returns the metadata for the given file
/// if there's already known metadata about the file it uses that /// if there's already known metadata about the file it uses that
async fn metadata(&mut self) -> Result<&FileMetadataInfo> { async fn metadata(&mut self) -> Result<&FileFullMetadata> {
if self.metadata.is_none() { if self.metadata.is_none() {
let metadata = self let metadata = self
.client .client
.get_file_metadata_by_identifier(self.id.clone()) .get_file_metadata_by_identifier::<FullMetadata>(self.id.clone())
.await?; .await?;
self.status = if metadata.is_trashed { self.status = if metadata.is_trashed {
FileStatus::Deleted FileStatus::Deleted

@ -1,4 +1,5 @@
use crate::api_core::common::{FileIdentifier, PageInformation}; use crate::api_core::common::{FileIdentifier, PageInformation};
use crate::api_core::endpoints::searching_and_fetching_files::Identifiers;
use crate::error::Result; use crate::error::Result;
use crate::utils::split_file_identifiers_into_hashes_and_ids; use crate::utils::split_file_identifiers_into_hashes_and_ids;
use crate::Client; use crate::Client;
@ -56,7 +57,7 @@ impl HydrusPage {
for id in ids { for id in ids {
let metadata = self let metadata = self
.client .client
.get_file_metadata_by_identifier(FileIdentifier::ID(id)) .get_file_metadata_by_identifier::<Identifiers>(FileIdentifier::ID(id))
.await?; .await?;
hashes.push(metadata.hash); hashes.push(metadata.hash);
} }

@ -1,5 +1,5 @@
use crate::api_core::access_management::GetServicesResponse; use crate::api_core::endpoints::access_management::GetServicesResponse;
use crate::api_core::access_management::{ use crate::api_core::endpoints::access_management::{
SERVICE_TYPE_ALL_KNOWN_FILES, SERVICE_TYPE_ALL_KNOWN_TAGS, SERVICE_TYPE_ALL_LOCAL_FILES, SERVICE_TYPE_ALL_KNOWN_FILES, SERVICE_TYPE_ALL_KNOWN_TAGS, SERVICE_TYPE_ALL_LOCAL_FILES,
SERVICE_TYPE_FILE_REPOSITORIES, SERVICE_TYPE_LOCAL_FILES, SERVICE_TYPE_LOCAL_TAGS, SERVICE_TYPE_FILE_REPOSITORIES, SERVICE_TYPE_LOCAL_FILES, SERVICE_TYPE_LOCAL_TAGS,
SERVICE_TYPE_TAG_REPOSITORIES, SERVICE_TYPE_TRASH, SERVICE_TYPE_TAG_REPOSITORIES, SERVICE_TYPE_TRASH,

@ -1,4 +1,4 @@
use crate::api_core::adding_urls::{ use crate::api_core::endpoints::adding_urls::{
URL_TYPE_FILE, URL_TYPE_GALLERY, URL_TYPE_POST, URL_TYPE_WATCHABLE, URL_TYPE_FILE, URL_TYPE_GALLERY, URL_TYPE_POST, URL_TYPE_WATCHABLE,
}; };
use crate::error::Result; use crate::error::Result;

@ -1,6 +1,8 @@
use crate::common; use crate::common;
use crate::common::create_testdata; use crate::common::create_testdata;
use crate::common::test_data::get_test_hashes; use crate::common::test_data::{get_test_hashes, TEST_HASH_1};
use hydrus_api::api_core::common::FileSelection;
use hydrus_api::wrapper::service::ServiceName;
#[tokio::test] #[tokio::test]
async fn it_adds_files() { async fn it_adds_files() {
@ -22,26 +24,52 @@ async fn it_adds_binary_files() {
#[tokio::test] #[tokio::test]
async fn it_deletes_files() { async fn it_deletes_files() {
let client = common::get_client(); let client = common::get_client();
client.delete_files(get_test_hashes()).await.unwrap(); create_testdata(&client).await;
client
.delete_files(
FileSelection::by_hashes(get_test_hashes()),
ServiceName::my_files().into(),
Some("Test".to_string()),
)
.await
.unwrap();
} }
#[tokio::test] #[tokio::test]
async fn it_undeletes_files() { async fn it_undeletes_files() {
let client = common::get_client(); let client = common::get_client();
create_testdata(&client).await; create_testdata(&client).await;
client.undelete_files(get_test_hashes()).await.unwrap(); client
.undelete_files(
FileSelection::by_hashes(get_test_hashes()),
ServiceName::my_files().into(),
)
.await
.unwrap();
} }
#[tokio::test] #[tokio::test]
async fn it_archives_files() { async fn it_archives_files() {
let client = common::get_client(); let client = common::get_client();
create_testdata(&client).await; create_testdata(&client).await;
client.archive_files(get_test_hashes()).await.unwrap(); client
.archive_files(
FileSelection::by_hashes(vec![TEST_HASH_1.to_string()]),
ServiceName::my_files().into(),
)
.await
.unwrap();
} }
#[tokio::test] #[tokio::test]
async fn it_unarchives_files() { async fn it_unarchives_files() {
let client = common::get_client(); let client = common::get_client();
create_testdata(&client).await; create_testdata(&client).await;
client.unarchive_files(get_test_hashes()).await.unwrap(); client
.unarchive_files(
FileSelection::by_hashes(get_test_hashes()),
ServiceName::my_files().into(),
)
.await
.unwrap();
} }

@ -1,7 +1,7 @@
use super::super::common; use super::super::common;
use crate::common::test_data::EMPTY_HASH; use crate::common::test_data::EMPTY_HASH;
use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
use hydrus_api::api_core::common::ServiceIdentifier; use hydrus_api::api_core::common::ServiceIdentifier;
use hydrus_api::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
#[tokio::test] #[tokio::test]
async fn it_cleans_tags() { async fn it_cleans_tags() {

@ -1,7 +1,7 @@
use super::super::common; use super::super::common;
use crate::common::test_data::{get_test_hashes, get_test_urls, TEST_URL_1}; use crate::common::test_data::{get_test_hashes, get_test_urls, TEST_URL_1};
use hydrus_api::api_core::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST};
use hydrus_api::api_core::common::ServiceIdentifier; use hydrus_api::api_core::common::ServiceIdentifier;
use hydrus_api::api_core::endpoints::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST};
#[tokio::test] #[tokio::test]
async fn it_returns_files_for_an_url() { async fn it_returns_files_for_an_url() {

@ -1,5 +1,5 @@
use super::super::common; use super::super::common;
use hydrus_api::api_core::managing_cookies_and_http_headers::CookieBuilder; use hydrus_api::api_core::endpoints::managing_cookies_and_http_headers::CookieBuilder;
#[tokio::test] #[tokio::test]
async fn it_returns_cookies_for_a_domain() { async fn it_returns_cookies_for_a_domain() {

@ -1,7 +1,9 @@
use super::super::common; use super::super::common;
use hydrus_api::api_core::common::FileIdentifier; use hydrus_api::api_core::common::FileIdentifier;
use hydrus_api::api_core::file_sort_type::SORT_FILE_PIXEL_COUNT; use hydrus_api::api_core::endpoints::searching_and_fetching_files::file_sort_type::SORT_FILE_PIXEL_COUNT;
use hydrus_api::api_core::searching_and_fetching_files::{FileSearchOptions, SearchQueryEntry}; use hydrus_api::api_core::endpoints::searching_and_fetching_files::{
BasicMetadata, FileSearchOptions, FullMetadata, Identifiers, SearchQueryEntry,
};
#[tokio::test] #[tokio::test]
async fn is_searches_files() { async fn is_searches_files() {
@ -45,7 +47,7 @@ async fn is_searches_file_hashes() {
async fn it_fetches_file_metadata() { async fn it_fetches_file_metadata() {
let client = common::get_client(); let client = common::get_client();
client client
.get_file_metadata( .get_file_metadata::<FullMetadata>(
vec![], vec![],
vec!["0000000000000000000000000000000000000000000000000000000000000000".to_string()], vec!["0000000000000000000000000000000000000000000000000000000000000000".to_string()],
) )
@ -56,8 +58,18 @@ async fn it_fetches_file_metadata() {
#[tokio::test] #[tokio::test]
async fn it_fetches_file_metadata_by_id() { async fn it_fetches_file_metadata_by_id() {
let client = common::get_client(); let client = common::get_client();
let response = client.get_file_metadata(vec![1], vec![]).await; let response = client
assert!(response.is_ok()); // Even if the file doesn't exist it still returns some information about it .get_file_metadata::<Identifiers>(vec![1], vec![])
.await;
assert!(response.is_ok());
let response = client
.get_file_metadata::<BasicMetadata>(vec![1], vec![])
.await;
assert!(response.is_ok());
let response = client
.get_file_metadata::<FullMetadata>(vec![1], vec![])
.await;
assert!(response.is_ok());
} }
#[tokio::test] #[tokio::test]

@ -1,5 +1,5 @@
use hydrus_api::api_core::adding_urls::AddUrlRequestBuilder;
use hydrus_api::api_core::client::Client; use hydrus_api::api_core::client::Client;
use hydrus_api::api_core::endpoints::adding_urls::AddUrlRequestBuilder;
use hydrus_api::Hydrus; use hydrus_api::Hydrus;
use std::env; use std::env;
use std::sync::{Arc, Mutex, MutexGuard}; use std::sync::{Arc, Mutex, MutexGuard};

@ -1,14 +1,18 @@
use super::super::common; use super::super::common;
use hydrus_api::api_core::adding_tags::TagAction; use crate::common::test_data::TEST_HASH_2;
use crate::common::{create_testdata, get_client};
use hydrus_api::api_core::common::FileIdentifier; use hydrus_api::api_core::common::FileIdentifier;
use hydrus_api::api_core::endpoints::adding_tags::TagAction;
use hydrus_api::wrapper::hydrus_file::HydrusFile; use hydrus_api::wrapper::hydrus_file::HydrusFile;
use hydrus_api::wrapper::service::ServiceName; use hydrus_api::wrapper::service::ServiceName;
async fn get_file() -> HydrusFile { async fn get_file() -> HydrusFile {
let client = get_client();
create_testdata(&client).await;
let hydrus = common::get_hydrus(); let hydrus = common::get_hydrus();
hydrus hydrus
.file(FileIdentifier::hash( .file(FileIdentifier::hash(
"277a138cd1ee79fc1fdb2869c321b848d4861e45b82184487139ef66dd40b62d", // needs to exist TEST_HASH_2, // needs to exist
)) ))
.await .await
.unwrap() .unwrap()
@ -102,9 +106,19 @@ async fn it_retrieves_content() {
async fn it_retrieves_metadata() { async fn it_retrieves_metadata() {
let mut file = get_file().await; let mut file = get_file().await;
assert!(file.dimensions().await.unwrap().is_some()); assert!(file.dimensions().await.unwrap().is_some());
assert!(file.stored_locally().await.unwrap());
assert!(file.duration().await.unwrap().is_none()); assert!(file.duration().await.unwrap().is_none());
assert!(file.time_modified().await.is_ok()); assert!(file.time_modified().await.is_ok());
assert!(file.time_deleted("000").await.is_ok()); assert!(file.time_deleted("000").await.is_ok());
assert!(file.time_imported("000").await.is_ok()); assert!(file.time_imported("000").await.is_ok());
} }
#[tokio::test]
async fn it_deletes() {
let mut file = get_file().await;
file.delete()
.reason("I just don't like that file")
.run()
.await
.unwrap();
file.undelete(ServiceName::my_files().into()).await.unwrap();
}

@ -1,5 +1,5 @@
use super::super::common; use super::super::common;
use hydrus_api::api_core::adding_tags::TagAction; use hydrus_api::api_core::endpoints::adding_tags::TagAction;
use hydrus_api::wrapper::builders::or_chain_builder::OrChainBuilder; use hydrus_api::wrapper::builders::or_chain_builder::OrChainBuilder;
use hydrus_api::wrapper::builders::search_builder::SortType; use hydrus_api::wrapper::builders::search_builder::SortType;
use hydrus_api::wrapper::builders::tag_builder::TagBuilder; use hydrus_api::wrapper::builders::tag_builder::TagBuilder;

Loading…
Cancel
Save