Merge pull request #15 from Trivernis/develop

Develop
main
Julius Riegel 2 years ago committed by GitHub
commit 2ad46cc423
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -20,5 +20,5 @@ jobs:
CRATES_IO_TOKEN: ${{secrets.CRATES_IO_TOKEN}}
run: cargo login "$CRATES_IO_TOKEN"
- name: Publish to crates.io
run: cargo publish --all-features
- name: Publish json to crates.io
run: cargo publish

@ -1,6 +1,6 @@
[package]
name = "hydrus-api"
version = "0.8.0"
version = "0.9.0"
authors = ["trivernis <trivernis@protonmail.com>"]
edition = "2018"
license = "Apache-2.0"

@ -1,32 +1,38 @@
use crate::api_core::access_management::{
use crate::api_core::common::{
FileIdentifier, FileRecord, FileSelection, FileServiceSelection, OptionalStringNumber,
};
use crate::api_core::endpoints::access_management::{
ApiVersion, ApiVersionResponse, GetServices, GetServicesResponse, SessionKey,
SessionKeyResponse, VerifyAccessKey, VerifyAccessKeyResponse,
};
use crate::api_core::adding_files::{
use crate::api_core::endpoints::adding_files::{
AddFile, AddFileRequest, AddFileResponse, ArchiveFiles, ArchiveFilesRequest, DeleteFiles,
DeleteFilesRequest, UnarchiveFiles, UnarchiveFilesRequest, UndeleteFiles, UndeleteFilesRequest,
};
use crate::api_core::adding_notes::{DeleteNotes, DeleteNotesRequest, SetNotes, SetNotesRequest};
use crate::api_core::adding_tags::{AddTags, AddTagsRequest, CleanTags, CleanTagsResponse};
use crate::api_core::adding_urls::{
use crate::api_core::endpoints::adding_notes::{
DeleteNotes, DeleteNotesRequest, SetNotes, SetNotesRequest,
};
use crate::api_core::endpoints::adding_tags::{
AddTags, AddTagsRequest, CleanTags, CleanTagsResponse,
};
use crate::api_core::endpoints::adding_urls::{
AddUrl, AddUrlRequest, AddUrlResponse, AssociateUrl, AssociateUrlRequest, GetUrlFiles,
GetUrlFilesResponse, GetUrlInfo, GetUrlInfoResponse,
};
use crate::api_core::client_builder::ClientBuilder;
use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord, OptionalStringNumber};
use crate::api_core::managing_cookies_and_http_headers::{
use crate::api_core::endpoints::client_builder::ClientBuilder;
use crate::api_core::endpoints::managing_cookies_and_http_headers::{
GetCookies, GetCookiesResponse, SetCookies, SetCookiesRequest, SetUserAgent,
SetUserAgentRequest,
};
use crate::api_core::managing_pages::{
use crate::api_core::endpoints::managing_pages::{
AddFiles, AddFilesRequest, FocusPage, FocusPageRequest, GetPageInfo, GetPageInfoResponse,
GetPages, GetPagesResponse,
};
use crate::api_core::searching_and_fetching_files::{
FileMetadata, FileMetadataResponse, FileSearchOptions, GetFile, SearchFileHashes,
SearchFileHashesResponse, SearchFiles, SearchFilesResponse, SearchQueryEntry,
use crate::api_core::endpoints::searching_and_fetching_files::{
FileMetadata, FileMetadataResponse, FileMetadataType, FileSearchOptions, GetFile,
SearchFileHashes, SearchFileHashesResponse, SearchFiles, SearchFilesResponse, SearchQueryEntry,
};
use crate::api_core::Endpoint;
use crate::api_core::endpoints::Endpoint;
use crate::error::{Error, Result};
use bytes::Buf;
use reqwest::Response;
@ -106,36 +112,66 @@ impl Client {
/// Moves files with matching hashes to the trash
#[tracing::instrument(skip(self), level = "debug")]
pub async fn delete_files(&self, hashes: Vec<String>) -> Result<()> {
self.post::<DeleteFiles>(DeleteFilesRequest { hashes })
.await?;
pub async fn delete_files(
&self,
files: FileSelection,
service: FileServiceSelection,
reason: Option<String>,
) -> Result<()> {
self.post::<DeleteFiles>(DeleteFilesRequest {
file_selection: files,
service_selection: service,
reason,
})
.await?;
Ok(())
}
/// Pulls files out of the trash by hash
#[tracing::instrument(skip(self), level = "debug")]
pub async fn undelete_files(&self, hashes: Vec<String>) -> Result<()> {
self.post::<UndeleteFiles>(UndeleteFilesRequest { hashes })
.await?;
pub async fn undelete_files(
&self,
files: FileSelection,
service: FileServiceSelection,
) -> Result<()> {
self.post::<UndeleteFiles>(UndeleteFilesRequest {
file_selection: files,
service_selection: service,
})
.await?;
Ok(())
}
/// Moves files from the inbox into the archive
#[tracing::instrument(skip(self), level = "debug")]
pub async fn archive_files(&self, hashes: Vec<String>) -> Result<()> {
self.post::<ArchiveFiles>(ArchiveFilesRequest { hashes })
.await?;
pub async fn archive_files(
&self,
files: FileSelection,
service: FileServiceSelection,
) -> Result<()> {
self.post::<ArchiveFiles>(ArchiveFilesRequest {
file_selection: files,
service_selection: service,
})
.await?;
Ok(())
}
/// Moves files from the archive into the inbox
#[tracing::instrument(skip(self), level = "debug")]
pub async fn unarchive_files(&self, hashes: Vec<String>) -> Result<()> {
self.post::<UnarchiveFiles>(UnarchiveFilesRequest { hashes })
.await?;
pub async fn unarchive_files(
&self,
files: FileSelection,
service: FileServiceSelection,
) -> Result<()> {
self.post::<UnarchiveFiles>(UnarchiveFilesRequest {
file_selection: files,
service_selection: service,
})
.await?;
Ok(())
}
@ -180,36 +216,47 @@ impl Client {
) -> Result<SearchFileHashesResponse> {
let mut args = options.into_query_args();
args.push(("tags", Self::serialize_query_object(query)?));
args.push(("return_hashes", String::from("true")));
args.push(("return_hashes", Self::serialize_query_object(true)?));
self.get_and_parse::<SearchFileHashes, [(&str, String)]>(&args)
.await
}
/// Returns the metadata for a given list of file_ids or hashes
#[tracing::instrument(skip(self), level = "debug")]
pub async fn get_file_metadata(
pub async fn get_file_metadata<M: FileMetadataType>(
&self,
file_ids: Vec<u64>,
hashes: Vec<String>,
) -> Result<FileMetadataResponse> {
let query = if file_ids.len() > 0 {
) -> Result<FileMetadataResponse<M>> {
let id_query = if file_ids.len() > 0 {
("file_ids", Self::serialize_query_object(file_ids)?)
} else {
("hashes", Self::serialize_query_object(hashes)?)
};
self.get_and_parse::<FileMetadata, [(&str, String)]>(&[query])
let query = [
id_query,
(
"only_return_identifiers",
Self::serialize_query_object(M::only_identifiers())?,
),
(
"only_return_basic_information",
Self::serialize_query_object(M::only_basic_information())?,
),
];
self.get_and_parse::<FileMetadata<M>, [(&str, String)]>(&query)
.await
}
/// Returns the metadata for a single file identifier
#[tracing::instrument(skip(self), level = "debug")]
pub async fn get_file_metadata_by_identifier(
pub async fn get_file_metadata_by_identifier<M: FileMetadataType>(
&self,
id: FileIdentifier,
) -> Result<FileMetadataInfo> {
) -> Result<M::Response> {
let mut response = match id.clone() {
FileIdentifier::ID(id) => self.get_file_metadata(vec![id], vec![]).await?,
FileIdentifier::Hash(hash) => self.get_file_metadata(vec![], vec![hash]).await?,
FileIdentifier::ID(id) => self.get_file_metadata::<M>(vec![id], vec![]).await?,
FileIdentifier::Hash(hash) => self.get_file_metadata::<M>(vec![], vec![hash]).await?,
};
response
@ -432,11 +479,13 @@ impl Client {
fn serialize_query_object<S: Serialize>(obj: S) -> Result<String> {
#[cfg(feature = "json")]
{
tracing::trace!("Serializing query to JSON");
serde_json::ser::to_string(&obj).map_err(|e| Error::Serialization(e.to_string()))
}
#[cfg(feature = "cbor")]
{
tracing::trace!("Serializing query to CBOR");
let mut buf = Vec::new();
ciborium::ser::into_writer(&obj, &mut buf)
.map_err(|e| Error::Serialization(e.to_string()))?;
@ -471,11 +520,19 @@ impl Client {
#[tracing::instrument(skip(body), level = "trace")]
fn serialize_body<S: Serialize>(body: S) -> Result<Vec<u8>> {
let mut buf = Vec::new();
#[cfg(feature = "cbor")]
ciborium::ser::into_writer(&body, &mut buf)
.map_err(|e| Error::Serialization(e.to_string()))?;
#[cfg(feature = "json")]
serde_json::to_writer(&mut buf, &body).map_err(|e| Error::Serialization(e.to_string()))?;
{
tracing::trace!("Serializing body to JSON");
serde_json::to_writer(&mut buf, &body)
.map_err(|e| Error::Serialization(e.to_string()))?;
}
#[cfg(feature = "cbor")]
{
tracing::trace!("Serializing body to CBOR");
ciborium::ser::into_writer(&body, &mut buf)
.map_err(|e| Error::Serialization(e.to_string()))?;
}
Ok(buf)
}
@ -525,11 +582,16 @@ impl Client {
let bytes = response.bytes().await?;
let reader = bytes.reader();
#[cfg(feature = "json")]
let content = serde_json::from_reader::<_, T>(reader)
.map_err(|e| Error::Deserialization(e.to_string()))?;
let content = {
tracing::trace!("Deserializing content from JSON");
serde_json::from_reader::<_, T>(reader)
.map_err(|e| Error::Deserialization(e.to_string()))?
};
#[cfg(feature = "cbor")]
let content =
ciborium::de::from_reader(reader).map_err(|e| Error::Deserialization(e.to_string()))?;
let content = {
tracing::trace!("Deserializing content from CBOR");
ciborium::de::from_reader(reader).map_err(|e| Error::Deserialization(e.to_string()))?
};
tracing::trace!("response content: {:?}", content);
Ok(content)

@ -1,3 +1,5 @@
use crate::wrapper::service::ServiceName;
use serde::Serialize;
use std::collections::HashMap;
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -43,33 +45,6 @@ pub struct BasicHashList {
pub hashes: Vec<String>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileMetadataInfo {
pub file_id: u64,
pub hash: String,
pub size: Option<u64>,
pub mime: String,
pub ext: String,
pub width: Option<u32>,
pub height: Option<u32>,
pub duration: Option<u64>,
pub time_modified: Option<u64>,
pub file_services: FileMetadataServices,
pub has_audio: Option<bool>,
pub num_frames: Option<u64>,
pub num_words: Option<u64>,
pub is_inbox: bool,
pub is_local: bool,
pub is_trashed: bool,
pub known_urls: Vec<String>,
#[deprecated]
pub service_names_to_statuses_to_tags: HashMap<String, HashMap<String, Vec<String>>>,
pub service_keys_to_statuses_to_tags: HashMap<String, HashMap<String, Vec<String>>>,
#[deprecated]
pub service_names_to_statuses_to_display_tags: HashMap<String, HashMap<String, Vec<String>>>,
pub service_keys_to_statuses_to_display_tags: HashMap<String, HashMap<String, Vec<String>>>,
}
#[derive(Clone, Debug)]
pub enum FileIdentifier {
ID(u64),
@ -98,6 +73,119 @@ impl FileIdentifier {
}
}
/// A generic selection for one or multiple files
#[derive(Clone, Debug, Serialize, Default)]
pub struct FileSelection {
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) hash: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub(crate) hashes: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) file_id: Option<u64>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub(crate) file_ids: Vec<u64>,
}
impl From<FileIdentifier> for FileSelection {
fn from(id: FileIdentifier) -> Self {
let mut selection = Self::default();
match id {
FileIdentifier::ID(id) => selection.file_id = Some(id),
FileIdentifier::Hash(hash) => selection.hash = Some(hash),
}
selection
}
}
impl FileSelection {
/// Creates a new single hash file selection
pub fn by_hash<S: ToString>(hash: S) -> Self {
Self {
hash: Some(hash.to_string()),
..Default::default()
}
}
/// Creates a new file selection with a single file id
pub fn by_file_id(file_id: u64) -> Self {
Self {
file_id: Some(file_id),
..Default::default()
}
}
/// Creates a new file selection with several hashes
pub fn by_hashes(mut hashes: Vec<String>) -> Self {
if hashes.len() == 1 {
Self::by_hash(hashes.pop().unwrap())
} else {
Self {
hashes,
..Default::default()
}
}
}
/// Creates a new file selection with several IDs
pub fn by_file_ids(mut file_ids: Vec<u64>) -> Self {
if file_ids.len() == 1 {
Self::by_file_id(file_ids.pop().unwrap())
} else {
Self {
file_ids,
..Default::default()
}
}
}
}
/// A selection for a single file service
#[derive(Clone, Debug, Serialize, Default)]
pub struct FileServiceSelection {
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) file_service_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) file_service_key: Option<String>,
}
impl FileServiceSelection {
/// Creates a new file service selection by name
pub fn by_name<S: ToString>(name: S) -> Self {
Self {
file_service_name: Some(name.to_string()),
..Default::default()
}
}
/// Creates a new file service selection by service key
pub fn by_key<S: ToString>(key: S) -> Self {
Self {
file_service_key: Some(key.to_string()),
..Default::default()
}
}
/// Selects no service
pub fn none() -> Self {
Self::default()
}
}
impl From<ServiceIdentifier> for FileServiceSelection {
fn from(id: ServiceIdentifier) -> Self {
match id {
ServiceIdentifier::Name(n) => Self::by_name(n),
ServiceIdentifier::Key(k) => Self::by_key(k),
}
}
}
impl From<ServiceName> for FileServiceSelection {
fn from(name: ServiceName) -> Self {
Self::by_name(name)
}
}
#[derive(Clone)]
pub struct FileRecord {
pub bytes: Vec<u8>,

@ -1,5 +1,5 @@
use crate::api_core::common::BasicServiceInfo;
use crate::api_core::Endpoint;
use crate::api_core::endpoints::Endpoint;
use std::collections::HashMap;
pub static SERVICE_TYPE_LOCAL_TAGS: &str = "local_tags";

@ -1,5 +1,6 @@
use crate::api_core::common::BasicHashList;
use crate::api_core::Endpoint;
use crate::api_core::common::{FileSelection, FileServiceSelection};
use crate::api_core::endpoints::Endpoint;
use serde::Serialize;
pub static STATUS_IMPORT_SUCCESS: u8 = 1;
pub static STATUS_IMPORT_ALREADY_EXISTS: u8 = 2;
@ -30,7 +31,14 @@ impl Endpoint for AddFile {
}
}
pub type DeleteFilesRequest = BasicHashList;
#[derive(Clone, Debug, Serialize)]
pub struct DeleteFilesRequest {
#[serde(flatten)]
pub file_selection: FileSelection,
#[serde(flatten)]
pub service_selection: FileServiceSelection,
pub reason: Option<String>,
}
pub struct DeleteFiles;
@ -43,7 +51,14 @@ impl Endpoint for DeleteFiles {
}
}
pub type UndeleteFilesRequest = BasicHashList;
#[derive(Clone, Debug, Serialize)]
pub struct UndeleteFilesRequest {
#[serde(flatten)]
pub file_selection: FileSelection,
#[serde(flatten)]
pub service_selection: FileServiceSelection,
}
pub struct UndeleteFiles;
impl Endpoint for UndeleteFiles {
@ -55,7 +70,14 @@ impl Endpoint for UndeleteFiles {
}
}
pub type ArchiveFilesRequest = BasicHashList;
#[derive(Clone, Debug, Serialize)]
pub struct ArchiveFilesRequest {
#[serde(flatten)]
pub file_selection: FileSelection,
#[serde(flatten)]
pub service_selection: FileServiceSelection,
}
pub struct ArchiveFiles;
impl Endpoint for ArchiveFiles {
@ -67,11 +89,18 @@ impl Endpoint for ArchiveFiles {
}
}
pub type UnarchiveFilesRequest = BasicHashList;
#[derive(Clone, Debug, Serialize)]
pub struct UnarchiveFilesRequest {
#[serde(flatten)]
pub file_selection: FileSelection,
#[serde(flatten)]
pub service_selection: FileServiceSelection,
}
pub struct UnarchiveFiles;
impl Endpoint for UnarchiveFiles {
type Request = UndeleteFilesRequest;
type Request = UnarchiveFilesRequest;
type Response = ();
fn path() -> String {

@ -1,5 +1,5 @@
use crate::api_core::common::FileIdentifier;
use crate::api_core::Endpoint;
use crate::api_core::endpoints::Endpoint;
use std::collections::HashMap;
pub struct SetNotes;

@ -1,5 +1,5 @@
use crate::api_core::common::ServiceIdentifier;
use crate::api_core::Endpoint;
use crate::api_core::endpoints::Endpoint;
use std::collections::HashMap;
#[derive(Debug, Clone, Deserialize)]

@ -1,5 +1,5 @@
use crate::api_core::common::ServiceIdentifier;
use crate::api_core::Endpoint;
use crate::api_core::endpoints::Endpoint;
use serde::Serialize;
use std::collections::HashMap;
@ -74,7 +74,7 @@ pub struct AddUrlRequest {
///
/// Example:
/// ```
/// use hydrus_api::api_core::adding_urls::AddUrlRequestBuilder;
/// use hydrus_api::api_core::endpoints::adding_urls::AddUrlRequestBuilder;
/// use hydrus_api::api_core::common::ServiceIdentifier;
///
/// let request = AddUrlRequestBuilder::default()

@ -1,5 +1,5 @@
use crate::api_core::common::OptionalStringNumber;
use crate::api_core::Endpoint;
use crate::api_core::endpoints::Endpoint;
#[derive(Clone, Debug, Deserialize)]
pub struct GetCookiesResponse {

@ -1,5 +1,5 @@
use crate::api_core::common::PageInformation;
use crate::api_core::Endpoint;
use crate::api_core::endpoints::Endpoint;
#[derive(Clone, Debug, Deserialize)]
pub struct GetPagesResponse {

@ -0,0 +1,20 @@
use serde::de::DeserializeOwned;
use serde::Serialize;
use std::fmt::Debug;
pub mod access_management;
pub mod adding_files;
pub mod adding_notes;
pub mod adding_tags;
pub mod adding_urls;
pub mod client_builder;
pub mod managing_cookies_and_http_headers;
pub mod managing_pages;
pub mod searching_and_fetching_files;
pub(crate) trait Endpoint {
type Request: Serialize + Debug;
type Response: DeserializeOwned + Debug;
fn path() -> String;
}

@ -1,5 +1,10 @@
use crate::api_core::common::FileMetadataInfo;
use crate::api_core::Endpoint;
use crate::api_core::common::FileMetadataServices;
use crate::api_core::endpoints::Endpoint;
use serde::de::DeserializeOwned;
use serde::Deserialize;
use std::collections::HashMap;
use std::fmt::Debug;
use std::marker::PhantomData;
pub mod file_sort_type {
pub const SORT_FILE_SIZE: u8 = 0;
@ -129,15 +134,15 @@ impl Endpoint for SearchFileHashes {
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileMetadataResponse {
pub metadata: Vec<FileMetadataInfo>,
pub struct FileMetadataResponse<M: FileMetadataType> {
pub metadata: Vec<M::Response>,
}
pub struct FileMetadata;
pub struct FileMetadata<M: FileMetadataType>(PhantomData<M>);
impl Endpoint for FileMetadata {
impl<M: FileMetadataType> Endpoint for FileMetadata<M> {
type Request = ();
type Response = FileMetadataResponse;
type Response = FileMetadataResponse<M>;
fn path() -> String {
String::from("get_files/file_metadata")
@ -169,3 +174,94 @@ where
Self::Tag(s.to_string())
}
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileMetadataIdentifiers {
pub file_id: u64,
pub hash: String,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileBasicMetadata {
#[serde(flatten)]
pub identifiers: FileMetadataIdentifiers,
pub size: Option<u64>,
pub mime: String,
pub ext: String,
pub width: Option<u32>,
pub height: Option<u32>,
pub duration: Option<u64>,
pub time_modified: Option<u64>,
pub file_services: FileMetadataServices,
pub has_audio: Option<bool>,
pub num_frames: Option<u64>,
pub num_words: Option<u64>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileFullMetadata {
#[serde(flatten)]
pub basic_metadata: FileBasicMetadata,
pub is_inbox: bool,
pub is_local: bool,
pub is_trashed: bool,
pub known_urls: Vec<String>,
#[deprecated]
pub service_names_to_statuses_to_tags: HashMap<String, HashMap<String, Vec<String>>>,
pub service_keys_to_statuses_to_tags: HashMap<String, HashMap<String, Vec<String>>>,
#[deprecated]
pub service_names_to_statuses_to_display_tags: HashMap<String, HashMap<String, Vec<String>>>,
pub service_keys_to_statuses_to_display_tags: HashMap<String, HashMap<String, Vec<String>>>,
}
pub trait FileMetadataType: Clone + Debug {
type Response: DeserializeOwned + Clone + Debug;
fn only_identifiers() -> bool;
fn only_basic_information() -> bool;
}
#[derive(Clone, Debug)]
pub struct FullMetadata;
impl FileMetadataType for FullMetadata {
type Response = FileFullMetadata;
fn only_identifiers() -> bool {
false
}
fn only_basic_information() -> bool {
false
}
}
#[derive(Clone, Debug)]
pub struct BasicMetadata;
impl FileMetadataType for BasicMetadata {
type Response = FileBasicMetadata;
fn only_identifiers() -> bool {
false
}
fn only_basic_information() -> bool {
true
}
}
#[derive(Clone, Debug)]
pub struct Identifiers;
impl FileMetadataType for Identifiers {
type Response = FileMetadataIdentifiers;
fn only_identifiers() -> bool {
true
}
fn only_basic_information() -> bool {
false
}
}

@ -1,24 +1,3 @@
use serde::de::DeserializeOwned;
use serde::Serialize;
use std::fmt::Debug;
pub mod access_management;
pub mod adding_files;
pub mod adding_tags;
pub mod adding_urls;
pub mod client;
pub mod client_builder;
pub mod common;
pub mod managing_cookies_and_http_headers;
pub mod managing_pages;
pub mod searching_and_fetching_files;
pub mod adding_notes;
pub use searching_and_fetching_files::file_sort_type;
pub(crate) trait Endpoint {
type Request: Serialize + Debug;
type Response: DeserializeOwned + Debug;
fn path() -> String;
}
pub mod endpoints;

@ -56,7 +56,7 @@
//! ## Client Usage Example
//! ```
//! use hydrus_api::Client;
//! use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
//! use hydrus_api::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
//! use std::env;
//! use hydrus_api::api_core::common::ServiceIdentifier;
//! # #[tokio::test]

@ -1,5 +1,5 @@
use crate::api_core::common::OptionalStringNumber;
use crate::api_core::managing_cookies_and_http_headers::CookieBuilder;
use crate::api_core::endpoints::managing_cookies_and_http_headers::CookieBuilder;
use crate::error::Result;
use crate::Client;
use std::time::{Duration, SystemTime, UNIX_EPOCH};

@ -0,0 +1,71 @@
use crate::api_core::common::{
FileIdentifier, FileSelection, FileServiceSelection, ServiceIdentifier,
};
use crate::error::Result;
use crate::Client;
pub struct DeleteFilesBuilder {
client: Client,
hashes: Vec<String>,
ids: Vec<u64>,
reason: Option<String>,
service: Option<ServiceIdentifier>,
}
impl DeleteFilesBuilder {
pub(crate) fn new(client: Client) -> Self {
Self {
client,
hashes: Vec::new(),
ids: Vec::new(),
reason: None,
service: None,
}
}
/// Adds a file to be deleted
pub fn add_file(mut self, identifier: FileIdentifier) -> Self {
match identifier {
FileIdentifier::ID(id) => self.ids.push(id),
FileIdentifier::Hash(hash) => self.hashes.push(hash),
}
self
}
/// Adds multiple files to be deleted
pub fn add_files(self, ids: Vec<FileIdentifier>) -> Self {
ids.into_iter().fold(self, |acc, id| acc.add_file(id))
}
/// Restricts deletion to a single file service
pub fn service(mut self, service: ServiceIdentifier) -> Self {
self.service = Some(service);
self
}
/// Adds a reason for why the file was deleted
pub fn reason<S: ToString>(mut self, reason: S) -> Self {
self.reason = Some(reason.to_string());
self
}
/// Deletes all files specified in this builder
pub async fn run(self) -> Result<()> {
let file_selection = FileSelection {
hashes: self.hashes,
file_ids: self.ids,
..Default::default()
};
let service_selection = self
.service
.map(FileServiceSelection::from)
.unwrap_or_default();
self.client
.delete_files(file_selection, service_selection, self.reason)
.await
}
}

@ -1,6 +1,6 @@
use crate::api_core::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED};
use crate::api_core::adding_urls::AddUrlRequestBuilder;
use crate::api_core::common::ServiceIdentifier;
use crate::api_core::endpoints::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED};
use crate::api_core::endpoints::adding_urls::AddUrlRequestBuilder;
use crate::error::{Error, Result};
use crate::utils::tag_list_to_string_list;
use crate::wrapper::hydrus_file::HydrusFile;

@ -1,6 +1,7 @@
pub mod delete_files_builder;
pub mod import_builder;
pub mod notes_builder;
pub mod or_chain_builder;
pub mod search_builder;
pub mod tag_builder;
pub mod tagging_builder;
pub mod notes_builder;

@ -1,4 +1,6 @@
use crate::api_core::searching_and_fetching_files::{FileSearchOptions, SearchQueryEntry};
use crate::api_core::endpoints::searching_and_fetching_files::{
FileSearchOptions, SearchQueryEntry,
};
use crate::error::Result;
use crate::wrapper::hydrus_file::HydrusFile;
use crate::wrapper::or_chain::OrChain;

@ -1,5 +1,5 @@
use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
use crate::api_core::common::ServiceIdentifier;
use crate::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
use crate::error::Result;
use crate::wrapper::tag::Tag;
use crate::Client;

@ -1,6 +1,8 @@
use crate::api_core::common::FileIdentifier;
use crate::api_core::endpoints::searching_and_fetching_files::FullMetadata;
use crate::error::Result;
use crate::wrapper::address::Address;
use crate::wrapper::builders::delete_files_builder::DeleteFilesBuilder;
use crate::wrapper::builders::import_builder::ImportBuilder;
use crate::wrapper::builders::search_builder::SearchBuilder;
use crate::wrapper::builders::tagging_builder::TaggingBuilder;
@ -71,12 +73,17 @@ impl Hydrus {
pub async fn file(&self, identifier: FileIdentifier) -> Result<HydrusFile> {
let metadata = self
.client
.get_file_metadata_by_identifier(identifier)
.get_file_metadata_by_identifier::<FullMetadata>(identifier)
.await?;
Ok(HydrusFile::from_metadata(self.client.clone(), metadata))
}
/// Creates a builder to delete files
pub async fn delete(&self) -> DeleteFilesBuilder {
DeleteFilesBuilder::new(self.client.clone())
}
/// Starts a request to bulk add tags to files
pub fn tagging(&self) -> TaggingBuilder {
TaggingBuilder::new(self.client.clone())

@ -1,7 +1,11 @@
use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord, ServiceIdentifier};
use crate::api_core::common::{
FileIdentifier, FileRecord, FileSelection, FileServiceSelection, ServiceIdentifier,
};
use crate::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
use crate::api_core::endpoints::searching_and_fetching_files::{FileFullMetadata, FullMetadata};
use crate::error::{Error, Result};
use crate::utils::tag_list_to_string_list;
use crate::wrapper::builders::delete_files_builder::DeleteFilesBuilder;
use crate::wrapper::builders::notes_builder::AddNotesBuilder;
use crate::wrapper::service::ServiceName;
use crate::wrapper::tag::Tag;
@ -35,7 +39,7 @@ pub struct HydrusFile {
pub(crate) client: Client,
pub id: FileIdentifier,
pub status: FileStatus,
pub(crate) metadata: Option<FileMetadataInfo>,
pub(crate) metadata: Option<FileFullMetadata>,
}
impl HydrusFile {
@ -61,7 +65,7 @@ impl HydrusFile {
}
}
pub(crate) fn from_metadata(client: Client, metadata: FileMetadataInfo) -> Self {
pub(crate) fn from_metadata(client: Client, metadata: FileFullMetadata) -> Self {
let status = if metadata.is_trashed {
FileStatus::Deleted
} else {
@ -70,7 +74,7 @@ impl HydrusFile {
Self {
client,
id: FileIdentifier::Hash(metadata.hash.clone()),
id: FileIdentifier::Hash(metadata.basic_metadata.identifiers.hash.clone()),
status,
metadata: Some(metadata),
}
@ -89,7 +93,7 @@ impl HydrusFile {
match &self.id {
FileIdentifier::ID(_) => {
let metadata = self.metadata().await?;
Ok(metadata.hash.clone())
Ok(metadata.basic_metadata.identifiers.hash.clone())
}
FileIdentifier::Hash(hash) => Ok(hash.clone()),
}
@ -99,17 +103,18 @@ impl HydrusFile {
pub async fn size(&mut self) -> Result<Option<u64>> {
let metadata = self.metadata().await?;
Ok(metadata.size.clone())
Ok(metadata.basic_metadata.size.clone())
}
/// Returns the mime of the file
pub async fn mime(&mut self) -> Result<Mime> {
let metadata = self.metadata().await?;
let mime = metadata
.basic_metadata
.mime
.as_str()
.parse()
.map_err(|_| Error::InvalidMime(metadata.mime.clone()))?;
.map_err(|_| Error::InvalidMime(metadata.basic_metadata.mime.clone()))?;
Ok(mime)
}
@ -118,13 +123,16 @@ impl HydrusFile {
pub async fn ext(&mut self) -> Result<String> {
let metadata = self.metadata().await?;
Ok(metadata.ext.clone())
Ok(metadata.basic_metadata.ext.clone())
}
/// Returns the dimensions of the file in pixels
pub async fn dimensions(&mut self) -> Result<Option<(u32, u32)>> {
let metadata = self.metadata().await?;
if let (Some(width), Some(height)) = (&metadata.width, &metadata.height) {
if let (Some(width), Some(height)) = (
&metadata.basic_metadata.width,
&metadata.basic_metadata.height,
) {
Ok(Some((*width, *height)))
} else {
Ok(None)
@ -135,21 +143,21 @@ impl HydrusFile {
pub async fn duration(&mut self) -> Result<Option<u64>> {
let metadata = self.metadata().await?;
Ok(metadata.duration.clone())
Ok(metadata.basic_metadata.duration.clone())
}
/// Returns the number of frames of the file if it's a video
pub async fn num_frames(&mut self) -> Result<Option<u64>> {
let metadata = self.metadata().await?;
Ok(metadata.num_frames.clone())
Ok(metadata.basic_metadata.num_frames.clone())
}
/// Returns if the file has audio
pub async fn has_audio(&mut self) -> Result<bool> {
let metadata = self.metadata().await?;
Ok(metadata.has_audio.unwrap_or(false))
Ok(metadata.basic_metadata.has_audio.unwrap_or(false))
}
/// Returns if the file is currently in the inbox
@ -184,6 +192,7 @@ impl HydrusFile {
pub async fn time_modified(&mut self) -> Result<Option<NaiveDateTime>> {
let metadata = self.metadata().await?;
let naive_time_modified = metadata
.basic_metadata
.time_modified
.map(|m| Utc.timestamp_millis(m as i64).naive_utc());
@ -197,12 +206,14 @@ impl HydrusFile {
) -> Result<Option<NaiveDateTime>> {
let metadata = self.metadata().await?;
let naive_time_imported = metadata
.basic_metadata
.file_services
.current
.get(service_key.as_ref())
.map(|s| s.time_imported)
.or_else(|| {
metadata
.basic_metadata
.file_services
.deleted
.get(service_key.as_ref())
@ -220,6 +231,7 @@ impl HydrusFile {
) -> Result<Option<NaiveDateTime>> {
let metadata = self.metadata().await?;
let naive_time_deleted = metadata
.basic_metadata
.file_services
.deleted
.get(service_key.as_ref())
@ -229,6 +241,41 @@ impl HydrusFile {
Ok(naive_time_deleted)
}
/// Creates a request builder to delete the file
pub fn delete(&mut self) -> DeleteFilesBuilder {
self.metadata = None;
DeleteFilesBuilder::new(self.client.clone()).add_file(self.id.clone())
}
/// Undeletes the file for the given service or all services
/// if `FileServiceSelection::none` is passed
pub async fn undelete(&mut self, service_selection: FileServiceSelection) -> Result<()> {
let hash = self.hash().await?;
self.metadata = None;
self.client
.undelete_files(FileSelection::by_hash(hash), service_selection)
.await
}
/// Archives the file in all passed file services or all configured services
/// if no selection is passed
pub async fn archive(&mut self, service_selection: FileServiceSelection) -> Result<()> {
let hash = self.hash().await?;
self.metadata = None;
self.client
.archive_files(FileSelection::by_hash(hash), service_selection)
.await
}
/// Unarchives the file for the given services
pub async fn unarchive(&mut self, service_selection: FileServiceSelection) -> Result<()> {
let hash = self.hash().await?;
self.metadata = None;
self.client
.unarchive_files(FileSelection::by_hash(hash), service_selection)
.await
}
/// Associates the file with a list of urls
pub async fn associate_urls(&mut self, urls: Vec<String>) -> Result<()> {
let hash = self.hash().await?;
@ -347,11 +394,11 @@ impl HydrusFile {
/// Returns the metadata for the given file
/// if there's already known metadata about the file it uses that
async fn metadata(&mut self) -> Result<&FileMetadataInfo> {
async fn metadata(&mut self) -> Result<&FileFullMetadata> {
if self.metadata.is_none() {
let metadata = self
.client
.get_file_metadata_by_identifier(self.id.clone())
.get_file_metadata_by_identifier::<FullMetadata>(self.id.clone())
.await?;
self.status = if metadata.is_trashed {
FileStatus::Deleted

@ -1,4 +1,5 @@
use crate::api_core::common::{FileIdentifier, PageInformation};
use crate::api_core::endpoints::searching_and_fetching_files::Identifiers;
use crate::error::Result;
use crate::utils::split_file_identifiers_into_hashes_and_ids;
use crate::Client;
@ -56,7 +57,7 @@ impl HydrusPage {
for id in ids {
let metadata = self
.client
.get_file_metadata_by_identifier(FileIdentifier::ID(id))
.get_file_metadata_by_identifier::<Identifiers>(FileIdentifier::ID(id))
.await?;
hashes.push(metadata.hash);
}

@ -1,5 +1,5 @@
use crate::api_core::access_management::GetServicesResponse;
use crate::api_core::access_management::{
use crate::api_core::endpoints::access_management::GetServicesResponse;
use crate::api_core::endpoints::access_management::{
SERVICE_TYPE_ALL_KNOWN_FILES, SERVICE_TYPE_ALL_KNOWN_TAGS, SERVICE_TYPE_ALL_LOCAL_FILES,
SERVICE_TYPE_FILE_REPOSITORIES, SERVICE_TYPE_LOCAL_FILES, SERVICE_TYPE_LOCAL_TAGS,
SERVICE_TYPE_TAG_REPOSITORIES, SERVICE_TYPE_TRASH,

@ -1,4 +1,4 @@
use crate::api_core::adding_urls::{
use crate::api_core::endpoints::adding_urls::{
URL_TYPE_FILE, URL_TYPE_GALLERY, URL_TYPE_POST, URL_TYPE_WATCHABLE,
};
use crate::error::Result;

@ -1,6 +1,8 @@
use crate::common;
use crate::common::create_testdata;
use crate::common::test_data::get_test_hashes;
use crate::common::test_data::{get_test_hashes, TEST_HASH_1};
use hydrus_api::api_core::common::FileSelection;
use hydrus_api::wrapper::service::ServiceName;
#[tokio::test]
async fn it_adds_files() {
@ -22,26 +24,52 @@ async fn it_adds_binary_files() {
#[tokio::test]
async fn it_deletes_files() {
let client = common::get_client();
client.delete_files(get_test_hashes()).await.unwrap();
create_testdata(&client).await;
client
.delete_files(
FileSelection::by_hashes(get_test_hashes()),
ServiceName::my_files().into(),
Some("Test".to_string()),
)
.await
.unwrap();
}
#[tokio::test]
async fn it_undeletes_files() {
let client = common::get_client();
create_testdata(&client).await;
client.undelete_files(get_test_hashes()).await.unwrap();
client
.undelete_files(
FileSelection::by_hashes(get_test_hashes()),
ServiceName::my_files().into(),
)
.await
.unwrap();
}
#[tokio::test]
async fn it_archives_files() {
let client = common::get_client();
create_testdata(&client).await;
client.archive_files(get_test_hashes()).await.unwrap();
client
.archive_files(
FileSelection::by_hashes(vec![TEST_HASH_1.to_string()]),
ServiceName::my_files().into(),
)
.await
.unwrap();
}
#[tokio::test]
async fn it_unarchives_files() {
let client = common::get_client();
create_testdata(&client).await;
client.unarchive_files(get_test_hashes()).await.unwrap();
client
.unarchive_files(
FileSelection::by_hashes(get_test_hashes()),
ServiceName::my_files().into(),
)
.await
.unwrap();
}

@ -1,7 +1,7 @@
use super::super::common;
use crate::common::test_data::EMPTY_HASH;
use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
use hydrus_api::api_core::common::ServiceIdentifier;
use hydrus_api::api_core::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
#[tokio::test]
async fn it_cleans_tags() {

@ -1,7 +1,7 @@
use super::super::common;
use crate::common::test_data::{get_test_hashes, get_test_urls, TEST_URL_1};
use hydrus_api::api_core::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST};
use hydrus_api::api_core::common::ServiceIdentifier;
use hydrus_api::api_core::endpoints::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST};
#[tokio::test]
async fn it_returns_files_for_an_url() {

@ -1,5 +1,5 @@
use super::super::common;
use hydrus_api::api_core::managing_cookies_and_http_headers::CookieBuilder;
use hydrus_api::api_core::endpoints::managing_cookies_and_http_headers::CookieBuilder;
#[tokio::test]
async fn it_returns_cookies_for_a_domain() {

@ -1,7 +1,9 @@
use super::super::common;
use hydrus_api::api_core::common::FileIdentifier;
use hydrus_api::api_core::file_sort_type::SORT_FILE_PIXEL_COUNT;
use hydrus_api::api_core::searching_and_fetching_files::{FileSearchOptions, SearchQueryEntry};
use hydrus_api::api_core::endpoints::searching_and_fetching_files::file_sort_type::SORT_FILE_PIXEL_COUNT;
use hydrus_api::api_core::endpoints::searching_and_fetching_files::{
BasicMetadata, FileSearchOptions, FullMetadata, Identifiers, SearchQueryEntry,
};
#[tokio::test]
async fn is_searches_files() {
@ -45,7 +47,7 @@ async fn is_searches_file_hashes() {
async fn it_fetches_file_metadata() {
let client = common::get_client();
client
.get_file_metadata(
.get_file_metadata::<FullMetadata>(
vec![],
vec!["0000000000000000000000000000000000000000000000000000000000000000".to_string()],
)
@ -56,8 +58,18 @@ async fn it_fetches_file_metadata() {
#[tokio::test]
async fn it_fetches_file_metadata_by_id() {
let client = common::get_client();
let response = client.get_file_metadata(vec![1], vec![]).await;
assert!(response.is_ok()); // Even if the file doesn't exist it still returns some information about it
let response = client
.get_file_metadata::<Identifiers>(vec![1], vec![])
.await;
assert!(response.is_ok());
let response = client
.get_file_metadata::<BasicMetadata>(vec![1], vec![])
.await;
assert!(response.is_ok());
let response = client
.get_file_metadata::<FullMetadata>(vec![1], vec![])
.await;
assert!(response.is_ok());
}
#[tokio::test]

@ -1,5 +1,5 @@
use hydrus_api::api_core::adding_urls::AddUrlRequestBuilder;
use hydrus_api::api_core::client::Client;
use hydrus_api::api_core::endpoints::adding_urls::AddUrlRequestBuilder;
use hydrus_api::Hydrus;
use std::env;
use std::sync::{Arc, Mutex, MutexGuard};

@ -1,14 +1,18 @@
use super::super::common;
use hydrus_api::api_core::adding_tags::TagAction;
use crate::common::test_data::TEST_HASH_2;
use crate::common::{create_testdata, get_client};
use hydrus_api::api_core::common::FileIdentifier;
use hydrus_api::api_core::endpoints::adding_tags::TagAction;
use hydrus_api::wrapper::hydrus_file::HydrusFile;
use hydrus_api::wrapper::service::ServiceName;
async fn get_file() -> HydrusFile {
let client = get_client();
create_testdata(&client).await;
let hydrus = common::get_hydrus();
hydrus
.file(FileIdentifier::hash(
"277a138cd1ee79fc1fdb2869c321b848d4861e45b82184487139ef66dd40b62d", // needs to exist
TEST_HASH_2, // needs to exist
))
.await
.unwrap()
@ -102,9 +106,19 @@ async fn it_retrieves_content() {
async fn it_retrieves_metadata() {
let mut file = get_file().await;
assert!(file.dimensions().await.unwrap().is_some());
assert!(file.stored_locally().await.unwrap());
assert!(file.duration().await.unwrap().is_none());
assert!(file.time_modified().await.is_ok());
assert!(file.time_deleted("000").await.is_ok());
assert!(file.time_imported("000").await.is_ok());
}
#[tokio::test]
async fn it_deletes() {
let mut file = get_file().await;
file.delete()
.reason("I just don't like that file")
.run()
.await
.unwrap();
file.undelete(ServiceName::my_files().into()).await.unwrap();
}

@ -1,5 +1,5 @@
use super::super::common;
use hydrus_api::api_core::adding_tags::TagAction;
use hydrus_api::api_core::endpoints::adding_tags::TagAction;
use hydrus_api::wrapper::builders::or_chain_builder::OrChainBuilder;
use hydrus_api::wrapper::builders::search_builder::SortType;
use hydrus_api::wrapper::builders::tag_builder::TagBuilder;

Loading…
Cancel
Save