Merge pull request #1 from Trivernis/feature/high-level-wrapper

Feature/high level wrapper
pull/2/head
Julius Riegel 3 years ago committed by GitHub
commit df5f36204c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,6 +1,6 @@
[package]
name = "hydrus-api"
version = "0.1.0"
version = "0.3.0"
authors = ["trivernis <trivernis@protonmail.com>"]
edition = "2018"
license = "Apache-2.0"
@ -14,6 +14,7 @@ repository = "https://github.com/trivernis/hydrus-api-rs"
serde = {version = "^1.0", features = ["derive"]}
reqwest = {version = "0.11.4", features = ["json"]}
log = "0.4.14"
mime = "0.3.16"
[dev-dependencies]
env_logger = "0.8.4"

@ -1,9 +1,51 @@
# Hydrus Rust API
<h1 align="center">
Hydrus Rust API
</h1>
<p align="center">
<a href="https://crates.io/crates/hydrus-api">
<img src="https://img.shields.io/crates/v/hydrus-api?style=for-the-badge">
</a>
<a href="https://docs.rs/hydrus-api">
<img src="https://img.shields.io/docsrs/hydrus-api?style=for-the-badge">
</a>
</p>
This is a WIP Rust Wrapper for the Hydrus Client API.
The official API documentation can be found [here](https://hydrusnetwork.github.io/hydrus/help/client_api.html).
## Example
## Example with Wrapper
```rust
use std::env;
use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation;
use hydrus_api::wrapper::tag::Tag;
use hydrus_api::wrapper::service::ServiceName;
use hydrus_api::wrapper::hydrus_file::FileStatus;
use hydrus_api::wrapper::page::PageIdentifier;
#[tokio::main]
async fn main() {
let hydrus_url = env::var("HYDRUS_URL").unwrap();
let access_key = env::var("HYDRUS_ACCESS_KEY").unwrap();
let hydrus = Hydrus::new(Client::new(hydrus_url, access_key));
let files = hydrus.search(FileSearchLocation::Archive,vec![Tag::from("character:megumin")]).await.unwrap();
for mut file in files {
file.add_tags(ServiceName::my_tags(), vec![Tag::from("ark mage")]).await.unwrap();
}
let url = hydrus.import()
.url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
.page(PageIdentifier::name("My Import Page"))
.add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin"))
.show_page(true)
.run().await.unwrap();
}
```
## Example with Client
```rust
use hydrus_api::Client;
@ -15,7 +57,7 @@ async fn main() {
Client::new(
env::var("HYDRUS_URL").unwrap(),
env::var("HYDRUS_ACCESS_KEY").unwrap(),
).unwrap();
);
// let's first import a file
let hash = client.add_file("/path/to/my/file").await.unwrap().hash;

@ -1,7 +1,16 @@
use crate::endpoints::common::BasicServiceInfo;
use crate::endpoints::Endpoint;
use crate::api_core::common::BasicServiceInfo;
use crate::api_core::Endpoint;
use std::collections::HashMap;
pub static SERVICE_TYPE_LOCAL_TAGS: &str = "local_tags";
pub static SERVICE_TYPE_TAG_REPOSITORIES: &str = "tag_repositories";
pub static SERVICE_TYPE_LOCAL_FILES: &str = "local_files";
pub static SERVICE_TYPE_FILE_REPOSITORIES: &str = "file_repositories";
pub static SERVICE_TYPE_ALL_LOCAL_FILES: &str = "all_local_files";
pub static SERVICE_TYPE_ALL_KNOWN_FILES: &str = "all_known_files";
pub static SERVICE_TYPE_ALL_KNOWN_TAGS: &str = "all_known_tags";
pub static SERVICE_TYPE_TRASH: &str = "trash";
#[derive(Debug, Clone, Deserialize)]
pub struct ApiVersionResponse {
pub version: u32,
@ -14,7 +23,7 @@ impl Endpoint for ApiVersion {
type Request = ();
type Response = ApiVersionResponse;
fn get_path() -> String {
fn path() -> String {
String::from("api_version")
}
}
@ -30,7 +39,7 @@ impl Endpoint for SessionKey {
type Request = ();
type Response = SessionKeyResponse;
fn get_path() -> String {
fn path() -> String {
String::from("session_key")
}
}
@ -47,7 +56,7 @@ impl Endpoint for VerifyAccessKey {
type Request = ();
type Response = VerifyAccessKeyResponse;
fn get_path() -> String {
fn path() -> String {
String::from("verify_access_key")
}
}
@ -61,7 +70,7 @@ impl Endpoint for GetServices {
type Request = ();
type Response = GetServicesResponse;
fn get_path() -> String {
fn path() -> String {
String::from("get_services")
}
}

@ -1,5 +1,11 @@
use crate::endpoints::common::BasicHashList;
use crate::endpoints::Endpoint;
use crate::api_core::common::BasicHashList;
use crate::api_core::Endpoint;
pub static STATUS_IMPORT_SUCCESS: u8 = 1;
pub static STATUS_IMPORT_ALREADY_EXISTS: u8 = 2;
pub static STATUS_IMPORT_PREVIOUSLY_DELETED: u8 = 3;
pub static STATUS_IMPORT_FAILED: u8 = 4;
pub static STATUS_IMPORT_VETOED: u8 = 5;
#[derive(Debug, Clone, Serialize)]
pub struct AddFileRequest {
@ -19,7 +25,7 @@ impl Endpoint for AddFile {
type Request = AddFileRequest;
type Response = AddFileResponse;
fn get_path() -> String {
fn path() -> String {
String::from("add_files/add_file")
}
}
@ -32,7 +38,7 @@ impl Endpoint for DeleteFiles {
type Request = DeleteFilesRequest;
type Response = ();
fn get_path() -> String {
fn path() -> String {
String::from("add_files/delete_files")
}
}
@ -44,7 +50,7 @@ impl Endpoint for UndeleteFiles {
type Request = UndeleteFilesRequest;
type Response = ();
fn get_path() -> String {
fn path() -> String {
String::from("add_files/undelete_files")
}
}
@ -56,7 +62,7 @@ impl Endpoint for ArchiveFiles {
type Request = ArchiveFilesRequest;
type Response = ();
fn get_path() -> String {
fn path() -> String {
String::from("add_files/archive_files")
}
}
@ -68,7 +74,7 @@ impl Endpoint for UnarchiveFiles {
type Request = UndeleteFilesRequest;
type Response = ();
fn get_path() -> String {
fn path() -> String {
String::from("add_files/unarchive_files")
}
}

@ -1,4 +1,4 @@
use crate::endpoints::Endpoint;
use crate::api_core::Endpoint;
use std::collections::HashMap;
#[derive(Debug, Clone, Deserialize)]
@ -12,7 +12,7 @@ impl Endpoint for CleanTags {
type Request = ();
type Response = CleanTagsResponse;
fn get_path() -> String {
fn path() -> String {
String::from("add_tags/clean_tags")
}
}
@ -30,7 +30,7 @@ impl Endpoint for AddTags {
type Request = AddTagsRequest;
type Response = ();
fn get_path() -> String {
fn path() -> String {
String::from("add_tags/add_tags")
}
}
@ -42,6 +42,7 @@ pub struct AddTagsRequestBuilder {
}
/// List of actions for a given tag
#[derive(Clone, Debug, PartialOrd, PartialEq, Hash)]
pub enum TagAction {
/// Add to a local tag service.
AddToLocalService,
@ -62,6 +63,8 @@ pub enum TagAction {
RescindPetitionFromRepository,
}
impl Eq for TagAction {}
impl TagAction {
fn into_id(self) -> u8 {
match self {

@ -1,4 +1,4 @@
use crate::endpoints::Endpoint;
use crate::api_core::Endpoint;
use serde::Serialize;
use std::collections::HashMap;
@ -16,7 +16,7 @@ pub struct GetUrlFilesResponse {
#[derive(Clone, Debug, Deserialize)]
pub struct UrlFileStatus {
pub status: u32,
pub status: u8,
pub hash: String,
pub note: String,
}
@ -27,7 +27,7 @@ impl Endpoint for GetUrlFiles {
type Request = ();
type Response = GetUrlFilesResponse;
fn get_path() -> String {
fn path() -> String {
String::from("add_urls/get_url_files")
}
}
@ -47,7 +47,7 @@ impl Endpoint for GetUrlInfo {
type Request = ();
type Response = GetUrlInfoResponse;
fn get_path() -> String {
fn path() -> String {
String::from("add_urls/get_url_info")
}
}
@ -72,7 +72,7 @@ pub struct AddUrlRequest {
///
/// Example:
/// ```
/// use hydrus_api::endpoints::adding_urls::AddUrlRequestBuilder;
/// use hydrus_api::api_core::adding_urls::AddUrlRequestBuilder;
///
/// let request = AddUrlRequestBuilder::default()
/// .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
@ -164,7 +164,7 @@ impl Endpoint for AddUrl {
type Request = AddUrlRequest;
type Response = AddUrlResponse;
fn get_path() -> String {
fn path() -> String {
String::from("add_urls/add_url")
}
}
@ -181,7 +181,7 @@ impl Endpoint for AssociateUrl {
type Request = AssociateUrlRequest;
type Response = ();
fn get_path() -> String {
fn path() -> String {
String::from("add_urls/associate_url")
}
}

@ -1,22 +1,22 @@
use crate::endpoints::access_management::{
use crate::api_core::access_management::{
ApiVersion, ApiVersionResponse, GetServices, GetServicesResponse, SessionKey,
SessionKeyResponse, VerifyAccessKey, VerifyAccessKeyResponse,
};
use crate::endpoints::adding_files::{
use crate::api_core::adding_files::{
AddFile, AddFileRequest, AddFileResponse, ArchiveFiles, ArchiveFilesRequest, DeleteFiles,
DeleteFilesRequest, UnarchiveFiles, UnarchiveFilesRequest, UndeleteFiles, UndeleteFilesRequest,
};
use crate::endpoints::adding_tags::{AddTags, AddTagsRequest, CleanTags, CleanTagsResponse};
use crate::endpoints::adding_urls::{
use crate::api_core::adding_tags::{AddTags, AddTagsRequest, CleanTags, CleanTagsResponse};
use crate::api_core::adding_urls::{
AddUrl, AddUrlRequest, AddUrlResponse, AssociateUrl, AssociateUrlRequest, GetUrlFiles,
GetUrlFilesResponse, GetUrlInfo, GetUrlInfoResponse,
};
use crate::endpoints::common::{FileIdentifier, FileRecord};
use crate::endpoints::searching_and_fetching_files::{
use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord};
use crate::api_core::searching_and_fetching_files::{
FileMetadata, FileMetadataResponse, FileSearchLocation, GetFile, SearchFiles,
SearchFilesResponse,
};
use crate::endpoints::Endpoint;
use crate::api_core::Endpoint;
use crate::error::{Error, Result};
use crate::utils::{number_list_to_json_array, string_list_to_json_array};
use reqwest::Response;
@ -25,6 +25,9 @@ use serde::Serialize;
static ACCESS_KEY_HEADER: &str = "Hydrus-Client-API-Access-Key";
#[derive(Clone)]
/// A low level Client for the hydrus API. It provides basic abstraction
/// over the REST api.
pub struct Client {
inner: reqwest::Client,
base_url: String,
@ -33,19 +36,19 @@ pub struct Client {
impl Client {
/// Creates a new client to start requests against the hydrus api.
pub fn new<S: AsRef<str>>(url: S, access_key: S) -> Result<Self> {
Ok(Self {
pub fn new<S: AsRef<str>>(url: S, access_key: S) -> Self {
Self {
inner: reqwest::Client::new(),
access_key: access_key.as_ref().to_string(),
base_url: url.as_ref().to_string(),
})
}
}
/// Starts a get request to the path
async fn get<E: Endpoint, Q: Serialize + ?Sized>(&mut self, query: &Q) -> Result<Response> {
async fn get<E: Endpoint, Q: Serialize + ?Sized>(&self, query: &Q) -> Result<Response> {
let response = self
.inner
.get(format!("{}/{}", self.base_url, E::get_path()))
.get(format!("{}/{}", self.base_url, E::path()))
.header(ACCESS_KEY_HEADER, &self.access_key)
.query(query)
.send()
@ -56,7 +59,7 @@ impl Client {
/// Starts a get request to the path associated with the Endpoint Type
async fn get_and_parse<E: Endpoint, Q: Serialize + ?Sized>(
&mut self,
&self,
query: &Q,
) -> Result<E::Response> {
let response = self.get::<E, Q>(query).await?;
@ -65,10 +68,10 @@ impl Client {
}
/// Stats a post request to the path associated with the Endpoint Type
async fn post<E: Endpoint>(&mut self, body: E::Request) -> Result<Response> {
async fn post<E: Endpoint>(&self, body: E::Request) -> Result<Response> {
let response = self
.inner
.post(format!("{}/{}", self.base_url, E::get_path()))
.post(format!("{}/{}", self.base_url, E::path()))
.json(&body)
.header(ACCESS_KEY_HEADER, &self.access_key)
.send()
@ -78,17 +81,17 @@ impl Client {
}
/// Stats a post request and parses the body as json
async fn post_and_parse<E: Endpoint>(&mut self, body: E::Request) -> Result<E::Response> {
async fn post_and_parse<E: Endpoint>(&self, body: E::Request) -> Result<E::Response> {
let response = self.post::<E>(body).await?;
Self::extract_content(response).await
}
/// Stats a post request to the path associated with the return type
async fn post_binary<E: Endpoint>(&mut self, data: Vec<u8>) -> Result<E::Response> {
async fn post_binary<E: Endpoint>(&self, data: Vec<u8>) -> Result<E::Response> {
let response = self
.inner
.post(format!("{}/{}", self.base_url, E::get_path()))
.post(format!("{}/{}", self.base_url, E::path()))
.body(data)
.header(ACCESS_KEY_HEADER, &self.access_key)
.header("Content-Type", "application/octet-stream")
@ -115,27 +118,27 @@ impl Client {
}
/// Returns the current API version. It's being incremented every time the API changes.
pub async fn api_version(&mut self) -> Result<ApiVersionResponse> {
pub async fn api_version(&self) -> Result<ApiVersionResponse> {
self.get_and_parse::<ApiVersion, ()>(&()).await
}
/// Creates a new session key
pub async fn session_key(&mut self) -> Result<SessionKeyResponse> {
pub async fn session_key(&self) -> Result<SessionKeyResponse> {
self.get_and_parse::<SessionKey, ()>(&()).await
}
/// Verifies if the access key is valid and returns some information about its permissions
pub async fn verify_access_key(&mut self) -> Result<VerifyAccessKeyResponse> {
pub async fn verify_access_key(&self) -> Result<VerifyAccessKeyResponse> {
self.get_and_parse::<VerifyAccessKey, ()>(&()).await
}
/// Returns the list of tag and file services of the client
pub async fn get_services(&mut self) -> Result<GetServicesResponse> {
pub async fn get_services(&self) -> Result<GetServicesResponse> {
self.get_and_parse::<GetServices, ()>(&()).await
}
/// Adds a file to hydrus
pub async fn add_file<S: AsRef<str>>(&mut self, path: S) -> Result<AddFileResponse> {
pub async fn add_file<S: AsRef<str>>(&self, path: S) -> Result<AddFileResponse> {
self.post_and_parse::<AddFile>(AddFileRequest {
path: path.as_ref().to_string(),
})
@ -143,12 +146,12 @@ impl Client {
}
/// Adds a file from binary data to hydrus
pub async fn add_binary_file(&mut self, data: Vec<u8>) -> Result<AddFileResponse> {
pub async fn add_binary_file(&self, data: Vec<u8>) -> Result<AddFileResponse> {
self.post_binary::<AddFile>(data).await
}
/// Moves files with matching hashes to the trash
pub async fn delete_files(&mut self, hashes: Vec<String>) -> Result<()> {
pub async fn delete_files(&self, hashes: Vec<String>) -> Result<()> {
self.post::<DeleteFiles>(DeleteFilesRequest { hashes })
.await?;
@ -156,7 +159,7 @@ impl Client {
}
/// Pulls files out of the trash by hash
pub async fn undelete_files(&mut self, hashes: Vec<String>) -> Result<()> {
pub async fn undelete_files(&self, hashes: Vec<String>) -> Result<()> {
self.post::<UndeleteFiles>(UndeleteFilesRequest { hashes })
.await?;
@ -164,7 +167,7 @@ impl Client {
}
/// Moves files from the inbox into the archive
pub async fn archive_files(&mut self, hashes: Vec<String>) -> Result<()> {
pub async fn archive_files(&self, hashes: Vec<String>) -> Result<()> {
self.post::<ArchiveFiles>(ArchiveFilesRequest { hashes })
.await?;
@ -172,7 +175,7 @@ impl Client {
}
/// Moves files from the archive into the inbox
pub async fn unarchive_files(&mut self, hashes: Vec<String>) -> Result<()> {
pub async fn unarchive_files(&self, hashes: Vec<String>) -> Result<()> {
self.post::<UnarchiveFiles>(UnarchiveFilesRequest { hashes })
.await?;
@ -180,7 +183,7 @@ impl Client {
}
/// Returns the list of tags as the client would see them in a human friendly order
pub async fn clean_tags(&mut self, tags: Vec<String>) -> Result<CleanTagsResponse> {
pub async fn clean_tags(&self, tags: Vec<String>) -> Result<CleanTagsResponse> {
self.get_and_parse::<CleanTags, [(&str, String)]>(&[(
"tags",
string_list_to_json_array(tags),
@ -189,7 +192,7 @@ impl Client {
}
/// Adds tags to files with the given hashes
pub async fn add_tags(&mut self, request: AddTagsRequest) -> Result<()> {
pub async fn add_tags(&self, request: AddTagsRequest) -> Result<()> {
self.post::<AddTags>(request).await?;
Ok(())
@ -197,7 +200,7 @@ impl Client {
/// Searches for files in the inbox, the archive or both
pub async fn search_files(
&mut self,
&self,
tags: Vec<String>,
location: FileSearchLocation,
) -> Result<SearchFilesResponse> {
@ -211,19 +214,37 @@ impl Client {
/// Returns the metadata for a given list of file_ids or hashes
pub async fn get_file_metadata(
&mut self,
&self,
file_ids: Vec<u64>,
hashes: Vec<String>,
) -> Result<FileMetadataResponse> {
self.get_and_parse::<FileMetadata, [(&str, String)]>(&[
("file_ids", number_list_to_json_array(file_ids)),
("hashes", string_list_to_json_array(hashes)),
])
let query = if file_ids.len() > 0 {
("file_ids", number_list_to_json_array(file_ids))
} else {
("hashes", string_list_to_json_array(hashes))
};
self.get_and_parse::<FileMetadata, [(&str, String)]>(&[query])
.await
}
/// Returns the metadata for a single file identifier
pub async fn get_file_metadata_by_identifier(
&self,
identifier: FileIdentifier,
) -> Result<FileMetadataInfo> {
let mut response = match identifier.clone() {
FileIdentifier::ID(id) => self.get_file_metadata(vec![id], vec![]).await?,
FileIdentifier::Hash(hash) => self.get_file_metadata(vec![], vec![hash]).await?,
};
response
.metadata
.pop()
.ok_or_else(|| Error::FileNotFound(identifier))
}
/// Returns the bytes of a file from hydrus
pub async fn get_file(&mut self, id: FileIdentifier) -> Result<FileRecord> {
pub async fn get_file(&self, id: FileIdentifier) -> Result<FileRecord> {
let response = match id {
FileIdentifier::ID(id) => {
self.get::<GetFile, [(&str, u64)]>(&[("file_id", id)])
@ -247,24 +268,24 @@ impl Client {
}
/// Returns all files associated with the given url
pub async fn get_url_files<S: AsRef<str>>(&mut self, url: S) -> Result<GetUrlFilesResponse> {
pub async fn get_url_files<S: AsRef<str>>(&self, url: S) -> Result<GetUrlFilesResponse> {
self.get_and_parse::<GetUrlFiles, [(&str, &str)]>(&[("url", url.as_ref())])
.await
}
/// Returns information about the given url
pub async fn get_url_info<S: AsRef<str>>(&mut self, url: S) -> Result<GetUrlInfoResponse> {
pub async fn get_url_info<S: AsRef<str>>(&self, url: S) -> Result<GetUrlInfoResponse> {
self.get_and_parse::<GetUrlInfo, [(&str, &str)]>(&[("url", url.as_ref())])
.await
}
/// Adds an url to hydrus, optionally with additional tags and a destination page
pub async fn add_url(&mut self, request: AddUrlRequest) -> Result<AddUrlResponse> {
pub async fn add_url(&self, request: AddUrlRequest) -> Result<AddUrlResponse> {
self.post_and_parse::<AddUrl>(request).await
}
/// Associates urls with the given file hashes
pub async fn associate_urls(&mut self, urls: Vec<String>, hashes: Vec<String>) -> Result<()> {
pub async fn associate_urls(&self, urls: Vec<String>, hashes: Vec<String>) -> Result<()> {
self.post::<AssociateUrl>(AssociateUrlRequest {
hashes,
urls_to_add: urls,
@ -276,11 +297,7 @@ impl Client {
}
/// Disassociates urls with the given file hashes
pub async fn disassociate_urls(
&mut self,
urls: Vec<String>,
hashes: Vec<String>,
) -> Result<()> {
pub async fn disassociate_urls(&self, urls: Vec<String>, hashes: Vec<String>) -> Result<()> {
self.post::<AssociateUrl>(AssociateUrlRequest {
hashes,
urls_to_add: vec![],

@ -11,18 +11,18 @@ pub struct BasicHashList {
pub hashes: Vec<String>,
}
#[derive(Clone, Default, Deserialize)]
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileMetadataInfo {
pub file_id: u64,
pub hash: String,
pub size: u64,
pub size: Option<u64>,
pub mime: String,
pub ext: String,
pub width: u32,
pub height: u32,
pub width: Option<u32>,
pub height: Option<u32>,
pub duration: Option<u64>,
pub has_audio: bool,
pub num_frames: Option<u16>,
pub has_audio: Option<bool>,
pub num_frames: Option<u64>,
pub num_words: Option<u64>,
pub is_inbox: bool,
pub is_local: bool,
@ -32,11 +32,19 @@ pub struct FileMetadataInfo {
pub service_names_to_statuses_to_display_tags: HashMap<String, HashMap<String, Vec<String>>>,
}
#[derive(Clone, Debug)]
pub enum FileIdentifier {
ID(u64),
Hash(String),
}
impl FileIdentifier {
pub fn hash<S: ToString>(hash: S) -> Self {
Self::Hash(hash.to_string())
}
}
#[derive(Clone)]
pub struct FileRecord {
pub bytes: Vec<u8>,
pub mime_type: String,

@ -5,12 +5,13 @@ pub mod access_management;
pub mod adding_files;
pub mod adding_tags;
pub mod adding_urls;
pub mod client;
pub mod common;
pub mod searching_and_fetching_files;
pub trait Endpoint {
pub(crate) trait Endpoint {
type Request: Serialize;
type Response: DeserializeOwned;
fn get_path() -> String;
fn path() -> String;
}

@ -1,5 +1,5 @@
use crate::endpoints::common::FileMetadataInfo;
use crate::endpoints::Endpoint;
use crate::api_core::common::FileMetadataInfo;
use crate::api_core::Endpoint;
#[derive(Debug, Clone, Deserialize)]
pub struct SearchFilesResponse {
@ -7,7 +7,6 @@ pub struct SearchFilesResponse {
}
pub enum FileSearchLocation {
All,
Inbox,
Archive,
}
@ -16,14 +15,6 @@ impl FileSearchLocation {
pub fn is_inbox(&self) -> bool {
if let &Self::Inbox = &self {
true
} else {
self.is_all()
}
}
pub fn is_all(&self) -> bool {
if let &Self::All = &self {
true
} else {
false
}
@ -33,7 +24,7 @@ impl FileSearchLocation {
if let &Self::Archive = &self {
true
} else {
self.is_all()
false
}
}
}
@ -44,14 +35,14 @@ impl Endpoint for SearchFiles {
type Request = ();
type Response = SearchFilesResponse;
fn get_path() -> String {
fn path() -> String {
String::from("get_files/search_files")
}
}
#[derive(Clone, Default, Deserialize)]
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileMetadataResponse {
metadata: Vec<FileMetadataInfo>,
pub metadata: Vec<FileMetadataInfo>,
}
pub struct FileMetadata;
@ -60,7 +51,7 @@ impl Endpoint for FileMetadata {
type Request = ();
type Response = FileMetadataResponse;
fn get_path() -> String {
fn path() -> String {
String::from("get_files/file_metadata")
}
}
@ -71,7 +62,7 @@ impl Endpoint for GetFile {
type Request = ();
type Response = ();
fn get_path() -> String {
fn path() -> String {
String::from("get_files/file")
}
}

@ -1,3 +1,4 @@
use crate::api_core::common::FileIdentifier;
use std::error::Error as StdError;
use std::fmt;
@ -7,6 +8,11 @@ pub type Result<T> = std::result::Result<T, Error>;
pub enum Error {
Reqwest(reqwest::Error),
Hydrus(String),
InvalidServiceType(String),
ImportVetoed(String),
ImportFailed(String),
FileNotFound(FileIdentifier),
InvalidMime(String),
}
impl fmt::Display for Error {
@ -14,6 +20,13 @@ impl fmt::Display for Error {
match self {
Self::Reqwest(e) => e.fmt(f),
Self::Hydrus(msg) => msg.fmt(f),
Self::InvalidServiceType(service_type) => {
write!(f, "Invalid Service Type '{}'", service_type)
}
Self::ImportFailed(msg) => write!(f, "File import failed: {}", msg),
Self::ImportVetoed(msg) => write!(f, "File import vetoed: {}", msg),
Self::FileNotFound(id) => write!(f, "File {:?} not found", id),
Self::InvalidMime(mime) => write!(f, "Failed to parse invalid mime {}", mime),
}
}
}
@ -22,7 +35,7 @@ impl StdError for Error {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
match self {
Self::Reqwest(e) => e.source(),
Self::Hydrus(_) => None,
_ => None,
}
}
}

@ -3,10 +3,41 @@
//! token that can be retrieved in the hydrus client from the *review services* dialog.
//! Different actions require different permissions, you can read about it in the [official docs](https://hydrusnetwork.github.io/hydrus/help/client_api.html).
//!
//! ## Usage Example
//! ## Hydrus Usage Example
//!
//! ```
//! # use hydrus_api::{Hydrus, Client};
//! use std::env;
//! use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation;
//! use hydrus_api::wrapper::tag::Tag;
//! use hydrus_api::wrapper::service::ServiceName;
//! use hydrus_api::wrapper::hydrus_file::FileStatus;
//! use hydrus_api::wrapper::page::PageIdentifier;
//!
//! # #[tokio::test]
//! # async fn doctest() {
//! let hydrus_url = env::var("HYDRUS_URL").unwrap();
//! let access_key = env::var("HYDRUS_ACCESS_KEY").unwrap();
//! let hydrus = Hydrus::new(Client::new(hydrus_url, access_key));
//! let files = hydrus.search(FileSearchLocation::Archive,vec![Tag::from("character:megumin")]).await.unwrap();
//!
//! for mut file in files {
//! file.add_tags(ServiceName::my_tags(), vec![Tag::from("ark mage")]).await.unwrap();
//! }
//!
//! let url = hydrus.import()
//! .url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
//! .page(PageIdentifier::name("My Import Page"))
//! .add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin"))
//! .show_page(true)
//! .run().await.unwrap();
//! # }
//! ```
//!
//! ## Client Usage Example
//! ```
//! use hydrus_api::Client;
//! use hydrus_api::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
//! use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
//! use std::env;
//! # #[tokio::test]
//! # async fn doctest() {
@ -14,7 +45,7 @@
//! Client::new(
//! env::var("HYDRUS_URL").unwrap(),
//! env::var("HYDRUS_ACCESS_KEY").unwrap(),
//! ).unwrap();
//! );
//! // let's first import a file
//! let hash = client.add_file("/path/to/my/file").await.unwrap().hash;
//!
@ -34,9 +65,10 @@
#[macro_use]
extern crate serde;
pub mod client;
pub mod endpoints;
pub use api_core::client::Client;
pub use wrapper::hydrus::Hydrus;
pub mod api_core;
pub mod error;
pub(crate) mod utils;
pub use client::Client;
pub mod wrapper;

@ -1,3 +1,5 @@
use crate::wrapper::tag::Tag;
pub fn string_list_to_json_array(l: Vec<String>) -> String {
format!("[\"{}\"]", l.join("\",\""))
}
@ -12,3 +14,8 @@ pub fn number_list_to_json_array<T: ToString>(l: Vec<T>) -> String {
))
)
}
/// Converts a list of tags into a list of string tags
pub fn tag_list_to_string_list(tags: Vec<Tag>) -> Vec<String> {
tags.into_iter().map(|t| t.to_string()).collect()
}

@ -0,0 +1,167 @@
use crate::api_core::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED};
use crate::api_core::adding_urls::AddUrlRequestBuilder;
use crate::error::{Error, Result};
use crate::utils::tag_list_to_string_list;
use crate::wrapper::hydrus_file::HydrusFile;
use crate::wrapper::page::PageIdentifier;
use crate::wrapper::service::ServiceName;
use crate::wrapper::tag::Tag;
use crate::wrapper::url::Url;
use crate::Client;
use std::collections::HashMap;
use std::io::Read;
pub struct ImportBuilder {
pub(crate) client: Client,
}
impl ImportBuilder {
pub fn file(self, file: FileImport) -> FileImportBuilder {
FileImportBuilder {
client: self.client,
file,
}
}
pub fn url<S: ToString>(self, url: S) -> UrlImportBuilder {
UrlImportBuilder::new(self.client.clone(), url)
}
}
pub enum FileImport {
Path(String),
Binary(Vec<u8>),
}
impl FileImport {
pub fn path<S: ToString>(path: S) -> Self {
Self::Path(path.to_string())
}
pub fn binary<R: Read>(reader: &mut R) -> Self {
let mut bytes = Vec::new();
let _ = reader.read_to_end(&mut bytes);
Self::Binary(bytes)
}
}
pub struct FileImportBuilder {
client: Client,
file: FileImport,
}
impl FileImportBuilder {
pub async fn run(self) -> Result<HydrusFile> {
let response = match self.file {
FileImport::Path(path) => self.client.add_file(path).await?,
FileImport::Binary(b) => self.client.add_binary_file(b).await?,
};
if response.status == STATUS_IMPORT_FAILED {
Err(Error::ImportFailed(response.note))
} else if response.status == STATUS_IMPORT_VETOED {
Err(Error::ImportVetoed(response.note))
} else {
Ok(HydrusFile::from_raw_status_and_hash(
self.client,
response.status,
response.hash,
))
}
}
}
pub struct UrlImportBuilder {
client: Client,
url: String,
page: Option<PageIdentifier>,
show_page: bool,
filter_tags: Vec<Tag>,
service_tag_mappings: HashMap<String, Vec<Tag>>,
}
impl UrlImportBuilder {
pub fn new<S: ToString>(client: Client, url: S) -> Self {
Self {
client,
url: url.to_string(),
page: None,
show_page: false,
filter_tags: vec![],
service_tag_mappings: Default::default(),
}
}
/// Sets the destination page of the import
pub fn page(mut self, page: PageIdentifier) -> Self {
self.page = Some(page);
self
}
/// If the destination page of the import should be focussed
pub fn show_page(mut self, show: bool) -> Self {
self.show_page = show;
self
}
/// Adds a tag that should be filtered
pub fn add_filter_tag(mut self, tag: Tag) -> Self {
self.filter_tags.push(tag);
self
}
/// Adds multiple tags that should be filtered
pub fn add_filter_tags(mut self, mut tags: Vec<Tag>) -> Self {
self.filter_tags.append(&mut tags);
self
}
/// Adds an additional tag for the imported file
pub fn add_additional_tag(self, service: ServiceName, tag: Tag) -> Self {
self.add_additional_tags(service, vec![tag])
}
/// Adds multiple additional tags for the import
pub fn add_additional_tags(mut self, service: ServiceName, mut tags: Vec<Tag>) -> Self {
if let Some(service_tags) = self.service_tag_mappings.get_mut(&service.0) {
service_tags.append(&mut tags);
} else {
self.service_tag_mappings.insert(service.0, tags);
}
self
}
/// Imports the URL
pub async fn run(self) -> Result<Url> {
let mut request = AddUrlRequestBuilder::default().url(&self.url);
for (service, tags) in self.service_tag_mappings {
request = request.add_tags(service, tag_list_to_string_list(tags));
}
request = request.add_filter_tags(tag_list_to_string_list(self.filter_tags));
if let Some(page) = self.page {
request = match page {
PageIdentifier::Name(n) => request.destination_page_name(n),
PageIdentifier::Key(k) => request.destination_page_key(k),
};
}
request = request.show_destination_page(self.show_page);
let response = self.client.add_url(request.build()).await?;
let url_info = self.client.get_url_info(&self.url).await?;
Ok(Url {
url: self.url,
client: self.client,
normalised_url: response.normalised_url,
url_type: url_info.url_type.into(),
match_name: url_info.match_name,
can_parse: url_info.can_parse,
})
}
}

@ -0,0 +1,2 @@
pub mod import_builder;
pub mod tagging_builder;

@ -0,0 +1,70 @@
use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
use crate::error::Result;
use crate::wrapper::service::ServiceName;
use crate::wrapper::tag::Tag;
use crate::Client;
use std::collections::HashMap;
pub struct TaggingBuilder {
client: Client,
hashes: Vec<String>,
tag_mappings: HashMap<ServiceName, HashMap<TagAction, Vec<Tag>>>,
}
impl TaggingBuilder {
pub(crate) fn new(client: Client) -> Self {
Self {
client,
hashes: Vec::new(),
tag_mappings: Default::default(),
}
}
/// Adds a file that should get the tags defined for this request
pub fn add_file<S: ToString>(mut self, hash: S) -> Self {
self.hashes.push(hash.to_string());
self
}
/// Adds a single tag for a given service
pub fn add_tag(self, service: ServiceName, action: TagAction, tag: Tag) -> Self {
self.add_tags(service, action, vec![tag])
}
/// Adds tags with actions for the given service
pub fn add_tags(mut self, service: ServiceName, action: TagAction, mut tags: Vec<Tag>) -> Self {
let service_action_mappings =
if let Some(service_action_mappings) = self.tag_mappings.get_mut(&service) {
service_action_mappings
} else {
self.tag_mappings.insert(service.clone(), HashMap::new());
self.tag_mappings.get_mut(&service).unwrap()
};
if let Some(action_tag_mappings) = service_action_mappings.get_mut(&action) {
action_tag_mappings.append(&mut tags)
} else {
service_action_mappings.insert(action, tags);
}
self
}
/// Executes the request
pub async fn run(self) -> Result<()> {
let mut request = AddTagsRequestBuilder::default().add_hashes(self.hashes);
for (service, action_tag_mappings) in self.tag_mappings {
for (action, tags) in action_tag_mappings {
for tag in tags {
request = request.add_tag_with_action(
service.0.clone(),
tag.to_string(),
action.clone(),
);
}
}
}
self.client.add_tags(request.build()).await
}
}

@ -0,0 +1,97 @@
use crate::api_core::common::FileIdentifier;
use crate::api_core::searching_and_fetching_files::FileSearchLocation;
use crate::error::Result;
use crate::utils::tag_list_to_string_list;
use crate::wrapper::builders::import_builder::ImportBuilder;
use crate::wrapper::builders::tagging_builder::TaggingBuilder;
use crate::wrapper::hydrus_file::HydrusFile;
use crate::wrapper::service::Services;
use crate::wrapper::tag::Tag;
use crate::wrapper::url::Url;
use crate::wrapper::version::Version;
use crate::Client;
/// A high level wrapper for the hydrus API for easier management of files, tags
/// urls etc.
pub struct Hydrus {
client: Client,
}
impl Hydrus {
/// Creates a new high level Hydrus API client
pub fn new(client: Client) -> Self {
Self { client }
}
/// Returns the Hydrus and API Version
pub async fn version(&self) -> Result<Version> {
let response = self.client.api_version().await?;
Ok(Version {
api: response.version,
hydrus: response.hydrus_version,
})
}
/// Returns a list of available services
pub async fn services(&self) -> Result<Services> {
let response = self.client.get_services().await?;
Ok(Services::from_response(self.client.clone(), response))
}
/// Creates an import builder to build an import request to hydrus
pub fn import(&self) -> ImportBuilder {
ImportBuilder {
client: self.client.clone(),
}
}
/// Returns information about a given url in an object that allows
/// further operations with that url
pub async fn url<S: AsRef<str>>(&self, url: S) -> Result<Url> {
let info = self.client.get_url_info(&url).await?;
Ok(Url {
client: self.client.clone(),
normalised_url: info.normalised_url,
url_type: info.url_type.into(),
match_name: info.match_name,
url: url.as_ref().to_string(),
can_parse: info.can_parse,
})
}
/// Returns a file by identifier to perform further operations on
pub async fn file(&self, identifier: FileIdentifier) -> Result<HydrusFile> {
let metadata = self
.client
.get_file_metadata_by_identifier(identifier)
.await?;
Ok(HydrusFile::from_metadata(self.client.clone(), metadata))
}
/// Starts a request to bulk add tags to files
pub fn tagging(&self) -> TaggingBuilder {
TaggingBuilder::new(self.client.clone())
}
/// Searches for files that have the given tags and returns a list of hydrus files as a result
pub async fn search(
&self,
location: FileSearchLocation,
tags: Vec<Tag>,
) -> Result<Vec<HydrusFile>> {
let search_result = self
.client
.search_files(tag_list_to_string_list(tags), location)
.await?;
let files = search_result
.file_ids
.into_iter()
.map(|id| HydrusFile::from_id(self.client.clone(), id))
.collect();
Ok(files)
}
}

@ -0,0 +1,264 @@
use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord};
use crate::error::{Error, Result};
use crate::utils::tag_list_to_string_list;
use crate::wrapper::service::ServiceName;
use crate::wrapper::tag::Tag;
use crate::Client;
use mime::Mime;
use std::collections::HashMap;
#[derive(Clone, Debug, PartialOrd, PartialEq)]
pub enum FileStatus {
ReadyForImport,
InDatabase,
Deleted,
Unknown,
}
impl Eq for FileStatus {}
#[derive(Clone)]
pub struct HydrusFile {
pub(crate) client: Client,
pub id: FileIdentifier,
pub status: FileStatus,
pub(crate) metadata: Option<FileMetadataInfo>,
}
impl HydrusFile {
pub(crate) fn from_id(client: Client, id: u64) -> Self {
Self {
client,
id: FileIdentifier::ID(id),
status: FileStatus::Unknown,
metadata: None,
}
}
pub(crate) fn from_raw_status_and_hash<S: ToString>(
client: Client,
status: u8,
hash: S,
) -> Self {
let status = if status == 3 {
FileStatus::Deleted
} else if status == 0 {
FileStatus::ReadyForImport
} else {
FileStatus::InDatabase
};
Self {
client,
id: FileIdentifier::Hash(hash.to_string()),
status,
metadata: None,
}
}
pub(crate) fn from_metadata(client: Client, metadata: FileMetadataInfo) -> Self {
let status = if metadata.is_trashed {
FileStatus::Deleted
} else {
FileStatus::InDatabase
};
Self {
client,
id: FileIdentifier::Hash(metadata.hash.clone()),
status,
metadata: Some(metadata),
}
}
/// Deletes the internally stored metadata about the file retrieves it again
pub async fn update(&mut self) -> Result<()> {
self.metadata = None;
self.metadata().await?;
Ok(())
}
/// Returns the hash of the file
/// if the file identifier is an id it calls hydrus to resolve the file
pub async fn hash(&mut self) -> Result<String> {
match &self.id {
FileIdentifier::ID(_) => {
let metadata = self.metadata().await?;
Ok(metadata.hash.clone())
}
FileIdentifier::Hash(hash) => Ok(hash.clone()),
}
}
/// Returns the file size in bytes
pub async fn size(&mut self) -> Result<Option<u64>> {
let metadata = self.metadata().await?;
Ok(metadata.size.clone())
}
/// Returns the mime of the file
pub async fn mime(&mut self) -> Result<Mime> {
let metadata = self.metadata().await?;
let mime = metadata
.mime
.as_str()
.parse()
.map_err(|_| Error::InvalidMime(metadata.mime.clone()))?;
Ok(mime)
}
/// Return the file extension
pub async fn ext(&mut self) -> Result<String> {
let metadata = self.metadata().await?;
Ok(metadata.ext.clone())
}
/// Returns the dimensions of the file in pixels
pub async fn dimensions(&mut self) -> Result<Option<(u32, u32)>> {
let metadata = self.metadata().await?;
if let (Some(width), Some(height)) = (&metadata.width, &metadata.height) {
Ok(Some((*width, *height)))
} else {
Ok(None)
}
}
/// Returns the duration of the file in seconds if it's a video
pub async fn duration(&mut self) -> Result<Option<u64>> {
let metadata = self.metadata().await?;
Ok(metadata.duration.clone())
}
/// Returns the number of frames of the file if it's a video
pub async fn num_frames(&mut self) -> Result<Option<u64>> {
let metadata = self.metadata().await?;
Ok(metadata.num_frames.clone())
}
/// Returns if the file has audio
pub async fn has_audio(&mut self) -> Result<bool> {
let metadata = self.metadata().await?;
Ok(metadata.has_audio.unwrap_or(false))
}
/// Returns if the file is currently in the inbox
pub async fn in_inbox(&mut self) -> Result<bool> {
let metadata = self.metadata().await?;
Ok(metadata.is_inbox)
}
/// Returns if the file is stored locally
pub async fn stored_locally(&mut self) -> Result<bool> {
let metadata = self.metadata().await?;
Ok(metadata.is_local)
}
/// Returns if the file has been moved to the trash
pub async fn moved_to_trashed(&mut self) -> Result<bool> {
let metadata = self.metadata().await?;
Ok(metadata.is_trashed)
}
/// Associates the file with a list of urls
pub async fn associate_urls(&mut self, urls: Vec<String>) -> Result<()> {
let hash = self.hash().await?;
self.client.associate_urls(urls, vec![hash]).await
}
/// Disassociates the file with a list of urls
pub async fn disassociate_urls(&mut self, urls: Vec<String>) -> Result<()> {
let hash = self.hash().await?;
self.client.disassociate_urls(urls, vec![hash]).await
}
/// Returns map mapping lists of tags to services
pub async fn services_with_tags(&mut self) -> Result<HashMap<ServiceName, Vec<Tag>>> {
let metadata = self.metadata().await?;
let mut tag_mappings = HashMap::new();
for (service, status_tags) in &metadata.service_names_to_statuses_to_tags {
let mut tag_list = Vec::new();
for (_, tags) in status_tags {
tag_list.append(&mut tags.into_iter().map(|t| t.into()).collect())
}
tag_mappings.insert(ServiceName(service.clone()), tag_list);
}
Ok(tag_mappings)
}
/// Returns a list of all tags assigned to the file
pub async fn tags(&mut self) -> Result<Vec<Tag>> {
let mut tag_list = Vec::new();
let tag_mappings = self.services_with_tags().await?;
for (_, mut tags) in tag_mappings {
tag_list.append(&mut tags);
}
Ok(tag_list)
}
/// Adds tags for a specific service to the file
pub async fn add_tags(&mut self, service: ServiceName, tags: Vec<Tag>) -> Result<()> {
let hash = self.hash().await?;
let request = AddTagsRequestBuilder::default()
.add_hash(hash)
.add_tags(service.0, tag_list_to_string_list(tags))
.build();
self.client.add_tags(request).await
}
/// Allows modification of tags by using the defined tag actions
pub async fn modify_tags(
&mut self,
service: ServiceName,
action: TagAction,
tags: Vec<Tag>,
) -> Result<()> {
let hash = self.hash().await?;
let mut reqwest = AddTagsRequestBuilder::default().add_hash(hash);
for tag in tags {
reqwest =
reqwest.add_tag_with_action(service.0.clone(), tag.to_string(), action.clone());
}
self.client.add_tags(reqwest.build()).await
}
/// Retrieves the file record bytes
pub async fn retrieve(&self) -> Result<FileRecord> {
self.client.get_file(self.id.clone()).await
}
/// Returns the metadata for the given file
/// if there's already known metadata about the file it uses that
async fn metadata(&mut self) -> Result<&FileMetadataInfo> {
if self.metadata.is_none() {
let metadata = self
.client
.get_file_metadata_by_identifier(self.id.clone())
.await?;
self.status = if metadata.is_trashed {
FileStatus::Deleted
} else {
FileStatus::InDatabase
};
self.metadata = Some(metadata);
}
Ok(self.metadata.as_ref().unwrap())
}
}

@ -0,0 +1,8 @@
pub mod builders;
pub mod hydrus;
pub mod hydrus_file;
pub mod page;
pub mod service;
pub mod tag;
pub mod url;
pub mod version;

@ -0,0 +1,20 @@
#[derive(Clone)]
pub struct HydrusPage {
pub id: PageIdentifier,
}
#[derive(Clone)]
pub enum PageIdentifier {
Name(String),
Key(String),
}
impl PageIdentifier {
pub fn name<S: ToString>(name: S) -> Self {
Self::Name(name.to_string())
}
pub fn key<S: ToString>(key: S) -> Self {
Self::Key(key.to_string())
}
}

@ -0,0 +1,145 @@
use crate::api_core::access_management::GetServicesResponse;
use crate::api_core::access_management::{
SERVICE_TYPE_ALL_KNOWN_FILES, SERVICE_TYPE_ALL_KNOWN_TAGS, SERVICE_TYPE_ALL_LOCAL_FILES,
SERVICE_TYPE_FILE_REPOSITORIES, SERVICE_TYPE_LOCAL_FILES, SERVICE_TYPE_LOCAL_TAGS,
SERVICE_TYPE_TAG_REPOSITORIES, SERVICE_TYPE_TRASH,
};
use crate::error::Error;
use crate::Client;
use std::collections::HashMap;
use std::convert::TryFrom;
#[derive(Clone, PartialOrd, PartialEq, Hash)]
pub enum ServiceType {
LocalTags,
TagRepositories,
LocalFiles,
FileRepositories,
AllLocalFiles,
AllKnownFiles,
AllKnownTags,
Trash,
}
impl Eq for ServiceType {}
impl TryFrom<String> for ServiceType {
type Error = Error;
fn try_from(value: String) -> Result<Self, Self::Error> {
match value.as_str() {
s if s == SERVICE_TYPE_LOCAL_TAGS => Ok(Self::LocalTags),
s if s == SERVICE_TYPE_TAG_REPOSITORIES => Ok(Self::TagRepositories),
s if s == SERVICE_TYPE_LOCAL_FILES => Ok(Self::LocalFiles),
s if s == SERVICE_TYPE_FILE_REPOSITORIES => Ok(Self::FileRepositories),
s if s == SERVICE_TYPE_ALL_LOCAL_FILES => Ok(Self::AllLocalFiles),
s if s == SERVICE_TYPE_ALL_KNOWN_FILES => Ok(Self::AllKnownFiles),
s if s == SERVICE_TYPE_ALL_KNOWN_TAGS => Ok(Self::AllKnownTags),
s if s == SERVICE_TYPE_TRASH => Ok(Self::Trash),
_ => Err(Error::InvalidServiceType(value)),
}
}
}
impl ToString for ServiceType {
fn to_string(&self) -> String {
match self {
ServiceType::LocalTags => String::from(SERVICE_TYPE_LOCAL_TAGS),
ServiceType::TagRepositories => String::from(SERVICE_TYPE_TAG_REPOSITORIES),
ServiceType::LocalFiles => String::from(SERVICE_TYPE_LOCAL_FILES),
ServiceType::FileRepositories => String::from(SERVICE_TYPE_FILE_REPOSITORIES),
ServiceType::AllLocalFiles => String::from(SERVICE_TYPE_ALL_LOCAL_FILES),
ServiceType::AllKnownFiles => String::from(SERVICE_TYPE_ALL_KNOWN_FILES),
ServiceType::AllKnownTags => String::from(SERVICE_TYPE_ALL_KNOWN_TAGS),
ServiceType::Trash => String::from(SERVICE_TYPE_TRASH),
}
}
}
#[derive(Clone, PartialOrd, PartialEq, Hash)]
pub struct ServiceName(pub String);
impl Eq for ServiceName {}
impl ServiceName {
pub fn my_tags() -> Self {
Self(String::from("my tags"))
}
pub fn my_files() -> Self {
Self(String::from("my files"))
}
pub fn public_tag_repository() -> Self {
Self(String::from("public tag repository"))
}
pub fn all_local_files() -> Self {
Self(String::from("all local files"))
}
pub fn all_known_tags() -> Self {
Self(String::from("all known tags"))
}
pub fn all_known_files() -> Self {
Self(String::from("all known files"))
}
}
#[derive(Clone)]
pub struct Service {
client: Client,
pub name: ServiceName,
pub key: String,
pub service_type: ServiceType,
}
#[derive(Clone)]
pub struct Services {
inner: HashMap<ServiceType, Vec<Service>>,
}
impl Services {
/// Creates the services list from a given hydrus response
pub fn from_response(client: Client, response: GetServicesResponse) -> Self {
let mut response = response.0;
let mut mapped_types = HashMap::with_capacity(response.keys().len());
let keys = response.keys().cloned().collect::<Vec<String>>().clone();
for service_type in &keys {
if let Ok(mapped_type) = ServiceType::try_from(service_type.clone()) {
let basic_services = response.remove(service_type).unwrap();
let mut service_list = Vec::new();
for basic_service in basic_services {
service_list.push(Service {
service_type: mapped_type.clone(),
name: ServiceName(basic_service.name),
key: basic_service.service_key,
client: client.clone(),
})
}
mapped_types.insert(mapped_type, service_list);
}
}
Self {
inner: mapped_types,
}
}
/// Returns a list of all services of the given type
pub fn get_services(&self, service_type: ServiceType) -> Vec<&Service> {
if let Some(services) = self.inner.get(&service_type) {
let mut borrowed_services = Vec::with_capacity(services.len());
for service in services {
borrowed_services.push(service)
}
borrowed_services
} else {
Vec::with_capacity(0)
}
}
}

@ -0,0 +1,35 @@
#[derive(Clone, Debug)]
pub struct Tag {
pub name: String,
pub namespace: Option<String>,
}
impl<S> From<S> for Tag
where
S: AsRef<str>,
{
fn from(value: S) -> Self {
let value = value.as_ref();
if let Some((namespace, tag)) = value.split_once(":") {
Self {
namespace: Some(namespace.to_string()),
name: tag.to_string(),
}
} else {
Self {
name: value.to_string(),
namespace: None,
}
}
}
}
impl ToString for Tag {
fn to_string(&self) -> String {
if let Some(namespace) = &self.namespace {
format!("{}:{}", namespace, self.name)
} else {
self.name.clone()
}
}
}

@ -0,0 +1,75 @@
use crate::api_core::adding_urls::{
URL_TYPE_FILE, URL_TYPE_GALLERY, URL_TYPE_POST, URL_TYPE_WATCHABLE,
};
use crate::error::Result;
use crate::wrapper::builders::import_builder::UrlImportBuilder;
use crate::wrapper::hydrus_file::HydrusFile;
use crate::Client;
#[derive(Clone, Debug, PartialOrd, PartialEq)]
pub enum UrlType {
Post,
File,
Gallery,
Watchable,
Unknown,
}
impl Eq for UrlType {}
impl From<u8> for UrlType {
fn from(value: u8) -> Self {
match value {
v if v == URL_TYPE_POST => Self::Post,
v if v == URL_TYPE_FILE => Self::File,
v if v == URL_TYPE_GALLERY => Self::Gallery,
v if v == URL_TYPE_WATCHABLE => Self::Watchable,
_ => Self::Unknown,
}
}
}
#[derive(Clone)]
pub struct Url {
pub url: String,
pub(crate) client: Client,
pub normalised_url: String,
pub url_type: UrlType,
pub match_name: String,
pub can_parse: bool,
}
impl Url {
/// Returns a list of files associated with the url
pub async fn files(&mut self) -> Result<Vec<HydrusFile>> {
let response = self.client.get_url_files(&self.url).await?;
let files = response
.url_file_statuses
.into_iter()
.map(|file| {
HydrusFile::from_raw_status_and_hash(self.client.clone(), file.status, file.hash)
})
.collect();
Ok(files)
}
/// Creates an import builder for the url
pub fn import(&mut self) -> UrlImportBuilder {
UrlImportBuilder::new(self.client.clone(), &self.url)
}
/// Associates the url with a list of file hashes
pub async fn associate(&mut self, hashes: Vec<String>) -> Result<()> {
self.client
.associate_urls(vec![self.url.clone()], hashes)
.await
}
/// Disassociates the url with a list of file hashes
pub async fn disassociate(&mut self, hashes: Vec<String>) -> Result<()> {
self.client
.disassociate_urls(vec![self.url.clone()], hashes)
.await
}
}

@ -0,0 +1,4 @@
pub struct Version {
pub api: u32,
pub hydrus: u32,
}

@ -0,0 +1,5 @@
mod test_access_management;
mod test_adding_files;
mod test_adding_tags;
mod test_adding_urls;
mod test_searching_and_fetching_files;

@ -1,8 +1,8 @@
mod common;
use super::super::common;
#[tokio::test]
async fn it_returns_the_api_version() {
let mut client = common::get_client();
let client = common::get_client();
let api_version = client.api_version().await.unwrap();
assert!(api_version.hydrus_version > 0);
assert!(api_version.version > 0);
@ -10,14 +10,14 @@ async fn it_returns_the_api_version() {
#[tokio::test]
async fn it_returns_the_session_key() {
let mut client = common::get_client();
let client = common::get_client();
let session_key = client.session_key().await.unwrap();
assert!(session_key.session_key.len() > 0);
}
#[tokio::test]
async fn it_verifies_the_access_key() {
let mut client = common::get_client();
let client = common::get_client();
let verification_response = client.verify_access_key().await.unwrap();
assert!(verification_response.basic_permissions.len() > 0); // needs to be configured in the client but we want at least some permissions for the test
assert!(verification_response.human_description.len() > 0);
@ -25,7 +25,7 @@ async fn it_verifies_the_access_key() {
#[tokio::test]
async fn it_returns_a_list_of_services() {
let mut client = common::get_client();
let client = common::get_client();
let services_response = client.get_services().await.unwrap();
assert!(services_response.0.keys().len() > 0);
}

@ -1,15 +1,15 @@
mod common;
use super::super::common;
#[tokio::test]
async fn it_adds_files() {
let mut client = common::get_client();
let client = common::get_client();
let result = client.add_file("/does/not/exist").await;
assert!(result.is_err()); // because the path does not exist
}
#[tokio::test]
async fn it_adds_binary_files() {
let mut client = common::get_client();
let client = common::get_client();
let result = client
.add_binary_file(vec![0u8, 0u8, 0u8, 0u8])
.await
@ -19,24 +19,24 @@ async fn it_adds_binary_files() {
#[tokio::test]
async fn it_deletes_files() {
let mut client = common::get_client();
let client = common::get_client();
client.delete_files(vec![]).await.unwrap();
}
#[tokio::test]
async fn it_undeletes_files() {
let mut client = common::get_client();
let client = common::get_client();
client.undelete_files(vec![]).await.unwrap();
}
#[tokio::test]
async fn it_archives_files() {
let mut client = common::get_client();
let client = common::get_client();
client.archive_files(vec![]).await.unwrap();
}
#[tokio::test]
async fn it_unarchives_files() {
let mut client = common::get_client();
let client = common::get_client();
client.unarchive_files(vec![]).await.unwrap();
}

@ -1,9 +1,9 @@
use hydrus_api::endpoints::adding_tags::{AddTagsRequestBuilder, TagAction};
mod common;
use super::super::common;
use hydrus_api::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
#[tokio::test]
async fn it_cleans_tags() {
let mut client = common::get_client();
let client = common::get_client();
let response = client
.clean_tags(vec![
"summer".into(),
@ -18,7 +18,7 @@ async fn it_cleans_tags() {
#[tokio::test]
async fn it_adds_tags() {
let mut client = common::get_client();
let client = common::get_client();
let request = AddTagsRequestBuilder::default()
.add_hash("0000000000000000000000000000000000000000000000000000000000000000") // valid hash, I hope no files are affected
.add_tags("my tags", vec!["beach".into(), "summer".into()])

@ -1,10 +1,9 @@
use hydrus_api::endpoints::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST};
mod common;
use super::super::common;
use hydrus_api::api_core::adding_urls::{AddUrlRequestBuilder, URL_TYPE_POST};
#[tokio::test]
async fn it_returns_files_for_an_url() {
let mut client = common::get_client();
let client = common::get_client();
let response = client
.get_url_files("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
.await
@ -15,7 +14,7 @@ async fn it_returns_files_for_an_url() {
#[tokio::test]
async fn it_returns_url_information() {
let mut client = common::get_client();
let client = common::get_client();
let info = client
.get_url_info("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
.await
@ -26,7 +25,7 @@ async fn it_returns_url_information() {
#[tokio::test]
async fn it_adds_urls() {
let mut client = common::get_client();
let client = common::get_client();
let request = AddUrlRequestBuilder::default()
.url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
.add_tags(
@ -42,7 +41,7 @@ async fn it_adds_urls() {
#[tokio::test]
async fn it_associates_urls() {
let mut client = common::get_client();
let client = common::get_client();
client
.associate_urls(
vec![
@ -57,7 +56,7 @@ async fn it_associates_urls() {
#[tokio::test]
async fn it_disassociates_urls() {
let mut client = common::get_client();
let client = common::get_client();
client
.disassociate_urls(
vec![

@ -1,11 +1,10 @@
use hydrus_api::endpoints::common::FileIdentifier;
use hydrus_api::endpoints::searching_and_fetching_files::FileSearchLocation;
mod common;
use super::super::common;
use hydrus_api::api_core::common::FileIdentifier;
use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation;
#[tokio::test]
async fn is_searches_files() {
let mut client = common::get_client();
let client = common::get_client();
client
.search_files(vec!["beach".to_string()], FileSearchLocation::Archive)
.await
@ -14,19 +13,19 @@ async fn is_searches_files() {
#[tokio::test]
async fn it_fetches_file_metadata() {
let mut client = common::get_client();
client
let client = common::get_client();
let response = client
.get_file_metadata(
vec![],
vec!["0000000000000000000000000000000000000000000000000000000000000000".to_string()],
)
.await
.unwrap();
.await;
assert!(response.is_ok()); // Even if the file doesn't exist it still returns some information about it
}
#[tokio::test]
async fn it_fetches_single_files() {
let mut client = common::get_client();
let client = common::get_client();
let response = client
.get_file(FileIdentifier::Hash(
"0000000000000000000000000000000000000000000000000000000000000000".to_string(),

@ -1,4 +1,5 @@
use hydrus_api::client::Client;
use hydrus_api::api_core::client::Client;
use hydrus_api::Hydrus;
use log::LevelFilter;
use std::env;
use std::sync::atomic::{AtomicBool, Ordering};
@ -15,9 +16,15 @@ pub fn setup() {
pub fn get_client() -> Client {
setup();
Client::new(
env::var("HYDRUS_URL").unwrap(),
env::var("HYDRUS_ACCESS_KEY").unwrap(),
)
.unwrap()
}
pub fn get_hydrus() -> Hydrus {
let client = get_client();
Hydrus::new(client)
}

@ -0,0 +1,3 @@
mod client;
mod common;
mod wrapper;

@ -0,0 +1,4 @@
mod test_files;
mod test_hydrus;
mod test_import;
mod test_url;

@ -0,0 +1,90 @@
use super::super::common;
use hydrus_api::api_core::adding_tags::TagAction;
use hydrus_api::api_core::common::FileIdentifier;
use hydrus_api::wrapper::hydrus_file::HydrusFile;
use hydrus_api::wrapper::service::ServiceName;
async fn get_file() -> HydrusFile {
let hydrus = common::get_hydrus();
hydrus
.file(FileIdentifier::hash(
"277a138cd1ee79fc1fdb2869c321b848d4861e45b82184487139ef66dd40b62d", // needs to exist
))
.await
.unwrap()
}
#[tokio::test]
async fn it_associates_with_urls() {
let mut file = get_file().await;
file.associate_urls(vec![
"https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium".to_string(),
])
.await
.unwrap();
}
#[tokio::test]
async fn it_disassociates_with_urls() {
let mut file = get_file().await;
file.disassociate_urls(vec![
"https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium".to_string(),
])
.await
.unwrap();
}
#[tokio::test]
async fn it_has_tags_with_services() {
let mut file = get_file().await;
let tags = file.services_with_tags().await.unwrap();
assert!(tags.keys().len() > 0)
}
#[tokio::test]
async fn it_has_tags() {
let mut file = get_file().await;
let tags = file.tags().await.unwrap();
assert!(tags.len() > 0) // test data needs to be prepared this way
}
#[tokio::test]
async fn it_adds_tags() {
let mut file = get_file().await;
file.add_tags(
ServiceName::public_tag_repository(),
vec!["character:megumin".into(), "ark mage".into()],
)
.await
.unwrap();
}
#[tokio::test]
async fn it_modifies_tags() {
let mut file = get_file().await;
file.modify_tags(
ServiceName::public_tag_repository(),
TagAction::RescindPendFromRepository,
vec!["ark mage".into()],
)
.await
.unwrap();
}
#[tokio::test]
async fn it_retrieves_content() {
let file = get_file().await;
let file = file.retrieve().await.unwrap();
assert!(file.bytes.len() > 0) // assuming it exists
}
#[tokio::test]
async fn it_retrieves_metadata() {
let mut file = get_file().await;
assert!(file.dimensions().await.unwrap().is_some());
assert!(file.stored_locally().await.unwrap());
assert!(file.duration().await.unwrap().is_none());
}

@ -0,0 +1,62 @@
use super::super::common;
use hydrus_api::api_core::adding_tags::TagAction;
use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation;
use hydrus_api::wrapper::service::{ServiceName, ServiceType};
use hydrus_api::wrapper::url::UrlType;
#[tokio::test]
async fn it_retrieves_version_info() {
let hydrus = common::get_hydrus();
let version = hydrus.version().await.unwrap();
assert!(version.hydrus > 0);
assert!(version.api > 0);
}
#[tokio::test]
async fn it_retrieves_services() {
let hydrus = common::get_hydrus();
let services = hydrus.services().await.unwrap();
// assuming hydrus is configured correctly
assert!(services.get_services(ServiceType::AllKnownFiles).len() > 0);
assert!(services.get_services(ServiceType::AllKnownTags).len() > 0);
}
#[tokio::test]
async fn it_retrieves_url_information() {
let hydrus = common::get_hydrus();
let url = hydrus
.url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
.await
.unwrap();
assert_eq!(url.url_type, UrlType::Post)
}
#[tokio::test]
async fn it_searches() {
let hydrus = common::get_hydrus();
hydrus
.search(
FileSearchLocation::Archive,
vec!["character:megumin".into()],
)
.await
.unwrap();
}
#[tokio::test]
async fn it_adds_tags() {
let hydrus = common::get_hydrus();
hydrus
.tagging()
.add_tag(
ServiceName::my_tags(),
TagAction::AddToLocalService,
"summer".into(),
)
.add_file("0000000000000000000000000000000000000000000000000000000000000000")
.run()
.await
.unwrap();
}

@ -0,0 +1,50 @@
use super::super::common;
use hydrus_api::wrapper::builders::import_builder::FileImport;
use hydrus_api::wrapper::page::PageIdentifier;
use hydrus_api::wrapper::service::ServiceName;
use hydrus_api::wrapper::tag::Tag;
use hydrus_api::wrapper::url::UrlType;
#[tokio::test]
async fn it_imports_file_paths() {
let hydrus = common::get_hydrus();
let result = hydrus
.import()
.file(FileImport::path("/does/not/exist/sadly"))
.run()
.await;
assert!(result.is_err()) // file does not exist
}
#[tokio::test]
async fn it_imports_binary_files() {
let hydrus = common::get_hydrus();
let bytes = [0u8, 0u8, 0u8, 0u8];
let result = hydrus
.import()
.file(FileImport::binary(&mut &bytes[..]))
.run()
.await;
assert!(result.is_err()) // return status should be 4
}
#[tokio::test]
async fn it_imports_urls() {
let hydrus = common::get_hydrus();
let result = hydrus
.import()
.url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
.page(PageIdentifier::name("Rusty Import"))
.show_page(true)
.add_additional_tag(ServiceName::my_tags(), Tag::from("ark mage"))
.add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin"))
.run()
.await
.unwrap();
assert!(result.normalised_url.len() > 0);
assert_eq!(result.url_type, UrlType::Post)
}

@ -0,0 +1,47 @@
use super::super::common;
use hydrus_api::wrapper::page::PageIdentifier;
use hydrus_api::wrapper::service::ServiceName;
use hydrus_api::wrapper::tag::Tag;
use hydrus_api::wrapper::url::Url;
async fn get_url() -> Url {
let hydrus = common::get_hydrus();
hydrus
.url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
.await
.unwrap()
}
#[tokio::test]
async fn it_imports() {
let mut url = get_url().await;
url.import()
.page(PageIdentifier::name("Rusty Import"))
.add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin"))
.run()
.await
.unwrap();
}
#[tokio::test]
async fn it_associates() {
let mut url = get_url().await;
url.associate(vec![
"0000000000000000000000000000000000000000000000000000000000000000".to_string(),
])
.await
.unwrap();
}
#[tokio::test]
async fn it_disassociates() {
let mut url = get_url().await;
url.disassociate(vec![
"0000000000000000000000000000000000000000000000000000000000000000".to_string(),
])
.await
.unwrap();
}
Loading…
Cancel
Save