Merge pull request #1 from Trivernis/feature/high-level-wrapper
Feature/high level wrapperpull/2/head
commit
df5f36204c
@ -0,0 +1,167 @@
|
|||||||
|
use crate::api_core::adding_files::{STATUS_IMPORT_FAILED, STATUS_IMPORT_VETOED};
|
||||||
|
use crate::api_core::adding_urls::AddUrlRequestBuilder;
|
||||||
|
use crate::error::{Error, Result};
|
||||||
|
use crate::utils::tag_list_to_string_list;
|
||||||
|
use crate::wrapper::hydrus_file::HydrusFile;
|
||||||
|
use crate::wrapper::page::PageIdentifier;
|
||||||
|
use crate::wrapper::service::ServiceName;
|
||||||
|
use crate::wrapper::tag::Tag;
|
||||||
|
use crate::wrapper::url::Url;
|
||||||
|
use crate::Client;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::io::Read;
|
||||||
|
|
||||||
|
pub struct ImportBuilder {
|
||||||
|
pub(crate) client: Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ImportBuilder {
|
||||||
|
pub fn file(self, file: FileImport) -> FileImportBuilder {
|
||||||
|
FileImportBuilder {
|
||||||
|
client: self.client,
|
||||||
|
file,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn url<S: ToString>(self, url: S) -> UrlImportBuilder {
|
||||||
|
UrlImportBuilder::new(self.client.clone(), url)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum FileImport {
|
||||||
|
Path(String),
|
||||||
|
Binary(Vec<u8>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileImport {
|
||||||
|
pub fn path<S: ToString>(path: S) -> Self {
|
||||||
|
Self::Path(path.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn binary<R: Read>(reader: &mut R) -> Self {
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
let _ = reader.read_to_end(&mut bytes);
|
||||||
|
Self::Binary(bytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct FileImportBuilder {
|
||||||
|
client: Client,
|
||||||
|
file: FileImport,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileImportBuilder {
|
||||||
|
pub async fn run(self) -> Result<HydrusFile> {
|
||||||
|
let response = match self.file {
|
||||||
|
FileImport::Path(path) => self.client.add_file(path).await?,
|
||||||
|
FileImport::Binary(b) => self.client.add_binary_file(b).await?,
|
||||||
|
};
|
||||||
|
|
||||||
|
if response.status == STATUS_IMPORT_FAILED {
|
||||||
|
Err(Error::ImportFailed(response.note))
|
||||||
|
} else if response.status == STATUS_IMPORT_VETOED {
|
||||||
|
Err(Error::ImportVetoed(response.note))
|
||||||
|
} else {
|
||||||
|
Ok(HydrusFile::from_raw_status_and_hash(
|
||||||
|
self.client,
|
||||||
|
response.status,
|
||||||
|
response.hash,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct UrlImportBuilder {
|
||||||
|
client: Client,
|
||||||
|
url: String,
|
||||||
|
page: Option<PageIdentifier>,
|
||||||
|
show_page: bool,
|
||||||
|
filter_tags: Vec<Tag>,
|
||||||
|
service_tag_mappings: HashMap<String, Vec<Tag>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UrlImportBuilder {
|
||||||
|
pub fn new<S: ToString>(client: Client, url: S) -> Self {
|
||||||
|
Self {
|
||||||
|
client,
|
||||||
|
url: url.to_string(),
|
||||||
|
page: None,
|
||||||
|
show_page: false,
|
||||||
|
filter_tags: vec![],
|
||||||
|
service_tag_mappings: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the destination page of the import
|
||||||
|
pub fn page(mut self, page: PageIdentifier) -> Self {
|
||||||
|
self.page = Some(page);
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If the destination page of the import should be focussed
|
||||||
|
pub fn show_page(mut self, show: bool) -> Self {
|
||||||
|
self.show_page = show;
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a tag that should be filtered
|
||||||
|
pub fn add_filter_tag(mut self, tag: Tag) -> Self {
|
||||||
|
self.filter_tags.push(tag);
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds multiple tags that should be filtered
|
||||||
|
pub fn add_filter_tags(mut self, mut tags: Vec<Tag>) -> Self {
|
||||||
|
self.filter_tags.append(&mut tags);
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds an additional tag for the imported file
|
||||||
|
pub fn add_additional_tag(self, service: ServiceName, tag: Tag) -> Self {
|
||||||
|
self.add_additional_tags(service, vec![tag])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds multiple additional tags for the import
|
||||||
|
pub fn add_additional_tags(mut self, service: ServiceName, mut tags: Vec<Tag>) -> Self {
|
||||||
|
if let Some(service_tags) = self.service_tag_mappings.get_mut(&service.0) {
|
||||||
|
service_tags.append(&mut tags);
|
||||||
|
} else {
|
||||||
|
self.service_tag_mappings.insert(service.0, tags);
|
||||||
|
}
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Imports the URL
|
||||||
|
pub async fn run(self) -> Result<Url> {
|
||||||
|
let mut request = AddUrlRequestBuilder::default().url(&self.url);
|
||||||
|
|
||||||
|
for (service, tags) in self.service_tag_mappings {
|
||||||
|
request = request.add_tags(service, tag_list_to_string_list(tags));
|
||||||
|
}
|
||||||
|
request = request.add_filter_tags(tag_list_to_string_list(self.filter_tags));
|
||||||
|
if let Some(page) = self.page {
|
||||||
|
request = match page {
|
||||||
|
PageIdentifier::Name(n) => request.destination_page_name(n),
|
||||||
|
PageIdentifier::Key(k) => request.destination_page_key(k),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
request = request.show_destination_page(self.show_page);
|
||||||
|
|
||||||
|
let response = self.client.add_url(request.build()).await?;
|
||||||
|
let url_info = self.client.get_url_info(&self.url).await?;
|
||||||
|
|
||||||
|
Ok(Url {
|
||||||
|
url: self.url,
|
||||||
|
client: self.client,
|
||||||
|
normalised_url: response.normalised_url,
|
||||||
|
url_type: url_info.url_type.into(),
|
||||||
|
match_name: url_info.match_name,
|
||||||
|
can_parse: url_info.can_parse,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,2 @@
|
|||||||
|
pub mod import_builder;
|
||||||
|
pub mod tagging_builder;
|
@ -0,0 +1,70 @@
|
|||||||
|
use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
|
||||||
|
use crate::error::Result;
|
||||||
|
use crate::wrapper::service::ServiceName;
|
||||||
|
use crate::wrapper::tag::Tag;
|
||||||
|
use crate::Client;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
pub struct TaggingBuilder {
|
||||||
|
client: Client,
|
||||||
|
hashes: Vec<String>,
|
||||||
|
tag_mappings: HashMap<ServiceName, HashMap<TagAction, Vec<Tag>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TaggingBuilder {
|
||||||
|
pub(crate) fn new(client: Client) -> Self {
|
||||||
|
Self {
|
||||||
|
client,
|
||||||
|
hashes: Vec::new(),
|
||||||
|
tag_mappings: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a file that should get the tags defined for this request
|
||||||
|
pub fn add_file<S: ToString>(mut self, hash: S) -> Self {
|
||||||
|
self.hashes.push(hash.to_string());
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a single tag for a given service
|
||||||
|
pub fn add_tag(self, service: ServiceName, action: TagAction, tag: Tag) -> Self {
|
||||||
|
self.add_tags(service, action, vec![tag])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds tags with actions for the given service
|
||||||
|
pub fn add_tags(mut self, service: ServiceName, action: TagAction, mut tags: Vec<Tag>) -> Self {
|
||||||
|
let service_action_mappings =
|
||||||
|
if let Some(service_action_mappings) = self.tag_mappings.get_mut(&service) {
|
||||||
|
service_action_mappings
|
||||||
|
} else {
|
||||||
|
self.tag_mappings.insert(service.clone(), HashMap::new());
|
||||||
|
self.tag_mappings.get_mut(&service).unwrap()
|
||||||
|
};
|
||||||
|
if let Some(action_tag_mappings) = service_action_mappings.get_mut(&action) {
|
||||||
|
action_tag_mappings.append(&mut tags)
|
||||||
|
} else {
|
||||||
|
service_action_mappings.insert(action, tags);
|
||||||
|
}
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Executes the request
|
||||||
|
pub async fn run(self) -> Result<()> {
|
||||||
|
let mut request = AddTagsRequestBuilder::default().add_hashes(self.hashes);
|
||||||
|
for (service, action_tag_mappings) in self.tag_mappings {
|
||||||
|
for (action, tags) in action_tag_mappings {
|
||||||
|
for tag in tags {
|
||||||
|
request = request.add_tag_with_action(
|
||||||
|
service.0.clone(),
|
||||||
|
tag.to_string(),
|
||||||
|
action.clone(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.client.add_tags(request.build()).await
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,97 @@
|
|||||||
|
use crate::api_core::common::FileIdentifier;
|
||||||
|
use crate::api_core::searching_and_fetching_files::FileSearchLocation;
|
||||||
|
use crate::error::Result;
|
||||||
|
use crate::utils::tag_list_to_string_list;
|
||||||
|
use crate::wrapper::builders::import_builder::ImportBuilder;
|
||||||
|
use crate::wrapper::builders::tagging_builder::TaggingBuilder;
|
||||||
|
use crate::wrapper::hydrus_file::HydrusFile;
|
||||||
|
use crate::wrapper::service::Services;
|
||||||
|
use crate::wrapper::tag::Tag;
|
||||||
|
use crate::wrapper::url::Url;
|
||||||
|
use crate::wrapper::version::Version;
|
||||||
|
use crate::Client;
|
||||||
|
|
||||||
|
/// A high level wrapper for the hydrus API for easier management of files, tags
|
||||||
|
/// urls etc.
|
||||||
|
pub struct Hydrus {
|
||||||
|
client: Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hydrus {
|
||||||
|
/// Creates a new high level Hydrus API client
|
||||||
|
pub fn new(client: Client) -> Self {
|
||||||
|
Self { client }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the Hydrus and API Version
|
||||||
|
pub async fn version(&self) -> Result<Version> {
|
||||||
|
let response = self.client.api_version().await?;
|
||||||
|
Ok(Version {
|
||||||
|
api: response.version,
|
||||||
|
hydrus: response.hydrus_version,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a list of available services
|
||||||
|
pub async fn services(&self) -> Result<Services> {
|
||||||
|
let response = self.client.get_services().await?;
|
||||||
|
|
||||||
|
Ok(Services::from_response(self.client.clone(), response))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates an import builder to build an import request to hydrus
|
||||||
|
pub fn import(&self) -> ImportBuilder {
|
||||||
|
ImportBuilder {
|
||||||
|
client: self.client.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns information about a given url in an object that allows
|
||||||
|
/// further operations with that url
|
||||||
|
pub async fn url<S: AsRef<str>>(&self, url: S) -> Result<Url> {
|
||||||
|
let info = self.client.get_url_info(&url).await?;
|
||||||
|
|
||||||
|
Ok(Url {
|
||||||
|
client: self.client.clone(),
|
||||||
|
normalised_url: info.normalised_url,
|
||||||
|
url_type: info.url_type.into(),
|
||||||
|
match_name: info.match_name,
|
||||||
|
url: url.as_ref().to_string(),
|
||||||
|
can_parse: info.can_parse,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a file by identifier to perform further operations on
|
||||||
|
pub async fn file(&self, identifier: FileIdentifier) -> Result<HydrusFile> {
|
||||||
|
let metadata = self
|
||||||
|
.client
|
||||||
|
.get_file_metadata_by_identifier(identifier)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(HydrusFile::from_metadata(self.client.clone(), metadata))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Starts a request to bulk add tags to files
|
||||||
|
pub fn tagging(&self) -> TaggingBuilder {
|
||||||
|
TaggingBuilder::new(self.client.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Searches for files that have the given tags and returns a list of hydrus files as a result
|
||||||
|
pub async fn search(
|
||||||
|
&self,
|
||||||
|
location: FileSearchLocation,
|
||||||
|
tags: Vec<Tag>,
|
||||||
|
) -> Result<Vec<HydrusFile>> {
|
||||||
|
let search_result = self
|
||||||
|
.client
|
||||||
|
.search_files(tag_list_to_string_list(tags), location)
|
||||||
|
.await?;
|
||||||
|
let files = search_result
|
||||||
|
.file_ids
|
||||||
|
.into_iter()
|
||||||
|
.map(|id| HydrusFile::from_id(self.client.clone(), id))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(files)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,264 @@
|
|||||||
|
use crate::api_core::adding_tags::{AddTagsRequestBuilder, TagAction};
|
||||||
|
use crate::api_core::common::{FileIdentifier, FileMetadataInfo, FileRecord};
|
||||||
|
use crate::error::{Error, Result};
|
||||||
|
use crate::utils::tag_list_to_string_list;
|
||||||
|
use crate::wrapper::service::ServiceName;
|
||||||
|
use crate::wrapper::tag::Tag;
|
||||||
|
use crate::Client;
|
||||||
|
use mime::Mime;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialOrd, PartialEq)]
|
||||||
|
pub enum FileStatus {
|
||||||
|
ReadyForImport,
|
||||||
|
InDatabase,
|
||||||
|
Deleted,
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for FileStatus {}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct HydrusFile {
|
||||||
|
pub(crate) client: Client,
|
||||||
|
pub id: FileIdentifier,
|
||||||
|
pub status: FileStatus,
|
||||||
|
pub(crate) metadata: Option<FileMetadataInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HydrusFile {
|
||||||
|
pub(crate) fn from_id(client: Client, id: u64) -> Self {
|
||||||
|
Self {
|
||||||
|
client,
|
||||||
|
id: FileIdentifier::ID(id),
|
||||||
|
status: FileStatus::Unknown,
|
||||||
|
metadata: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_raw_status_and_hash<S: ToString>(
|
||||||
|
client: Client,
|
||||||
|
status: u8,
|
||||||
|
hash: S,
|
||||||
|
) -> Self {
|
||||||
|
let status = if status == 3 {
|
||||||
|
FileStatus::Deleted
|
||||||
|
} else if status == 0 {
|
||||||
|
FileStatus::ReadyForImport
|
||||||
|
} else {
|
||||||
|
FileStatus::InDatabase
|
||||||
|
};
|
||||||
|
Self {
|
||||||
|
client,
|
||||||
|
id: FileIdentifier::Hash(hash.to_string()),
|
||||||
|
status,
|
||||||
|
metadata: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_metadata(client: Client, metadata: FileMetadataInfo) -> Self {
|
||||||
|
let status = if metadata.is_trashed {
|
||||||
|
FileStatus::Deleted
|
||||||
|
} else {
|
||||||
|
FileStatus::InDatabase
|
||||||
|
};
|
||||||
|
|
||||||
|
Self {
|
||||||
|
client,
|
||||||
|
id: FileIdentifier::Hash(metadata.hash.clone()),
|
||||||
|
status,
|
||||||
|
metadata: Some(metadata),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deletes the internally stored metadata about the file retrieves it again
|
||||||
|
pub async fn update(&mut self) -> Result<()> {
|
||||||
|
self.metadata = None;
|
||||||
|
self.metadata().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the hash of the file
|
||||||
|
/// if the file identifier is an id it calls hydrus to resolve the file
|
||||||
|
pub async fn hash(&mut self) -> Result<String> {
|
||||||
|
match &self.id {
|
||||||
|
FileIdentifier::ID(_) => {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
Ok(metadata.hash.clone())
|
||||||
|
}
|
||||||
|
FileIdentifier::Hash(hash) => Ok(hash.clone()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the file size in bytes
|
||||||
|
pub async fn size(&mut self) -> Result<Option<u64>> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
|
||||||
|
Ok(metadata.size.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the mime of the file
|
||||||
|
pub async fn mime(&mut self) -> Result<Mime> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
let mime = metadata
|
||||||
|
.mime
|
||||||
|
.as_str()
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| Error::InvalidMime(metadata.mime.clone()))?;
|
||||||
|
|
||||||
|
Ok(mime)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the file extension
|
||||||
|
pub async fn ext(&mut self) -> Result<String> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
|
||||||
|
Ok(metadata.ext.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the dimensions of the file in pixels
|
||||||
|
pub async fn dimensions(&mut self) -> Result<Option<(u32, u32)>> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
if let (Some(width), Some(height)) = (&metadata.width, &metadata.height) {
|
||||||
|
Ok(Some((*width, *height)))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the duration of the file in seconds if it's a video
|
||||||
|
pub async fn duration(&mut self) -> Result<Option<u64>> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
|
||||||
|
Ok(metadata.duration.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the number of frames of the file if it's a video
|
||||||
|
pub async fn num_frames(&mut self) -> Result<Option<u64>> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
|
||||||
|
Ok(metadata.num_frames.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns if the file has audio
|
||||||
|
pub async fn has_audio(&mut self) -> Result<bool> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
|
||||||
|
Ok(metadata.has_audio.unwrap_or(false))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns if the file is currently in the inbox
|
||||||
|
pub async fn in_inbox(&mut self) -> Result<bool> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
|
||||||
|
Ok(metadata.is_inbox)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns if the file is stored locally
|
||||||
|
pub async fn stored_locally(&mut self) -> Result<bool> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
|
||||||
|
Ok(metadata.is_local)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns if the file has been moved to the trash
|
||||||
|
pub async fn moved_to_trashed(&mut self) -> Result<bool> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
|
||||||
|
Ok(metadata.is_trashed)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Associates the file with a list of urls
|
||||||
|
pub async fn associate_urls(&mut self, urls: Vec<String>) -> Result<()> {
|
||||||
|
let hash = self.hash().await?;
|
||||||
|
self.client.associate_urls(urls, vec![hash]).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Disassociates the file with a list of urls
|
||||||
|
pub async fn disassociate_urls(&mut self, urls: Vec<String>) -> Result<()> {
|
||||||
|
let hash = self.hash().await?;
|
||||||
|
self.client.disassociate_urls(urls, vec![hash]).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns map mapping lists of tags to services
|
||||||
|
pub async fn services_with_tags(&mut self) -> Result<HashMap<ServiceName, Vec<Tag>>> {
|
||||||
|
let metadata = self.metadata().await?;
|
||||||
|
let mut tag_mappings = HashMap::new();
|
||||||
|
|
||||||
|
for (service, status_tags) in &metadata.service_names_to_statuses_to_tags {
|
||||||
|
let mut tag_list = Vec::new();
|
||||||
|
|
||||||
|
for (_, tags) in status_tags {
|
||||||
|
tag_list.append(&mut tags.into_iter().map(|t| t.into()).collect())
|
||||||
|
}
|
||||||
|
tag_mappings.insert(ServiceName(service.clone()), tag_list);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tag_mappings)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a list of all tags assigned to the file
|
||||||
|
pub async fn tags(&mut self) -> Result<Vec<Tag>> {
|
||||||
|
let mut tag_list = Vec::new();
|
||||||
|
let tag_mappings = self.services_with_tags().await?;
|
||||||
|
|
||||||
|
for (_, mut tags) in tag_mappings {
|
||||||
|
tag_list.append(&mut tags);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tag_list)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds tags for a specific service to the file
|
||||||
|
pub async fn add_tags(&mut self, service: ServiceName, tags: Vec<Tag>) -> Result<()> {
|
||||||
|
let hash = self.hash().await?;
|
||||||
|
let request = AddTagsRequestBuilder::default()
|
||||||
|
.add_hash(hash)
|
||||||
|
.add_tags(service.0, tag_list_to_string_list(tags))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
self.client.add_tags(request).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allows modification of tags by using the defined tag actions
|
||||||
|
pub async fn modify_tags(
|
||||||
|
&mut self,
|
||||||
|
service: ServiceName,
|
||||||
|
action: TagAction,
|
||||||
|
tags: Vec<Tag>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let hash = self.hash().await?;
|
||||||
|
let mut reqwest = AddTagsRequestBuilder::default().add_hash(hash);
|
||||||
|
|
||||||
|
for tag in tags {
|
||||||
|
reqwest =
|
||||||
|
reqwest.add_tag_with_action(service.0.clone(), tag.to_string(), action.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.client.add_tags(reqwest.build()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieves the file record bytes
|
||||||
|
pub async fn retrieve(&self) -> Result<FileRecord> {
|
||||||
|
self.client.get_file(self.id.clone()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the metadata for the given file
|
||||||
|
/// if there's already known metadata about the file it uses that
|
||||||
|
async fn metadata(&mut self) -> Result<&FileMetadataInfo> {
|
||||||
|
if self.metadata.is_none() {
|
||||||
|
let metadata = self
|
||||||
|
.client
|
||||||
|
.get_file_metadata_by_identifier(self.id.clone())
|
||||||
|
.await?;
|
||||||
|
self.status = if metadata.is_trashed {
|
||||||
|
FileStatus::Deleted
|
||||||
|
} else {
|
||||||
|
FileStatus::InDatabase
|
||||||
|
};
|
||||||
|
self.metadata = Some(metadata);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(self.metadata.as_ref().unwrap())
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,8 @@
|
|||||||
|
pub mod builders;
|
||||||
|
pub mod hydrus;
|
||||||
|
pub mod hydrus_file;
|
||||||
|
pub mod page;
|
||||||
|
pub mod service;
|
||||||
|
pub mod tag;
|
||||||
|
pub mod url;
|
||||||
|
pub mod version;
|
@ -0,0 +1,20 @@
|
|||||||
|
#[derive(Clone)]
|
||||||
|
pub struct HydrusPage {
|
||||||
|
pub id: PageIdentifier,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub enum PageIdentifier {
|
||||||
|
Name(String),
|
||||||
|
Key(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PageIdentifier {
|
||||||
|
pub fn name<S: ToString>(name: S) -> Self {
|
||||||
|
Self::Name(name.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn key<S: ToString>(key: S) -> Self {
|
||||||
|
Self::Key(key.to_string())
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,145 @@
|
|||||||
|
use crate::api_core::access_management::GetServicesResponse;
|
||||||
|
use crate::api_core::access_management::{
|
||||||
|
SERVICE_TYPE_ALL_KNOWN_FILES, SERVICE_TYPE_ALL_KNOWN_TAGS, SERVICE_TYPE_ALL_LOCAL_FILES,
|
||||||
|
SERVICE_TYPE_FILE_REPOSITORIES, SERVICE_TYPE_LOCAL_FILES, SERVICE_TYPE_LOCAL_TAGS,
|
||||||
|
SERVICE_TYPE_TAG_REPOSITORIES, SERVICE_TYPE_TRASH,
|
||||||
|
};
|
||||||
|
use crate::error::Error;
|
||||||
|
use crate::Client;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::convert::TryFrom;
|
||||||
|
|
||||||
|
#[derive(Clone, PartialOrd, PartialEq, Hash)]
|
||||||
|
pub enum ServiceType {
|
||||||
|
LocalTags,
|
||||||
|
TagRepositories,
|
||||||
|
LocalFiles,
|
||||||
|
FileRepositories,
|
||||||
|
AllLocalFiles,
|
||||||
|
AllKnownFiles,
|
||||||
|
AllKnownTags,
|
||||||
|
Trash,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for ServiceType {}
|
||||||
|
|
||||||
|
impl TryFrom<String> for ServiceType {
|
||||||
|
type Error = Error;
|
||||||
|
|
||||||
|
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||||
|
match value.as_str() {
|
||||||
|
s if s == SERVICE_TYPE_LOCAL_TAGS => Ok(Self::LocalTags),
|
||||||
|
s if s == SERVICE_TYPE_TAG_REPOSITORIES => Ok(Self::TagRepositories),
|
||||||
|
s if s == SERVICE_TYPE_LOCAL_FILES => Ok(Self::LocalFiles),
|
||||||
|
s if s == SERVICE_TYPE_FILE_REPOSITORIES => Ok(Self::FileRepositories),
|
||||||
|
s if s == SERVICE_TYPE_ALL_LOCAL_FILES => Ok(Self::AllLocalFiles),
|
||||||
|
s if s == SERVICE_TYPE_ALL_KNOWN_FILES => Ok(Self::AllKnownFiles),
|
||||||
|
s if s == SERVICE_TYPE_ALL_KNOWN_TAGS => Ok(Self::AllKnownTags),
|
||||||
|
s if s == SERVICE_TYPE_TRASH => Ok(Self::Trash),
|
||||||
|
_ => Err(Error::InvalidServiceType(value)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToString for ServiceType {
|
||||||
|
fn to_string(&self) -> String {
|
||||||
|
match self {
|
||||||
|
ServiceType::LocalTags => String::from(SERVICE_TYPE_LOCAL_TAGS),
|
||||||
|
ServiceType::TagRepositories => String::from(SERVICE_TYPE_TAG_REPOSITORIES),
|
||||||
|
ServiceType::LocalFiles => String::from(SERVICE_TYPE_LOCAL_FILES),
|
||||||
|
ServiceType::FileRepositories => String::from(SERVICE_TYPE_FILE_REPOSITORIES),
|
||||||
|
ServiceType::AllLocalFiles => String::from(SERVICE_TYPE_ALL_LOCAL_FILES),
|
||||||
|
ServiceType::AllKnownFiles => String::from(SERVICE_TYPE_ALL_KNOWN_FILES),
|
||||||
|
ServiceType::AllKnownTags => String::from(SERVICE_TYPE_ALL_KNOWN_TAGS),
|
||||||
|
ServiceType::Trash => String::from(SERVICE_TYPE_TRASH),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialOrd, PartialEq, Hash)]
|
||||||
|
pub struct ServiceName(pub String);
|
||||||
|
|
||||||
|
impl Eq for ServiceName {}
|
||||||
|
|
||||||
|
impl ServiceName {
|
||||||
|
pub fn my_tags() -> Self {
|
||||||
|
Self(String::from("my tags"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn my_files() -> Self {
|
||||||
|
Self(String::from("my files"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn public_tag_repository() -> Self {
|
||||||
|
Self(String::from("public tag repository"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all_local_files() -> Self {
|
||||||
|
Self(String::from("all local files"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all_known_tags() -> Self {
|
||||||
|
Self(String::from("all known tags"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all_known_files() -> Self {
|
||||||
|
Self(String::from("all known files"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Service {
|
||||||
|
client: Client,
|
||||||
|
pub name: ServiceName,
|
||||||
|
pub key: String,
|
||||||
|
pub service_type: ServiceType,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Services {
|
||||||
|
inner: HashMap<ServiceType, Vec<Service>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Services {
|
||||||
|
/// Creates the services list from a given hydrus response
|
||||||
|
pub fn from_response(client: Client, response: GetServicesResponse) -> Self {
|
||||||
|
let mut response = response.0;
|
||||||
|
let mut mapped_types = HashMap::with_capacity(response.keys().len());
|
||||||
|
let keys = response.keys().cloned().collect::<Vec<String>>().clone();
|
||||||
|
|
||||||
|
for service_type in &keys {
|
||||||
|
if let Ok(mapped_type) = ServiceType::try_from(service_type.clone()) {
|
||||||
|
let basic_services = response.remove(service_type).unwrap();
|
||||||
|
let mut service_list = Vec::new();
|
||||||
|
|
||||||
|
for basic_service in basic_services {
|
||||||
|
service_list.push(Service {
|
||||||
|
service_type: mapped_type.clone(),
|
||||||
|
name: ServiceName(basic_service.name),
|
||||||
|
key: basic_service.service_key,
|
||||||
|
client: client.clone(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
mapped_types.insert(mapped_type, service_list);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self {
|
||||||
|
inner: mapped_types,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a list of all services of the given type
|
||||||
|
pub fn get_services(&self, service_type: ServiceType) -> Vec<&Service> {
|
||||||
|
if let Some(services) = self.inner.get(&service_type) {
|
||||||
|
let mut borrowed_services = Vec::with_capacity(services.len());
|
||||||
|
for service in services {
|
||||||
|
borrowed_services.push(service)
|
||||||
|
}
|
||||||
|
borrowed_services
|
||||||
|
} else {
|
||||||
|
Vec::with_capacity(0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,35 @@
|
|||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Tag {
|
||||||
|
pub name: String,
|
||||||
|
pub namespace: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S> From<S> for Tag
|
||||||
|
where
|
||||||
|
S: AsRef<str>,
|
||||||
|
{
|
||||||
|
fn from(value: S) -> Self {
|
||||||
|
let value = value.as_ref();
|
||||||
|
if let Some((namespace, tag)) = value.split_once(":") {
|
||||||
|
Self {
|
||||||
|
namespace: Some(namespace.to_string()),
|
||||||
|
name: tag.to_string(),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Self {
|
||||||
|
name: value.to_string(),
|
||||||
|
namespace: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToString for Tag {
|
||||||
|
fn to_string(&self) -> String {
|
||||||
|
if let Some(namespace) = &self.namespace {
|
||||||
|
format!("{}:{}", namespace, self.name)
|
||||||
|
} else {
|
||||||
|
self.name.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,75 @@
|
|||||||
|
use crate::api_core::adding_urls::{
|
||||||
|
URL_TYPE_FILE, URL_TYPE_GALLERY, URL_TYPE_POST, URL_TYPE_WATCHABLE,
|
||||||
|
};
|
||||||
|
use crate::error::Result;
|
||||||
|
use crate::wrapper::builders::import_builder::UrlImportBuilder;
|
||||||
|
use crate::wrapper::hydrus_file::HydrusFile;
|
||||||
|
use crate::Client;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialOrd, PartialEq)]
|
||||||
|
pub enum UrlType {
|
||||||
|
Post,
|
||||||
|
File,
|
||||||
|
Gallery,
|
||||||
|
Watchable,
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for UrlType {}
|
||||||
|
|
||||||
|
impl From<u8> for UrlType {
|
||||||
|
fn from(value: u8) -> Self {
|
||||||
|
match value {
|
||||||
|
v if v == URL_TYPE_POST => Self::Post,
|
||||||
|
v if v == URL_TYPE_FILE => Self::File,
|
||||||
|
v if v == URL_TYPE_GALLERY => Self::Gallery,
|
||||||
|
v if v == URL_TYPE_WATCHABLE => Self::Watchable,
|
||||||
|
_ => Self::Unknown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Url {
|
||||||
|
pub url: String,
|
||||||
|
pub(crate) client: Client,
|
||||||
|
pub normalised_url: String,
|
||||||
|
pub url_type: UrlType,
|
||||||
|
pub match_name: String,
|
||||||
|
pub can_parse: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Url {
|
||||||
|
/// Returns a list of files associated with the url
|
||||||
|
pub async fn files(&mut self) -> Result<Vec<HydrusFile>> {
|
||||||
|
let response = self.client.get_url_files(&self.url).await?;
|
||||||
|
let files = response
|
||||||
|
.url_file_statuses
|
||||||
|
.into_iter()
|
||||||
|
.map(|file| {
|
||||||
|
HydrusFile::from_raw_status_and_hash(self.client.clone(), file.status, file.hash)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(files)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates an import builder for the url
|
||||||
|
pub fn import(&mut self) -> UrlImportBuilder {
|
||||||
|
UrlImportBuilder::new(self.client.clone(), &self.url)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Associates the url with a list of file hashes
|
||||||
|
pub async fn associate(&mut self, hashes: Vec<String>) -> Result<()> {
|
||||||
|
self.client
|
||||||
|
.associate_urls(vec![self.url.clone()], hashes)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Disassociates the url with a list of file hashes
|
||||||
|
pub async fn disassociate(&mut self, hashes: Vec<String>) -> Result<()> {
|
||||||
|
self.client
|
||||||
|
.disassociate_urls(vec![self.url.clone()], hashes)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,4 @@
|
|||||||
|
pub struct Version {
|
||||||
|
pub api: u32,
|
||||||
|
pub hydrus: u32,
|
||||||
|
}
|
@ -0,0 +1,5 @@
|
|||||||
|
mod test_access_management;
|
||||||
|
mod test_adding_files;
|
||||||
|
mod test_adding_tags;
|
||||||
|
mod test_adding_urls;
|
||||||
|
mod test_searching_and_fetching_files;
|
@ -0,0 +1,3 @@
|
|||||||
|
mod client;
|
||||||
|
mod common;
|
||||||
|
mod wrapper;
|
@ -0,0 +1,4 @@
|
|||||||
|
mod test_files;
|
||||||
|
mod test_hydrus;
|
||||||
|
mod test_import;
|
||||||
|
mod test_url;
|
@ -0,0 +1,90 @@
|
|||||||
|
use super::super::common;
|
||||||
|
use hydrus_api::api_core::adding_tags::TagAction;
|
||||||
|
use hydrus_api::api_core::common::FileIdentifier;
|
||||||
|
use hydrus_api::wrapper::hydrus_file::HydrusFile;
|
||||||
|
use hydrus_api::wrapper::service::ServiceName;
|
||||||
|
|
||||||
|
async fn get_file() -> HydrusFile {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
hydrus
|
||||||
|
.file(FileIdentifier::hash(
|
||||||
|
"277a138cd1ee79fc1fdb2869c321b848d4861e45b82184487139ef66dd40b62d", // needs to exist
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_associates_with_urls() {
|
||||||
|
let mut file = get_file().await;
|
||||||
|
file.associate_urls(vec![
|
||||||
|
"https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium".to_string(),
|
||||||
|
])
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_disassociates_with_urls() {
|
||||||
|
let mut file = get_file().await;
|
||||||
|
file.disassociate_urls(vec![
|
||||||
|
"https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium".to_string(),
|
||||||
|
])
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_has_tags_with_services() {
|
||||||
|
let mut file = get_file().await;
|
||||||
|
let tags = file.services_with_tags().await.unwrap();
|
||||||
|
|
||||||
|
assert!(tags.keys().len() > 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_has_tags() {
|
||||||
|
let mut file = get_file().await;
|
||||||
|
let tags = file.tags().await.unwrap();
|
||||||
|
|
||||||
|
assert!(tags.len() > 0) // test data needs to be prepared this way
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_adds_tags() {
|
||||||
|
let mut file = get_file().await;
|
||||||
|
file.add_tags(
|
||||||
|
ServiceName::public_tag_repository(),
|
||||||
|
vec!["character:megumin".into(), "ark mage".into()],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_modifies_tags() {
|
||||||
|
let mut file = get_file().await;
|
||||||
|
file.modify_tags(
|
||||||
|
ServiceName::public_tag_repository(),
|
||||||
|
TagAction::RescindPendFromRepository,
|
||||||
|
vec!["ark mage".into()],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_retrieves_content() {
|
||||||
|
let file = get_file().await;
|
||||||
|
let file = file.retrieve().await.unwrap();
|
||||||
|
|
||||||
|
assert!(file.bytes.len() > 0) // assuming it exists
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_retrieves_metadata() {
|
||||||
|
let mut file = get_file().await;
|
||||||
|
assert!(file.dimensions().await.unwrap().is_some());
|
||||||
|
assert!(file.stored_locally().await.unwrap());
|
||||||
|
assert!(file.duration().await.unwrap().is_none());
|
||||||
|
}
|
@ -0,0 +1,62 @@
|
|||||||
|
use super::super::common;
|
||||||
|
use hydrus_api::api_core::adding_tags::TagAction;
|
||||||
|
use hydrus_api::api_core::searching_and_fetching_files::FileSearchLocation;
|
||||||
|
use hydrus_api::wrapper::service::{ServiceName, ServiceType};
|
||||||
|
use hydrus_api::wrapper::url::UrlType;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_retrieves_version_info() {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
let version = hydrus.version().await.unwrap();
|
||||||
|
assert!(version.hydrus > 0);
|
||||||
|
assert!(version.api > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_retrieves_services() {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
let services = hydrus.services().await.unwrap();
|
||||||
|
|
||||||
|
// assuming hydrus is configured correctly
|
||||||
|
assert!(services.get_services(ServiceType::AllKnownFiles).len() > 0);
|
||||||
|
assert!(services.get_services(ServiceType::AllKnownTags).len() > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_retrieves_url_information() {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
let url = hydrus
|
||||||
|
.url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(url.url_type, UrlType::Post)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_searches() {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
hydrus
|
||||||
|
.search(
|
||||||
|
FileSearchLocation::Archive,
|
||||||
|
vec!["character:megumin".into()],
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_adds_tags() {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
hydrus
|
||||||
|
.tagging()
|
||||||
|
.add_tag(
|
||||||
|
ServiceName::my_tags(),
|
||||||
|
TagAction::AddToLocalService,
|
||||||
|
"summer".into(),
|
||||||
|
)
|
||||||
|
.add_file("0000000000000000000000000000000000000000000000000000000000000000")
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
@ -0,0 +1,50 @@
|
|||||||
|
use super::super::common;
|
||||||
|
use hydrus_api::wrapper::builders::import_builder::FileImport;
|
||||||
|
use hydrus_api::wrapper::page::PageIdentifier;
|
||||||
|
use hydrus_api::wrapper::service::ServiceName;
|
||||||
|
use hydrus_api::wrapper::tag::Tag;
|
||||||
|
use hydrus_api::wrapper::url::UrlType;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_imports_file_paths() {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
let result = hydrus
|
||||||
|
.import()
|
||||||
|
.file(FileImport::path("/does/not/exist/sadly"))
|
||||||
|
.run()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
assert!(result.is_err()) // file does not exist
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_imports_binary_files() {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
let bytes = [0u8, 0u8, 0u8, 0u8];
|
||||||
|
let result = hydrus
|
||||||
|
.import()
|
||||||
|
.file(FileImport::binary(&mut &bytes[..]))
|
||||||
|
.run()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
assert!(result.is_err()) // return status should be 4
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_imports_urls() {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
|
||||||
|
let result = hydrus
|
||||||
|
.import()
|
||||||
|
.url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
|
||||||
|
.page(PageIdentifier::name("Rusty Import"))
|
||||||
|
.show_page(true)
|
||||||
|
.add_additional_tag(ServiceName::my_tags(), Tag::from("ark mage"))
|
||||||
|
.add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin"))
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(result.normalised_url.len() > 0);
|
||||||
|
assert_eq!(result.url_type, UrlType::Post)
|
||||||
|
}
|
@ -0,0 +1,47 @@
|
|||||||
|
use super::super::common;
|
||||||
|
use hydrus_api::wrapper::page::PageIdentifier;
|
||||||
|
use hydrus_api::wrapper::service::ServiceName;
|
||||||
|
use hydrus_api::wrapper::tag::Tag;
|
||||||
|
use hydrus_api::wrapper::url::Url;
|
||||||
|
|
||||||
|
async fn get_url() -> Url {
|
||||||
|
let hydrus = common::get_hydrus();
|
||||||
|
hydrus
|
||||||
|
.url("https://www.pixiv.net/member_illust.php?illust_id=83406361&mode=medium")
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_imports() {
|
||||||
|
let mut url = get_url().await;
|
||||||
|
|
||||||
|
url.import()
|
||||||
|
.page(PageIdentifier::name("Rusty Import"))
|
||||||
|
.add_additional_tag(ServiceName::my_tags(), Tag::from("character:megumin"))
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_associates() {
|
||||||
|
let mut url = get_url().await;
|
||||||
|
|
||||||
|
url.associate(vec![
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000000".to_string(),
|
||||||
|
])
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn it_disassociates() {
|
||||||
|
let mut url = get_url().await;
|
||||||
|
|
||||||
|
url.disassociate(vec![
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000000".to_string(),
|
||||||
|
])
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
Loading…
Reference in New Issue