diff --git a/.idea/discord.xml b/.idea/discord.xml index 30bab2a..d8e9561 100644 --- a/.idea/discord.xml +++ b/.idea/discord.xml @@ -1,7 +1,7 @@ - \ No newline at end of file diff --git a/src/client.rs b/src/client.rs index 17e6de6..0c92713 100644 --- a/src/client.rs +++ b/src/client.rs @@ -29,21 +29,24 @@ impl Client { /// Returns the options of the PTR #[tracing::instrument(skip(self), level = "debug")] - pub async fn options(&self) -> Result { + pub async fn get_options(&self) -> Result { self.get::(&()).await } /// Returns information about all available updates since the given ID /// and when the next check for updates should be made #[tracing::instrument(skip(self), level = "debug")] - pub async fn metadata(&self, since: u64) -> Result { + pub async fn get_metadata(&self, since: u64) -> Result { self.get::(&[("since", since)]).await } /// Returns the parsed update file identified by the given hash. /// The hash can be retrieved by fetching the metadata with [Client::metadata] #[tracing::instrument(skip(self), level = "debug")] - pub async fn update + Debug>(&self, update_hash: S) -> Result { + pub async fn get_update + Debug>( + &self, + update_hash: S, + ) -> Result { self.get::(&[("update_hash", update_hash.as_ref())]) .await } diff --git a/src/constants.rs b/src/constants.rs index fe5a3f2..bbf0fae 100644 --- a/src/constants.rs +++ b/src/constants.rs @@ -1,5 +1,6 @@ #![allow(unused)] +// serializable pub const HYDRUS_TYPE_BASE: u64 = 0; pub const HYDRUS_TYPE_BASE_NAMED: u64 = 1; pub const HYDRUS_TYPE_SHORTCUT_SET: u64 = 2; @@ -109,3 +110,27 @@ pub const HYDRUS_TYPE_GUI_SESSION_PAGE_DATA: u64 = 105; pub const HYDRUS_TYPE_GUI_SESSION_CONTAINER_PAGE_NOTEBOOK: u64 = 106; pub const HYDRUS_TYPE_GUI_SESSION_CONTAINER_PAGE_SINGLE: u64 = 107; pub const HYDRUS_TYPE_PRESENTATION_IMPORT_OPTIONS: u64 = 108; + +// content types +pub const CONTENT_TYPE_MAPPINGS: u64 = 0; +pub const CONTENT_TYPE_TAG_SIBLINGS: u64 = 1; +pub const CONTENT_TYPE_TAG_PARENTS: u64 = 2; +pub const CONTENT_TYPE_FILES: u64 = 3; +pub const CONTENT_TYPE_RATINGS: u64 = 4; +pub const CONTENT_TYPE_MAPPING: u64 = 5; +pub const CONTENT_TYPE_DIRECTORIES: u64 = 6; +pub const CONTENT_TYPE_URLS: u64 = 7; +pub const CONTENT_TYPE_VETO: u64 = 8; +pub const CONTENT_TYPE_ACCOUNTS: u64 = 9; +pub const CONTENT_TYPE_OPTIONS: u64 = 10; +pub const CONTENT_TYPE_SERVICES: u64 = 11; +pub const CONTENT_TYPE_UNKNOWN: u64 = 12; +pub const CONTENT_TYPE_ACCOUNT_TYPES: u64 = 13; +pub const CONTENT_TYPE_VARIABLE: u64 = 14; +pub const CONTENT_TYPE_HASH: u64 = 15; +pub const CONTENT_TYPE_TIMESTAMP: u64 = 16; +pub const CONTENT_TYPE_TITLE: u64 = 17; +pub const CONTENT_TYPE_NOTES: u64 = 18; +pub const CONTENT_TYPE_FILE_VIEWING_STATS: u64 = 19; +pub const CONTENT_TYPE_TAG: u64 = 20; +pub const CONTENT_TYPE_DEFINITIONS: u64 = 21; diff --git a/src/endpoints/update.rs b/src/endpoints/update.rs index 485ca3b..376f052 100644 --- a/src/endpoints/update.rs +++ b/src/endpoints/update.rs @@ -1,7 +1,12 @@ +use crate::hydrus_serializable::content_update::{ + ContentUpdatesAndAction, HydrusContentUpdate, MappingsUpdateEntry, TagParentsUpdateEntry, + TagSiblingsUpdateEntry, +}; use crate::hydrus_serializable::definitions_update::{ HashDefinition, HydrusDefinitionsUpdate, TagDefinition, }; use crate::hydrus_serializable::wrapper::GenericHydrusSerWrapper; +use crate::Error::Malformed; use crate::Result; use crate::{Endpoint, Error, FromJson, GetEndpoint}; use serde_json::Value; @@ -22,12 +27,7 @@ impl GetEndpoint for UpdateEndpoint { #[derive(Clone, Debug)] pub enum UpdateResponse { Definitions(DefinitionsUpdateResponse), -} - -#[derive(Clone, Debug)] -pub struct DefinitionsUpdateResponse { - pub hashes: HashMap, - pub tags: HashMap, + Content(ContentUpdateResponse), } impl FromJson for UpdateResponse { @@ -37,6 +37,11 @@ impl FromJson for UpdateResponse { { let wrapper = serde_json::from_value::(value)?; match wrapper.type_id { + 34 => { + let content_update = ContentUpdateResponse::from_wrapper(wrapper)?; + + Ok(Self::Content(content_update)) + } 36 => { let definitions_update = DefinitionsUpdateResponse::from_wrapper(wrapper)?; @@ -47,20 +52,147 @@ impl FromJson for UpdateResponse { } } -impl DefinitionsUpdateResponse { +trait FromWrapper { + fn from_wrapper(wrapper: GenericHydrusSerWrapper) -> Result + where + Self: Sized; +} + +#[derive(Clone, Debug)] +pub struct DefinitionsUpdateResponse { + pub hashes: HashMap, + pub tags: HashMap, +} + +impl FromWrapper for DefinitionsUpdateResponse { fn from_wrapper(wrapper: GenericHydrusSerWrapper) -> Result { let mut definitions_update = wrapper.into_inner::()?; let hashes = definitions_update .take::()? .map(|h| h.into_iter().map(|h| (h.id, h.hash)).collect()) - .unwrap_or_else(|| HashMap::new()); + .unwrap_or_default(); let tags = definitions_update .take::()? .map(|t| t.into_iter().map(|t| (t.id, t.tag)).collect()) - .unwrap_or_else(|| HashMap::new()); + .unwrap_or_default(); Ok(Self { hashes, tags }) } } + +#[derive(Clone, Debug)] +pub struct ContentUpdateResponse { + pub mappings: HashMap>>, + pub tag_parents: HashMap>, + pub tag_siblings: HashMap>, +} + +impl FromWrapper for ContentUpdateResponse { + fn from_wrapper(wrapper: GenericHydrusSerWrapper) -> Result { + let mut content_update = wrapper.into_inner::()?; + + let mappings = content_update + .take::()? + .map(Self::map_mappings_update) + .unwrap_or_default(); + + let tag_parents = content_update + .take::()? + .map(Self::map_tag_parents_update) + .unwrap_or_default(); + + let tag_siblings = content_update + .take::()? + .map(Self::map_tag_siblings_update) + .unwrap_or_default(); + + Ok(Self { + mappings, + tag_parents, + tag_siblings, + }) + } +} + +impl ContentUpdateResponse { + fn map_mappings_update( + update: Vec>, + ) -> HashMap>> { + update + .into_iter() + .filter_map(Self::map_update_and_action) + .map(|(action, entries)| { + ( + action, + entries + .into_iter() + .map(|e| (e.tag_id, e.hash_ids)) + .collect::>>(), + ) + }) + .collect() + } + + fn map_tag_parents_update( + update: Vec>, + ) -> HashMap> { + update + .into_iter() + .filter_map(Self::map_update_and_action::) + .map(|(action, entries)| { + ( + action, + entries + .into_iter() + .map(|e| (e.child_id, e.parent_id)) + .collect::>(), + ) + }) + .collect() + } + + fn map_tag_siblings_update( + update: Vec>, + ) -> HashMap> { + update + .into_iter() + .filter_map(Self::map_update_and_action::) + .map(|(a, entries)| { + ( + a, + entries + .into_iter() + .map(|e| (e.tag_id, e.sibling_id)) + .collect::>(), + ) + }) + .collect() + } + + fn map_update_and_action( + entry: ContentUpdatesAndAction, + ) -> Option<(ContentUpdateAction, Vec)> { + Some(( + ContentUpdateAction::from_number(entry.action).ok()?, + entry.updates, + )) + } +} + +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub enum ContentUpdateAction { + Add = 0, + Delete = 1, +} + +impl ContentUpdateAction { + pub fn from_number(num: u64) -> Result { + match num { + 0 => Ok(Self::Add), + 1 => Ok(Self::Delete), + _ => Err(Malformed), + } + } +} diff --git a/src/hydrus_serializable/content_update.rs b/src/hydrus_serializable/content_update.rs new file mode 100644 index 0000000..3d7609e --- /dev/null +++ b/src/hydrus_serializable/content_update.rs @@ -0,0 +1,84 @@ +use crate::constants::{ + CONTENT_TYPE_MAPPINGS, CONTENT_TYPE_TAG_PARENTS, CONTENT_TYPE_TAG_SIBLINGS, + HYDRUS_TYPE_CONTENT_UPDATE, +}; +use crate::hydrus_serializable::HydrusSerializable; +use crate::{Error, Result}; +use serde::de::DeserializeOwned; +use serde::Deserialize; +use serde_json::Value; + +#[derive(Clone, Debug, Deserialize)] +pub struct HydrusContentUpdate(Vec); + +impl HydrusSerializable for HydrusContentUpdate { + fn type_id() -> u64 { + HYDRUS_TYPE_CONTENT_UPDATE + } +} + +impl HydrusContentUpdate { + pub fn take( + &mut self, + ) -> Result>>> { + if let Some(index) = self.0.iter().position(|e| e.content_type == U::type_id()) { + let entry = self.0.swap_remove(index); + + serde_json::from_value(entry.entries).map_err(Error::from) + } else { + Ok(None) + } + } +} + +#[derive(Clone, Debug, Deserialize)] +pub struct ContentUpdateEntries { + pub content_type: u64, + pub entries: Value, +} + +#[derive(Clone, Debug, Deserialize)] +pub struct ContentUpdatesAndAction { + pub action: u64, + pub updates: Vec, +} + +pub trait ContentUpdateTrait: DeserializeOwned { + fn type_id() -> u64; +} + +#[derive(Clone, Debug, Deserialize)] +pub struct MappingsUpdateEntry { + pub tag_id: u64, + pub hash_ids: Vec, +} + +impl ContentUpdateTrait for MappingsUpdateEntry { + fn type_id() -> u64 { + CONTENT_TYPE_MAPPINGS + } +} + +#[derive(Clone, Debug, Deserialize)] +pub struct TagParentsUpdateEntry { + pub child_id: u64, + pub parent_id: u64, +} + +impl ContentUpdateTrait for TagParentsUpdateEntry { + fn type_id() -> u64 { + CONTENT_TYPE_TAG_PARENTS + } +} + +#[derive(Clone, Debug, Deserialize)] +pub struct TagSiblingsUpdateEntry { + pub tag_id: u64, + pub sibling_id: u64, +} + +impl ContentUpdateTrait for TagSiblingsUpdateEntry { + fn type_id() -> u64 { + CONTENT_TYPE_TAG_SIBLINGS + } +} diff --git a/src/hydrus_serializable/mod.rs b/src/hydrus_serializable/mod.rs index f0fbfba..a457e92 100644 --- a/src/hydrus_serializable/mod.rs +++ b/src/hydrus_serializable/mod.rs @@ -6,6 +6,7 @@ use serde_json::Value; use std::fmt::Formatter; use std::marker::PhantomData; +pub mod content_update; pub mod definitions_update; pub mod dictionary; pub mod metadata; diff --git a/tests/endpoints.rs b/tests/endpoints.rs index 83ac4cd..59798e1 100644 --- a/tests/endpoints.rs +++ b/tests/endpoints.rs @@ -3,20 +3,24 @@ mod common; #[tokio::test] async fn test_options() { let client = common::get_client(); - client.options().await.unwrap(); + client.get_options().await.unwrap(); } #[tokio::test] async fn test_metadata() { let client = common::get_client(); - client.metadata(0).await.unwrap(); + client.get_metadata(0).await.unwrap(); } +const DEFINITIONS_UPDATE_HASH: &str = + "4a4d13c1fcdf0cf734927ec4c9637fdac6144512ad7dc919e0f222e7b0e71586"; +const CONTENT_UPDATE_HASH: &str = + "cd1418ffeba0b8fe46aefa51a7adf1210356523ead658b182762ff61b73ebae5"; + #[tokio::test] async fn test_update() { let client = common::get_client(); - client - .update("4a4d13c1fcdf0cf734927ec4c9637fdac6144512ad7dc919e0f222e7b0e71586") - .await - .unwrap(); + + client.get_update(DEFINITIONS_UPDATE_HASH).await.unwrap(); + client.get_update(CONTENT_UPDATE_HASH).await.unwrap(); }