Add content update response

Signed-off-by: trivernis <trivernis@protonmail.com>
main
trivernis 2 years ago
parent 11a2807116
commit f0b669991d
Signed by: Trivernis
GPG Key ID: DFFFCC2C7A02DB45

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DiscordProjectSettings">
<option name="show" value="ASK" />
<option name="show" value="PROJECT_FILES" />
<option name="description" value="" />
</component>
</project>

@ -29,21 +29,24 @@ impl Client {
/// Returns the options of the PTR
#[tracing::instrument(skip(self), level = "debug")]
pub async fn options(&self) -> Result<OptionsResponse> {
pub async fn get_options(&self) -> Result<OptionsResponse> {
self.get::<OptionsEndpoint, ()>(&()).await
}
/// Returns information about all available updates since the given ID
/// and when the next check for updates should be made
#[tracing::instrument(skip(self), level = "debug")]
pub async fn metadata(&self, since: u64) -> Result<MetadataResponse> {
pub async fn get_metadata(&self, since: u64) -> Result<MetadataResponse> {
self.get::<MetadataEndpoint, _>(&[("since", since)]).await
}
/// Returns the parsed update file identified by the given hash.
/// The hash can be retrieved by fetching the metadata with [Client::metadata]
#[tracing::instrument(skip(self), level = "debug")]
pub async fn update<S: AsRef<str> + Debug>(&self, update_hash: S) -> Result<UpdateResponse> {
pub async fn get_update<S: AsRef<str> + Debug>(
&self,
update_hash: S,
) -> Result<UpdateResponse> {
self.get::<UpdateEndpoint, _>(&[("update_hash", update_hash.as_ref())])
.await
}

@ -1,5 +1,6 @@
#![allow(unused)]
// serializable
pub const HYDRUS_TYPE_BASE: u64 = 0;
pub const HYDRUS_TYPE_BASE_NAMED: u64 = 1;
pub const HYDRUS_TYPE_SHORTCUT_SET: u64 = 2;
@ -109,3 +110,27 @@ pub const HYDRUS_TYPE_GUI_SESSION_PAGE_DATA: u64 = 105;
pub const HYDRUS_TYPE_GUI_SESSION_CONTAINER_PAGE_NOTEBOOK: u64 = 106;
pub const HYDRUS_TYPE_GUI_SESSION_CONTAINER_PAGE_SINGLE: u64 = 107;
pub const HYDRUS_TYPE_PRESENTATION_IMPORT_OPTIONS: u64 = 108;
// content types
pub const CONTENT_TYPE_MAPPINGS: u64 = 0;
pub const CONTENT_TYPE_TAG_SIBLINGS: u64 = 1;
pub const CONTENT_TYPE_TAG_PARENTS: u64 = 2;
pub const CONTENT_TYPE_FILES: u64 = 3;
pub const CONTENT_TYPE_RATINGS: u64 = 4;
pub const CONTENT_TYPE_MAPPING: u64 = 5;
pub const CONTENT_TYPE_DIRECTORIES: u64 = 6;
pub const CONTENT_TYPE_URLS: u64 = 7;
pub const CONTENT_TYPE_VETO: u64 = 8;
pub const CONTENT_TYPE_ACCOUNTS: u64 = 9;
pub const CONTENT_TYPE_OPTIONS: u64 = 10;
pub const CONTENT_TYPE_SERVICES: u64 = 11;
pub const CONTENT_TYPE_UNKNOWN: u64 = 12;
pub const CONTENT_TYPE_ACCOUNT_TYPES: u64 = 13;
pub const CONTENT_TYPE_VARIABLE: u64 = 14;
pub const CONTENT_TYPE_HASH: u64 = 15;
pub const CONTENT_TYPE_TIMESTAMP: u64 = 16;
pub const CONTENT_TYPE_TITLE: u64 = 17;
pub const CONTENT_TYPE_NOTES: u64 = 18;
pub const CONTENT_TYPE_FILE_VIEWING_STATS: u64 = 19;
pub const CONTENT_TYPE_TAG: u64 = 20;
pub const CONTENT_TYPE_DEFINITIONS: u64 = 21;

@ -1,7 +1,12 @@
use crate::hydrus_serializable::content_update::{
ContentUpdatesAndAction, HydrusContentUpdate, MappingsUpdateEntry, TagParentsUpdateEntry,
TagSiblingsUpdateEntry,
};
use crate::hydrus_serializable::definitions_update::{
HashDefinition, HydrusDefinitionsUpdate, TagDefinition,
};
use crate::hydrus_serializable::wrapper::GenericHydrusSerWrapper;
use crate::Error::Malformed;
use crate::Result;
use crate::{Endpoint, Error, FromJson, GetEndpoint};
use serde_json::Value;
@ -22,12 +27,7 @@ impl GetEndpoint for UpdateEndpoint {
#[derive(Clone, Debug)]
pub enum UpdateResponse {
Definitions(DefinitionsUpdateResponse),
}
#[derive(Clone, Debug)]
pub struct DefinitionsUpdateResponse {
pub hashes: HashMap<u64, String>,
pub tags: HashMap<u64, String>,
Content(ContentUpdateResponse),
}
impl FromJson for UpdateResponse {
@ -37,6 +37,11 @@ impl FromJson for UpdateResponse {
{
let wrapper = serde_json::from_value::<GenericHydrusSerWrapper>(value)?;
match wrapper.type_id {
34 => {
let content_update = ContentUpdateResponse::from_wrapper(wrapper)?;
Ok(Self::Content(content_update))
}
36 => {
let definitions_update = DefinitionsUpdateResponse::from_wrapper(wrapper)?;
@ -47,20 +52,147 @@ impl FromJson for UpdateResponse {
}
}
impl DefinitionsUpdateResponse {
trait FromWrapper {
fn from_wrapper(wrapper: GenericHydrusSerWrapper) -> Result<Self>
where
Self: Sized;
}
#[derive(Clone, Debug)]
pub struct DefinitionsUpdateResponse {
pub hashes: HashMap<u64, String>,
pub tags: HashMap<u64, String>,
}
impl FromWrapper for DefinitionsUpdateResponse {
fn from_wrapper(wrapper: GenericHydrusSerWrapper) -> Result<Self> {
let mut definitions_update = wrapper.into_inner::<HydrusDefinitionsUpdate>()?;
let hashes = definitions_update
.take::<HashDefinition>()?
.map(|h| h.into_iter().map(|h| (h.id, h.hash)).collect())
.unwrap_or_else(|| HashMap::new());
.unwrap_or_default();
let tags = definitions_update
.take::<TagDefinition>()?
.map(|t| t.into_iter().map(|t| (t.id, t.tag)).collect())
.unwrap_or_else(|| HashMap::new());
.unwrap_or_default();
Ok(Self { hashes, tags })
}
}
#[derive(Clone, Debug)]
pub struct ContentUpdateResponse {
pub mappings: HashMap<ContentUpdateAction, HashMap<u64, Vec<u64>>>,
pub tag_parents: HashMap<ContentUpdateAction, HashMap<u64, u64>>,
pub tag_siblings: HashMap<ContentUpdateAction, HashMap<u64, u64>>,
}
impl FromWrapper for ContentUpdateResponse {
fn from_wrapper(wrapper: GenericHydrusSerWrapper) -> Result<Self> {
let mut content_update = wrapper.into_inner::<HydrusContentUpdate>()?;
let mappings = content_update
.take::<MappingsUpdateEntry>()?
.map(Self::map_mappings_update)
.unwrap_or_default();
let tag_parents = content_update
.take::<TagParentsUpdateEntry>()?
.map(Self::map_tag_parents_update)
.unwrap_or_default();
let tag_siblings = content_update
.take::<TagSiblingsUpdateEntry>()?
.map(Self::map_tag_siblings_update)
.unwrap_or_default();
Ok(Self {
mappings,
tag_parents,
tag_siblings,
})
}
}
impl ContentUpdateResponse {
fn map_mappings_update(
update: Vec<ContentUpdatesAndAction<MappingsUpdateEntry>>,
) -> HashMap<ContentUpdateAction, HashMap<u64, Vec<u64>>> {
update
.into_iter()
.filter_map(Self::map_update_and_action)
.map(|(action, entries)| {
(
action,
entries
.into_iter()
.map(|e| (e.tag_id, e.hash_ids))
.collect::<HashMap<u64, Vec<u64>>>(),
)
})
.collect()
}
fn map_tag_parents_update(
update: Vec<ContentUpdatesAndAction<TagParentsUpdateEntry>>,
) -> HashMap<ContentUpdateAction, HashMap<u64, u64>> {
update
.into_iter()
.filter_map(Self::map_update_and_action::<TagParentsUpdateEntry>)
.map(|(action, entries)| {
(
action,
entries
.into_iter()
.map(|e| (e.child_id, e.parent_id))
.collect::<HashMap<u64, u64>>(),
)
})
.collect()
}
fn map_tag_siblings_update(
update: Vec<ContentUpdatesAndAction<TagSiblingsUpdateEntry>>,
) -> HashMap<ContentUpdateAction, HashMap<u64, u64>> {
update
.into_iter()
.filter_map(Self::map_update_and_action::<TagSiblingsUpdateEntry>)
.map(|(a, entries)| {
(
a,
entries
.into_iter()
.map(|e| (e.tag_id, e.sibling_id))
.collect::<HashMap<u64, u64>>(),
)
})
.collect()
}
fn map_update_and_action<T>(
entry: ContentUpdatesAndAction<T>,
) -> Option<(ContentUpdateAction, Vec<T>)> {
Some((
ContentUpdateAction::from_number(entry.action).ok()?,
entry.updates,
))
}
}
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub enum ContentUpdateAction {
Add = 0,
Delete = 1,
}
impl ContentUpdateAction {
pub fn from_number(num: u64) -> Result<Self> {
match num {
0 => Ok(Self::Add),
1 => Ok(Self::Delete),
_ => Err(Malformed),
}
}
}

@ -0,0 +1,84 @@
use crate::constants::{
CONTENT_TYPE_MAPPINGS, CONTENT_TYPE_TAG_PARENTS, CONTENT_TYPE_TAG_SIBLINGS,
HYDRUS_TYPE_CONTENT_UPDATE,
};
use crate::hydrus_serializable::HydrusSerializable;
use crate::{Error, Result};
use serde::de::DeserializeOwned;
use serde::Deserialize;
use serde_json::Value;
#[derive(Clone, Debug, Deserialize)]
pub struct HydrusContentUpdate(Vec<ContentUpdateEntries>);
impl HydrusSerializable for HydrusContentUpdate {
fn type_id() -> u64 {
HYDRUS_TYPE_CONTENT_UPDATE
}
}
impl HydrusContentUpdate {
pub fn take<U: ContentUpdateTrait>(
&mut self,
) -> Result<Option<Vec<ContentUpdatesAndAction<U>>>> {
if let Some(index) = self.0.iter().position(|e| e.content_type == U::type_id()) {
let entry = self.0.swap_remove(index);
serde_json::from_value(entry.entries).map_err(Error::from)
} else {
Ok(None)
}
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct ContentUpdateEntries {
pub content_type: u64,
pub entries: Value,
}
#[derive(Clone, Debug, Deserialize)]
pub struct ContentUpdatesAndAction<T> {
pub action: u64,
pub updates: Vec<T>,
}
pub trait ContentUpdateTrait: DeserializeOwned {
fn type_id() -> u64;
}
#[derive(Clone, Debug, Deserialize)]
pub struct MappingsUpdateEntry {
pub tag_id: u64,
pub hash_ids: Vec<u64>,
}
impl ContentUpdateTrait for MappingsUpdateEntry {
fn type_id() -> u64 {
CONTENT_TYPE_MAPPINGS
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct TagParentsUpdateEntry {
pub child_id: u64,
pub parent_id: u64,
}
impl ContentUpdateTrait for TagParentsUpdateEntry {
fn type_id() -> u64 {
CONTENT_TYPE_TAG_PARENTS
}
}
#[derive(Clone, Debug, Deserialize)]
pub struct TagSiblingsUpdateEntry {
pub tag_id: u64,
pub sibling_id: u64,
}
impl ContentUpdateTrait for TagSiblingsUpdateEntry {
fn type_id() -> u64 {
CONTENT_TYPE_TAG_SIBLINGS
}
}

@ -6,6 +6,7 @@ use serde_json::Value;
use std::fmt::Formatter;
use std::marker::PhantomData;
pub mod content_update;
pub mod definitions_update;
pub mod dictionary;
pub mod metadata;

@ -3,20 +3,24 @@ mod common;
#[tokio::test]
async fn test_options() {
let client = common::get_client();
client.options().await.unwrap();
client.get_options().await.unwrap();
}
#[tokio::test]
async fn test_metadata() {
let client = common::get_client();
client.metadata(0).await.unwrap();
client.get_metadata(0).await.unwrap();
}
const DEFINITIONS_UPDATE_HASH: &str =
"4a4d13c1fcdf0cf734927ec4c9637fdac6144512ad7dc919e0f222e7b0e71586";
const CONTENT_UPDATE_HASH: &str =
"cd1418ffeba0b8fe46aefa51a7adf1210356523ead658b182762ff61b73ebae5";
#[tokio::test]
async fn test_update() {
let client = common::get_client();
client
.update("4a4d13c1fcdf0cf734927ec4c9637fdac6144512ad7dc919e0f222e7b0e71586")
.await
.unwrap();
client.get_update(DEFINITIONS_UPDATE_HASH).await.unwrap();
client.get_update(CONTENT_UPDATE_HASH).await.unwrap();
}

Loading…
Cancel
Save