diff --git a/Cargo.lock b/Cargo.lock index 9a87f48..0e26368 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -200,6 +200,7 @@ name = "destools" version = "0.1.0" dependencies = [ "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "cfb-mode 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "des 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "pbr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/Cargo.toml b/Cargo.toml index 3f3962f..8e1d9bb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,4 +17,5 @@ base64 = "0.11.0" rayon = "1.3.0" pbr = "1.0.2" spinners = "1.2.0" -regex = "1.3.4" \ No newline at end of file +regex = "1.3.4" +byteorder = "1.3.4" diff --git a/src/lib/mod.rs b/src/lib/mod.rs index 58298f6..ff53bc6 100644 --- a/src/lib/mod.rs +++ b/src/lib/mod.rs @@ -1,2 +1,3 @@ pub mod crypt; -pub mod hash; \ No newline at end of file +pub mod hash; +pub mod rainbowutils; diff --git a/src/lib/rainbowutils.rs b/src/lib/rainbowutils.rs new file mode 100644 index 0000000..5b370b4 --- /dev/null +++ b/src/lib/rainbowutils.rs @@ -0,0 +1,120 @@ +use byteorder::{BigEndian, ByteOrder, ReadBytesExt}; +use rand::AsByteSliceMut; +use std::collections::HashMap; +use std::fs::File; +use std::io::{BufReader, Read}; +use std::io::{Error, ErrorKind}; + +pub const BDF_HDR: &[u8; 11] = b"BDF\x01RAINBOW"; +pub const NULL_BYTES: &[u8; 4] = &[0u8; 4]; + +pub struct BinaryDictionaryFile { + name: String, + reader: BufReader, + metadata: Option, +} + +#[derive(Debug, Clone)] +pub struct GenericChunk { + length: u32, + name: String, + data: Vec, + crc: u32, +} + +#[derive(Debug, Clone)] +pub struct MetaChunk { + chunk_count: u32, + entries_per_chunk: u32, + entry_count: u32, + compression_method: Option, +} + +#[derive(Debug, Clone)] +pub struct HashEntry { + id: u32, + output_length: u32, + name: String, +} + +#[derive(Debug, Clone)] +pub struct DataEntry { + plain: String, + hashes: HashMap>, +} + +impl BinaryDictionaryFile { + fn new(reader: BufReader) -> Self { + Self { + name: "".to_string(), + metadata: None, + reader, + } + } + + fn read_metadata(&mut self) -> Result { + if !self.validate_header() { + return Err(Error::new(ErrorKind::InvalidData, "Invalid BDF Header!")); + } + let meta_chunk = self.next_chunk().as_meta_chunk(); + self.metadata = Some(meta_chunk.clone()); + + Ok(meta_chunk) + } + + fn validate_header(&mut self) -> bool { + let mut header = [0u8; 11]; + let _ = self.reader.read(&mut header); + + header == BDF_HDR.as_ref() + } + + fn next_chunk(&mut self) -> GenericChunk { + let mut length_raw = [0u8; 4]; + let _ = self.reader.read(&mut length_raw); + let length = BigEndian::read_u32(&mut length_raw); + let mut name_raw = [0u8; 4]; + let _ = self.reader.read(&mut name_raw); + let name = + String::from_utf8(name_raw.to_vec()).expect("Failed to parse chunk name to string!"); + let mut data = vec![0u8; length as usize]; + let _ = self.reader.read(&mut data); + let mut crc_raw = [0u8; 4]; + let _ = self.reader.read(&mut crc_raw); + let crc = BigEndian::read_u32(&mut crc_raw); + + GenericChunk { + length, + name, + data, + crc, + } + } +} + +impl GenericChunk { + fn as_meta_chunk(&self) -> MetaChunk { + let mut chunk_count_raw = self.data[0..4].to_vec(); + let mut entries_per_chunk = self.data[4..8].to_vec(); + let mut total_number_of_entries = self.data[8..12].to_vec(); + let mut compression_method_raw = self.data[12..16].to_vec(); + let chunk_count = BigEndian::read_u32(&mut chunk_count_raw); + let entries_per_chunk = BigEndian::read_u32(&mut entries_per_chunk); + let entry_count = BigEndian::read_u32(&mut total_number_of_entries); + let compression_method = if &compression_method_raw != NULL_BYTES { + Some( + String::from_utf8(compression_method_raw.to_vec()) + .expect("Failed to parse compression method from meta string"), + ) + } else { + None + }; + + MetaChunk { + chunk_count, + entries_per_chunk, + entry_count, + compression_method, + } + } +}