Update Readme and add utility functions

master
Trivernis 5 years ago
parent 1d1b279e75
commit 83c776fb4f

@ -1,6 +1,6 @@
[package]
name = "bdflib"
version = "0.1.4"
version = "0.1.5"
authors = ["trivernis <trivernis@gmail.com>"]
edition = "2018"
license-file = "LICENSE"

@ -2,6 +2,54 @@
This library provides methods to read and write Binary Dictionary Format files that can be used to represent rainbow tables.
## Usage
### Read
```rust
use bdf::io::BDFReader;
use std::fs::File;
use std::io::BufReader;
fn main() {
let f = File::open("dictionary.bdf").unwrap();
let buf_reader = BufReader::new(f);
let mut bdf_reader = BDFReader::new(buf_reader);
bdf_reader.read_metadata().unwrap();
let lookup_table = bdf_reader.read_lookup_table().unwrap();
let lookup_table = lookup_table.clone();
while let Ok(next_chunk) = &mut bdf_reader.next_chunk() {
if let Ok(entries) = next_chunk.data_entries(&lookup_table) {
println!("{:?}", entries);
}
}
}
```
### Write
```rust
use bdf::chunks::{DataEntry, HashEntry};
use bdf::io::BDFWriter;
use std::fs::File;
use std::io::BufWriter;
use std::convert::Into;
fn main() {
let f = File::create("dictionary.bdf").unwrap();
let buf_writer = BufWriter::new(f);
let entry_count = 1;
let mut bdf_writer = BDFWriter::new(buf_writer, entry_count, false);
bdf_writer.add_lookup_entry(HashEntry::new("fakehash".into(), 3)).unwrap();
let mut entry = DataEntry::new("foo".into());
entry.add_hash_value("fakehash".into(), vec![0, 2, 3]);
bdf_writer.add_data_entry(entry).unwrap();
bdf_writer.flush().unwrap();
bdf_writer.flush_writer().unwrap();
println!("Finished writing!");
}
```
## Binary Dictionary File Format (bdf)
```

@ -1,14 +1,13 @@
use super::chunks::*;
use std::io::{Write, BufWriter, ErrorKind, BufReader, Read};
use std::fs::File;
use std::collections::HashMap;
use std::io::Error;
use byteorder::{BigEndian, ByteOrder};
use std::collections::HashMap;
use std::convert::TryInto;
use std::fs::File;
use std::io::Error;
use std::io::{BufReader, BufWriter, ErrorKind, Read, Write};
const ENTRIES_PER_CHUNK: u32 = 100_000;
pub struct BDFReader {
reader: BufReader<File>,
pub metadata: Option<MetaChunk>,
@ -16,7 +15,6 @@ pub struct BDFReader {
compressed: bool,
}
pub struct BDFWriter {
writer: BufWriter<File>,
metadata: MetaChunk,
@ -28,7 +26,14 @@ pub struct BDFWriter {
}
impl BDFWriter {
/// Creates a new BDFWriter.
/// The number for `entry_count` should be the total number of entries
/// This is required since the META chunk containing the information is the
/// first chunk to be written.
/// The number of entries can be used in tools that provide a progress
/// bar for how many entries were read.
/// If the `compress` parameter is true, each data chunk will be compressed
/// using lzma with a default level of 1.
pub fn new(writer: BufWriter<File>, entry_count: u64, compress: bool) -> Self {
Self {
metadata: MetaChunk::new(entry_count, ENTRIES_PER_CHUNK, compress),
@ -70,7 +75,6 @@ impl BDFWriter {
}
/// Writes the data to the file
pub fn flush(&mut self) -> Result<(), Error> {
if !self.head_written {
self.writer.write(BDF_HDR)?;
@ -98,6 +102,15 @@ impl BDFWriter {
self.writer.flush()
}
/// Flushes the buffered chunk data and the writer
/// to finish the file.
pub fn finish(&mut self) -> Result<(), Error> {
self.flush()?;
self.flush_writer()?;
Ok(())
}
/// Sets the compression level for lzma compression
pub fn set_compression_level(&mut self, level: u32) {
self.compression_level = level;
@ -107,16 +120,19 @@ impl BDFWriter {
/// Returns an error if the metadata has already been written.
pub fn set_entries_per_chunk(&mut self, number: u32) -> Result<(), Error> {
if self.head_written {
return Err(Error::new(ErrorKind::Other, "the head has already been written"))
return Err(Error::new(
ErrorKind::Other,
"the head has already been written",
));
}
self.metadata.entries_per_chunk = number;
self.metadata.chunk_count = (self.metadata.entry_count as f64 / number as f64).ceil() as u32;
self.metadata.chunk_count =
(self.metadata.entry_count as f64 / number as f64).ceil() as u32;
Ok(())
}
}
impl BDFReader {
/// Creates a new BDFReader
pub fn new(reader: BufReader<File>) -> Self {
Self {
@ -127,6 +143,14 @@ impl BDFReader {
}
}
/// Reads the metadata and lookup table
pub fn read_start(&mut self) -> Result<(), Error> {
self.read_metadata()?;
self.read_lookup_table()?;
Ok(())
}
/// Verifies the header of the file and reads and stores the metadata
pub fn read_metadata(&mut self) -> Result<&MetaChunk, Error> {
if !self.validate_header() {

@ -2,10 +2,10 @@
mod tests {
use super::io::BDFWriter;
use std::io::{BufWriter, Error, BufReader};
use std::fs::{File, remove_file};
use crate::chunks::{HashEntry, DataEntry};
use crate::chunks::{DataEntry, HashEntry};
use crate::io::BDFReader;
use std::fs::{remove_file, File};
use std::io::{BufReader, BufWriter, Error};
const FOO: &str = "foo";
const BAR: &str = "bar";
@ -53,8 +53,7 @@ mod tests {
entry_2.add_hash_value(FOO.to_string(), vec![4, 5, 2, 3]);
writer.add_data_entry(entry_2)?;
writer.flush()?;
writer.flush_writer()?;
writer.finish()?;
remove_file("tmp2.bdf")?;
@ -65,8 +64,8 @@ mod tests {
fn it_reads() -> Result<(), Error> {
create_simple_file("tmp3.bdf", false)?;
let mut reader = new_reader("tmp3.bdf")?;
reader.read_metadata()?;
let lookup_table = &reader.read_lookup_table()?.clone();
reader.read_start()?;
let lookup_table = &reader.lookup_table.clone().unwrap();
let mut next_chunk = reader.next_chunk()?;
let data_entries = next_chunk.data_entries(lookup_table)?;
assert_eq!(data_entries[0].plain, "lol".to_string());

Loading…
Cancel
Save