From 886ac5ee49ada1282a983901b218414d660f293d Mon Sep 17 00:00:00 2001 From: Trivernis Date: Sat, 14 Mar 2020 18:17:18 +0100 Subject: [PATCH] Change new to BDFWriter/Reader to accept a File --- Cargo.toml | 2 +- README.md | 6 ++---- src/io.rs | 9 +++++---- src/lib.rs | 8 +++----- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 7065a4e..9c3e298 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "bdflib" -version = "0.1.5" +version = "0.2.0" authors = ["trivernis "] edition = "2018" license-file = "LICENSE" diff --git a/README.md b/README.md index bfcf472..3994ca7 100644 --- a/README.md +++ b/README.md @@ -13,8 +13,7 @@ use std::io::BufReader; fn main() { let f = File::open("dictionary.bdf").unwrap(); - let buf_reader = BufReader::new(f); - let mut bdf_reader = BDFReader::new(buf_reader); + let mut bdf_reader = BDFReader::new(f); bdf_reader.read_metadata().unwrap(); let lookup_table = bdf_reader.read_lookup_table().unwrap(); let lookup_table = lookup_table.clone(); @@ -37,9 +36,8 @@ use std::convert::Into; fn main() { let f = File::create("dictionary.bdf").unwrap(); - let buf_writer = BufWriter::new(f); let entry_count = 1; - let mut bdf_writer = BDFWriter::new(buf_writer, entry_count, false); + let mut bdf_writer = BDFWriter::new(f, entry_count, false); bdf_writer.add_lookup_entry(HashEntry::new("fakehash".into(), 3)).unwrap(); let mut entry = DataEntry::new("foo".into()); entry.add_hash_value("fakehash".into(), vec![0, 2, 3]); diff --git a/src/io.rs b/src/io.rs index cef0bf3..e851d7b 100644 --- a/src/io.rs +++ b/src/io.rs @@ -34,12 +34,12 @@ impl BDFWriter { /// bar for how many entries were read. /// If the `compress` parameter is true, each data chunk will be compressed /// using lzma with a default level of 1. - pub fn new(writer: BufWriter, entry_count: u64, compress: bool) -> Self { + pub fn new(inner: File, entry_count: u64, compress: bool) -> Self { Self { metadata: MetaChunk::new(entry_count, ENTRIES_PER_CHUNK, compress), lookup_table: HashLookupTable::new(HashMap::new()), data_entries: Vec::new(), - writer, + writer: BufWriter::new(inner), head_written: false, compressed: compress, compression_level: 1, @@ -134,11 +134,11 @@ impl BDFWriter { impl BDFReader { /// Creates a new BDFReader - pub fn new(reader: BufReader) -> Self { + pub fn new(inner: File) -> Self { Self { metadata: None, lookup_table: None, - reader, + reader: BufReader::new(inner), compressed: false, } } @@ -226,6 +226,7 @@ impl BDFReader { data, crc, }; + if gen_chunk.name == DTBL_CHUNK_NAME.to_string() && self.compressed { gen_chunk.decompress()?; } diff --git a/src/lib.rs b/src/lib.rs index be857f5..c8eb1b2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -5,7 +5,7 @@ mod tests { use crate::chunks::{DataEntry, HashEntry}; use crate::io::BDFReader; use std::fs::{remove_file, File}; - use std::io::{BufReader, BufWriter, Error}; + use std::io::Error; const FOO: &str = "foo"; const BAR: &str = "bar"; @@ -106,16 +106,14 @@ mod tests { fn new_reader(file_name: &str) -> Result { let file = File::open(file_name)?; - let f = BufReader::new(file); - Ok(BDFReader::new(f)) + Ok(BDFReader::new(file)) } fn new_writer(file_name: &str, entries: u64, compress: bool) -> Result { let file = File::create(file_name)?; - let f = BufWriter::new(file); - Ok(BDFWriter::new(f, entries, compress)) + Ok(BDFWriter::new(file, entries, compress)) } }