Fix deadlock when serialising in parallel

master
trivernis 5 years ago
parent 4494234a59
commit a6d2135e5e

@ -1,6 +1,5 @@
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use crc::crc32; use crc::crc32;
use rayon::prelude::*;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::{TryFrom, TryInto}; use std::convert::{TryFrom, TryInto};
use std::fs::File; use std::fs::File;
@ -300,12 +299,8 @@ impl GenericChunk {
lookup_table: &HashLookupTable, lookup_table: &HashLookupTable,
) -> GenericChunk { ) -> GenericChunk {
let mut serialized_data: Vec<u8> = Vec::new(); let mut serialized_data: Vec<u8> = Vec::new();
let serialized_entries: Vec<Vec<u8>> = entries entries.iter().for_each(|entry| {
.par_iter() serialized_data.append(&mut entry.serialize(&lookup_table));
.map(|entry: &DataEntry| entry.serialize(lookup_table.clone()))
.collect();
serialized_entries.iter().for_each(|entry| {
serialized_data.append(&mut entry.clone());
}); });
let crc_sum = crc32::checksum_ieee(serialized_data.as_slice()); let crc_sum = crc32::checksum_ieee(serialized_data.as_slice());
@ -562,7 +557,7 @@ impl DataEntry {
} }
/// Serializes the entry to a vector of bytes /// Serializes the entry to a vector of bytes
pub fn serialize(&self, lookup_table: HashLookupTable) -> Vec<u8> { pub fn serialize(&self, lookup_table: &HashLookupTable) -> Vec<u8> {
let mut pw_plain_raw = self.plain.clone().into_bytes(); let mut pw_plain_raw = self.plain.clone().into_bytes();
let mut pw_length_raw = [0u8; 4]; let mut pw_length_raw = [0u8; 4];
BigEndian::write_u32(&mut pw_length_raw, pw_plain_raw.len() as u32); BigEndian::write_u32(&mut pw_length_raw, pw_plain_raw.len() as u32);

@ -162,6 +162,7 @@ fn decrypt(_opts: &Opts, args: &Decrypt) {
const SHA256: &str = "sha256"; const SHA256: &str = "sha256";
/// Creates a dictionary from an input file and writes it to the output file /// Creates a dictionary from an input file and writes it to the output file
fn create_dictionary(_opts: &Opts, args: &CreateDictionary) { fn create_dictionary(_opts: &Opts, args: &CreateDictionary) {
let sp = spinner("Reading input file...");
let input: String = (*args.input).parse().unwrap(); let input: String = (*args.input).parse().unwrap();
// TODO: Some form of removing duplicates (without itertools) // TODO: Some form of removing duplicates (without itertools)
let fout = File::create(args.output.clone()).unwrap(); let fout = File::create(args.output.clone()).unwrap();
@ -171,9 +172,10 @@ fn create_dictionary(_opts: &Opts, args: &CreateDictionary) {
let content = fs::read_to_string(input).expect("Failed to read content"); let content = fs::read_to_string(input).expect("Failed to read content");
let lines = content.par_lines(); let lines = content.par_lines();
let entry_count = lines.clone().count() as u64; let entry_count = lines.clone().count() as u64;
sp.stop();
let mut pb = ProgressBar::new(entry_count); let mut pb = ProgressBar::new(entry_count);
pb.set_max_refresh_rate(Some(Duration::from_millis(200))); pb.set_max_refresh_rate(Some(Duration::from_millis(200)));
let (rx, tx) = sync_channel::<DataEntry>(100_000_000); let (rx, tx) = sync_channel::<DataEntry>(100_000);
let mut bdf_file = BDFWriter::new(writer, entry_count, args.compress); let mut bdf_file = BDFWriter::new(writer, entry_count, args.compress);
bdf_file bdf_file
.add_lookup_entry(HashEntry::new(SHA256.to_string(), 32)) .add_lookup_entry(HashEntry::new(SHA256.to_string(), 32))

Loading…
Cancel
Save