Remove C bindings to be able to publish it to crates.io

master
trivernis 4 years ago
parent 272bf6b725
commit d00a22516c

@ -4,14 +4,13 @@ version = "0.1.0"
authors = ["trivernis <trivernis@gmail.com>"]
edition = "2018"
license-file = "LICENSE"
license = "GPL-3"
readme = "Readme.md"
description = "A library to parse bdf files."
homepage = "https://github.com/parallel-programming-hwr/bdflib-rs"
[lib]
name = "bdf"
crate-type = ["staticlib", "cdylib", "lib"]
crate-type = ["lib"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

@ -1,8 +1,8 @@
use byteorder::{BigEndian, ByteOrder};
use crc::crc32;
use std::collections::HashMap;
use std::convert::{TryFrom};
use std::io::{Read};
use std::convert::TryFrom;
use std::io::Read;
use std::io::{Error, ErrorKind};
use xz2::read::{XzDecoder, XzEncoder};
@ -14,7 +14,7 @@ pub const META_CHUNK_NAME: &str = "META";
pub const HTBL_CHUNK_NAME: &str = "HTBL";
pub const DTBL_CHUNK_NAME: &str = "DTBL";
#[repr(C)]
#[derive(Debug, Clone)]
pub struct GenericChunk {
pub length: u32,
@ -23,7 +23,7 @@ pub struct GenericChunk {
pub crc: u32,
}
#[repr(C)]
#[derive(Debug, Clone)]
pub struct MetaChunk {
pub chunk_count: u32,
@ -32,13 +32,13 @@ pub struct MetaChunk {
pub compression_method: Option<String>,
}
#[repr(C)]
#[derive(Debug, Clone)]
pub struct HashLookupTable {
pub entries: HashMap<u32, HashEntry>,
}
#[repr(C)]
#[derive(Debug, Clone)]
pub struct HashEntry {
pub(crate) id: u32,
@ -46,7 +46,7 @@ pub struct HashEntry {
name: String,
}
#[repr(C)]
#[derive(Debug, Clone)]
pub struct DataEntry {
pub plain: String,
@ -55,8 +55,7 @@ pub struct DataEntry {
impl GenericChunk {
/// Serializes the chunk to a vector of bytes
#[no_mangle]
pub extern fn serialize(&mut self) -> Vec<u8> {
pub fn serialize(&mut self) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
let mut length_raw = [0u8; 4];
BigEndian::write_u32(&mut length_raw, self.length);
@ -72,8 +71,7 @@ impl GenericChunk {
}
/// Returns the data entries of the chunk
#[no_mangle]
pub extern fn data_entries(
pub fn data_entries(
&mut self,
lookup_table: &HashLookupTable,
) -> Result<Vec<DataEntry>, Error> {
@ -123,8 +121,7 @@ impl GenericChunk {
}
/// Constructs the chunk from a Vec of Data entries and a hash lookup table
#[no_mangle]
pub extern fn from_data_entries(
pub fn from_data_entries(
entries: &Vec<DataEntry>,
lookup_table: &HashLookupTable,
) -> GenericChunk {
@ -142,8 +139,8 @@ impl GenericChunk {
}
}
#[no_mangle]
pub extern fn compress(&mut self) -> Result<(), Error> {
/// Compresses the data of the chunk using lzma with a level of 6
pub fn compress(&mut self) -> Result<(), Error> {
let data = self.data.as_slice();
let mut compressor = XzEncoder::new(data, 6);
let mut compressed: Vec<u8> = Vec::new();
@ -154,8 +151,8 @@ impl GenericChunk {
Ok(())
}
#[no_mangle]
pub extern fn decompress(&mut self) -> Result<(), Error> {
/// Decompresses the data of the chunk with lzma
pub fn decompress(&mut self) -> Result<(), Error> {
let data = self.data.as_slice();
let mut decompressor = XzDecoder::new(data);
let mut decompressed: Vec<u8> = Vec::new();
@ -204,8 +201,7 @@ impl From<&HashLookupTable> for GenericChunk {
impl MetaChunk {
/// Creates a new meta chunk
#[no_mangle]
pub extern fn new(entry_count: u64, entries_per_chunk: u32, compress: bool) -> Self {
pub fn new(entry_count: u64, entries_per_chunk: u32, compress: bool) -> Self {
let compression_method = if compress {
Some(LZMA.to_string())
} else {
@ -222,8 +218,7 @@ impl MetaChunk {
}
/// Serializes the chunk into bytes
#[no_mangle]
pub extern fn serialize(&self) -> Vec<u8> {
pub fn serialize(&self) -> Vec<u8> {
let mut serialized_data: Vec<u8> = Vec::new();
let mut chunk_count_raw = [0u8; 4];
BigEndian::write_u32(&mut chunk_count_raw, self.chunk_count);
@ -248,7 +243,6 @@ impl MetaChunk {
impl TryFrom<GenericChunk> for MetaChunk {
type Error = Error;
#[no_mangle]
fn try_from(chunk: GenericChunk) -> Result<MetaChunk, Error> {
if &chunk.name != META_CHUNK_NAME {
return Err(Error::new(
@ -285,20 +279,19 @@ impl TryFrom<GenericChunk> for MetaChunk {
}
impl HashLookupTable {
#[no_mangle]
pub extern fn new(entries: HashMap<u32, HashEntry>) -> Self {
/// Creates a new hash lookup table
pub fn new(entries: HashMap<u32, HashEntry>) -> Self {
Self { entries }
}
/// Returns an entry by the name of the hash function
#[no_mangle]
pub extern fn get_entry(&self, name: &String) -> Option<(&u32, &HashEntry)> {
pub fn get_entry(&self, name: &String) -> Option<(&u32, &HashEntry)> {
self.entries.iter().find(|(_, entry)| entry.name == *name)
}
/// Serializes the lookup table into a vector of bytes
#[no_mangle]
pub extern fn serialize(&self) -> Vec<u8> {
pub fn serialize(&self) -> Vec<u8> {
let mut serialized_full: Vec<u8> = Vec::new();
for (_, entry) in &self.entries {
serialized_full.append(entry.serialize().as_mut())
@ -311,7 +304,6 @@ impl HashLookupTable {
impl TryFrom<GenericChunk> for HashLookupTable {
type Error = Error;
#[no_mangle]
fn try_from(chunk: GenericChunk) -> Result<HashLookupTable, Error> {
if &chunk.name != HTBL_CHUNK_NAME {
return Err(Error::new(
@ -350,8 +342,9 @@ impl TryFrom<GenericChunk> for HashLookupTable {
}
impl HashEntry {
#[no_mangle]
pub extern fn new(name: String, output_length: u32) -> Self {
/// Creates a new hash entry
pub fn new(name: String, output_length: u32) -> Self {
Self {
id: 0,
name,
@ -360,8 +353,7 @@ impl HashEntry {
}
/// Serializes the entry to a vector of bytes
#[no_mangle]
pub extern fn serialize(&self) -> Vec<u8> {
pub fn serialize(&self) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
let mut id_raw = [0u8; 4];
BigEndian::write_u32(&mut id_raw, self.id);
@ -380,8 +372,7 @@ impl HashEntry {
}
impl DataEntry {
#[no_mangle]
pub extern fn new(plain: String) -> Self {
pub fn new(plain: String) -> Self {
Self {
hashes: HashMap::new(),
plain,
@ -389,20 +380,17 @@ impl DataEntry {
}
/// Adds a hash to the hash values
#[no_mangle]
pub extern fn add_hash_value(&mut self, name: String, value: Vec<u8>) {
pub fn add_hash_value(&mut self, name: String, value: Vec<u8>) {
self.hashes.insert(name, value);
}
/// Returns the hash value for a given name of a hash function
#[no_mangle]
pub extern fn get_hash_value(&self, name: String) -> Option<&Vec<u8>> {
pub fn get_hash_value(&self, name: String) -> Option<&Vec<u8>> {
self.hashes.get(&name)
}
/// Serializes the entry to a vector of bytes
#[no_mangle]
pub extern fn serialize(&self, lookup_table: &HashLookupTable) -> Vec<u8> {
pub fn serialize(&self, lookup_table: &HashLookupTable) -> Vec<u8> {
let mut pw_plain_raw = self.plain.clone().into_bytes();
let mut pw_length_raw = [0u8; 4];
BigEndian::write_u32(&mut pw_length_raw, pw_plain_raw.len() as u32);

@ -8,7 +8,7 @@ use std::convert::TryInto;
const ENTRIES_PER_CHUNK: u32 = 100_000;
#[repr(C)]
pub struct BDFReader {
reader: BufReader<File>,
pub metadata: Option<MetaChunk>,
@ -16,7 +16,7 @@ pub struct BDFReader {
compressed: bool,
}
#[repr(C)]
pub struct BDFWriter {
writer: BufWriter<File>,
metadata: MetaChunk,
@ -27,8 +27,8 @@ pub struct BDFWriter {
}
impl BDFWriter {
#[no_mangle]
pub extern fn new(writer: BufWriter<File>, entry_count: u64, compress: bool) -> Self {
pub fn new(writer: BufWriter<File>, entry_count: u64, compress: bool) -> Self {
Self {
metadata: MetaChunk::new(entry_count, ENTRIES_PER_CHUNK, compress),
lookup_table: HashLookupTable::new(HashMap::new()),
@ -41,8 +41,7 @@ impl BDFWriter {
/// Adds an entry to the hash lookup table
/// If the lookup table has already been written to the file, an error ris returned
#[no_mangle]
pub extern fn add_lookup_entry(&mut self, mut entry: HashEntry) -> Result<u32, Error> {
pub fn add_lookup_entry(&mut self, mut entry: HashEntry) -> Result<u32, Error> {
if self.head_written {
return Err(Error::new(
ErrorKind::Other,
@ -59,7 +58,7 @@ impl BDFWriter {
/// Adds a data entry to the file.
/// If the number of entries per chunk is reached,
/// the data will be written to the file
pub extern fn add_data_entry(&mut self, data_entry: DataEntry) -> Result<(), Error> {
pub fn add_data_entry(&mut self, data_entry: DataEntry) -> Result<(), Error> {
self.data_entries.push(data_entry);
if self.data_entries.len() >= ENTRIES_PER_CHUNK as usize {
self.flush()?;
@ -69,8 +68,8 @@ impl BDFWriter {
}
/// Writes the data to the file
#[no_mangle]
pub extern fn flush(&mut self) -> Result<(), Error> {
pub fn flush(&mut self) -> Result<(), Error> {
if !self.head_written {
self.writer.write(BDF_HDR)?;
let mut generic_meta = GenericChunk::from(&self.metadata);
@ -91,15 +90,17 @@ impl BDFWriter {
Ok(())
}
#[no_mangle]
pub extern fn flush_writer(&mut self) -> Result<(), Error> {
/// Flushes the writer
/// This should be called when no more data is being written
pub fn flush_writer(&mut self) -> Result<(), Error> {
self.writer.flush()
}
}
impl BDFReader {
#[no_mangle]
pub extern fn new(reader: BufReader<File>) -> Self {
/// Creates a new BDFReader
pub fn new(reader: BufReader<File>) -> Self {
Self {
metadata: None,
lookup_table: None,
@ -109,8 +110,7 @@ impl BDFReader {
}
/// Verifies the header of the file and reads and stores the metadata
#[no_mangle]
pub extern fn read_metadata(&mut self) -> Result<&MetaChunk, Error> {
pub fn read_metadata(&mut self) -> Result<&MetaChunk, Error> {
if !self.validate_header() {
return Err(Error::new(ErrorKind::InvalidData, "invalid BDF Header"));
}
@ -139,8 +139,7 @@ impl BDFReader {
/// Reads the lookup table of the file.
/// This function should be called after the read_metadata function was called
#[no_mangle]
pub extern fn read_lookup_table(&mut self) -> Result<&HashLookupTable, Error> {
pub fn read_lookup_table(&mut self) -> Result<&HashLookupTable, Error> {
match &self.metadata {
None => self.read_metadata()?,
Some(t) => t,
@ -159,8 +158,7 @@ impl BDFReader {
}
/// Validates the header of the file
#[no_mangle]
extern fn validate_header(&mut self) -> bool {
fn validate_header(&mut self) -> bool {
let mut header = [0u8; 11];
let _ = self.reader.read(&mut header);
@ -168,8 +166,7 @@ impl BDFReader {
}
/// Returns the next chunk if one is available.
#[no_mangle]
pub extern fn next_chunk(&mut self) -> Result<GenericChunk, Error> {
pub fn next_chunk(&mut self) -> Result<GenericChunk, Error> {
let mut length_raw = [0u8; 4];
let _ = self.reader.read_exact(&mut length_raw)?;
let length = BigEndian::read_u32(&mut length_raw);

Loading…
Cancel
Save