Remove C bindings to be able to publish it to crates.io

master
trivernis 5 years ago
parent 272bf6b725
commit d00a22516c

@ -4,14 +4,13 @@ version = "0.1.0"
authors = ["trivernis <trivernis@gmail.com>"] authors = ["trivernis <trivernis@gmail.com>"]
edition = "2018" edition = "2018"
license-file = "LICENSE" license-file = "LICENSE"
license = "GPL-3"
readme = "Readme.md" readme = "Readme.md"
description = "A library to parse bdf files." description = "A library to parse bdf files."
homepage = "https://github.com/parallel-programming-hwr/bdflib-rs" homepage = "https://github.com/parallel-programming-hwr/bdflib-rs"
[lib] [lib]
name = "bdf" name = "bdf"
crate-type = ["staticlib", "cdylib", "lib"] crate-type = ["lib"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

@ -1,8 +1,8 @@
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use crc::crc32; use crc::crc32;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::{TryFrom}; use std::convert::TryFrom;
use std::io::{Read}; use std::io::Read;
use std::io::{Error, ErrorKind}; use std::io::{Error, ErrorKind};
use xz2::read::{XzDecoder, XzEncoder}; use xz2::read::{XzDecoder, XzEncoder};
@ -14,7 +14,7 @@ pub const META_CHUNK_NAME: &str = "META";
pub const HTBL_CHUNK_NAME: &str = "HTBL"; pub const HTBL_CHUNK_NAME: &str = "HTBL";
pub const DTBL_CHUNK_NAME: &str = "DTBL"; pub const DTBL_CHUNK_NAME: &str = "DTBL";
#[repr(C)]
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct GenericChunk { pub struct GenericChunk {
pub length: u32, pub length: u32,
@ -23,7 +23,7 @@ pub struct GenericChunk {
pub crc: u32, pub crc: u32,
} }
#[repr(C)]
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct MetaChunk { pub struct MetaChunk {
pub chunk_count: u32, pub chunk_count: u32,
@ -32,13 +32,13 @@ pub struct MetaChunk {
pub compression_method: Option<String>, pub compression_method: Option<String>,
} }
#[repr(C)]
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct HashLookupTable { pub struct HashLookupTable {
pub entries: HashMap<u32, HashEntry>, pub entries: HashMap<u32, HashEntry>,
} }
#[repr(C)]
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct HashEntry { pub struct HashEntry {
pub(crate) id: u32, pub(crate) id: u32,
@ -46,7 +46,7 @@ pub struct HashEntry {
name: String, name: String,
} }
#[repr(C)]
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct DataEntry { pub struct DataEntry {
pub plain: String, pub plain: String,
@ -55,8 +55,7 @@ pub struct DataEntry {
impl GenericChunk { impl GenericChunk {
/// Serializes the chunk to a vector of bytes /// Serializes the chunk to a vector of bytes
#[no_mangle] pub fn serialize(&mut self) -> Vec<u8> {
pub extern fn serialize(&mut self) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new(); let mut serialized: Vec<u8> = Vec::new();
let mut length_raw = [0u8; 4]; let mut length_raw = [0u8; 4];
BigEndian::write_u32(&mut length_raw, self.length); BigEndian::write_u32(&mut length_raw, self.length);
@ -72,8 +71,7 @@ impl GenericChunk {
} }
/// Returns the data entries of the chunk /// Returns the data entries of the chunk
#[no_mangle] pub fn data_entries(
pub extern fn data_entries(
&mut self, &mut self,
lookup_table: &HashLookupTable, lookup_table: &HashLookupTable,
) -> Result<Vec<DataEntry>, Error> { ) -> Result<Vec<DataEntry>, Error> {
@ -123,8 +121,7 @@ impl GenericChunk {
} }
/// Constructs the chunk from a Vec of Data entries and a hash lookup table /// Constructs the chunk from a Vec of Data entries and a hash lookup table
#[no_mangle] pub fn from_data_entries(
pub extern fn from_data_entries(
entries: &Vec<DataEntry>, entries: &Vec<DataEntry>,
lookup_table: &HashLookupTable, lookup_table: &HashLookupTable,
) -> GenericChunk { ) -> GenericChunk {
@ -142,8 +139,8 @@ impl GenericChunk {
} }
} }
#[no_mangle] /// Compresses the data of the chunk using lzma with a level of 6
pub extern fn compress(&mut self) -> Result<(), Error> { pub fn compress(&mut self) -> Result<(), Error> {
let data = self.data.as_slice(); let data = self.data.as_slice();
let mut compressor = XzEncoder::new(data, 6); let mut compressor = XzEncoder::new(data, 6);
let mut compressed: Vec<u8> = Vec::new(); let mut compressed: Vec<u8> = Vec::new();
@ -154,8 +151,8 @@ impl GenericChunk {
Ok(()) Ok(())
} }
#[no_mangle] /// Decompresses the data of the chunk with lzma
pub extern fn decompress(&mut self) -> Result<(), Error> { pub fn decompress(&mut self) -> Result<(), Error> {
let data = self.data.as_slice(); let data = self.data.as_slice();
let mut decompressor = XzDecoder::new(data); let mut decompressor = XzDecoder::new(data);
let mut decompressed: Vec<u8> = Vec::new(); let mut decompressed: Vec<u8> = Vec::new();
@ -204,8 +201,7 @@ impl From<&HashLookupTable> for GenericChunk {
impl MetaChunk { impl MetaChunk {
/// Creates a new meta chunk /// Creates a new meta chunk
#[no_mangle] pub fn new(entry_count: u64, entries_per_chunk: u32, compress: bool) -> Self {
pub extern fn new(entry_count: u64, entries_per_chunk: u32, compress: bool) -> Self {
let compression_method = if compress { let compression_method = if compress {
Some(LZMA.to_string()) Some(LZMA.to_string())
} else { } else {
@ -222,8 +218,7 @@ impl MetaChunk {
} }
/// Serializes the chunk into bytes /// Serializes the chunk into bytes
#[no_mangle] pub fn serialize(&self) -> Vec<u8> {
pub extern fn serialize(&self) -> Vec<u8> {
let mut serialized_data: Vec<u8> = Vec::new(); let mut serialized_data: Vec<u8> = Vec::new();
let mut chunk_count_raw = [0u8; 4]; let mut chunk_count_raw = [0u8; 4];
BigEndian::write_u32(&mut chunk_count_raw, self.chunk_count); BigEndian::write_u32(&mut chunk_count_raw, self.chunk_count);
@ -248,7 +243,6 @@ impl MetaChunk {
impl TryFrom<GenericChunk> for MetaChunk { impl TryFrom<GenericChunk> for MetaChunk {
type Error = Error; type Error = Error;
#[no_mangle]
fn try_from(chunk: GenericChunk) -> Result<MetaChunk, Error> { fn try_from(chunk: GenericChunk) -> Result<MetaChunk, Error> {
if &chunk.name != META_CHUNK_NAME { if &chunk.name != META_CHUNK_NAME {
return Err(Error::new( return Err(Error::new(
@ -285,20 +279,19 @@ impl TryFrom<GenericChunk> for MetaChunk {
} }
impl HashLookupTable { impl HashLookupTable {
#[no_mangle]
pub extern fn new(entries: HashMap<u32, HashEntry>) -> Self { /// Creates a new hash lookup table
pub fn new(entries: HashMap<u32, HashEntry>) -> Self {
Self { entries } Self { entries }
} }
/// Returns an entry by the name of the hash function /// Returns an entry by the name of the hash function
#[no_mangle] pub fn get_entry(&self, name: &String) -> Option<(&u32, &HashEntry)> {
pub extern fn get_entry(&self, name: &String) -> Option<(&u32, &HashEntry)> {
self.entries.iter().find(|(_, entry)| entry.name == *name) self.entries.iter().find(|(_, entry)| entry.name == *name)
} }
/// Serializes the lookup table into a vector of bytes /// Serializes the lookup table into a vector of bytes
#[no_mangle] pub fn serialize(&self) -> Vec<u8> {
pub extern fn serialize(&self) -> Vec<u8> {
let mut serialized_full: Vec<u8> = Vec::new(); let mut serialized_full: Vec<u8> = Vec::new();
for (_, entry) in &self.entries { for (_, entry) in &self.entries {
serialized_full.append(entry.serialize().as_mut()) serialized_full.append(entry.serialize().as_mut())
@ -311,7 +304,6 @@ impl HashLookupTable {
impl TryFrom<GenericChunk> for HashLookupTable { impl TryFrom<GenericChunk> for HashLookupTable {
type Error = Error; type Error = Error;
#[no_mangle]
fn try_from(chunk: GenericChunk) -> Result<HashLookupTable, Error> { fn try_from(chunk: GenericChunk) -> Result<HashLookupTable, Error> {
if &chunk.name != HTBL_CHUNK_NAME { if &chunk.name != HTBL_CHUNK_NAME {
return Err(Error::new( return Err(Error::new(
@ -350,8 +342,9 @@ impl TryFrom<GenericChunk> for HashLookupTable {
} }
impl HashEntry { impl HashEntry {
#[no_mangle]
pub extern fn new(name: String, output_length: u32) -> Self { /// Creates a new hash entry
pub fn new(name: String, output_length: u32) -> Self {
Self { Self {
id: 0, id: 0,
name, name,
@ -360,8 +353,7 @@ impl HashEntry {
} }
/// Serializes the entry to a vector of bytes /// Serializes the entry to a vector of bytes
#[no_mangle] pub fn serialize(&self) -> Vec<u8> {
pub extern fn serialize(&self) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new(); let mut serialized: Vec<u8> = Vec::new();
let mut id_raw = [0u8; 4]; let mut id_raw = [0u8; 4];
BigEndian::write_u32(&mut id_raw, self.id); BigEndian::write_u32(&mut id_raw, self.id);
@ -380,8 +372,7 @@ impl HashEntry {
} }
impl DataEntry { impl DataEntry {
#[no_mangle] pub fn new(plain: String) -> Self {
pub extern fn new(plain: String) -> Self {
Self { Self {
hashes: HashMap::new(), hashes: HashMap::new(),
plain, plain,
@ -389,20 +380,17 @@ impl DataEntry {
} }
/// Adds a hash to the hash values /// Adds a hash to the hash values
#[no_mangle] pub fn add_hash_value(&mut self, name: String, value: Vec<u8>) {
pub extern fn add_hash_value(&mut self, name: String, value: Vec<u8>) {
self.hashes.insert(name, value); self.hashes.insert(name, value);
} }
/// Returns the hash value for a given name of a hash function /// Returns the hash value for a given name of a hash function
#[no_mangle] pub fn get_hash_value(&self, name: String) -> Option<&Vec<u8>> {
pub extern fn get_hash_value(&self, name: String) -> Option<&Vec<u8>> {
self.hashes.get(&name) self.hashes.get(&name)
} }
/// Serializes the entry to a vector of bytes /// Serializes the entry to a vector of bytes
#[no_mangle] pub fn serialize(&self, lookup_table: &HashLookupTable) -> Vec<u8> {
pub extern fn serialize(&self, lookup_table: &HashLookupTable) -> Vec<u8> {
let mut pw_plain_raw = self.plain.clone().into_bytes(); let mut pw_plain_raw = self.plain.clone().into_bytes();
let mut pw_length_raw = [0u8; 4]; let mut pw_length_raw = [0u8; 4];
BigEndian::write_u32(&mut pw_length_raw, pw_plain_raw.len() as u32); BigEndian::write_u32(&mut pw_length_raw, pw_plain_raw.len() as u32);

@ -8,7 +8,7 @@ use std::convert::TryInto;
const ENTRIES_PER_CHUNK: u32 = 100_000; const ENTRIES_PER_CHUNK: u32 = 100_000;
#[repr(C)]
pub struct BDFReader { pub struct BDFReader {
reader: BufReader<File>, reader: BufReader<File>,
pub metadata: Option<MetaChunk>, pub metadata: Option<MetaChunk>,
@ -16,7 +16,7 @@ pub struct BDFReader {
compressed: bool, compressed: bool,
} }
#[repr(C)]
pub struct BDFWriter { pub struct BDFWriter {
writer: BufWriter<File>, writer: BufWriter<File>,
metadata: MetaChunk, metadata: MetaChunk,
@ -27,8 +27,8 @@ pub struct BDFWriter {
} }
impl BDFWriter { impl BDFWriter {
#[no_mangle]
pub extern fn new(writer: BufWriter<File>, entry_count: u64, compress: bool) -> Self { pub fn new(writer: BufWriter<File>, entry_count: u64, compress: bool) -> Self {
Self { Self {
metadata: MetaChunk::new(entry_count, ENTRIES_PER_CHUNK, compress), metadata: MetaChunk::new(entry_count, ENTRIES_PER_CHUNK, compress),
lookup_table: HashLookupTable::new(HashMap::new()), lookup_table: HashLookupTable::new(HashMap::new()),
@ -41,8 +41,7 @@ impl BDFWriter {
/// Adds an entry to the hash lookup table /// Adds an entry to the hash lookup table
/// If the lookup table has already been written to the file, an error ris returned /// If the lookup table has already been written to the file, an error ris returned
#[no_mangle] pub fn add_lookup_entry(&mut self, mut entry: HashEntry) -> Result<u32, Error> {
pub extern fn add_lookup_entry(&mut self, mut entry: HashEntry) -> Result<u32, Error> {
if self.head_written { if self.head_written {
return Err(Error::new( return Err(Error::new(
ErrorKind::Other, ErrorKind::Other,
@ -59,7 +58,7 @@ impl BDFWriter {
/// Adds a data entry to the file. /// Adds a data entry to the file.
/// If the number of entries per chunk is reached, /// If the number of entries per chunk is reached,
/// the data will be written to the file /// the data will be written to the file
pub extern fn add_data_entry(&mut self, data_entry: DataEntry) -> Result<(), Error> { pub fn add_data_entry(&mut self, data_entry: DataEntry) -> Result<(), Error> {
self.data_entries.push(data_entry); self.data_entries.push(data_entry);
if self.data_entries.len() >= ENTRIES_PER_CHUNK as usize { if self.data_entries.len() >= ENTRIES_PER_CHUNK as usize {
self.flush()?; self.flush()?;
@ -69,8 +68,8 @@ impl BDFWriter {
} }
/// Writes the data to the file /// Writes the data to the file
#[no_mangle]
pub extern fn flush(&mut self) -> Result<(), Error> { pub fn flush(&mut self) -> Result<(), Error> {
if !self.head_written { if !self.head_written {
self.writer.write(BDF_HDR)?; self.writer.write(BDF_HDR)?;
let mut generic_meta = GenericChunk::from(&self.metadata); let mut generic_meta = GenericChunk::from(&self.metadata);
@ -91,15 +90,17 @@ impl BDFWriter {
Ok(()) Ok(())
} }
#[no_mangle] /// Flushes the writer
pub extern fn flush_writer(&mut self) -> Result<(), Error> { /// This should be called when no more data is being written
pub fn flush_writer(&mut self) -> Result<(), Error> {
self.writer.flush() self.writer.flush()
} }
} }
impl BDFReader { impl BDFReader {
#[no_mangle]
pub extern fn new(reader: BufReader<File>) -> Self { /// Creates a new BDFReader
pub fn new(reader: BufReader<File>) -> Self {
Self { Self {
metadata: None, metadata: None,
lookup_table: None, lookup_table: None,
@ -109,8 +110,7 @@ impl BDFReader {
} }
/// Verifies the header of the file and reads and stores the metadata /// Verifies the header of the file and reads and stores the metadata
#[no_mangle] pub fn read_metadata(&mut self) -> Result<&MetaChunk, Error> {
pub extern fn read_metadata(&mut self) -> Result<&MetaChunk, Error> {
if !self.validate_header() { if !self.validate_header() {
return Err(Error::new(ErrorKind::InvalidData, "invalid BDF Header")); return Err(Error::new(ErrorKind::InvalidData, "invalid BDF Header"));
} }
@ -139,8 +139,7 @@ impl BDFReader {
/// Reads the lookup table of the file. /// Reads the lookup table of the file.
/// This function should be called after the read_metadata function was called /// This function should be called after the read_metadata function was called
#[no_mangle] pub fn read_lookup_table(&mut self) -> Result<&HashLookupTable, Error> {
pub extern fn read_lookup_table(&mut self) -> Result<&HashLookupTable, Error> {
match &self.metadata { match &self.metadata {
None => self.read_metadata()?, None => self.read_metadata()?,
Some(t) => t, Some(t) => t,
@ -159,8 +158,7 @@ impl BDFReader {
} }
/// Validates the header of the file /// Validates the header of the file
#[no_mangle] fn validate_header(&mut self) -> bool {
extern fn validate_header(&mut self) -> bool {
let mut header = [0u8; 11]; let mut header = [0u8; 11];
let _ = self.reader.read(&mut header); let _ = self.reader.read(&mut header);
@ -168,8 +166,7 @@ impl BDFReader {
} }
/// Returns the next chunk if one is available. /// Returns the next chunk if one is available.
#[no_mangle] pub fn next_chunk(&mut self) -> Result<GenericChunk, Error> {
pub extern fn next_chunk(&mut self) -> Result<GenericChunk, Error> {
let mut length_raw = [0u8; 4]; let mut length_raw = [0u8; 4];
let _ = self.reader.read_exact(&mut length_raw)?; let _ = self.reader.read_exact(&mut length_raw)?;
let length = BigEndian::read_u32(&mut length_raw); let length = BigEndian::read_u32(&mut length_raw);

Loading…
Cancel
Save