Add more nbt tags to be checked

Signed-off-by: trivernis <trivernis@protonmail.com>
main
trivernis 4 years ago
parent 3f6063b24a
commit f72ceb0ed2
Signed by: Trivernis
GPG Key ID: DFFFCC2C7A02DB45

2
Cargo.lock generated

@ -307,7 +307,7 @@ dependencies = [
[[package]] [[package]]
name = "minecraft-regions-tool" name = "minecraft-regions-tool"
version = "0.5.2" version = "0.5.3"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"colored", "colored",

@ -1,6 +1,6 @@
[package] [package]
name = "minecraft-regions-tool" name = "minecraft-regions-tool"
version = "0.5.2" version = "0.5.3"
authors = ["trivernis <trivernis@protonmail.com>"] authors = ["trivernis <trivernis@protonmail.com>"]
edition = "2018" edition = "2018"
license = "GPL-3.0" license = "GPL-3.0"

@ -1,18 +1,14 @@
use crate::nbt::{NBTError, NBTReader, NBTValue}; use crate::nbt::{NBTError, NBTReader, NBTValue};
use byteorder::{BigEndian, ReadBytesExt}; use byteorder::{BigEndian, ReadBytesExt};
use crate::constants::tags::{LEVEL_TAGS, TAG_LEVEL};
use crate::region_file::BLOCK_SIZE; use crate::region_file::BLOCK_SIZE;
use flate2::read::ZlibDecoder; use flate2::read::{GzDecoder, ZlibDecoder};
use std::fmt::{Display, Formatter}; use std::fmt::{Display, Formatter};
use std::io::{self, BufReader, Error}; use std::io::{self, BufReader, Error};
type IOResult<T> = io::Result<T>; type IOResult<T> = io::Result<T>;
const TAG_LEVEL: &str = "Level";
const TAG_X_POS: &str = "xPos";
const TAG_Z_POS: &str = "zPos";
const TAG_SECTIONS: &str = "Sections";
#[derive(Debug)] #[derive(Debug)]
pub struct Chunk { pub struct Chunk {
pub length: u32, pub length: u32,
@ -22,7 +18,7 @@ pub struct Chunk {
impl Chunk { impl Chunk {
pub fn from_buf_reader<R: io::Read + io::Seek>(reader: &mut R) -> IOResult<Self> { pub fn from_buf_reader<R: io::Read + io::Seek>(reader: &mut R) -> IOResult<Self> {
let length = reader.read_u32::<BigEndian>()?; let length = reader.read_u32::<BigEndian>()?;
if length > 128 * BLOCK_SIZE as u32 { if length > 128 * BLOCK_SIZE as u32 || length == 0 {
return Err(io::Error::from(io::ErrorKind::InvalidData)); return Err(io::Error::from(io::ErrorKind::InvalidData));
} }
let compression_type = reader.read_u8()?; let compression_type = reader.read_u8()?;
@ -37,7 +33,10 @@ impl Chunk {
&mut self, &mut self,
reader: &mut R, reader: &mut R,
) -> Result<(), ChunkScanError> { ) -> Result<(), ChunkScanError> {
let data = if self.compression_type == 2 { let data = if self.compression_type == 1 {
let mut nbt_reader = NBTReader::new(BufReader::new(GzDecoder::new(reader)));
nbt_reader.parse()?
} else if self.compression_type == 2 {
let mut nbt_reader = NBTReader::new(BufReader::new(ZlibDecoder::new(reader))); let mut nbt_reader = NBTReader::new(BufReader::new(ZlibDecoder::new(reader)));
nbt_reader.parse()? nbt_reader.parse()?
} else { } else {
@ -51,20 +50,12 @@ impl Chunk {
let lvl_data = &data[TAG_LEVEL]; let lvl_data = &data[TAG_LEVEL];
if let NBTValue::Compound(lvl_data) = lvl_data { if let NBTValue::Compound(lvl_data) = lvl_data {
if !lvl_data.contains_key(TAG_X_POS) { for tag in LEVEL_TAGS {
Err(ChunkScanError::MissingTag(TAG_X_POS)) if !lvl_data.contains_key(*tag) {
} else if !lvl_data.contains_key(TAG_Z_POS) { return Err(ChunkScanError::MissingTag(tag));
Err(ChunkScanError::MissingTag(TAG_Z_POS))
} else if !lvl_data.contains_key(TAG_SECTIONS) {
Err(ChunkScanError::MissingTag(TAG_SECTIONS))
} else {
let sections = &lvl_data[TAG_SECTIONS];
if let NBTValue::List(_) = sections {
Ok(())
} else {
Err(ChunkScanError::InvalidFormat(TAG_SECTIONS))
} }
} }
Ok(())
} else { } else {
Err(ChunkScanError::InvalidFormat(TAG_LEVEL)) Err(ChunkScanError::InvalidFormat(TAG_LEVEL))
} }

@ -0,0 +1,37 @@
/// NBT tag constants
pub mod tags {
pub const TAG_LEVEL: &str = "Level";
pub const TAG_X_POS: &str = "xPos";
pub const TAG_Z_POS: &str = "zPos";
pub const TAG_SECTIONS: &str = "Sections";
pub const TAG_LAST_UPDATE: &str = "LastUpdate";
pub const TAG_INHABITED_TIME: &str = "InhabitedTime";
pub const TAG_HEIGHTMAPS: &str = "Heightmaps";
pub const TAG_CARVING_MASKS: &str = "CarvingMasks";
pub const TAG_ENTITIES: &str = "Entities";
pub const TAG_TILE_ENTITIES: &str = "TileEntities";
pub const TAG_TILE_TICKS: &str = "TileTicks";
pub const TAG_LIQUID_TICKS: &str = "LiquidTicks";
pub const TAG_LIGHTS: &str = "Lights";
pub const TAG_LIQUIDS_TO_BE_TICKED: &str = "LiquidsToBeTicked";
pub const TAG_TO_BE_TICKED: &str = "ToBeTicked";
pub const TAG_POST_PROCESSING: &str = "PostProcessing";
pub const TAG_STATUS: &str = "Status";
pub const TAG_STRUCTURES: &str = "Structures";
/// A list of required tags stored in the level tag
pub const LEVEL_TAGS: &[&'static str] = &[
TAG_X_POS,
TAG_Z_POS,
TAG_SECTIONS,
TAG_LAST_UPDATE,
TAG_INHABITED_TIME,
TAG_HEIGHTMAPS,
TAG_ENTITIES,
TAG_TILE_ENTITIES,
TAG_LIQUID_TICKS,
TAG_POST_PROCESSING,
TAG_STATUS,
TAG_STRUCTURES,
];
}

@ -1,4 +1,5 @@
pub mod chunk; pub mod chunk;
pub mod constants;
pub mod nbt; pub mod nbt;
pub mod region_file; pub mod region_file;
pub mod scan; pub mod scan;

@ -66,6 +66,13 @@ impl RegionFile {
let reader_offset = offset as u64 * BLOCK_SIZE as u64; let reader_offset = offset as u64 * BLOCK_SIZE as u64;
self.reader.seek(SeekFrom::Start(reader_offset))?; self.reader.seek(SeekFrom::Start(reader_offset))?;
let offset_diff = offset - (previous_offset + previous_sections);
if offset_diff > 0 {
statistic.unused_space += (BLOCK_SIZE * offset_diff as usize) as u64;
if options.fix {
shift_operations.push((offset as usize, -(offset_diff as isize)));
}
}
match Chunk::from_buf_reader(&mut self.reader) { match Chunk::from_buf_reader(&mut self.reader) {
Ok(chunk) => { Ok(chunk) => {
let exists = let exists =
@ -77,19 +84,15 @@ impl RegionFile {
} }
Err(e) => { Err(e) => {
statistic.failed_to_read += 1; statistic.failed_to_read += 1;
log::error!("Failed to read chunk at {}: {}", offset, e);
if options.fix_delete { if options.fix_delete {
self.delete_chunk(index)?; self.delete_chunk(index)?;
shift_operations
.push((offset as usize + sections as usize, -(sections as isize)));
} }
log::error!("Failed to read chunk at {}: {}", offset, e);
}
}
let offset_diff = offset - (previous_offset + previous_sections);
if offset_diff > 0 {
statistic.unused_space += (BLOCK_SIZE * offset_diff as usize) as u64;
if options.fix {
shift_operations.push((offset as usize, -(offset_diff as isize)));
} }
} }
previous_offset = offset; previous_offset = offset;
previous_sections = sections as u32; previous_sections = sections as u32;
} }
@ -175,14 +178,10 @@ impl RegionFile {
/// Deletes a chunk and shifts all other chunks /// Deletes a chunk and shifts all other chunks
pub fn delete_chunk(&mut self, index: usize) -> Result<()> { pub fn delete_chunk(&mut self, index: usize) -> Result<()> {
let (offset, sections) = self.locations.get_chunk_entry_unchecked(index);
log::debug!( log::debug!(
"Shifting chunk entries starting from {} by {} to the left", "Deleting chunk at {}",
offset, self.locations.get_chunk_entry_unchecked(index).0
sections as u32
); );
self.locations.delete_chunk_entry_unchecked(index); self.locations.delete_chunk_entry_unchecked(index);
Ok(()) Ok(())
} }
@ -218,6 +217,11 @@ impl RegionFile {
Ok(()) Ok(())
} }
/// Closes the region file by flushing the writer
pub fn close(&mut self) -> Result<()> {
self.writer.flush()
}
} }
#[derive(Debug)] #[derive(Debug)]

Loading…
Cancel
Save