Add optional deletion of corrupted chunks and region files

Signed-off-by: trivernis <trivernis@protonmail.com>
main
trivernis 4 years ago
parent 687201d164
commit eb88a39b9a
Signed by: Trivernis
GPG Key ID: DFFFCC2C7A02DB45

2
Cargo.lock generated

@ -307,7 +307,7 @@ dependencies = [
[[package]] [[package]]
name = "minecraft-regions-tool" name = "minecraft-regions-tool"
version = "0.4.1" version = "0.5.0"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"colored", "colored",

@ -1,6 +1,6 @@
[package] [package]
name = "minecraft-regions-tool" name = "minecraft-regions-tool"
version = "0.4.1" version = "0.5.0"
authors = ["trivernis <trivernis@protonmail.com>"] authors = ["trivernis <trivernis@protonmail.com>"]
edition = "2018" edition = "2018"
license = "GPL-3.0" license = "GPL-3.0"

@ -11,6 +11,7 @@ type IOResult<T> = io::Result<T>;
const TAG_LEVEL: &str = "Level"; const TAG_LEVEL: &str = "Level";
const TAG_X_POS: &str = "xPos"; const TAG_X_POS: &str = "xPos";
const TAG_Z_POS: &str = "zPos"; const TAG_Z_POS: &str = "zPos";
const TAG_SECTIONS: &str = "Sections";
#[derive(Debug)] #[derive(Debug)]
pub struct Chunk { pub struct Chunk {
@ -54,8 +55,15 @@ impl Chunk {
Err(ChunkScanError::MissingTag(TAG_X_POS)) Err(ChunkScanError::MissingTag(TAG_X_POS))
} else if !lvl_data.contains_key(TAG_Z_POS) { } else if !lvl_data.contains_key(TAG_Z_POS) {
Err(ChunkScanError::MissingTag(TAG_Z_POS)) Err(ChunkScanError::MissingTag(TAG_Z_POS))
} else if !lvl_data.contains_key(TAG_SECTIONS) {
Err(ChunkScanError::MissingTag(TAG_SECTIONS))
} else { } else {
Ok(()) let sections = &lvl_data[TAG_SECTIONS];
if let NBTValue::List(_) = sections {
Ok(())
} else {
Err(ChunkScanError::InvalidFormat(TAG_SECTIONS))
}
} }
} else { } else {
Err(ChunkScanError::InvalidFormat(TAG_LEVEL)) Err(ChunkScanError::InvalidFormat(TAG_LEVEL))

@ -2,4 +2,5 @@ pub mod chunk;
pub mod nbt; pub mod nbt;
pub mod region_file; pub mod region_file;
pub mod scan; pub mod scan;
pub mod utils;
pub mod world_folder; pub mod world_folder;

@ -1,6 +1,7 @@
use colored::*; use colored::*;
use env_logger::Env; use env_logger::Env;
use log::Level; use log::Level;
use minecraft_regions_tool::scan::ScanOptions;
use minecraft_regions_tool::world_folder::WorldFolder; use minecraft_regions_tool::world_folder::WorldFolder;
use std::path::PathBuf; use std::path::PathBuf;
use structopt::StructOpt; use structopt::StructOpt;
@ -26,14 +27,19 @@ enum SubCommand {
Count, Count,
/// Scan for errors in the region files and optionally fix them /// Scan for errors in the region files and optionally fix them
Scan(ScanOptions), Scan(ScanArgs),
} }
#[derive(StructOpt, Debug)] #[derive(StructOpt, Debug)]
#[structopt()] #[structopt()]
struct ScanOptions { struct ScanArgs {
/// Fixes errors that can be fixed without problems
#[structopt(short, long)] #[structopt(short, long)]
fix: bool, fix: bool,
/// Deletes corrupted data
#[structopt(short, long)]
delete: bool,
} }
fn main() { fn main() {
@ -47,7 +53,12 @@ fn main() {
log::info!("Fixing fixable errors."); log::info!("Fixing fixable errors.");
} }
log::info!("Scanning Region files for errors..."); log::info!("Scanning Region files for errors...");
log::info!("Scan Results:\n{}", world.scan_files(opt.fix).unwrap()) log::info!(
"Scan Results:\n{}",
world
.scan_files(ScanOptions::new().fix(opt.fix).fix_delete(opt.delete))
.unwrap()
)
} }
} }
} }

@ -1,13 +1,14 @@
use crate::utils::ByteArrayCache;
use byteorder::{BigEndian, ReadBytesExt}; use byteorder::{BigEndian, ReadBytesExt};
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::fmt::{self, Display, Formatter}; use std::fmt::{self, Display, Formatter};
use std::io::{self, Read}; use std::io::{self};
const MAX_RECURSION: u64 = 100; const MAX_RECURSION: u64 = 100;
pub struct NBTReader<R> { pub struct NBTReader<R> {
inner: Box<R>, inner: R,
recursion: u64, recursion: u64,
} }
@ -19,7 +20,7 @@ where
{ {
pub fn new(inner: R) -> Self { pub fn new(inner: R) -> Self {
Self { Self {
inner: Box::new(inner), inner,
recursion: 0, recursion: 0,
} }
} }
@ -73,13 +74,14 @@ where
} }
/// Parses an array of bytes /// Parses an array of bytes
fn parse_byte_array(&mut self) -> NBTResult<Vec<u8>> { fn parse_byte_array(&mut self) -> NBTResult<ByteArrayCache> {
let length = self.inner.read_u32::<BigEndian>()?; let length = self.inner.read_u32::<BigEndian>()?;
for _ in 0..length { let mut cache = ByteArrayCache::new();
self.inner.read_u8()?; let mut buf = vec![0u8; length as usize];
} self.inner.read_exact(&mut buf)?;
cache.write(&buf[..])?;
Ok(Vec::with_capacity(0)) Ok(cache)
} }
/// Parses a string value /// Parses a string value
@ -155,7 +157,7 @@ pub enum NBTValue {
Long(i64), Long(i64),
Float(f32), Float(f32),
Double(f64), Double(f64),
ByteArray(Vec<u8>), ByteArray(ByteArrayCache),
String(String), String(String),
List(Vec<NBTValue>), List(Vec<NBTValue>),
Compound(HashMap<String, NBTValue>), Compound(HashMap<String, NBTValue>),

@ -1,9 +1,12 @@
use crate::chunk::{Chunk, ChunkScanError}; use crate::chunk::{Chunk, ChunkScanError};
use crate::scan::ScanOptions;
use crate::scan::ScanStatistics; use crate::scan::ScanStatistics;
use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use std::cmp::Ordering;
use std::fs::{File, OpenOptions}; use std::fs::{File, OpenOptions};
use std::io::{BufReader, BufWriter, Read, Result, Seek, SeekFrom, Write}; use std::io::{BufReader, BufWriter, Read, Result, Seek, SeekFrom, Write};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc;
pub const BLOCK_SIZE: usize = 4096; pub const BLOCK_SIZE: usize = 4096;
@ -37,61 +40,32 @@ impl RegionFile {
/// Returns the number of chunks in the file /// Returns the number of chunks in the file
pub fn count_chunks(&self) -> usize { pub fn count_chunks(&self) -> usize {
return self.locations.valid_entries().len(); return self.locations.valid_entries_enumerate().len();
} }
/// Scans the chunk entries for possible errors /// Scans the chunk entries for possible errors
pub fn scan_chunks(&mut self, fix: bool) -> Result<ScanStatistics> { pub fn scan_chunks(&mut self, options: &Arc<ScanOptions>) -> Result<ScanStatistics> {
let mut statistic = ScanStatistics::new(); let mut statistic = ScanStatistics::new();
let entries = self.locations.valid_entries(); let mut entries = self.locations.valid_entries_enumerate();
let mut corrected_entries = Vec::new(); entries.sort_by(|(_, (a, _)), (_, (b, _))| {
if a > b {
Ordering::Greater
} else if a < b {
Ordering::Less
} else {
Ordering::Equal
}
});
statistic.total_chunks = entries.len() as u64; statistic.total_chunks = entries.len() as u64;
for (offset, sections) in &entries { for (index, (offset, sections)) in entries {
let reader_offset = *offset as u64 * BLOCK_SIZE as u64; let reader_offset = offset as u64 * BLOCK_SIZE as u64;
self.reader.seek(SeekFrom::Start(reader_offset))?; self.reader.seek(SeekFrom::Start(reader_offset))?;
match Chunk::from_buf_reader(&mut self.reader) { match Chunk::from_buf_reader(&mut self.reader) {
Ok(mut chunk) => { Ok(chunk) => {
let chunk_sections = ((chunk.length + 4) as f64 / BLOCK_SIZE as f64).ceil(); self.scan_chunk(index, offset, sections, chunk, &mut statistic, options)?;
if chunk.compression_type > 3 {
statistic.invalid_compression_method += 1;
if fix {
self.writer.seek(SeekFrom::Start(reader_offset + 4))?;
self.writer.write_u8(1)?;
}
} else {
self.reader.seek(SeekFrom::Start(reader_offset + 5))?;
if let Err(e) = chunk.validate_nbt_data(&mut self.reader) {
match e {
ChunkScanError::IO(e) => {
log::debug!(
"Compression error when reading chunk {}: {}",
offset,
e
);
statistic.corrupted_compression += 1;
}
ChunkScanError::NBTError(e) => {
log::debug!("Corrupted nbt data for chunk {}: {}", offset, e);
statistic.corrupted_nbt += 1;
}
_ => {
log::debug!("Missing nbt data for chunk {}: {}", offset, e);
statistic.missing_nbt += 1;
}
}
}
}
if *sections != chunk_sections as u8 || chunk.length >= 1_048_576 {
statistic.invalid_length += 1;
corrected_entries.push((*offset, chunk_sections as u8));
} else {
corrected_entries.push((*offset, *sections));
}
} }
Err(e) => { Err(e) => {
statistic.failed_to_read += 1; statistic.failed_to_read += 1;
@ -99,9 +73,8 @@ impl RegionFile {
} }
} }
} }
self.locations.set_entries(corrected_entries);
if fix { if options.fix || options.fix_delete {
self.writer.seek(SeekFrom::Start(0))?; self.writer.seek(SeekFrom::Start(0))?;
self.writer self.writer
.write_all(self.locations.to_bytes().as_slice())?; .write_all(self.locations.to_bytes().as_slice())?;
@ -110,6 +83,82 @@ impl RegionFile {
Ok(statistic) Ok(statistic)
} }
/// Scans a single chunk for errors
fn scan_chunk(
&mut self,
index: usize,
offset: u32,
sections: u8,
mut chunk: Chunk,
statistic: &mut ScanStatistics,
options: &Arc<ScanOptions>,
) -> Result<()> {
let chunk_sections = ((chunk.length + 4) as f64 / BLOCK_SIZE as f64).ceil();
let reader_offset = offset as u64 * BLOCK_SIZE as u64;
if chunk.compression_type > 3 {
statistic.invalid_compression_method += 1;
if options.fix {
self.writer.seek(SeekFrom::Start(reader_offset + 4))?;
self.writer.write_u8(1)?;
}
} else {
self.reader.seek(SeekFrom::Start(reader_offset + 5))?;
if let Err(e) = chunk.validate_nbt_data(&mut self.reader) {
match e {
ChunkScanError::IO(e) => {
log::debug!("Compression error at chunk {}: {}", offset, e);
statistic.corrupted_compression += 1;
}
ChunkScanError::NBTError(e) => {
log::debug!("Corrupted nbt data for chunk {}: {}", offset, e);
statistic.corrupted_nbt += 1;
}
_ => {
log::debug!("Missing nbt data for chunk {}: {}", offset, e);
statistic.missing_nbt += 1;
}
}
self.delete_chunk(index)?;
}
}
if sections != chunk_sections as u8 || chunk.length >= 1_048_576 {
statistic.invalid_length += 1;
self.locations
.replace_entry_unchecked(index, (offset, chunk_sections as u8));
}
Ok(())
}
/// Deletes a chunk and shifts all other chunks
pub fn delete_chunk(&mut self, index: usize) -> Result<()> {
let (offset, sections) = self.locations.get_chunk_entry_unchecked(index);
self.reader.seek(SeekFrom::Start(
(offset as usize * BLOCK_SIZE + sections as usize * BLOCK_SIZE) as u64,
))?;
self.writer
.seek(SeekFrom::Start((offset as usize * BLOCK_SIZE) as u64))?;
log::debug!(
"Shifting chunk entries starting from {} by {} to the left",
offset,
sections as u32
);
loop {
let mut buf = [0u8; BLOCK_SIZE];
let read = self.reader.read(&mut buf)?;
self.writer.write(&buf)?;
if read < BLOCK_SIZE {
break;
}
}
self.locations.delete_chunk_entry_unchecked(index);
self.locations.shift_entries(offset, -(sections as i32));
Ok(())
}
} }
#[derive(Debug)] #[derive(Debug)]
@ -158,16 +207,55 @@ impl Locations {
} }
/// Returns chunk entry list /// Returns chunk entry list
pub fn valid_entries(&self) -> Vec<(u32, u8)> { pub fn valid_entries_enumerate(&self) -> Vec<(usize, (u32, u8))> {
self.inner self.inner
.iter() .iter()
.filter_map(|e| if (*e).0 >= 2 { Some(*e) } else { None }) .enumerate()
.filter_map(|e| {
if (*e.1).0 >= 2 {
Some((e.0, *e.1))
} else {
None
}
})
.collect() .collect()
} }
/// Replaces the entry list with a new one /// Replaces an entry with a new one. Panics if the index doesn't exist
pub fn set_entries(&mut self, entries: Vec<(u32, u8)>) { pub fn replace_entry_unchecked(&mut self, index: usize, entry: (u32, u8)) {
self.inner = entries; self.inner[index] = entry;
}
/// Returns a chunk entry for an index. Panics if it doesn't exist.
pub fn get_chunk_entry_unchecked(&self, index: usize) -> (u32, u8) {
self.inner[index]
}
/// Sets a chunk entry to not generated. Panics if the index doesn't exists
pub fn delete_chunk_entry_unchecked(&mut self, index: usize) {
self.inner[index] = (0, 0);
}
/// Shifts all entries starting from `start_index` by `amount`
pub fn shift_entries(&mut self, start_offset: u32, amount: i32) {
log::debug!(
"Shifting location entries starting from {} by {}",
start_offset,
amount
);
self.inner = self
.inner
.iter()
.map(|e| {
let mut entry = *e;
if e.0 >= start_offset {
entry.0 = (entry.0 as i32 + amount) as u32;
}
entry
})
.collect();
} }
} }

@ -64,3 +64,30 @@ impl Display for ScanStatistics {
) )
} }
} }
#[derive(Clone, Debug)]
pub struct ScanOptions {
pub fix: bool,
pub fix_delete: bool,
}
impl ScanOptions {
pub fn new() -> Self {
ScanOptions {
fix: false,
fix_delete: false,
}
}
pub fn fix(mut self, fix: bool) -> Self {
self.fix = fix;
self
}
pub fn fix_delete(mut self, fix_delete: bool) -> Self {
self.fix_delete = fix_delete;
self
}
}

@ -0,0 +1,45 @@
use flate2::read::ZlibEncoder;
use flate2::Compression;
use std::io::{Read, Result};
#[derive(Clone, Debug)]
pub struct ByteArrayCache {
inner: Vec<u8>,
position: usize,
}
impl ByteArrayCache {
/// Creates a new byte array cache
pub fn new() -> Self {
Self {
inner: Vec::new(),
position: 0,
}
}
/// Creates a new byte array cache with capacity
pub fn with_capacity(capacity: usize) -> Self {
Self {
inner: Vec::with_capacity(capacity),
position: 0,
}
}
pub fn write<R: Read>(&mut self, reader: R) -> Result<()> {
let mut encoder = ZlibEncoder::new(reader, Compression::default());
let mut buffer = Vec::new();
encoder.read_to_end(&mut buffer)?;
self.inner.append(&mut buffer);
Ok(())
}
}
impl Read for ByteArrayCache {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let read = (&self.inner[self.position..]).read(buf)?;
self.position += read;
Ok(read)
}
}

@ -1,4 +1,5 @@
use crate::region_file::RegionFile; use crate::region_file::RegionFile;
use crate::scan::ScanOptions;
use crate::scan::ScanStatistics; use crate::scan::ScanStatistics;
use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle}; use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle};
use log::LevelFilter; use log::LevelFilter;
@ -7,6 +8,7 @@ use std::fs;
use std::io; use std::io;
use std::ops::Add; use std::ops::Add;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc;
pub struct WorldFolder { pub struct WorldFolder {
path: PathBuf, path: PathBuf,
@ -29,9 +31,11 @@ impl WorldFolder {
Ok(count) Ok(count)
} }
pub fn scan_files(&self, fix: bool) -> io::Result<ScanStatistics> { /// Scans all region files for potential errors
pub fn scan_files(&self, options: ScanOptions) -> io::Result<ScanStatistics> {
let paths = self.region_file_paths(); let paths = self.region_file_paths();
let bar = ProgressBar::new(paths.len() as u64); let bar = ProgressBar::new(paths.len() as u64);
let options = Arc::new(options);
bar.set_style( bar.set_style(
ProgressStyle::default_bar().template("\r[{eta_precise}] {wide_bar} {pos}/{len} "), ProgressStyle::default_bar().template("\r[{eta_precise}] {wide_bar} {pos}/{len} "),
); );
@ -46,13 +50,18 @@ impl WorldFolder {
log::debug!("Opening and scanning region file {:?}", path); log::debug!("Opening and scanning region file {:?}", path);
let mut region_file = RegionFile::new(path) let mut region_file = RegionFile::new(path)
.map_err(|e| { .map_err(|e| {
log::error!("Failed to open region file {:?}: {}", path, e); log::error!("Failed to open region file {:?}: {}\n", path, e);
if options.fix_delete {
if let Err(e) = fs::remove_file(path) {
return e;
}
}
e e
}) })
.ok()?; .ok()?;
let result = region_file.scan_chunks(fix).ok()?; let result = region_file.scan_chunks(&options).ok()?;
bar.inc(1); bar.inc(1);
log::debug!("Statistics for {:?}:\n{}", path, result); log::debug!("Statistics for {:?}:\n{}", path, result);

Loading…
Cancel
Save