Fix shifting of chunks to occupy unused space

Signed-off-by: trivernis <trivernis@protonmail.com>
main
trivernis 4 years ago
parent eb88a39b9a
commit 4842b9f346
Signed by: Trivernis
GPG Key ID: DFFFCC2C7A02DB45

2
Cargo.lock generated

@ -307,7 +307,7 @@ dependencies = [
[[package]] [[package]]
name = "minecraft-regions-tool" name = "minecraft-regions-tool"
version = "0.5.0" version = "0.5.1"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"colored", "colored",

@ -1,6 +1,6 @@
[package] [package]
name = "minecraft-regions-tool" name = "minecraft-regions-tool"
version = "0.5.0" version = "0.5.1"
authors = ["trivernis <trivernis@protonmail.com>"] authors = ["trivernis <trivernis@protonmail.com>"]
edition = "2018" edition = "2018"
license = "GPL-3.0" license = "GPL-3.0"

@ -46,6 +46,7 @@ impl RegionFile {
/// Scans the chunk entries for possible errors /// Scans the chunk entries for possible errors
pub fn scan_chunks(&mut self, options: &Arc<ScanOptions>) -> Result<ScanStatistics> { pub fn scan_chunks(&mut self, options: &Arc<ScanOptions>) -> Result<ScanStatistics> {
let mut statistic = ScanStatistics::new(); let mut statistic = ScanStatistics::new();
let mut shift_operations: Vec<(usize, isize)> = Vec::new();
let mut entries = self.locations.valid_entries_enumerate(); let mut entries = self.locations.valid_entries_enumerate();
entries.sort_by(|(_, (a, _)), (_, (b, _))| { entries.sort_by(|(_, (a, _)), (_, (b, _))| {
@ -58,6 +59,8 @@ impl RegionFile {
} }
}); });
statistic.total_chunks = entries.len() as u64; statistic.total_chunks = entries.len() as u64;
let mut previous_offset = 2;
let mut previous_sections = 0;
for (index, (offset, sections)) in entries { for (index, (offset, sections)) in entries {
let reader_offset = offset as u64 * BLOCK_SIZE as u64; let reader_offset = offset as u64 * BLOCK_SIZE as u64;
@ -65,16 +68,41 @@ impl RegionFile {
match Chunk::from_buf_reader(&mut self.reader) { match Chunk::from_buf_reader(&mut self.reader) {
Ok(chunk) => { Ok(chunk) => {
let exists =
self.scan_chunk(index, offset, sections, chunk, &mut statistic, options)?; self.scan_chunk(index, offset, sections, chunk, &mut statistic, options)?;
if !exists && options.fix {
shift_operations
.push((offset as usize + sections as usize, -(sections as isize)))
}
} }
Err(e) => { Err(e) => {
statistic.failed_to_read += 1; statistic.failed_to_read += 1;
if options.fix_delete {
self.delete_chunk(index)?;
}
log::error!("Failed to read chunk at {}: {}", offset, e); log::error!("Failed to read chunk at {}: {}", offset, e);
} }
} }
let offset_diff = offset - (previous_offset + previous_sections);
if offset_diff > 0 {
statistic.unused_space += (BLOCK_SIZE * offset_diff as usize) as u64;
if options.fix {
shift_operations.push((offset as usize, -(offset_diff as isize)));
}
}
previous_offset = offset;
previous_sections = sections as u32;
} }
if options.fix || options.fix_delete { if options.fix || options.fix_delete {
let mut shifted = 0isize;
for (offset, amount) in shift_operations {
let offset = (offset as isize + shifted) as usize;
self.shift_right(offset, amount)?;
self.locations.shift_entries(offset as u32, amount as i32);
shifted += amount;
}
statistic.shrunk_size = self.locations.estimated_size();
self.writer.seek(SeekFrom::Start(0))?; self.writer.seek(SeekFrom::Start(0))?;
self.writer self.writer
.write_all(self.locations.to_bytes().as_slice())?; .write_all(self.locations.to_bytes().as_slice())?;
@ -93,7 +121,7 @@ impl RegionFile {
mut chunk: Chunk, mut chunk: Chunk,
statistic: &mut ScanStatistics, statistic: &mut ScanStatistics,
options: &Arc<ScanOptions>, options: &Arc<ScanOptions>,
) -> Result<()> { ) -> Result<bool> {
let chunk_sections = ((chunk.length + 4) as f64 / BLOCK_SIZE as f64).ceil(); let chunk_sections = ((chunk.length + 4) as f64 / BLOCK_SIZE as f64).ceil();
let reader_offset = offset as u64 * BLOCK_SIZE as u64; let reader_offset = offset as u64 * BLOCK_SIZE as u64;
@ -120,32 +148,45 @@ impl RegionFile {
statistic.missing_nbt += 1; statistic.missing_nbt += 1;
} }
} }
if options.fix_delete {
self.delete_chunk(index)?; self.delete_chunk(index)?;
return Ok(false);
}
} }
} }
if sections != chunk_sections as u8 || chunk.length >= 1_048_576 { if sections != chunk_sections as u8 || chunk.length >= 1_048_576 {
statistic.invalid_length += 1; statistic.invalid_length += 1;
if options.fix {
self.locations self.locations
.replace_entry_unchecked(index, (offset, chunk_sections as u8)); .replace_entry_unchecked(index, (offset, chunk_sections as u8));
} }
}
Ok(()) Ok(true)
} }
/// Deletes a chunk and shifts all other chunks /// Deletes a chunk and shifts all other chunks
pub fn delete_chunk(&mut self, index: usize) -> Result<()> { pub fn delete_chunk(&mut self, index: usize) -> Result<()> {
let (offset, sections) = self.locations.get_chunk_entry_unchecked(index); let (offset, sections) = self.locations.get_chunk_entry_unchecked(index);
self.reader.seek(SeekFrom::Start(
(offset as usize * BLOCK_SIZE + sections as usize * BLOCK_SIZE) as u64,
))?;
self.writer
.seek(SeekFrom::Start((offset as usize * BLOCK_SIZE) as u64))?;
log::debug!( log::debug!(
"Shifting chunk entries starting from {} by {} to the left", "Shifting chunk entries starting from {} by {} to the left",
offset, offset,
sections as u32 sections as u32
); );
self.locations.delete_chunk_entry_unchecked(index);
Ok(())
}
/// Shifts the file from the `offset` position `amount` blocks to the right
pub fn shift_right(&mut self, offset: usize, amount: isize) -> Result<()> {
self.reader
.seek(SeekFrom::Start((offset * BLOCK_SIZE) as u64))?;
self.writer.seek(SeekFrom::Start(
((offset as isize + amount) as usize * BLOCK_SIZE) as u64,
))?;
loop { loop {
let mut buf = [0u8; BLOCK_SIZE]; let mut buf = [0u8; BLOCK_SIZE];
let read = self.reader.read(&mut buf)?; let read = self.reader.read(&mut buf)?;
@ -154,8 +195,6 @@ impl RegionFile {
break; break;
} }
} }
self.locations.delete_chunk_entry_unchecked(index);
self.locations.shift_entries(offset, -(sections as i32));
Ok(()) Ok(())
} }
@ -221,6 +260,26 @@ impl Locations {
.collect() .collect()
} }
/// Returns the estimated of all chunks combined including the header
pub fn estimated_size(&self) -> u64 {
let largest = self
.inner
.iter()
.max_by(|(a, _), (b, _)| {
if a > b {
Ordering::Greater
} else if a < b {
Ordering::Less
} else {
Ordering::Equal
}
})
.cloned()
.unwrap_or((2, 0));
(largest.0 as u64 + largest.1 as u64) * BLOCK_SIZE as u64
}
/// Replaces an entry with a new one. Panics if the index doesn't exist /// Replaces an entry with a new one. Panics if the index doesn't exist
pub fn replace_entry_unchecked(&mut self, index: usize, entry: (u32, u8)) { pub fn replace_entry_unchecked(&mut self, index: usize, entry: (u32, u8)) {
self.inner[index] = entry; self.inner[index] = entry;

@ -10,6 +10,8 @@ pub struct ScanStatistics {
pub corrupted_nbt: u64, pub corrupted_nbt: u64,
pub failed_to_read: u64, pub failed_to_read: u64,
pub corrupted_compression: u64, pub corrupted_compression: u64,
pub shrunk_size: u64,
pub unused_space: u64,
} }
impl ScanStatistics { impl ScanStatistics {
@ -22,6 +24,8 @@ impl ScanStatistics {
corrupted_nbt: 0, corrupted_nbt: 0,
corrupted_compression: 0, corrupted_compression: 0,
failed_to_read: 0, failed_to_read: 0,
shrunk_size: 0,
unused_space: 0,
} }
} }
} }
@ -37,6 +41,7 @@ impl Add for ScanStatistics {
self.missing_nbt += rhs.missing_nbt; self.missing_nbt += rhs.missing_nbt;
self.corrupted_compression += rhs.corrupted_compression; self.corrupted_compression += rhs.corrupted_compression;
self.corrupted_nbt += rhs.corrupted_nbt; self.corrupted_nbt += rhs.corrupted_nbt;
self.unused_space += rhs.unused_space;
self self
} }
@ -53,14 +58,16 @@ impl Display for ScanStatistics {
Chunks with invalid compression method: {} Chunks with invalid compression method: {}
Chunks with missing nbt data: {} Chunks with missing nbt data: {}
Chunks with corrupted nbt data: {} Chunks with corrupted nbt data: {}
Chunks with corrupted compressed data {}", Chunks with corrupted compressed data: {}
Unused space: {} KiB",
self.total_chunks, self.total_chunks,
self.failed_to_read, self.failed_to_read,
self.invalid_length, self.invalid_length,
self.invalid_compression_method, self.invalid_compression_method,
self.missing_nbt, self.missing_nbt,
self.corrupted_nbt, self.corrupted_nbt,
self.corrupted_compression self.corrupted_compression,
self.unused_space / 1024,
) )
} }
} }

@ -5,6 +5,7 @@ use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle};
use log::LevelFilter; use log::LevelFilter;
use rayon::prelude::*; use rayon::prelude::*;
use std::fs; use std::fs;
use std::fs::OpenOptions;
use std::io; use std::io;
use std::ops::Add; use std::ops::Add;
use std::path::PathBuf; use std::path::PathBuf;
@ -62,6 +63,10 @@ impl WorldFolder {
.ok()?; .ok()?;
let result = region_file.scan_chunks(&options).ok()?; let result = region_file.scan_chunks(&options).ok()?;
if options.fix && result.shrunk_size > 0 {
let f = OpenOptions::new().read(true).write(true).open(path).ok()?;
f.set_len(result.shrunk_size).ok()?;
}
bar.inc(1); bar.inc(1);
log::debug!("Statistics for {:?}:\n{}", path, result); log::debug!("Statistics for {:?}:\n{}", path, result);

Loading…
Cancel
Save