Skip to content

Commit

Permalink
improved error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
mcroomp committed Nov 10, 2024
1 parent 9f226dc commit 425cebb
Show file tree
Hide file tree
Showing 8 changed files with 259 additions and 110 deletions.
14 changes: 9 additions & 5 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ mod huffman_encoding;
mod huffman_helper;
mod idat_parse;
mod preflate_constants;
pub mod preflate_container;
pub mod preflate_error;
mod preflate_container;
mod preflate_error;
mod preflate_input;
mod preflate_parameter_estimator;
mod preflate_parse_config;
Expand All @@ -34,8 +34,12 @@ mod statistical_codec;
mod token_predictor;
mod tree_predictor;

use preflate_container::{expand_zlib_chunks, recreated_zlib_chunks};
use preflate_error::PreflateError;
pub use preflate_container::{
compress_zstd, decompress_deflate_stream, decompress_zstd, expand_zlib_chunks,
recompress_deflate_stream, recreated_zlib_chunks,
};
pub use preflate_error::PreflateError;

use std::{io::Cursor, panic::catch_unwind};

/// C ABI interface for compressing Zip file, exposed from DLL.
Expand Down Expand Up @@ -84,7 +88,7 @@ pub unsafe extern "C" fn WrapperDecompressZip(
let output = std::slice::from_raw_parts_mut(output_buffer, output_buffer_size as usize);

let compressed_data =
zstd::bulk::decompress(input, 1024 * 1024 * 128).map_err(PreflateError::ZstdError)?;
zstd::bulk::decompress(input, 1024 * 1024 * 128).map_err(PreflateError::from)?;

let mut source = Cursor::new(&compressed_data);
let mut destination = Cursor::new(output);
Expand Down
32 changes: 1 addition & 31 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,37 +3,7 @@ use std::{
path::{Path, PathBuf},
};

use preflate_container::{compress_zstd, decompress_zstd};

mod add_policy_estimator;
mod bit_helper;
mod bit_reader;
mod bit_writer;
mod cabac_codec;
mod complevel_estimator;
mod deflate_reader;
mod deflate_writer;
mod depth_estimator;
mod hash_algorithm;
mod hash_chain;
mod hash_chain_holder;
mod huffman_calc;
mod huffman_encoding;
mod huffman_helper;
mod idat_parse;
mod preflate_constants;
mod preflate_container;
pub mod preflate_error;
mod preflate_input;
mod preflate_parameter_estimator;
mod preflate_parse_config;
mod preflate_stream_info;
mod preflate_token;
mod process;
mod scan_deflate;
mod statistical_codec;
mod token_predictor;
mod tree_predictor;
use preflate_rs::{compress_zstd, decompress_zstd};

fn enumerate_directory_recursively(path: &Path) -> Result<Vec<PathBuf>, std::io::Error> {
let mut results = Vec::new();
Expand Down
51 changes: 29 additions & 22 deletions src/preflate_container.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use std::io::{Cursor, Read, Write};
use crate::{
cabac_codec::{PredictionDecoderCabac, PredictionEncoderCabac},
idat_parse::{recreate_idat, IdatContents},
preflate_error::PreflateError,
preflate_error::{ExitCode, PreflateError},
preflate_input::PreflateInput,
preflate_parameter_estimator::{estimate_preflate_parameters, PreflateParameters},
process::{decode_mispredictions, encode_mispredictions, parse_deflate},
Expand Down Expand Up @@ -153,12 +153,17 @@ fn read_chunk_block(

if let Some(idat) = idat {
recreate_idat(&idat, &recompressed[..], destination)
.map_err(|_e| PreflateError::InvalidCompressedWrapper)?;
.map_err(|e| PreflateError::wrap(ExitCode::InvalidCompressedWrapper, &e))?;
} else {
destination.write_all(&recompressed)?;
}
}
_ => return Err(PreflateError::InvalidCompressedWrapper),
_ => {
return Err(PreflateError::new(
ExitCode::InvalidCompressedWrapper,
"Invalid chunk",
))
}
}
Ok(true)
}
Expand Down Expand Up @@ -251,7 +256,10 @@ pub fn recreated_zlib_chunks(
) -> std::result::Result<(), PreflateError> {
let version = source.read_u8()?;
if version != COMPRESSED_WRAPPER_VERSION_1 {
return Err(PreflateError::InvalidCompressedWrapper);
return Err(PreflateError::new(
ExitCode::InvalidCompressedWrapper,
format!("Invalid version {version}").as_str(),
));
}

loop {
Expand Down Expand Up @@ -345,7 +353,7 @@ pub fn decompress_deflate_stream(
//process::write_file("c:\\temp\\lastop.bin", contents.plain_text.as_slice());

let params = estimate_preflate_parameters(&contents.plain_text, &contents.blocks)
.map_err(PreflateError::AnalyzeFailed)?;
.map_err(|e| PreflateError::wrap(ExitCode::AnalyzeFailed, &e))?;

if loglevel > 0 {
println!("params: {:?}", params);
Expand All @@ -361,7 +369,7 @@ pub fn decompress_deflate_stream(
PredictionDecoderCabac::new(VP8Reader::new(Cursor::new(&cabac_encoded[..])).unwrap());

let reread_params = PreflateParameters::read(&mut cabac_decoder)
.map_err(PreflateError::InvalidPredictionData)?;
.map_err(|e| PreflateError::wrap(ExitCode::InvalidPredictionData, &e))?;
assert_eq!(params, reread_params);

let (recompressed, _recreated_blocks) = decode_mispredictions(
Expand All @@ -371,9 +379,10 @@ pub fn decompress_deflate_stream(
)?;

if recompressed[..] != compressed_data[..contents.compressed_size] {
return Err(PreflateError::Mismatch(anyhow::anyhow!(
"recompressed data does not match original"
)));
return Err(PreflateError::new(
ExitCode::RoundtripMismatch,
"recompressed data does not match original",
));
}
}

Expand All @@ -394,7 +403,7 @@ pub fn recompress_deflate_stream(
PredictionDecoderCabac::new(VP8Reader::new(Cursor::new(prediction_corrections)).unwrap());

let params = PreflateParameters::read(&mut cabac_decoder)
.map_err(PreflateError::InvalidPredictionData)?;
.map_err(|e| PreflateError::wrap(ExitCode::InvalidPredictionData, &e))?;
let (recompressed, _recreated_blocks) =
decode_mispredictions(&params, PreflateInput::new(plain_text), &mut cabac_decoder)?;
Ok(recompressed)
Expand All @@ -417,7 +426,7 @@ pub fn decompress_deflate_stream_assert(
let contents = parse_deflate(compressed_data, 0)?;

let params = estimate_preflate_parameters(&contents.plain_text, &contents.blocks)
.map_err(PreflateError::AnalyzeFailed)?;
.map_err(|e| PreflateError::wrap(ExitCode::AnalyzeFailed, &e))?;

params.write(&mut cabac_encoder);
encode_mispredictions(&contents, &params, &mut cabac_encoder)?;
Expand All @@ -430,17 +439,18 @@ pub fn decompress_deflate_stream_assert(
PredictionDecoderCabac::new(DebugReader::new(Cursor::new(&cabac_encoded)).unwrap());

let params = PreflateParameters::read(&mut cabac_decoder)
.map_err(PreflateError::InvalidPredictionData)?;
.map_err(|e| PreflateError::wrap(ExitCode::InvalidPredictionData, &e))?;
let (recompressed, _recreated_blocks) = decode_mispredictions(
&params,
PreflateInput::new(&contents.plain_text),
&mut cabac_decoder,
)?;

if recompressed[..] != compressed_data[..] {
return Err(PreflateError::Mismatch(anyhow::anyhow!(
"recompressed data does not match original"
)));
return Err(PreflateError::new(
ExitCode::RoundtripMismatch,
"recompressed data does not match original",
));
}
}

Expand All @@ -465,8 +475,7 @@ pub fn recompress_deflate_stream_assert(
DebugReader::new(Cursor::new(&prediction_corrections)).unwrap(),
);

let params = PreflateParameters::read(&mut cabac_decoder)
.map_err(PreflateError::InvalidPredictionData)?;
let params = PreflateParameters::read(&mut cabac_decoder)?;

let (recompressed, _recreated_blocks) =
decode_mispredictions(&params, PreflateInput::new(plain_text), &mut cabac_decoder)?;
Expand Down Expand Up @@ -520,16 +529,14 @@ fn verify_file(filename: &str) {
/// expands the Zlib compressed streams in the data and then recompresses the result
/// with Zstd with the maximum level.
pub fn compress_zstd(zlib_compressed_data: &[u8], loglevel: u32) -> Result<Vec<u8>, PreflateError> {
let plain_text = expand_zlib_chunks(zlib_compressed_data, loglevel)
.map_err(|_| PreflateError::InvalidCompressedWrapper)?;
zstd::bulk::compress(&plain_text, 9).map_err(PreflateError::ZstdError)
let plain_text = expand_zlib_chunks(zlib_compressed_data, loglevel)?;
Ok(zstd::bulk::compress(&plain_text, 9)?)
}

/// decompresses the Zstd compressed data and then recompresses the result back
/// to the original Zlib compressed streams.
pub fn decompress_zstd(compressed_data: &[u8], capacity: usize) -> Result<Vec<u8>, PreflateError> {
let compressed_data =
zstd::bulk::decompress(compressed_data, capacity).map_err(PreflateError::ZstdError)?;
let compressed_data = zstd::bulk::decompress(compressed_data, capacity)?;

let mut result = Vec::new();
recreated_zlib_chunks(&mut Cursor::new(compressed_data), &mut result)?;
Expand Down
Loading

0 comments on commit 425cebb

Please sign in to comment.