From 807523bf229ba677e05efeeb1dfff1b38301a967 Mon Sep 17 00:00:00 2001 From: dece Date: Mon, 20 Apr 2020 01:02:11 +0200 Subject: [PATCH] ironring: how do i &str --- src/bin/ironring.rs | 38 +++++++++++++++----- src/lib.rs | 1 + src/parsers/dcx.rs | 80 ++++++++++++++--------------------------- src/parsers/errors.rs | 18 ++++++++++ src/unpackers/bhd.rs | 10 +++--- src/unpackers/dcx.rs | 26 ++++++++++++-- src/unpackers/errors.rs | 18 ++++++++++ 7 files changed, 121 insertions(+), 70 deletions(-) create mode 100644 src/parsers/errors.rs create mode 100644 src/unpackers/errors.rs diff --git a/src/bin/ironring.rs b/src/bin/ironring.rs index 949c256..b6373e5 100644 --- a/src/bin/ironring.rs +++ b/src/bin/ironring.rs @@ -52,7 +52,10 @@ fn main() { .about("?TODO?") .arg(Arg::with_name("file") .takes_value(true) - .required(true))) + .required(true)) + .arg(Arg::with_name("output") + .takes_value(true) + .required(false))) .get_matches(); process::exit(match matches.subcommand() { @@ -108,18 +111,21 @@ fn cmd_bhds(args: &ArgMatches) -> i32 { continue } let path = entry.unwrap().path(); - match path.extension() { - Some(e) => { if e == "bhd5" { bhd_paths.push(path); } } - _ => {} + if let Some(e) = path.extension() { + if e == "bhd5" { + bhd_paths.push(path); + } } } bhd_paths.sort(); for bhd_path in bhd_paths { println!("Extracting {:?}", bhd_path); - match unpackers::bhd::extract_bhd(bhd_path.to_str().unwrap(), &names, output_path) { - Err(e) => { eprintln!("Failed to extract BHD: {:?}", e); return 1 } - _ => {} + if let Some(path_str) = bhd_path.to_str() { + if let Err(e) = unpackers::bhd::extract_bhd(path_str, &names, output_path) { + eprintln!("Failed to extract BHD: {:?}", e); + return 1 + } } } return 0 @@ -133,7 +139,23 @@ fn cmd_hash(args: &ArgMatches) -> i32 { fn cmd_dcx(args: &ArgMatches) -> i32 { let file_path: &str = args.value_of("file").unwrap(); - match unpackers::dcx::extract_dcx(file_path) { + let mut output_path: String = match args.value_of("output") { + Some(s) => { s.to_string() } + _ => { String::with_capacity(file_path.len()) } + }; + if output_path.is_empty() { + let mut pb = path::PathBuf::from(&file_path); + pb.set_extension(""); + if let Some(s) = pb.to_str() { + output_path.push_str(s); + } else { + eprintln!("Could not create an uncompressed path for {}. \ + Provide one as \"output\" argument.", file_path); + return 1 + } + } + + match unpackers::dcx::extract_dcx(file_path, &output_path) { Err(e) => { eprintln!("Failed to extract DCX: {:?}", e); return 1 } _ => { 0 } } diff --git a/src/lib.rs b/src/lib.rs index 210902d..6c39bf9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -6,6 +6,7 @@ pub mod parsers { pub mod unpackers { pub mod bhd; pub mod dcx; + pub mod errors; } pub mod utils { pub mod fs; diff --git a/src/parsers/dcx.rs b/src/parsers/dcx.rs index a05c6b9..39d33ed 100644 --- a/src/parsers/dcx.rs +++ b/src/parsers/dcx.rs @@ -1,51 +1,6 @@ -//typedef struct { -// int unk00; Assert(unk00 == 0); -// int dataOffset; -// int dataLength; -// int unk0C; Assert(unk0C == 1); -//} Block ; -// -//typedef struct { -// char dcx[4]; Assert(dcx == "DCX\0"); -// int unk04; Assert(unk04 == 0x10000 || unk04 == 0x11000); -// int unk08; Assert(unk08 == 0x18); -// int unk0C; Assert(unk0C == 0x24); -// int unk10; Assert(unk10 == 0x24 || unk10 == 0x44); -// int unk14; // In EDGE, size from 0x20 to end of block headers -// char dcs[4]; Assert(dcs == "DCS\0"); -// uint uncompressedSize ; -// uint compressedSize ; -// char dcp[4]; Assert(dcp == "DCP\0"); -// char format[4]; Assert(format == "DFLT" || format == "EDGE" || format == "KRAK"); -// int unk2C; Assert(unk2C == 0x20); -// byte unk30; Assert(unk30 == 6|| unk30 == 8 || unk30 == 9); -// byte unk31 ; Assert(unk31 == 0); -// byte unk32 ; Assert(unk32 == 0); -// byte unk33 ; Assert(unk33 == 0); -// int unk34; Assert(unk34 == 0 || unk34 == 0x10000); // Block size for EDGE? -// int unk38; Assert(unk38 == 0); -// int unk3C; Assert(unk3C == 0); -// int unk40; -// char dca[4]; Assert(dca == "DCA\0"); -// int dcaSize; // From before "DCA" to dca end -// -// if (format == "EDGE") { -// char egdt[4]; Assert(egdt == "EgdT"); -// int unk50; Assert(unk50 == 0x10100); -// int unk54; Assert(unk54 == 0x24); -// int unk58; Assert(unk58 == 0x10); -// int unk5C; Assert(unk5C == 0x10000); -// int lastBlockUncompressedSize; -// int egdtSize; // From before "EgdT" to dca end -// int blockCount; -// int unk6C; Assert(unk6C == 0x100000); -// Block blocks[blockCount]; -// } -//} Header ; - use nom::IResult; +use nom::branch::alt; use nom::bytes::complete::tag; -use nom::choice::alt; use nom::multi::count; use nom::number::complete::*; use nom::sequence::tuple; @@ -95,7 +50,8 @@ pub struct DcxParams { pub unk1C: u32, } -fn parse_params(i: &[u8]) { +#[allow(non_snake_case)] +fn parse_params(i: &[u8]) -> IResult<&[u8], DcxParams> { let (i, (magic, method, ofs_dca, flags, unk10, unk14, unk18, unk1C)) = tuple(( tag(b"DCP\0"), @@ -111,7 +67,7 @@ fn parse_params(i: &[u8]) { i, DcxParams { magic: magic.to_vec(), - method, + method: method.to_vec(), ofs_dca, unk0C: flags[0], unk0D: flags[1], @@ -125,17 +81,33 @@ fn parse_params(i: &[u8]) { )) } +#[derive(Debug)] +pub struct DcxArchive { + pub magic: Vec, + pub ofs_data: u32, +} + +fn parse_archive(i: &[u8]) -> IResult<&[u8], DcxArchive> { + let (i, (magic, ofs_data)) = tuple((tag(b"DCA\0"), be_u32))(i)?; + Ok((i, DcxArchive { magic: magic.to_vec(), ofs_data })) +} + #[derive(Debug)] pub struct Dcx { pub header: DcxHeader, pub sizes: DcxSizes, pub params: DcxParams, + pub archive: DcxArchive, } -pub fn parse(i: &[u8]) -> IResult<&[u8], u8> { - let (i, header) = parse_header(i).unwrap(); - println!("{:?}", header); - Ok((i, 0)) - - //Ok((i, Dcx { header: None, sizes: None, params: None })) +pub fn parse(i: &[u8]) -> IResult<&[u8], Dcx> { + let full_file = i; + let (_, header) = parse_header(&full_file)?; + let pos_dcs = header.ofs_dcs as usize; + let (_, sizes) = parse_sizes(&full_file[pos_dcs..])?; + let pos_dcp = header.ofs_dcp as usize; + let (_, params) = parse_params(&full_file[pos_dcp..])?; + let pos_dca = pos_dcp + params.ofs_dca as usize; + let (_, archive) = parse_archive(&full_file[pos_dca..])?; + Ok((i, Dcx { header, sizes, params, archive })) } diff --git a/src/parsers/errors.rs b/src/parsers/errors.rs new file mode 100644 index 0000000..7db64c4 --- /dev/null +++ b/src/parsers/errors.rs @@ -0,0 +1,18 @@ +use nom::Err::{Error as NomError, Failure as NomFailure}; + +pub enum ParseError { + Error(NomError), + Failure(NomFailure), +} + +impl From for ParseError { + fn from(e: NomError) -> Self { + ParseError::Error(e) + } +} + +impl From for ParseError { + fn from(e: NomFailure) -> Self { + ParseError::Failure(e) + } +} diff --git a/src/unpackers/bhd.rs b/src/unpackers/bhd.rs index 8b2133c..6dcc6e2 100644 --- a/src/unpackers/bhd.rs +++ b/src/unpackers/bhd.rs @@ -7,6 +7,7 @@ use nom::Err::{Error as NomError, Failure as NomFailure}; use crate::name_hashes; use crate::parsers::bhd; +use crate::unpackers::errors::{self as unpackers_errors, UnpackError}; use crate::utils::fs as fs_utils; /// Parse a BHD file and extract its content. @@ -14,7 +15,7 @@ pub fn extract_bhd( bhd_path: &str, names: &HashMap, output_path: &str -) -> Result<(), io::Error> { +) -> Result<(), UnpackError> { let mut bhd_file = fs::File::open(bhd_path)?; let file_len = bhd_file.metadata()?.len() as usize; let mut bhd_data = vec![0u8; file_len]; @@ -22,12 +23,11 @@ pub fn extract_bhd( let bhd = match bhd::parse(&bhd_data) { Ok((_, bhd)) => { bhd } Err(NomError(e)) | Err(NomFailure(e)) => { - let (_, kind) = e; - let reason = format!("{:?} {:?}", kind, kind.description()); - eprintln!("BHD parsing failed: {}", reason); return Ok(()) + let reason = unpackers_errors::get_nom_error_reason(e.1); + return Err(UnpackError::Parsing("BHD parsing failed: ".to_owned() + &reason)) } e => { - eprintln!("Unknown error: {:?}", e); return Ok(()) + return Err(UnpackError::Unknown(format!("Unknown error: {:?}", e))) } }; diff --git a/src/unpackers/dcx.rs b/src/unpackers/dcx.rs index 05afb11..db69c98 100644 --- a/src/unpackers/dcx.rs +++ b/src/unpackers/dcx.rs @@ -1,13 +1,33 @@ use std::fs; -use std::io::{self, Read}; +use std::io::{Read}; + +use nom::Err::{Error as NomError, Failure as NomFailure}; use crate::parsers::dcx; +use crate::unpackers::errors::{self as unpackers_errors, UnpackError}; -pub fn extract_dcx(dcx_path: &str) -> Result<(), io::Error> { +pub fn extract_dcx(dcx_path: &str, output_path: &str) -> Result<(), UnpackError> { let mut dcx_file = fs::File::open(dcx_path)?; let file_len = dcx_file.metadata()?.len() as usize; let mut dcx_data = vec![0u8; file_len]; dcx_file.read_exact(&mut dcx_data)?; - dcx::parse(&dcx_data); + let dcx = match dcx::parse(&dcx_data) { + Ok((_, dcx)) => { dcx } + Err(NomError(e)) | Err(NomFailure(e)) => { + let reason = unpackers_errors::get_nom_error_reason(e.1); + return Err(UnpackError::Parsing("DCX parsing failed: ".to_owned() + &reason)) + } + e => { + return Err(UnpackError::Unknown(format!("Unknown error: {:?}", e))) + } + }; + + + println!("{:?}", dcx); Ok(()) } + +pub fn decompress_dcx(dcx: &dcx::Dcx) -> Vec { + let mut data = vec![0u8; dcx.sizes.uncompressed_size as usize]; + data +} diff --git a/src/unpackers/errors.rs b/src/unpackers/errors.rs new file mode 100644 index 0000000..95231a8 --- /dev/null +++ b/src/unpackers/errors.rs @@ -0,0 +1,18 @@ +use std::io; + +#[derive(Debug)] +pub enum UnpackError { + Io(io::Error), + Parsing(String), + Unknown(String), +} + +impl From for UnpackError { + fn from(e: io::Error) -> Self { + UnpackError::Io(e) + } +} + +pub fn get_nom_error_reason(kind: nom::error::ErrorKind) -> String { + format!("{:?} {:?}", kind, kind.description()) +}