diff --git a/README.md b/README.md index 7b43110..375df97 100644 --- a/README.md +++ b/README.md @@ -51,8 +51,9 @@ Features -------- - BHD5 / BDT: extraction from disk to disk. -- DCX: decompression from disk to disk. -- BND (v3): extraction from disk/memory to disk/memory. +- DCX: decompression from disk to disk/memory. +- BND (v3): extraction from disk/memory to disk/memory, optionally decompress + from DCX. - BHF (v3): extraction from disk/memory to disk/memory. Repacking is not supported, maybe one day. It is not that useful when using diff --git a/src/bin/ironring.rs b/src/bin/ironring.rs index 864aa16..c3bbd95 100644 --- a/src/bin/ironring.rs +++ b/src/bin/ironring.rs @@ -5,7 +5,7 @@ use std::process; use clap::{App, AppSettings, Arg, ArgMatches, SubCommand}; -use rir::{name_hashes, unpackers, utils}; +use rir::{name_hashes, unpackers}; fn main() { let default_namefilepath: &str = &get_default_namefilepath(); @@ -14,80 +14,65 @@ fn main() { .subcommand(SubCommand::with_name("bhd") .about("Extracts BHD/BDT contents") .arg(Arg::with_name("file") - .takes_value(true) - .required(true)) + .help("BHD file path, usually with bhd5 extension") + .takes_value(true).required(true)) .arg(Arg::with_name("output") - .short("o") - .long("output") - .takes_value(true) - .required(true)) + .help("Output directory") + .short("o").long("output").takes_value(true).required(true)) .arg(Arg::with_name("namefile") - .short("n") - .long("names") - .takes_value(true) - .required(false) + .help("Namefile path, mapping hashes to file names") + .short("n").long("names").takes_value(true).required(false) .default_value(default_namefilepath))) .subcommand(SubCommand::with_name("bhds") .about("Extracts all BHD/BDT content (alphabetically) in a folder") .arg(Arg::with_name("folder") - .takes_value(true) - .required(true)) + .help("Path where BHD/BDT archives are stored") + .takes_value(true).required(true)) .arg(Arg::with_name("output") - .short("o") - .long("output") - .takes_value(true) - .required(true)) + .help("Output directory") + .short("o").long("output").takes_value(true).required(true)) .arg(Arg::with_name("namefile") - .short("n") - .long("names") - .takes_value(true) - .required(false) + .help("Namefile path, mapping hashes to file names") + .short("n").long("names").takes_value(true).required(false) .default_value(default_namefilepath))) .subcommand(SubCommand::with_name("hash") .about("Calculates hash for a string") .arg(Arg::with_name("value") - .takes_value(true) - .required(true))) + .help("Any string or path to hash") + .takes_value(true).required(true))) .subcommand(SubCommand::with_name("dcx") .about("Extracts and decompress DCX data") .arg(Arg::with_name("file") - .takes_value(true) - .required(true)) + .help("DCX path") + .takes_value(true).required(true)) .arg(Arg::with_name("output") - .short("o") - .long("output") - .takes_value(true) - .required(false))) + .help("Output directory") + .short("o").long("output").takes_value(true).required(false))) .subcommand(SubCommand::with_name("bnd") .about("Extracts BND contents") .arg(Arg::with_name("file") - .takes_value(true) - .required(true)) + .help("BND (or BND/DCX) file path") + .takes_value(true).required(true)) .arg(Arg::with_name("output") - .short("o") - .long("output") - .takes_value(true) - .required(true)) + .help("Output directory") + .short("o").long("output").takes_value(true).required(true)) .arg(Arg::with_name("overwrite") - .short("f") - .long("force") - .takes_value(false) - .required(false))) + .help("Overwrite existing files") + .short("f").long("force").takes_value(false).required(false)) + .arg(Arg::with_name("decompress") + .help("Decompress file first if BND is in DCX") + .long("decompress").takes_value(false).required(false))) .subcommand(SubCommand::with_name("bhf") .about("Extracts BHF/BDT contents") .arg(Arg::with_name("file") - .takes_value(true) - .required(true)) + .help("BHF file path") + .takes_value(true).required(true)) .arg(Arg::with_name("output") - .short("o") - .long("output") - .takes_value(true) - .required(false)) + .help("Output directory") + .short("o").long("output").takes_value(true).required(false)) .arg(Arg::with_name("overwrite") - .short("f") - .long("force") - .takes_value(false) - .required(false))) + .help("Overwrite existing files") + .short("f").long("force").takes_value(false).required(false))) .get_matches(); process::exit(match matches.subcommand() { @@ -119,7 +104,7 @@ fn cmd_bhd(args: &ArgMatches) -> i32 { Err(e) => { eprintln!("Failed to load namefile: {:?}", e); return 1 } }; - return match unpackers::bhd::extract_bhd(file_path, &names, output_path) { + match unpackers::bhd::extract_bhd(file_path, &names, output_path) { Err(e) => { eprintln!("Failed to extract BHD: {:?}", e); 1 } _ => { 0 } } @@ -173,49 +158,15 @@ fn cmd_hash(args: &ArgMatches) -> i32 { fn cmd_dcx(args: &ArgMatches) -> i32 { let file_path: &str = args.value_of("file").unwrap(); - let mut output_path_valid = false; - let mut output_path: String = match args.value_of("output") { - Some(s) => { output_path_valid = true; s.to_string() } - _ => { String::with_capacity(file_path.len()) } - }; - // If no output path is provided, try to strip the file extension. - if !output_path_valid { - if let Some(pb) = utils::fs::strip_extension(&path::PathBuf::from(&file_path)) { - if let Some(s) = pb.to_str() { - output_path.push_str(s); - output_path_valid = true; - } - } - } - if !output_path_valid { - eprintln!("Could not determine a valid output path."); - return 1 - } - // If the output path is a dir, try to strip extension and place the file there. - if path::Path::new(&output_path).is_dir() { - output_path_valid = false; - let mut out_pb = path::PathBuf::from(&output_path); - if let Some(file_pb) = utils::fs::strip_extension(&path::PathBuf::from(&file_path)) { - if let Some(file_name) = file_pb.file_name() { - if let Some(file_name_str) = file_name.to_str() { - out_pb.push(file_name_str); - if let Some(s) = out_pb.as_path().to_str() { - output_path.clear(); - output_path.push_str(s); - output_path_valid = true; - } - } - } - } - } - if !output_path_valid { - eprintln!("Could not determine a valid output path."); - return 1 - } + let output_path: String = + match unpackers::dcx::get_decompressed_path(file_path, args.value_of("output")) { + Some(p) => p, + _ => { return 1 } + }; match unpackers::dcx::extract_dcx(file_path, &output_path) { - Err(e) => { eprintln!("Failed to extract DCX: {:?}", e); return 1 } - _ => { 0 } + Err(e) => { eprintln!("Failed to extract DCX: {:?}", e); 1 } + _ => 0 } } @@ -223,9 +174,11 @@ fn cmd_bnd(args: &ArgMatches) -> i32 { let file_path: &str = args.value_of("file").unwrap(); let output_path: &str = args.value_of("output").unwrap(); let overwrite: bool = args.is_present("overwrite"); - match unpackers::bnd::extract_bnd_file(file_path, output_path, overwrite) { - Err(e) => { eprintln!("Failed to extract BND: {:?}", e); return 1 } - _ => { 0 } + let decompress: bool = args.is_present("decompress"); + + match unpackers::bnd::extract_bnd_file(file_path, output_path, overwrite, decompress) { + Err(e) => { eprintln!("Failed to extract BND: {:?}", e); 1 } + _ => 0 } } @@ -234,7 +187,7 @@ fn cmd_bhf(args: &ArgMatches) -> i32 { let output_path: Option<&str> = args.value_of("output"); let overwrite: bool = args.is_present("overwrite"); match unpackers::bhf::extract_bhf_file(file_path, output_path, overwrite) { - Err(e) => { eprintln!("Failed to extract BHF: {:?}", e); return 1 } - _ => { 0 } + Err(e) => { eprintln!("Failed to extract BHF: {:?}", e); 1 } + _ => 0 } } diff --git a/src/unpackers/bnd.rs b/src/unpackers/bnd.rs index 82e8948..8965935 100644 --- a/src/unpackers/bnd.rs +++ b/src/unpackers/bnd.rs @@ -5,6 +5,7 @@ use std::path; use nom::Err::{Error as NomError, Failure as NomFailure}; use crate::parsers::bnd; +use crate::unpackers::dcx::load_dcx; use crate::unpackers::errors::UnpackError; use crate::utils::fs as utils_fs; @@ -14,10 +15,16 @@ use crate::utils::fs as utils_fs; pub fn extract_bnd_file( bnd_path: &str, output_dir: &str, - overwrite: bool + overwrite: bool, + decompress: bool, ) -> Result<(), UnpackError> { - let (bnd, data) = load_bnd_file(bnd_path)?; - extract_bnd(&bnd, &data, output_dir, overwrite)?; + let (bnd, bnd_data) = if decompress { + let (_, decomp_data) = load_dcx(bnd_path)?; + (load_bnd(&decomp_data)?, decomp_data) + } else { + load_bnd_file(bnd_path)? + }; + extract_bnd(&bnd, &bnd_data, output_dir, overwrite)?; Ok(()) } diff --git a/src/unpackers/dcx.rs b/src/unpackers/dcx.rs index bc0e8f1..f4bea38 100644 --- a/src/unpackers/dcx.rs +++ b/src/unpackers/dcx.rs @@ -50,3 +50,65 @@ fn decompress_dcx_dflt(dcx: &dcx::Dcx, comp_data: &[u8]) -> Result, Unpa deflater.read_exact(&mut data)?; Ok(data) } + +/// Get a decompressed path for this file in this path. +/// +/// If the path is some valid file path (existing or not), use it. +/// If the path is None, it tries to strip "dcx" extension from the +/// path and return it. If there is no extension, return None. +/// If the path is a valid dir path, tries to strip dcx from file name +/// and join this file name to the output path. +pub fn get_decompressed_path(dcx_path: &str, output_path: Option<&str>) -> Option { + let mut output_path_valid = false; + // If no output path is provided, try to strip the file extension. + let mut output_path: String = match output_path { + Some(s) => { output_path_valid = true; s.to_string() } + _ => { String::with_capacity(dcx_path.len()) } + }; + if !output_path_valid { + if let Some(pb) = utils_fs::strip_extension(&path::PathBuf::from(&dcx_path)) { + if let Some(s) = pb.to_str() { + output_path.push_str(s); + output_path_valid = true; + } + } + } + if !output_path_valid { + eprintln!("Can't determine a valid output path: {}", dcx_path); + return None + } + // If the output path is a dir, try to strip extension and place the file there. + if path::Path::new(&output_path).is_dir() { + output_path_valid = false; + if let Some(file_pb) = utils_fs::strip_extension(&path::PathBuf::from(&dcx_path)) { + if let Some(file_name) = file_pb.file_name() { + if let Some(file_name_str) = file_name.to_str() { + let mut out_pb = path::PathBuf::from(&output_path); + out_pb.push(file_name_str); + if let Some(s) = out_pb.as_path().to_str() { + output_path.clear(); + output_path.push_str(s); + output_path_valid = true; + } + } + } + } + } + if !output_path_valid { + eprintln!("Can't determine a valid output path: {}", dcx_path); + return None + } + Some(output_path) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_decompressed_path() { + // Without an output path. + assert_eq!(get_decompressed_path("file.ext.dcx", None).unwrap(), "file.ext"); + assert_eq!(get_decompressed_path("file.ext", None).unwrap(), "file"); + } +} diff --git a/src/unpackers/errors.rs b/src/unpackers/errors.rs index 57b0c2f..12dc11f 100644 --- a/src/unpackers/errors.rs +++ b/src/unpackers/errors.rs @@ -11,9 +11,8 @@ pub enum UnpackError { impl UnpackError { pub fn parsing_err(filetype: &str, kind: nom::error::ErrorKind) -> UnpackError { - let reason = format!("{:?} {:?}", kind, kind.description()); - let message = format!("{} parsing failed: ", filetype); - UnpackError::Parsing(message + &reason) + let message = format!("{} parsing failed: {}", filetype, kind.description()); + UnpackError::Parsing(message) } }