dat: repacking, untested

This commit is contained in:
dece 2020-05-24 20:24:01 +02:00
parent 6e7ce88396
commit b57c68bbe7
8 changed files with 219 additions and 37 deletions

View file

@ -1,11 +1,12 @@
Rusted Iron Ring Rusted Iron Ring
================ ================
Low-level library for exploring From Software games files. Currently only Low-level library for exploring From Software games files.
supports Dark Souls 1 (PTDE).
This project is mainly to play with the Rust language, Nom parser, FFI, etc; if This project is mainly to play with the Rust language, Nom parser, FFI, etc; if
you need an actually used and tested library, see [SoulsFormats][soulsformats]. you need an actually used and tested library, see [SoulsFormats][soulsformats].
The main target has been Dark Souls 1 PTDE, but checkout the features section
below.
[soulsformats]: https://github.com/JKAnderson/SoulsFormats [soulsformats]: https://github.com/JKAnderson/SoulsFormats
@ -16,13 +17,9 @@ Usage
The project contains 2 artefacts: The project contains 2 artefacts:
- `ironring`, a library with all the parsing/unpacking features implemented. - `ironring`, a library with all the projects features implemented.
- `rir`, an executable to use main lib features from the CLI. - `rir`, an executable to use main lib features from the CLI.
The goal is to make the lib compatible with FFI tools such as Python's ctypes,
to ship a dynamic lib accessible for any language to easily script tasks and
ideas, but we're not there yet.
Ironring usage: Ironring usage:
``` ```
@ -41,6 +38,7 @@ SUBCOMMANDS:
bhf Extracts BHF/BDT contents bhf Extracts BHF/BDT contents
bnd Extracts BND contents bnd Extracts BND contents
dat Extracts King's Field IV DAT contents dat Extracts King's Field IV DAT contents
dat-pack Packs files in a King's Field IV DAT
dcx Extracts and decompress DCX data dcx Extracts and decompress DCX data
hash Calculates hash for a string hash Calculates hash for a string
help Prints this message or the help of the given subcommand(s) help Prints this message or the help of the given subcommand(s)
@ -53,28 +51,28 @@ SUBCOMMANDS:
Features Features
-------- --------
### Containers ### Format support
- BHD5 / BDT: extraction from disk to disk. | Type | Games | Features |
- DCX: decompression from disk to disk/memory. |----------|-------|------------------------------------------|
- BND (v3): extraction from disk/memory to disk/memory, optionally decompress | BHD5/BDT | DS1 | Load, extract |
from DCX. | DCX | DS1 | Load, extract |
- BHF (v3): extraction from disk/memory to disk/memory. | BND3 | DS1 | Load, extract |
- DAT (King's Field IV): extraction from disk/memory to disk/memory. | BHF3 | DS1 | Load, extract |
| DAT | KF4 | Load, extract, repack (untested) |
| PARAMDEF | DS1 | Pretty-print |
| PARAM | DS1 | Pretty-print, optionally with a PARAMDEF |
Repacking is not supported, maybe one day. It is not that useful when using Formats typically found within DCX files can usually be decompressed on the fly.
[UDSFM][udsfm] and [Yabber][yabber], but if you really need it you can check out
[SiegLib][sieglib]. Repacking is mostly not supported, maybe one day. It is not that useful when
using [UDSFM][udsfm] and [Yabber][yabber], but if you really need it you can
check out [SiegLib][sieglib].
[udsfm]: https://github.com/HotPocketRemix/UnpackDarkSoulsForModding [udsfm]: https://github.com/HotPocketRemix/UnpackDarkSoulsForModding
[yabber]: https://github.com/JKAnderson/Yabber [yabber]: https://github.com/JKAnderson/Yabber
[sieglib]: https://github.com/Dece/DarkSoulsDev/tree/master/Programs/SiegLib [sieglib]: https://github.com/Dece/DarkSoulsDev/tree/master/Programs/SiegLib
### Files
- PARAMDEF: parsing
- PARAM: parsing, optionally with a PARAMDEF
### Misc ### Misc
- Encrypted archive name hasher. - Encrypted archive name hasher.
@ -90,6 +88,6 @@ build.
Credits Credits
------- -------
All the fat cats involved in the scene and the [wiki][smwiki]. TKGP and all the fat cats involved in the scene and the [wiki][smwiki].
[smwiki]: http://soulsmodding.wikidot.com/ [smwiki]: http://soulsmodding.wikidot.com/

View file

@ -5,7 +5,7 @@ use std::process;
use clap::{App, AppSettings, Arg, ArgMatches, SubCommand}; use clap::{App, AppSettings, Arg, ArgMatches, SubCommand};
use ironring::{name_hashes, unpackers}; use ironring::{name_hashes, repackers, unpackers};
fn main() { fn main() {
let default_namefilepath: &str = &get_default_namefilepath(); let default_namefilepath: &str = &get_default_namefilepath();
@ -94,19 +94,28 @@ fn main() {
.arg(Arg::with_name("output") .arg(Arg::with_name("output")
.help("Output directory") .help("Output directory")
.short("o").long("output").takes_value(true).required(true))) .short("o").long("output").takes_value(true).required(true)))
.subcommand(SubCommand::with_name("dat-pack")
.about("Pack files in a King's Field IV DAT")
.arg(Arg::with_name("files")
.help("Directory containing files to pack")
.takes_value(true).required(true))
.arg(Arg::with_name("output")
.help("Output file")
.takes_value(true).required(true)))
.get_matches(); .get_matches();
process::exit(match matches.subcommand() { process::exit(match matches.subcommand() {
("bhd", Some(s)) => { cmd_bhd(s) } ("bhd", Some(s)) => cmd_bhd(s),
("bhds", Some(s)) => { cmd_bhds(s) } ("bhds", Some(s)) => cmd_bhds(s),
("hash", Some(s)) => { cmd_hash(s) } ("hash", Some(s)) => cmd_hash(s),
("dcx", Some(s)) => { cmd_dcx(s) } ("dcx", Some(s)) => cmd_dcx(s),
("bnd", Some(s)) => { cmd_bnd(s) } ("bnd", Some(s)) => cmd_bnd(s),
("bhf", Some(s)) => { cmd_bhf(s) } ("bhf", Some(s)) => cmd_bhf(s),
("paramdef", Some(s)) => { cmd_paramdef(s) } ("paramdef", Some(s)) => cmd_paramdef(s),
("param", Some(s)) => { cmd_param(s) } ("param", Some(s)) => cmd_param(s),
("dat", Some(s)) => { cmd_dat(s) } ("dat", Some(s)) => cmd_dat(s),
_ => { 0 } ("dat-pack", Some(s)) => cmd_dat_pack(s),
_ => 0,
}) })
} }
@ -257,3 +266,12 @@ fn cmd_dat(args: &ArgMatches) -> i32 {
_ => 0 _ => 0
} }
} }
fn cmd_dat_pack(args: &ArgMatches) -> i32 {
let files_path: &str = args.value_of("files").unwrap();
let output_path: &str = args.value_of("output").unwrap();
match repackers::dat::pack_dat(files_path, output_path) {
Err(e) => { eprintln!("Failed to pack DAT: {:?}", e); 1 }
_ => 0
}
}

View file

@ -1,9 +1,15 @@
use std::fmt; use std::fmt;
use std::io;
use encoding_rs::SHIFT_JIS; use encoding_rs::SHIFT_JIS;
use nom::IResult; use nom::IResult;
use nom::bytes::complete::take_while; use nom::bytes::complete::take_while;
pub trait Pack {
/// Write the entirety of `self` as bytes to the write buffer `f`.
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize>;
}
/// Parse a zero-terminated string from the slice. /// Parse a zero-terminated string from the slice.
pub fn take_cstring(i: &[u8]) -> IResult<&[u8], &[u8]> { pub fn take_cstring(i: &[u8]) -> IResult<&[u8], &[u8]> {
take_while(|c| c != b'\0')(i) take_while(|c| c != b'\0')(i)

View file

@ -1,9 +1,16 @@
use std::io;
use nom::IResult; use nom::IResult;
use nom::multi::count; use nom::multi::count;
use nom::number::complete::*; use nom::number::complete::*;
use nom::sequence::tuple; use nom::sequence::tuple;
use crate::formats::common::take_cstring_from; use crate::formats::common::{Pack, take_cstring_from};
use crate::utils::bin as utils_bin;
pub const HEADER_SIZE: usize = 0x40;
pub const MAGIC: u32 = 0x1E048000; // Maybe it's 2 shorts and the 1st is padding?
pub const HEADER_PAD: usize = 0x38; // Padding after the header.
#[derive(Debug)] #[derive(Debug)]
pub struct DatHeader { pub struct DatHeader {
@ -11,11 +18,22 @@ pub struct DatHeader {
pub num_files: u32, pub num_files: u32,
} }
impl Pack for DatHeader {
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize> {
f.write_all(&self.unk00.to_le_bytes())?;
f.write_all(&self.num_files.to_le_bytes())?;
Ok(0x8usize)
}
}
fn parse_header(i: &[u8]) -> IResult<&[u8], DatHeader> { fn parse_header(i: &[u8]) -> IResult<&[u8], DatHeader> {
let (i, (unk00, num_files)) = tuple((le_u32, le_u32))(i)?; let (i, (unk00, num_files)) = tuple((le_u32, le_u32))(i)?;
Ok((i, DatHeader { unk00, num_files })) Ok((i, DatHeader { unk00, num_files }))
} }
pub const FILE_ENTRY_SIZE: usize = 0x40;
pub const FILE_ENTRY_NAME_MAXLEN: usize = 0x34;
#[derive(Debug)] #[derive(Debug)]
pub struct DatFileEntry { pub struct DatFileEntry {
pub name: String, pub name: String,
@ -24,13 +42,28 @@ pub struct DatFileEntry {
pub ofs_data: u32, pub ofs_data: u32,
} }
impl Pack for DatFileEntry {
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize> {
let name_bytes = self.name.as_bytes();
f.write_all(name_bytes)?;
f.write_all(&vec![0u8; utils_bin::pad(name_bytes.len(), FILE_ENTRY_NAME_MAXLEN)])?;
f.write_all(&self.size.to_le_bytes())?;
f.write_all(&self.padded_size.to_le_bytes())?;
f.write_all(&self.ofs_data.to_le_bytes())?;
Ok(FILE_ENTRY_SIZE)
}
}
fn parse_file_entry(i: &[u8]) -> IResult<&[u8], DatFileEntry> { fn parse_file_entry(i: &[u8]) -> IResult<&[u8], DatFileEntry> {
let (i, name) = take_cstring_from(i, 0x34)?; let (i, name) = take_cstring_from(i, FILE_ENTRY_NAME_MAXLEN)?;
let name = String::from_utf8_lossy(name).to_string(); let name = String::from_utf8_lossy(name).to_string();
let (i, (size, padded_size, ofs_data)) = tuple((le_u32, le_u32, le_u32))(i)?; let (i, (size, padded_size, ofs_data)) = tuple((le_u32, le_u32, le_u32))(i)?;
Ok((i, DatFileEntry { name, size, padded_size, ofs_data })) Ok((i, DatFileEntry { name, size, padded_size, ofs_data }))
} }
pub const INTERNAL_PATH_SEP: char = '/';
pub const DATA_ALIGN: usize = 0x8000;
#[derive(Debug)] #[derive(Debug)]
pub struct Dat { pub struct Dat {
pub header: DatHeader, pub header: DatHeader,
@ -41,7 +74,7 @@ pub struct Dat {
pub fn parse(i: &[u8]) -> IResult<&[u8], Dat> { pub fn parse(i: &[u8]) -> IResult<&[u8], Dat> {
let full_file = i; let full_file = i;
let (_, header) = parse_header(i)?; let (_, header) = parse_header(i)?;
let i = &full_file[0x40..]; let i = &full_file[HEADER_SIZE..];
let (_, files) = count(parse_file_entry, header.num_files as usize)(i)?; let (_, files) = count(parse_file_entry, header.num_files as usize)(i)?;
Ok((full_file, Dat { header, files })) Ok((full_file, Dat { header, files }))
} }

View file

@ -1,6 +1,9 @@
#![allow(non_snake_case)] #![allow(non_snake_case)]
pub mod name_hashes; pub mod name_hashes;
pub mod repackers {
pub mod dat;
}
pub mod formats { pub mod formats {
pub mod bhd; pub mod bhd;
pub mod bhf; pub mod bhf;

100
src/repackers/dat.rs Normal file
View file

@ -0,0 +1,100 @@
use std::fs;
use std::io::{self, Write};
use std::path;
use crate::formats::common::Pack;
use crate::formats::dat;
use crate::utils::bin as utils_bin;
use crate::utils::fs as utils_fs;
/// Pack a directory as a DAT archive.
///
/// Walks recursively in `files_path` to build all file entries.
/// For performance and laziness, the archive is built directly in RAM.
pub fn pack_dat(files_path: &str, output_path: &str) -> Result<(), io::Error> {
// Pack all files and entries description in memory.
let files_path = path::Path::new(files_path);
let mut entries = vec!();
let mut files_data = vec!();
pack_dat_dir(files_path, "", &mut entries, &mut files_data)?;
let mut output_file = fs::File::create(output_path)?;
let mut ofs = 0usize;
// Write header.
let header = dat::DatHeader { unk00: dat::MAGIC, num_files: entries.len() as u32 };
header.write(&mut output_file)?;
output_file.write_all(&vec![0u8; dat::HEADER_PAD])?;
ofs += dat::HEADER_SIZE;
// Write entries, but shift their data offset beforehand.
let entries_size = entries.len() * dat::FILE_ENTRY_SIZE;
let entries_pad = utils_bin::pad(ofs + entries_size, dat::DATA_ALIGN);
let ofs_data = ofs + entries_size + entries_pad;
for entry in &mut entries {
entry.ofs_data += ofs_data as u32;
entry.write(&mut output_file)?;
}
output_file.write_all(&vec![0u8; entries_pad])?;
// Finally, write files data.
output_file.write_all(&files_data)?;
Ok(())
}
/// Recursively walks in `dir` to create `DatFileEntry`s.
///
/// `prefix` is initially "" and will contain current relative dir with
/// separator suffixed during walks, e.g. "param/".
fn pack_dat_dir(
dir: &path::Path,
prefix: &str,
entries: &mut Vec<dat::DatFileEntry>,
files_data: &mut Vec<u8>,
) -> Result<(), io::Error> {
for entry in fs::read_dir(dir)? {
let entry = entry?.path();
if entry.is_dir() {
if let Some(dir_name) = entry.file_name().and_then(|n| n.to_str()) {
let mut prefix = String::from(prefix);
prefix.push_str(dir_name);
prefix.push(dat::INTERNAL_PATH_SEP);
pack_dat_dir(&entry, &prefix, entries, files_data)?;
}
} else if entry.is_file() /* No symlink support. */ {
if let Some(name) = entry.file_name().and_then(|n| n.to_str()) {
if let Ok(metadata) = entry.metadata() {
let mut entry_name = String::from(prefix);
entry_name.push_str(name);
pack_dat_entry(&entry, entry_name, &metadata, entries, files_data)?;
}
}
}
}
Ok(())
}
/// Pack the file in `files_data` and update `entries` accordingly.
fn pack_dat_entry(
file_entry: &path::PathBuf,
internal_name: String,
metadata: &fs::Metadata,
entries: &mut Vec<dat::DatFileEntry>,
files_data: &mut Vec<u8>,
) -> Result<(), io::Error> {
let file_size = metadata.len() as u32;
let padding = utils_bin::pad(file_size as usize, dat::DATA_ALIGN);
entries.push(dat::DatFileEntry {
name: internal_name,
size: file_size,
padded_size: file_size + padding as u32,
ofs_data: files_data.len() as u32, // Data will be pushed at the current end of file.
});
let mut data = utils_fs::open_file_to_vec(file_entry)?;
files_data.append(&mut data);
let mut padding_data = vec![0u8; padding];
files_data.append(&mut padding_data);
Ok(())
}

View file

@ -8,11 +8,15 @@ use crate::formats::dat;
use crate::unpackers::errors::UnpackError; use crate::unpackers::errors::UnpackError;
use crate::utils::fs as utils_fs; use crate::utils::fs as utils_fs;
/// Extract DAT file contents to `output_path`.
///
/// Wraps around `extract_dat` to load the DAT from disk.
pub fn extract_dat_file(dat_path: &str, output_path: &str) -> Result<(), UnpackError> { pub fn extract_dat_file(dat_path: &str, output_path: &str) -> Result<(), UnpackError> {
let (dat, dat_data) = load_dat_file(dat_path)?; let (dat, dat_data) = load_dat_file(dat_path)?;
extract_dat(&dat, dat_data, output_path) extract_dat(&dat, dat_data, output_path)
} }
/// Extract DAT contents to `output_path`.
pub fn extract_dat( pub fn extract_dat(
dat: &dat::Dat, dat: &dat::Dat,
dat_data: Vec<u8>, dat_data: Vec<u8>,
@ -29,6 +33,7 @@ pub fn extract_dat(
Ok(()) Ok(())
} }
/// Extract one `DatFileEntry`, preserving internal dir structure.
fn extract_file( fn extract_file(
file_entry: &dat::DatFileEntry, file_entry: &dat::DatFileEntry,
data: &Vec<u8>, data: &Vec<u8>,
@ -54,11 +59,13 @@ fn extract_file(
Ok(()) Ok(())
} }
/// Load a DAT file from disk.
pub fn load_dat_file(dat_path: &str) -> Result<(dat::Dat, Vec<u8>), UnpackError> { pub fn load_dat_file(dat_path: &str) -> Result<(dat::Dat, Vec<u8>), UnpackError> {
let dat_data = utils_fs::open_file_to_vec(path::Path::new(dat_path))?; let dat_data = utils_fs::open_file_to_vec(path::Path::new(dat_path))?;
Ok((load_dat(&dat_data)?, dat_data)) Ok((load_dat(&dat_data)?, dat_data))
} }
/// Load a DAT file from a bytes slice.
pub fn load_dat(dat_data: &[u8]) -> Result<dat::Dat, UnpackError> { pub fn load_dat(dat_data: &[u8]) -> Result<dat::Dat, UnpackError> {
match dat::parse(&dat_data) { match dat::parse(&dat_data) {
Ok((_, dat)) => Ok(dat), Ok((_, dat)) => Ok(dat),

View file

@ -11,6 +11,11 @@ pub fn mask(bit_size: usize) -> usize {
(1 << bit_size) - 1 (1 << bit_size) - 1
} }
/// Return the number of bytes to pad from ofs to alignment.
pub fn pad(ofs: usize, alignment: usize) -> usize {
(alignment - (ofs % alignment)) % alignment
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
@ -30,4 +35,16 @@ mod test {
assert_eq!(mask(8), 0b11111111); assert_eq!(mask(8), 0b11111111);
assert_eq!(mask(15), 0b01111111_11111111); assert_eq!(mask(15), 0b01111111_11111111);
} }
#[test]
fn test_pad() {
assert_eq!(pad(0, 4), 0);
assert_eq!(pad(1, 4), 3);
assert_eq!(pad(3, 4), 1);
assert_eq!(pad(4, 4), 0);
assert_eq!(pad(4, 16), 12);
assert_eq!(pad(15, 16), 1);
assert_eq!(pad(16, 16), 0);
assert_eq!(pad(17, 16), 15);
}
} }