dcx: repack from existing instance

This commit is contained in:
dece 2020-05-30 17:32:56 +02:00
parent 85e0e58981
commit 6e96361f9e
8 changed files with 154 additions and 27 deletions

View file

@ -4,11 +4,12 @@ Rusted Iron Ring
Low-level library for exploring From Software games files.
This project is mainly to play with the Rust language, Nom parser, FFI, etc; if
you need an actually used and tested library, see [SoulsFormats][soulsformats].
The main target has been Dark Souls 1 PTDE, but checkout the features section
below.
you need an actually used and tested library, see [SoulsFormats][soulsformats]
(C#) or [soulstruct][soulstruct] (Python). The main target has been Dark Souls 1
PTDE, but checkout the features section below.
[soulsformats]: https://github.com/JKAnderson/SoulsFormats
[soulstruct]: https://github.com/Grimrukh/soulstruct
@ -54,7 +55,7 @@ Features
| Type | Games | Features |
|----------|-------|------------------------------------------|
| BHD5/BDT | DS1 | Load, extract |
| DCX | DS1 | Load, extract |
| DCX | DS1 | Load, extract, repack (untested) |
| BND3 | DS1 | Load, extract |
| BHF3 | DS1 | Load, extract |
| DAT | KF4 | Load, extract, repack |
@ -77,6 +78,7 @@ check out [SiegLib][sieglib].
- There is a demo Python binding for some `name_hashes` features in the
`bindings/python` dir, that uses [PyO3][pyo3] and thus requires nightly
rustc to build.
- There are a few scripts useful for some testing/modding tasks.
[pyo3]: https://pyo3.rs/

View file

@ -5,6 +5,7 @@ use encoding_rs::SHIFT_JIS;
use nom::IResult;
use nom::bytes::complete::take_while;
/// Trait for structs that are easy to pack to bytes.
pub trait Pack {
/// Write the entirety of `self` as bytes to the write buffer `f`.
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize>;

View file

@ -18,6 +18,11 @@ pub struct DatHeader {
pub num_files: u32,
}
fn parse_header(i: &[u8]) -> IResult<&[u8], DatHeader> {
let (i, (unk00, num_files)) = tuple((le_u32, le_u32))(i)?;
Ok((i, DatHeader { unk00, num_files }))
}
impl Pack for DatHeader {
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize> {
f.write_all(&self.unk00.to_le_bytes())?;
@ -26,11 +31,6 @@ impl Pack for DatHeader {
}
}
fn parse_header(i: &[u8]) -> IResult<&[u8], DatHeader> {
let (i, (unk00, num_files)) = tuple((le_u32, le_u32))(i)?;
Ok((i, DatHeader { unk00, num_files }))
}
pub const FILE_ENTRY_SIZE: usize = 0x40;
pub const FILE_ENTRY_NAME_MAXLEN: usize = 0x34;
@ -42,6 +42,13 @@ pub struct DatFileEntry {
pub ofs_data: u32,
}
fn parse_file_entry(i: &[u8]) -> IResult<&[u8], DatFileEntry> {
let (i, name) = take_cstring_from(i, FILE_ENTRY_NAME_MAXLEN)?;
let name = String::from_utf8_lossy(name).to_string();
let (i, (size, padded_size, ofs_data)) = tuple((le_u32, le_u32, le_u32))(i)?;
Ok((i, DatFileEntry { name, size, padded_size, ofs_data }))
}
impl Pack for DatFileEntry {
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize> {
let name_bytes = self.name.as_bytes();
@ -54,13 +61,6 @@ impl Pack for DatFileEntry {
}
}
fn parse_file_entry(i: &[u8]) -> IResult<&[u8], DatFileEntry> {
let (i, name) = take_cstring_from(i, FILE_ENTRY_NAME_MAXLEN)?;
let name = String::from_utf8_lossy(name).to_string();
let (i, (size, padded_size, ofs_data)) = tuple((le_u32, le_u32, le_u32))(i)?;
Ok((i, DatFileEntry { name, size, padded_size, ofs_data }))
}
pub const INTERNAL_PATH_SEP: char = '/';
pub const DATA_ALIGN: usize = 0x8000;

View file

@ -1,3 +1,9 @@
//! DCX format.
//!
//! Support DFLT method only.
use std::io;
use nom::IResult;
use nom::branch::alt;
use nom::bytes::complete::tag;
@ -5,6 +11,11 @@ use nom::multi::count;
use nom::number::complete::*;
use nom::sequence::tuple;
use crate::formats::common::Pack;
pub const HEADER_MAGIC: &[u8] = b"DCX\0";
pub const HEADER_SIZE: usize = 0x18;
#[derive(Debug)]
pub struct DcxHeader {
pub magic: Vec<u8>,
@ -17,10 +28,25 @@ pub struct DcxHeader {
fn parse_header(i: &[u8]) -> IResult<&[u8], DcxHeader> {
let (i, (magic, unk04, ofs_dcs, ofs_dcp, unk10, unk14)) =
tuple((tag(b"DCX\0"), be_u32, be_u32, be_u32, be_u32, be_u32))(i)?;
tuple((tag(HEADER_MAGIC), be_u32, be_u32, be_u32, be_u32, be_u32))(i)?;
Ok((i, DcxHeader { magic: magic.to_vec(), unk04, ofs_dcs, ofs_dcp, unk10, unk14 }))
}
impl Pack for DcxHeader {
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize> {
f.write_all(&self.magic)?;
f.write_all(&self.unk04.to_be_bytes())?;
f.write_all(&self.ofs_dcs.to_be_bytes())?;
f.write_all(&self.ofs_dcp.to_be_bytes())?;
f.write_all(&self.unk10.to_be_bytes())?;
f.write_all(&self.unk14.to_be_bytes())?;
Ok(HEADER_SIZE)
}
}
pub const SIZES_CHUNK_MAGIC: &[u8] = b"DCS\0";
pub const SIZES_CHUNK_SIZE: usize = 0xC;
#[derive(Debug)]
pub struct DcxSizes {
pub magic: Vec<u8>,
@ -30,10 +56,22 @@ pub struct DcxSizes {
fn parse_sizes(i: &[u8]) -> IResult<&[u8], DcxSizes> {
let (i, (magic, uncompressed_size, compressed_size)) =
tuple((tag(b"DCS\0"), be_u32, be_u32))(i)?;
tuple((tag(SIZES_CHUNK_MAGIC), be_u32, be_u32))(i)?;
Ok((i, DcxSizes { magic: magic.to_vec(), uncompressed_size, compressed_size }))
}
impl Pack for DcxSizes {
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize> {
f.write_all(&self.magic)?;
f.write_all(&self.uncompressed_size.to_be_bytes())?;
f.write_all(&self.compressed_size.to_be_bytes())?;
Ok(SIZES_CHUNK_SIZE)
}
}
pub const PARAMS_CHUNK_MAGIC: &[u8] = b"DCP\0";
pub const PARAMS_CHUNK_SIZE: usize = 0x32;
#[derive(Debug)]
pub struct DcxParams {
pub magic: Vec<u8>,
@ -52,7 +90,7 @@ pub struct DcxParams {
fn parse_params(i: &[u8]) -> IResult<&[u8], DcxParams> {
let (i, (magic, method, ofs_dca, flags, unk10, unk14, unk18, unk1C)) =
tuple((
tag(b"DCP\0"),
tag(PARAMS_CHUNK_MAGIC),
alt((tag(b"DFLT"), tag(b"EDGE"), tag(b"KRAK"))),
be_u32,
count(be_u8, 4),
@ -79,6 +117,26 @@ fn parse_params(i: &[u8]) -> IResult<&[u8], DcxParams> {
))
}
impl Pack for DcxParams {
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize> {
f.write_all(&self.magic)?;
f.write_all(&self.method)?;
f.write_all(&self.ofs_dca.to_be_bytes())?;
f.write_all(&self.unk0C.to_be_bytes())?;
f.write_all(&self.unk0D.to_be_bytes())?;
f.write_all(&self.unk0E.to_be_bytes())?;
f.write_all(&self.unk0F.to_be_bytes())?;
f.write_all(&self.unk10.to_be_bytes())?;
f.write_all(&self.unk14.to_be_bytes())?;
f.write_all(&self.unk18.to_be_bytes())?;
f.write_all(&self.unk1C.to_be_bytes())?;
Ok(PARAMS_CHUNK_SIZE)
}
}
pub const ARCHIVE_CHUNK_MAGIC: &[u8] = b"DCA\0";
pub const ARCHIVE_CHUNK_SIZE: usize = 0x8;
#[derive(Debug)]
pub struct DcxArchive {
pub magic: Vec<u8>,
@ -86,10 +144,18 @@ pub struct DcxArchive {
}
fn parse_archive(i: &[u8]) -> IResult<&[u8], DcxArchive> {
let (i, (magic, ofs_data)) = tuple((tag(b"DCA\0"), be_u32))(i)?;
let (i, (magic, ofs_data)) = tuple((tag(ARCHIVE_CHUNK_MAGIC), be_u32))(i)?;
Ok((i, DcxArchive { magic: magic.to_vec(), ofs_data }))
}
impl Pack for DcxArchive {
fn write(&self, f: &mut dyn io::Write) -> io::Result<usize> {
f.write_all(&self.magic)?;
f.write_all(&self.ofs_data.to_be_bytes())?;
Ok(ARCHIVE_CHUNK_SIZE)
}
}
#[derive(Debug)]
pub struct Dcx {
pub header: DcxHeader,

View file

@ -1,9 +1,6 @@
#![allow(non_snake_case)]
pub mod name_hashes;
pub mod repackers {
pub mod dat;
}
pub mod formats {
pub mod bhd;
pub mod bhf;
@ -14,6 +11,11 @@ pub mod formats {
pub mod param;
pub mod paramdef;
}
pub mod repackers {
pub mod dat;
pub mod dcx;
pub mod errors;
}
pub mod unpackers {
pub mod bhd;
pub mod bhf;

45
src/repackers/dcx.rs Normal file
View file

@ -0,0 +1,45 @@
use std::fs;
use std::io::Write;
use flate2::Compression;
use flate2::write::ZlibEncoder;
use crate::formats::common::Pack;
use crate::formats::dcx;
use crate::repackers::errors::PackError;
/// Repack a previously unpacked DCX with this new data.
///
/// Params that are not well understood are reused, others are replaced
/// with accurate values.
pub fn pack_dcx(dcx: &mut dcx::Dcx, data: &[u8], output_path: &str) -> Result<(), PackError> {
dcx.sizes.uncompressed_size = data.len() as u32;
let compressed = compress(dcx, data)?;
dcx.sizes.compressed_size = compressed.len() as u32;
let mut output_file = fs::File::create(output_path)?;
dcx.header.write(&mut output_file)?;
dcx.sizes.write(&mut output_file)?;
dcx.params.write(&mut output_file)?;
dcx.archive.write(&mut output_file)?;
output_file.write_all(&compressed)?;
Ok(())
}
/// Compress data using DCX params.
pub fn compress(dcx: &dcx::Dcx, data: &[u8]) -> Result<Vec<u8>, PackError> {
let method: &[u8] = dcx.params.method.as_slice();
if method == b"DFLT" {
compress_dflt(dcx, data)
} else {
let method_string = String::from_utf8_lossy(method).to_string();
Err(PackError::Compression(format!("Method unknown: {}", method_string)))
}
}
fn compress_dflt(dcx: &dcx::Dcx, data: &[u8]) -> Result<Vec<u8>, PackError> {
let level = dcx.params.unk0C as u32; // Unsure if it really is compression level.
let half_size = data.len() / 2; // Quicker allocation.
let encoder = ZlibEncoder::new(Vec::with_capacity(half_size), Compression::new(level));
Ok(encoder.finish()?)
}

14
src/repackers/errors.rs Normal file
View file

@ -0,0 +1,14 @@
use std::io;
#[derive(Debug)]
pub enum PackError {
Io(io::Error),
Compression(String),
Unknown(String),
}
impl From<io::Error> for PackError {
fn from(e: io::Error) -> Self {
PackError::Io(e)
}
}

View file

@ -36,10 +36,7 @@ fn decompress_dcx(dcx: &dcx::Dcx, comp_data: &[u8]) -> Result<Vec<u8>, UnpackErr
if method == b"DFLT" {
decompress_dcx_dflt(dcx, comp_data)
} else {
let method_string = match std::str::from_utf8(method) {
Ok(s) => { String::from(s) }
Err(_) => { format!("{:?}", method) }
};
let method_string = String::from_utf8_lossy(method).to_string();
Err(UnpackError::Compression(format!("Unknown method: {}", method_string)))
}
}