Compare commits

..

No commits in common. "86e6144d9ebd77481b91e841d75f9be87e17e17b" and "7f7e0b9f780bfa715f7e6cb3d4b899d55939296e" have entirely different histories.

14 changed files with 693 additions and 753 deletions

78
Cargo.lock generated
View file

@ -68,6 +68,12 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "anyhow"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]] [[package]]
name = "assert_cmd" name = "assert_cmd"
version = "2.0.16" version = "2.0.16"
@ -223,6 +229,21 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "crc"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636"
dependencies = [
"crc-catalog",
]
[[package]]
name = "crc-catalog"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]] [[package]]
name = "crc32fast" name = "crc32fast"
version = "1.4.2" version = "1.4.2"
@ -395,6 +416,12 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "hex-literal"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46"
[[package]] [[package]]
name = "icu_collections" name = "icu_collections"
version = "1.5.0" version = "1.5.0"
@ -632,12 +659,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]] [[package]]
name = "lz4_flex" name = "lzma-rs"
version = "0.11.3" version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5" checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e"
dependencies = [ dependencies = [
"twox-hash", "byteorder",
"crc",
] ]
[[package]] [[package]]
@ -661,6 +689,15 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
[[package]]
name = "object"
version = "0.36.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.20.2" version = "1.20.2"
@ -671,10 +708,15 @@ checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
name = "oodle_loader" name = "oodle_loader"
version = "0.2.2" version = "0.2.2"
dependencies = [ dependencies = [
"anyhow",
"hex", "hex",
"hex-literal",
"libc",
"libloading", "libloading",
"sha2", "lzma-rs",
"thiserror", "object",
"seq-macro",
"sha1",
"ureq", "ureq",
] ]
@ -793,8 +835,6 @@ dependencies = [
"base64", "base64",
"byteorder", "byteorder",
"flate2", "flate2",
"hex",
"lz4_flex",
"oodle_loader", "oodle_loader",
"paste", "paste",
"sha1", "sha1",
@ -901,6 +941,12 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "seq-macro"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.217" version = "1.0.217"
@ -967,12 +1013,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]] [[package]]
name = "strsim" name = "strsim"
version = "0.11.1" version = "0.11.1"
@ -1079,16 +1119,6 @@ dependencies = [
"zerovec", "zerovec",
] ]
[[package]]
name = "twox-hash"
version = "1.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
dependencies = [
"cfg-if",
"static_assertions",
]
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.17.0" version = "1.17.0"

View file

@ -6,9 +6,18 @@ license.workspace = true
version.workspace = true version.workspace = true
edition.workspace = true edition.workspace = true
[dependencies] [target.'cfg(windows)'.dependencies]
libloading = "0.8" libloading = "0.8"
[target.'cfg(unix)'.dependencies]
object = { version = "0.36.7", default-features = false, features = ["std", "read"] }
libc = "0.2.169"
seq-macro = "0.3.5"
[dependencies]
sha1 = { workspace = true }
ureq = "2.12" ureq = "2.12"
hex-literal = "0.4"
hex = { workspace = true } hex = { workspace = true }
sha2 = "0.10.8" anyhow = "1.0.95"
thiserror = "2.0.11" lzma-rs = "0.3.0"

View file

@ -1,323 +1,405 @@
use std::{io::Read, sync::OnceLock}; use anyhow::{anyhow, Context, Result};
type Result<T, E = Error> = std::result::Result<T, E>; use std::sync::OnceLock;
pub use oodle_lz::{CompressionLevel, Compressor}; type OodleDecompress = fn(comp_buf: &[u8], raw_buf: &mut [u8]) -> i32;
mod oodle_lz { #[allow(non_camel_case_types)]
#[derive(Debug, Clone, Copy)] type OodleLZ_Decompress = unsafe extern "win64" fn(
#[repr(i32)] compBuf: *const u8,
pub enum Compressor { compBufSize: usize,
/// None = memcpy, pass through uncompressed bytes rawBuf: *mut u8,
None = 3, rawLen: usize,
fuzzSafe: u32,
checkCRC: u32,
verbosity: u32,
decBufBase: u64,
decBufSize: usize,
fpCallback: u64,
callbackUserData: u64,
decoderMemory: *mut u8,
decoderMemorySize: usize,
threadPhase: u32,
) -> i32;
/// Fast decompression and high compression ratios, amazing! pub fn decompress() -> Result<OodleDecompress, Box<dyn std::error::Error>> {
Kraken = 8, #[cfg(windows)]
/// Leviathan = Kraken's big brother with higher compression, slightly slower decompression. return Ok(windows_oodle::decompress_wrapper_windows);
Leviathan = 13, #[cfg(unix)]
/// Mermaid is between Kraken & Selkie - crazy fast, still decent compression. return Ok(linux_oodle::oodle_loader_linux());
Mermaid = 9,
/// Selkie is a super-fast relative of Mermaid. For maximum decode speed.
Selkie = 11,
/// Hydra, the many-headed beast = Leviathan, Kraken, Mermaid, or Selkie (see $OodleLZ_About_Hydra)
Hydra = 12,
}
#[derive(Debug, Clone, Copy)]
#[repr(i32)]
pub enum CompressionLevel {
/// don't compress, just copy raw bytes
None = 0,
/// super fast mode, lower compression ratio
SuperFast = 1,
/// fastest LZ mode with still decent compression ratio
VeryFast = 2,
/// fast - good for daily use
Fast = 3,
/// standard medium speed LZ mode
Normal = 4,
/// optimal parse level 1 (faster optimal encoder)
Optimal1 = 5,
/// optimal parse level 2 (recommended baseline optimal encoder)
Optimal2 = 6,
/// optimal parse level 3 (slower optimal encoder)
Optimal3 = 7,
/// optimal parse level 4 (very slow optimal encoder)
Optimal4 = 8,
/// optimal parse level 5 (don't care about encode speed, maximum compression)
Optimal5 = 9,
/// faster than SuperFast, less compression
HyperFast1 = -1,
/// faster than HyperFast1, less compression
HyperFast2 = -2,
/// faster than HyperFast2, less compression
HyperFast3 = -3,
/// fastest, less compression
HyperFast4 = -4,
}
pub type Compress = unsafe extern "system" fn(
compressor: Compressor,
rawBuf: *const u8,
rawLen: usize,
compBuf: *mut u8,
level: CompressionLevel,
pOptions: *const (),
dictionaryBase: *const (),
lrm: *const (),
scratchMem: *mut u8,
scratchSize: usize,
) -> isize;
pub type Decompress = unsafe extern "system" fn(
compBuf: *const u8,
compBufSize: usize,
rawBuf: *mut u8,
rawLen: usize,
fuzzSafe: u32,
checkCRC: u32,
verbosity: u32,
decBufBase: u64,
decBufSize: usize,
fpCallback: u64,
callbackUserData: u64,
decoderMemory: *mut u8,
decoderMemorySize: usize,
threadPhase: u32,
) -> isize;
pub type GetCompressedBufferSizeNeeded =
unsafe extern "system" fn(compressor: Compressor, rawSize: usize) -> usize;
pub type SetPrintf = unsafe extern "system" fn(printf: *const ());
} }
static OODLE_VERSION: &str = "2.9.10"; fn call_decompress(comp_buf: &[u8], raw_buf: &mut [u8], decompress: OodleLZ_Decompress) -> i32 {
static OODLE_BASE_URL: &str = "https://github.com/WorkingRobot/OodleUE/raw/refs/heads/main/Engine/Source/Programs/Shared/EpicGames.Oodle/Sdk/"; unsafe {
decompress(
struct OodlePlatform { comp_buf.as_ptr(),
path: &'static str, comp_buf.len(),
name: &'static str, raw_buf.as_mut_ptr(),
hash: &'static str, raw_buf.len(),
} 1,
1,
#[cfg(target_os = "linux")] 0,
static OODLE_PLATFORM: OodlePlatform = OodlePlatform { 0,
path: "linux/lib", 0,
name: "liboo2corelinux64.so.9", 0,
hash: "ed7e98f70be1254a80644efd3ae442ff61f854a2fe9debb0b978b95289884e9c", 0,
}; std::ptr::null_mut(),
0,
#[cfg(target_os = "macos")] 3,
static OODLE_PLATFORM: OodlePlatform = OodlePlatform { )
path: "mac/lib",
name: "liboo2coremac64.2.9.10.dylib",
hash: "b09af35f6b84a61e2b6488495c7927e1cef789b969128fa1c845e51a475ec501",
};
#[cfg(windows)]
static OODLE_PLATFORM: OodlePlatform = OodlePlatform {
path: "win/redist",
name: "oo2core_9_win64.dll",
hash: "6f5d41a7892ea6b2db420f2458dad2f84a63901c9a93ce9497337b16c195f457",
};
fn url() -> String {
format!(
"{OODLE_BASE_URL}/{}/{}/{}",
OODLE_VERSION, OODLE_PLATFORM.path, OODLE_PLATFORM.name
)
}
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Oodle lib hash mismatch expected: {expected} got {found}")]
HashMismatch { expected: String, found: String },
#[error("Oodle compression failed")]
CompressionFailed,
#[error("Oodle initialization failed previously")]
InitializationFailed,
#[error("IO error {0:?}")]
Io(#[from] std::io::Error),
#[error("ureq error {0:?}")]
Ureq(Box<ureq::Error>),
#[error("Oodle libloading error {0:?}")]
LibLoading(#[from] libloading::Error),
}
impl From<ureq::Error> for Error {
fn from(value: ureq::Error) -> Self {
Self::Ureq(value.into())
} }
} }
fn check_hash(buffer: &[u8]) -> Result<()> { static OODLE_HASH: [u8; 20] = hex_literal::hex!("4bcc73614cb8fd2b0bce8d0f91ee5f3202d9d624");
use sha2::{Digest, Sha256}; static OODLE_DLL_NAME: &str = "oo2core_9_win64.dll";
let mut hasher = Sha256::new();
hasher.update(buffer);
let hash = hex::encode(hasher.finalize());
if hash != OODLE_PLATFORM.hash {
return Err(Error::HashMismatch {
expected: OODLE_PLATFORM.hash.into(),
found: hash,
});
}
Ok(())
}
fn fetch_oodle() -> Result<std::path::PathBuf> { fn fetch_oodle() -> Result<std::path::PathBuf> {
let oodle_path = std::env::current_exe()?.with_file_name(OODLE_PLATFORM.name); use sha1::{Digest, Sha1};
let oodle_path = std::env::current_exe()?.with_file_name(OODLE_DLL_NAME);
if !oodle_path.exists() { if !oodle_path.exists() {
let mut buffer = vec![]; let mut compressed = vec![];
ureq::get(&url()) ureq::get("https://origin.warframe.com/origin/50F7040A/index.txt.lzma")
.call()? .call()?
.into_reader() .into_reader()
.read_to_end(&mut buffer)?; .read_to_end(&mut compressed)?;
check_hash(&buffer)?;
std::fs::write(&oodle_path, buffer)?; let mut decompressed = vec![];
lzma_rs::lzma_decompress(&mut std::io::Cursor::new(compressed), &mut decompressed).unwrap();
let index = String::from_utf8(decompressed)?;
let line = index
.lines()
.find(|l| l.contains(OODLE_DLL_NAME))
.with_context(|| format!("{OODLE_DLL_NAME} not found in index"))?;
let path = line.split_once(',').context("failed to parse index")?.0;
let mut compressed = vec![];
ureq::get(&format!("https://content.warframe.com{path}"))
.call()?
.into_reader()
.read_to_end(&mut compressed)?;
let mut decompressed = vec![];
lzma_rs::lzma_decompress(&mut std::io::Cursor::new(compressed), &mut decompressed).unwrap();
std::fs::write(&oodle_path, decompressed)?;
} }
// don't check existing file to allow user to substitute other versions
// check_hash(&std::fs::read(&oodle_path)?)?; let mut hasher = Sha1::new();
hasher.update(std::fs::read(&oodle_path)?);
let hash = hasher.finalize();
(hash[..] == OODLE_HASH).then_some(()).ok_or_else(|| {
anyhow!(
"oodle hash mismatch expected: {} got: {} ",
hex::encode(OODLE_HASH),
hex::encode(hash)
)
})?;
Ok(oodle_path) Ok(oodle_path)
} }
pub struct Oodle { #[cfg(windows)]
_library: libloading::Library, mod windows_oodle {
compress: oodle_lz::Compress,
decompress: oodle_lz::Decompress,
get_compressed_buffer_size_needed: oodle_lz::GetCompressedBufferSizeNeeded,
set_printf: oodle_lz::SetPrintf,
}
impl Oodle {
fn new(lib: libloading::Library) -> Result<Self> {
unsafe {
let res = Oodle {
compress: *lib.get(b"OodleLZ_Compress")?,
decompress: *lib.get(b"OodleLZ_Decompress")?,
get_compressed_buffer_size_needed: *lib
.get(b"OodleLZ_GetCompressedBufferSizeNeeded")?,
set_printf: *lib.get(b"OodleCore_Plugins_SetPrintf")?,
_library: lib,
};
(res.set_printf)(std::ptr::null()); // silence oodle logging
Ok(res)
}
}
pub fn compress(
&self,
input: &[u8],
compressor: Compressor,
compression_level: CompressionLevel,
) -> Result<Vec<u8>> {
unsafe {
let buffer_size = self.get_compressed_buffer_size_needed(compressor, input.len());
let mut buffer = vec![0; buffer_size];
let len = (self.compress)(
compressor,
input.as_ptr(),
input.len(),
buffer.as_mut_ptr(),
compression_level,
std::ptr::null(),
std::ptr::null(),
std::ptr::null(),
std::ptr::null_mut(),
0,
);
if len == -1 {
return Err(Error::CompressionFailed);
}
buffer.truncate(len as usize);
Ok(buffer)
}
}
pub fn decompress(&self, input: &[u8], output: &mut [u8]) -> isize {
unsafe {
(self.decompress)(
input.as_ptr(),
input.len(),
output.as_mut_ptr(),
output.len(),
1,
1,
0,
0,
0,
0,
0,
std::ptr::null_mut(),
0,
3,
)
}
}
fn get_compressed_buffer_size_needed(
&self,
compressor: oodle_lz::Compressor,
raw_buffer: usize,
) -> usize {
unsafe { (self.get_compressed_buffer_size_needed)(compressor, raw_buffer) }
}
}
static OODLE: OnceLock<Option<Oodle>> = OnceLock::new();
fn load_oodle() -> Result<Oodle> {
let path = fetch_oodle()?;
unsafe {
let library = libloading::Library::new(path)?;
Oodle::new(library)
}
}
pub fn oodle() -> Result<&'static Oodle> {
let mut result = None;
let oodle = OODLE.get_or_init(|| match load_oodle() {
Err(err) => {
result = Some(Err(err));
None
}
Ok(oodle) => Some(oodle),
});
match (result, oodle) {
// oodle initialized so return
(_, Some(oodle)) => Ok(oodle),
// error during initialization
(Some(result), _) => result?,
// no error because initialization was tried and failed before
_ => Err(Error::InitializationFailed),
}
}
#[cfg(test)]
mod test {
use super::*; use super::*;
#[test] static DECOMPRESS: OnceLock<(OodleLZ_Decompress, libloading::Library)> = OnceLock::new();
fn test_oodle() {
let oodle = oodle().unwrap();
let data = b"In tools and when compressing large inputs in one call, consider using pub fn decompress_wrapper_windows(comp_buf: &[u8], raw_buf: &mut [u8]) -> i32 {
$OodleXLZ_Compress_AsyncAndWait (in the Oodle2 Ext lib) instead to get parallelism. Alternatively, let decompress = DECOMPRESS.get_or_init(|| {
chop the data into small fixed size chunks (we recommend at least 256KiB, i.e. 262144 bytes) and let path = fetch_oodle().context("failed to fetch oodle").unwrap();
call compress on each of them, which decreases compression ratio but makes for trivial parallel
compression and decompression.";
let buffer = oodle let lib = unsafe { libloading::Library::new(path) }
.compress(data, Compressor::Mermaid, CompressionLevel::Optimal5) .context("failed to load oodle")
.unwrap(); .unwrap();
dbg!((data.len(), buffer.len())); (*unsafe { lib.get(b"OodleLZ_Decompress") }.unwrap(), lib)
});
let mut uncomp = vec![0; data.len()]; call_decompress(comp_buf, raw_buf, decompress.0)
oodle.decompress(&buffer, &mut uncomp); }
}
assert_eq!(data[..], uncomp[..]);
#[cfg(unix)]
mod linux_oodle {
use super::*;
use object::pe::{
ImageNtHeaders64, IMAGE_REL_BASED_DIR64, IMAGE_SCN_MEM_EXECUTE, IMAGE_SCN_MEM_READ,
IMAGE_SCN_MEM_WRITE,
};
use object::read::pe::{ImageOptionalHeader, ImageThunkData, PeFile64};
use object::{LittleEndian as LE, Object, ObjectSection};
use std::collections::HashMap;
use std::ffi::{c_void, CStr};
#[repr(C)]
struct ThreadInformationBlock {
exception_list: *const c_void,
stack_base: *const c_void,
stack_limit: *const c_void,
sub_system_tib: *const c_void,
fiber_data: *const c_void,
arbitrary_user_pointer: *const c_void,
teb: *const c_void,
}
const TIB: ThreadInformationBlock = ThreadInformationBlock {
exception_list: std::ptr::null(),
stack_base: std::ptr::null(),
stack_limit: std::ptr::null(),
sub_system_tib: std::ptr::null(),
fiber_data: std::ptr::null(),
arbitrary_user_pointer: std::ptr::null(),
teb: std::ptr::null(),
};
static DECOMPRESS: OnceLock<OodleLZ_Decompress> = OnceLock::new();
fn decompress_wrapper(comp_buf: &[u8], raw_buf: &mut [u8]) -> i32 {
unsafe {
// Set GS register in calling thread
const ARCH_SET_GS: i32 = 0x1001;
libc::syscall(libc::SYS_arch_prctl, ARCH_SET_GS, &TIB);
// Call actual decompress function
call_decompress(comp_buf, raw_buf, *DECOMPRESS.get().unwrap())
}
}
#[allow(non_snake_case)]
mod imports {
use super::*;
pub unsafe extern "win64" fn OutputDebugStringA(string: *const std::ffi::c_char) {
print!("[OODLE] {}", CStr::from_ptr(string).to_string_lossy());
}
pub unsafe extern "win64" fn GetProcessHeap() -> *const c_void {
0x12345678 as *const c_void
}
pub unsafe extern "win64" fn HeapAlloc(
_heap: *const c_void,
flags: i32,
size: usize,
) -> *const c_void {
assert_eq!(0, flags);
libc::malloc(size)
}
pub unsafe extern "win64" fn HeapFree(
_heap: *const c_void,
_flags: i32,
ptr: *mut c_void,
) -> bool {
libc::free(ptr);
true
}
pub unsafe extern "win64" fn memset(
ptr: *mut c_void,
value: i32,
num: usize,
) -> *const c_void {
libc::memset(ptr, value, num)
}
pub unsafe extern "win64" fn memmove(
destination: *mut c_void,
source: *const c_void,
num: usize,
) -> *const c_void {
libc::memmove(destination, source, num)
}
pub unsafe extern "win64" fn memcpy(
destination: *mut c_void,
source: *const c_void,
num: usize,
) -> *const c_void {
libc::memcpy(destination, source, num)
}
}
// Create some unique function pointers to use for unimplemented imports
const DEBUG_FNS: [*const fn(); 100] = gen_debug_fns();
static mut DEBUG_NAMES: [&str; 100] = [""; 100];
const fn gen_debug_fns() -> [*const fn(); 100] {
fn log<const I: usize>() {
unimplemented!("import {:?}", unsafe { DEBUG_NAMES[I] });
}
let mut array = [std::ptr::null(); 100];
seq_macro::seq!(N in 0..100 {
array[N] = log::<N> as *const fn();
});
array
}
pub fn oodle_loader_linux() -> OodleDecompress {
DECOMPRESS.get_or_init(|| get_decompress_inner().unwrap());
decompress_wrapper
}
fn get_decompress_inner() -> Result<OodleLZ_Decompress> {
fetch_oodle()?;
let oodle = std::env::current_exe()
.unwrap()
.with_file_name(OODLE_DLL_NAME);
let dll = std::fs::read(oodle)?;
let obj_file = PeFile64::parse(&*dll)?;
let size = obj_file.nt_headers().optional_header.size_of_image() as usize;
let header_size = obj_file.nt_headers().optional_header.size_of_headers() as usize;
let image_base = obj_file.relative_address_base() as usize;
// Create map
let mmap = unsafe {
std::slice::from_raw_parts_mut(
libc::mmap(
std::ptr::null_mut(),
size,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_PRIVATE | libc::MAP_ANONYMOUS,
-1,
0,
) as *mut u8,
size,
)
};
let map_base = mmap.as_ptr();
// Copy header to map
mmap[0..header_size].copy_from_slice(&dll[0..header_size]);
unsafe {
assert_eq!(
0,
libc::mprotect(
mmap.as_mut_ptr() as *mut c_void,
header_size,
libc::PROT_READ
)
);
}
// Copy section data to map
for section in obj_file.sections() {
let address = section.address() as usize;
let data = section.data()?;
mmap[(address - image_base)..(address - image_base + data.len())]
.copy_from_slice(section.data()?);
}
// Apply relocations
let sections = obj_file.section_table();
let mut blocks = obj_file
.data_directories()
.relocation_blocks(&*dll, &sections)?
.unwrap();
while let Some(block) = blocks.next()? {
let block_address = block.virtual_address();
let block_data = sections.pe_data_at(&*dll, block_address).map(object::Bytes);
for reloc in block {
let offset = (reloc.virtual_address - block_address) as usize;
match reloc.typ {
IMAGE_REL_BASED_DIR64 => {
let addend = block_data
.and_then(|data| data.read_at::<object::U64Bytes<LE>>(offset).ok())
.map(|addend| addend.get(LE));
if let Some(addend) = addend {
mmap[reloc.virtual_address as usize
..reloc.virtual_address as usize + 8]
.copy_from_slice(&u64::to_le_bytes(
addend - image_base as u64 + map_base as u64,
));
}
}
_ => unimplemented!(),
}
}
}
// Fix up imports
let import_table = obj_file.import_table()?.unwrap();
let mut import_descs = import_table.descriptors()?;
let mut i = 0;
while let Some(import_desc) = import_descs.next()? {
let mut thunks = import_table.thunks(import_desc.original_first_thunk.get(LE))?;
let mut address = import_desc.first_thunk.get(LE) as usize;
while let Some(thunk) = thunks.next::<ImageNtHeaders64>()? {
let (_hint, name) = import_table.hint_name(thunk.address())?;
let name = String::from_utf8_lossy(name).to_string();
use imports::*;
let fn_addr = match name.as_str() {
"OutputDebugStringA" => OutputDebugStringA as usize,
"GetProcessHeap" => GetProcessHeap as usize,
"HeapAlloc" => HeapAlloc as usize,
"HeapFree" => HeapFree as usize,
"memset" => memset as usize,
"memcpy" => memcpy as usize,
"memmove" => memmove as usize,
_ => {
unsafe { DEBUG_NAMES[i] = name.leak() }
let a = DEBUG_FNS[i] as usize;
i += 1;
a
}
};
mmap[address..address + 8].copy_from_slice(&usize::to_le_bytes(fn_addr));
address += 8;
}
}
// Build export table
let mut exports = HashMap::new();
for export in obj_file.exports()? {
let name = String::from_utf8_lossy(export.name());
let address = export.address() - image_base as u64 + map_base as u64;
exports.insert(name, address as *const c_void);
}
// Fix section permissions
for section in obj_file.sections() {
let address = section.address() as usize;
let data = section.data()?;
let size = data.len();
let mut permissions = 0;
let flags = match section.flags() {
object::SectionFlags::Coff { characteristics } => characteristics,
_ => unreachable!(),
};
if 0 != flags & IMAGE_SCN_MEM_READ {
permissions |= libc::PROT_READ;
}
if 0 != flags & IMAGE_SCN_MEM_WRITE {
permissions |= libc::PROT_WRITE;
}
if 0 != flags & IMAGE_SCN_MEM_EXECUTE {
permissions |= libc::PROT_EXEC;
}
unsafe {
assert_eq!(
0,
libc::mprotect(
mmap.as_mut_ptr().add(address - image_base) as *mut c_void,
size,
permissions
)
);
}
}
Ok(unsafe {
std::mem::transmute::<*const c_void, OodleLZ_Decompress>(exports["OodleLZ_Decompress"])
})
} }
} }

View file

@ -9,8 +9,11 @@ keywords.workspace = true
[features] [features]
default = ["compression", "encryption"] default = ["compression", "encryption"]
compression = ["dep:flate2", "dep:zstd", "dep:lz4_flex"] compression = ["dep:flate2", "dep:zstd"]
oodle = ["dep:oodle_loader", "compression"] oodle = []
oodle_loader = ["dep:oodle_loader"]
oodle_explicit = ["oodle"]
oodle_implicit_dynamic = ["dep:oodle_loader", "oodle"]
encryption = ["dep:aes"] encryption = ["dep:aes"]
wuthering-waves = [] wuthering-waves = []
@ -19,12 +22,10 @@ byteorder = "1.5"
aes = { workspace = true, optional = true } aes = { workspace = true, optional = true }
flate2 = { version = "1.0", optional = true } flate2 = { version = "1.0", optional = true }
zstd = { version = "0.13", optional = true } zstd = { version = "0.13", optional = true }
lz4_flex = { version = "0.11.3", optional = true }
oodle_loader = { path = "../oodle_loader", optional = true} oodle_loader = { path = "../oodle_loader", optional = true}
thiserror = "2.0" thiserror = "2.0"
sha1 = { workspace = true } sha1 = { workspace = true }
strum = { workspace = true } strum = { workspace = true }
hex.workspace = true
[dev-dependencies] [dev-dependencies]
base64 = { workspace = true } base64 = { workspace = true }

View file

@ -1,222 +0,0 @@
use std::io::Write;
use crate::{
entry::{Block, Entry},
Compression, Error, Hash, Version, VersionMajor,
};
type Result<T, E = Error> = std::result::Result<T, E>;
pub struct PartialEntry<D: AsRef<[u8]>> {
compression: Option<Compression>,
compressed_size: u64,
uncompressed_size: u64,
compression_block_size: u32,
data: PartialEntryData<D>,
hash: Hash,
}
pub(crate) struct PartialBlock {
uncompressed_size: usize,
data: Vec<u8>,
}
pub(crate) enum PartialEntryData<D> {
Slice(D),
Blocks(Vec<PartialBlock>),
}
#[cfg(feature = "compression")]
fn get_compression_slot(
version: Version,
compression_slots: &mut Vec<Option<Compression>>,
compression: Compression,
) -> Result<u32> {
let slot = compression_slots
.iter()
.enumerate()
.find(|(_, s)| **s == Some(compression));
Ok(if let Some((i, _)) = slot {
// existing found
i
} else {
if version.version_major() < VersionMajor::FNameBasedCompression {
return Err(Error::Other(format!(
"cannot use {compression:?} prior to FNameBasedCompression (pak version 8)"
)));
}
// find empty slot
if let Some((i, empty_slot)) = compression_slots
.iter_mut()
.enumerate()
.find(|(_, s)| s.is_none())
{
// empty found, set it to used compression type
*empty_slot = Some(compression);
i
} else {
// no empty slot found, add a new one
compression_slots.push(Some(compression));
compression_slots.len() - 1
}
} as u32)
}
impl<D: AsRef<[u8]>> PartialEntry<D> {
pub(crate) fn build_entry(
&self,
version: Version,
#[allow(unused)] compression_slots: &mut Vec<Option<Compression>>,
file_offset: u64,
) -> Result<Entry> {
#[cfg(feature = "compression")]
let compression_slot = self
.compression
.map(|c| get_compression_slot(version, compression_slots, c))
.transpose()?;
#[cfg(not(feature = "compression"))]
let compression_slot = None;
let blocks = match &self.data {
PartialEntryData::Slice(_) => None,
PartialEntryData::Blocks(blocks) => {
let entry_size =
Entry::get_serialized_size(version, compression_slot, blocks.len() as u32);
let mut offset = entry_size;
if version.version_major() < VersionMajor::RelativeChunkOffsets {
offset += file_offset;
};
Some(
blocks
.iter()
.map(|block| {
let start = offset;
offset += block.data.len() as u64;
let end = offset;
Block { start, end }
})
.collect(),
)
}
};
Ok(Entry {
offset: file_offset,
compressed: self.compressed_size,
uncompressed: self.uncompressed_size,
compression_slot,
timestamp: None,
hash: Some(self.hash),
blocks,
flags: 0,
compression_block_size: self.compression_block_size,
})
}
pub(crate) fn write_data<S: Write>(&self, stream: &mut S) -> Result<()> {
match &self.data {
PartialEntryData::Slice(data) => {
stream.write_all(data.as_ref())?;
}
PartialEntryData::Blocks(blocks) => {
for block in blocks {
stream.write_all(&block.data)?;
}
}
}
Ok(())
}
}
pub(crate) fn build_partial_entry<D>(
allowed_compression: &[Compression],
data: D,
) -> Result<PartialEntry<D>>
where
D: AsRef<[u8]>,
{
// TODO hash needs to be post-compression/encryption
use sha1::{Digest, Sha1};
let mut hasher = Sha1::new();
// TODO possibly select best compression based on some criteria instead of picking first
let compression = allowed_compression.first().cloned();
let uncompressed_size = data.as_ref().len() as u64;
let compression_block_size;
let (data, compressed_size) = match compression {
#[cfg(not(feature = "compression"))]
Some(_) => {
unreachable!("should not be able to reach this point without compression feature")
}
#[cfg(feature = "compression")]
Some(compression) => {
// https://github.com/EpicGames/UnrealEngine/commit/3aad0ff7976be1073005dca2c1282af548b45d89
// Block size must fit into flags field or it may cause unreadable paks for earlier Unreal Engine versions
compression_block_size = 0x3e << 11; // max possible block size
let mut compressed_size = 0;
let mut blocks = vec![];
for chunk in data.as_ref().chunks(compression_block_size as usize) {
let data = compress(compression, chunk)?;
compressed_size += data.len() as u64;
hasher.update(&data);
blocks.push(PartialBlock {
uncompressed_size: chunk.len(),
data,
})
}
(PartialEntryData::Blocks(blocks), compressed_size)
}
None => {
compression_block_size = 0;
hasher.update(data.as_ref());
(PartialEntryData::Slice(data), uncompressed_size)
}
};
Ok(PartialEntry {
compression,
compressed_size,
uncompressed_size,
compression_block_size,
data,
hash: Hash(hasher.finalize().into()),
})
}
#[cfg(feature = "compression")]
fn compress(compression: Compression, data: &[u8]) -> Result<Vec<u8>> {
use std::io::Write;
let compressed = match compression {
Compression::Zlib => {
let mut compress =
flate2::write::ZlibEncoder::new(Vec::new(), flate2::Compression::fast());
compress.write_all(data.as_ref())?;
compress.finish()?
}
Compression::Gzip => {
let mut compress =
flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::fast());
compress.write_all(data.as_ref())?;
compress.finish()?
}
Compression::Zstd => zstd::stream::encode_all(data, 0)?,
Compression::LZ4 => lz4_flex::block::compress(data),
Compression::Oodle => {
#[cfg(not(feature = "oodle"))]
return Err(super::Error::Oodle);
#[cfg(feature = "oodle")]
{
oodle_loader::oodle().unwrap().compress(
data.as_ref(),
oodle_loader::Compressor::Mermaid,
oodle_loader::CompressionLevel::Normal,
)?
}
}
};
Ok(compressed)
}

View file

@ -2,7 +2,7 @@ use std::io;
use byteorder::{LE, ReadBytesExt, WriteBytesExt}; use byteorder::{LE, ReadBytesExt, WriteBytesExt};
use crate::{data::build_partial_entry, reader, writer, Error, Hash}; use crate::{Error, reader, writer};
use super::{Compression, ext::BoolExt, ext::ReadExt, Version, VersionMajor}; use super::{Compression, ext::BoolExt, ext::ReadExt, Version, VersionMajor};
@ -12,7 +12,7 @@ pub(crate) enum EntryLocation {
Index, Index,
} }
#[derive(Debug, Default, Clone)] #[derive(Debug)]
pub(crate) struct Block { pub(crate) struct Block {
pub start: u64, pub start: u64,
pub end: u64, pub end: u64,
@ -57,7 +57,7 @@ pub(crate) struct Entry {
pub uncompressed: u64, pub uncompressed: u64,
pub compression_slot: Option<u32>, pub compression_slot: Option<u32>,
pub timestamp: Option<u64>, pub timestamp: Option<u64>,
pub hash: Option<Hash>, pub hash: Option<[u8; 20]>,
pub blocks: Option<Vec<Block>>, pub blocks: Option<Vec<Block>>,
pub flags: u8, pub flags: u8,
pub compression_block_size: u32, pub compression_block_size: u32,
@ -105,13 +105,127 @@ impl Entry {
version: Version, version: Version,
compression_slots: &mut Vec<Option<Compression>>, compression_slots: &mut Vec<Option<Compression>>,
allowed_compression: &[Compression], allowed_compression: &[Compression],
data: &[u8], data: impl AsRef<[u8]>,
) -> Result<Self, Error> { ) -> Result<Self, super::Error> {
let partial_entry = build_partial_entry(allowed_compression, data)?; // TODO hash needs to be post-compression
let stream_position = writer.stream_position()?; use sha1::{Digest, Sha1};
let entry = partial_entry.build_entry(version, compression_slots, stream_position)?; let mut hasher = Sha1::new();
entry.write(writer, version, crate::entry::EntryLocation::Data)?; hasher.update(&data);
partial_entry.write_data(writer)?;
let offset = writer.stream_position()?;
let len = data.as_ref().len() as u64;
// TODO possibly select best compression based on some criteria instead of picking first
let compression = allowed_compression.first().cloned();
let compression_slot = if let Some(compression) = compression {
// find existing
let slot = compression_slots
.iter()
.enumerate()
.find(|(_, s)| **s == Some(compression));
Some(if let Some((i, _)) = slot {
// existing found
i
} else {
if version.version_major() < VersionMajor::FNameBasedCompression {
return Err(Error::Other(format!(
"cannot use {compression:?} prior to FNameBasedCompression (pak version 8)"
)));
}
// find empty slot
if let Some((i, empty_slot)) = compression_slots
.iter_mut()
.enumerate()
.find(|(_, s)| s.is_none())
{
// empty found, set it to used compression type
*empty_slot = Some(compression);
i
} else {
// no empty slot found, add a new one
compression_slots.push(Some(compression));
compression_slots.len() - 1
}
} as u32)
} else {
None
};
let (blocks, compressed) = match compression {
#[cfg(not(feature = "compression"))]
Some(_) => {
unreachable!("should not be able to reach this point without compression feature")
}
#[cfg(feature = "compression")]
Some(compression) => {
use std::io::Write;
let entry_size = Entry::get_serialized_size(version, compression_slot, 1);
let data_offset = offset + entry_size;
let compressed = match compression {
Compression::Zlib => {
let mut compress = flate2::write::ZlibEncoder::new(
Vec::new(),
flate2::Compression::fast(),
);
compress.write_all(data.as_ref())?;
compress.finish()?
}
Compression::Gzip => {
let mut compress =
flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::fast());
compress.write_all(data.as_ref())?;
compress.finish()?
}
Compression::Zstd => zstd::stream::encode_all(data.as_ref(), 0)?,
Compression::Oodle => {
return Err(Error::Other("writing Oodle compression unsupported".into()))
}
};
let compute_offset = |index: usize| -> u64 {
match version.version_major() >= VersionMajor::RelativeChunkOffsets {
true => index as u64 + (data_offset - offset),
false => index as u64 + data_offset,
}
};
let blocks = vec![Block {
start: compute_offset(0),
end: compute_offset(compressed.len()),
}];
(Some(blocks), Some(compressed))
}
None => (None, None),
};
let entry = super::entry::Entry {
offset,
compressed: compressed
.as_ref()
.map(|c: &Vec<u8>| c.len() as u64)
.unwrap_or(len),
uncompressed: len,
compression_slot,
timestamp: None,
hash: Some(hasher.finalize().into()),
blocks,
flags: 0,
compression_block_size: compressed.as_ref().map(|_| len as u32).unwrap_or_default(),
};
entry.write(writer, version, EntryLocation::Data)?;
if let Some(compressed) = compressed {
writer.write_all(&compressed)?;
} else {
writer.write_all(data.as_ref())?;
}
Ok(entry) Ok(entry)
} }
@ -131,7 +245,7 @@ impl Entry {
n => Some(n - 1), n => Some(n - 1),
}; };
let timestamp = (ver == VersionMajor::Initial).then_try(|| reader.read_u64::<LE>())?; let timestamp = (ver == VersionMajor::Initial).then_try(|| reader.read_u64::<LE>())?;
let hash = Some(Hash(reader.read_guid()?)); let hash = Some(reader.read_guid()?);
let blocks = (ver >= VersionMajor::CompressionEncryption && compression.is_some()) let blocks = (ver >= VersionMajor::CompressionEncryption && compression.is_some())
.then_try(|| reader.read_array(Block::read))?; .then_try(|| reader.read_array(Block::read))?;
let flags = (ver >= VersionMajor::CompressionEncryption) let flags = (ver >= VersionMajor::CompressionEncryption)
@ -175,7 +289,7 @@ impl Entry {
writer.write_u64::<LE>(self.timestamp.unwrap_or_default())?; writer.write_u64::<LE>(self.timestamp.unwrap_or_default())?;
} }
if let Some(hash) = self.hash { if let Some(hash) = self.hash {
writer.write_all(&hash.0)?; writer.write_all(&hash)?;
} else { } else {
panic!("hash missing"); panic!("hash missing");
} }
@ -277,11 +391,6 @@ impl Entry {
let is_uncompressed_size_32_bit_safe = self.uncompressed <= u32::MAX as u64; let is_uncompressed_size_32_bit_safe = self.uncompressed <= u32::MAX as u64;
let is_offset_32_bit_safe = self.offset <= u32::MAX as u64; let is_offset_32_bit_safe = self.offset <= u32::MAX as u64;
assert!(
compression_blocks_count < 0x10_000,
"compression blocks count fits in 16 bits"
);
let flags = (compression_block_size) let flags = (compression_block_size)
| (compression_blocks_count << 6) | (compression_blocks_count << 6)
| ((self.is_encrypted() as u32) << 22) | ((self.is_encrypted() as u32) << 22)
@ -327,6 +436,7 @@ impl Entry {
version: Version, version: Version,
compression: &[Option<Compression>], compression: &[Option<Compression>],
#[allow(unused)] key: &super::Key, #[allow(unused)] key: &super::Key,
#[allow(unused)] oodle: &super::Oodle,
buf: &mut W, buf: &mut W,
) -> Result<(), super::Error> { ) -> Result<(), super::Error> {
reader.seek(io::SeekFrom::Start(self.offset))?; reader.seek(io::SeekFrom::Start(self.offset))?;
@ -354,7 +464,7 @@ impl Entry {
} }
} }
#[cfg(feature = "compression")] #[cfg(any(feature = "compression", feature = "oodle"))]
let ranges = { let ranges = {
let offset = |index: u64| -> usize { let offset = |index: u64| -> usize {
(match version.version_major() >= VersionMajor::RelativeChunkOffsets { (match version.version_major() >= VersionMajor::RelativeChunkOffsets {
@ -384,52 +494,56 @@ impl Entry {
match self.compression_slot.and_then(|c| compression[c as usize]) { match self.compression_slot.and_then(|c| compression[c as usize]) {
None => buf.write_all(&data)?, None => buf.write_all(&data)?,
#[cfg(not(feature = "compression"))]
_ => return Err(super::Error::Compression),
#[cfg(feature = "compression")] #[cfg(feature = "compression")]
Some(comp) => { Some(Compression::Zlib) => decompress!(flate2::read::ZlibDecoder<&[u8]>),
let chunk_size = if ranges.len() == 1 { #[cfg(feature = "compression")]
self.uncompressed as usize Some(Compression::Gzip) => decompress!(flate2::read::GzDecoder<&[u8]>),
} else { #[cfg(feature = "compression")]
self.compression_block_size as usize Some(Compression::Zstd) => {
}; for range in ranges {
io::copy(&mut zstd::stream::read::Decoder::new(&data[range])?, buf)?;
match comp {
Compression::Zlib => decompress!(flate2::read::ZlibDecoder<&[u8]>),
Compression::Gzip => decompress!(flate2::read::GzDecoder<&[u8]>),
Compression::Zstd => {
for range in ranges {
io::copy(&mut zstd::stream::read::Decoder::new(&data[range])?, buf)?;
}
}
Compression::LZ4 => {
let mut decompressed = vec![0; self.uncompressed as usize];
for (decomp_chunk, comp_range) in
decompressed.chunks_mut(chunk_size).zip(ranges)
{
lz4_flex::block::decompress_into(&data[comp_range], decomp_chunk)
.map_err(|_| Error::DecompressionFailed(Compression::LZ4))?;
}
buf.write_all(&decompressed)?;
}
#[cfg(feature = "oodle")]
Compression::Oodle => {
let mut decompressed = vec![0; self.uncompressed as usize];
for (decomp_chunk, comp_range) in
decompressed.chunks_mut(chunk_size).zip(ranges)
{
let out =
oodle_loader::oodle()?.decompress(&data[comp_range], decomp_chunk);
if out == 0 {
return Err(Error::DecompressionFailed(Compression::Oodle));
}
}
buf.write_all(&decompressed)?;
}
#[cfg(not(feature = "oodle"))]
Compression::Oodle => return Err(super::Error::Oodle),
} }
} }
#[cfg(feature = "oodle")]
Some(Compression::Oodle) => {
let oodle = match oodle {
crate::Oodle::Some(getter) => getter().map_err(|_| super::Error::OodleFailed),
crate::Oodle::None => Err(super::Error::OodleFailed),
}?;
let mut decompressed = vec![0; self.uncompressed as usize];
let mut compress_offset = 0;
let mut decompress_offset = 0;
let block_count = ranges.len();
for range in ranges {
let decomp = if block_count == 1 {
self.uncompressed as usize
} else {
(self.compression_block_size as usize)
.min(self.uncompressed as usize - compress_offset)
};
let buffer = &mut data[range];
let out = oodle(
buffer,
&mut decompressed[decompress_offset..decompress_offset + decomp],
);
if out == 0 {
return Err(super::Error::DecompressionFailed(Compression::Oodle));
}
compress_offset += self.compression_block_size as usize;
decompress_offset += out as usize;
}
debug_assert_eq!(
decompress_offset, self.uncompressed as usize,
"Oodle decompression length mismatch"
);
buf.write_all(&decompressed)?;
}
#[cfg(not(feature = "oodle"))]
Some(Compression::Oodle) => return Err(super::Error::Oodle),
#[cfg(not(feature = "compression"))]
_ => return Err(super::Error::Compression),
} }
buf.flush()?; buf.flush()?;
Ok(()) Ok(())

View file

@ -42,9 +42,8 @@ pub enum Error {
#[error("found magic of {:#x} instead of {:#x}", .0, super::MAGIC)] #[error("found magic of {:#x} instead of {:#x}", .0, super::MAGIC)]
Magic(u32), Magic(u32),
#[cfg(feature = "oodle")] #[error("pointer to OodleLZ_Decompress was not provided")]
#[error("Oodle loader error: {0}")] OodleFailed,
OodleFailed(#[from] oodle_loader::Error),
#[error("No entry found at {0}")] #[error("No entry found at {0}")]
MissingEntry(String), MissingEntry(String),

View file

@ -1,7 +1,4 @@
use crate::{ use crate::ext::{BoolExt, WriteExt};
ext::{BoolExt, WriteExt},
Hash,
};
use super::{ext::ReadExt, Compression, Version, VersionMajor}; use super::{ext::ReadExt, Compression, Version, VersionMajor};
use byteorder::{ReadBytesExt, WriteBytesExt, LE}; use byteorder::{ReadBytesExt, WriteBytesExt, LE};
@ -16,7 +13,7 @@ pub struct Footer {
pub version_major: VersionMajor, pub version_major: VersionMajor,
pub index_offset: u64, pub index_offset: u64,
pub index_size: u64, pub index_size: u64,
pub hash: Hash, pub hash: [u8; 20],
pub frozen: bool, pub frozen: bool,
pub compression: Vec<Option<Compression>>, pub compression: Vec<Option<Compression>>,
} }
@ -32,7 +29,7 @@ impl Footer {
VersionMajor::from_repr(reader.read_u32::<LE>()?).unwrap_or(version.version_major()); VersionMajor::from_repr(reader.read_u32::<LE>()?).unwrap_or(version.version_major());
let index_offset = reader.read_u64::<LE>()?; let index_offset = reader.read_u64::<LE>()?;
let index_size = reader.read_u64::<LE>()?; let index_size = reader.read_u64::<LE>()?;
let hash = Hash(reader.read_guid()?); let hash = reader.read_guid()?;
let frozen = version.version_major() == VersionMajor::FrozenIndex && reader.read_bool()?; let frozen = version.version_major() == VersionMajor::FrozenIndex && reader.read_bool()?;
let compression = { let compression = {
let mut compression = Vec::with_capacity(match version { let mut compression = Vec::with_capacity(match version {
@ -94,7 +91,7 @@ impl Footer {
writer.write_u32::<LE>(self.version_major as u32)?; writer.write_u32::<LE>(self.version_major as u32)?;
writer.write_u64::<LE>(self.index_offset)?; writer.write_u64::<LE>(self.index_offset)?;
writer.write_u64::<LE>(self.index_size)?; writer.write_u64::<LE>(self.index_size)?;
writer.write_all(&self.hash.0)?; writer.write_all(&self.hash)?;
if self.version_major == VersionMajor::FrozenIndex { if self.version_major == VersionMajor::FrozenIndex {
writer.write_bool(self.frozen)?; writer.write_bool(self.frozen)?;
} }

View file

@ -1,5 +1,4 @@
#![allow(dead_code)] #![allow(dead_code)]
mod data;
mod entry; mod entry;
mod error; mod error;
mod ext; mod ext;
@ -8,10 +7,19 @@ mod pak;
mod reader; mod reader;
mod writer; mod writer;
pub use {data::PartialEntry, error::*, pak::*}; pub use {error::*, pak::*};
pub const MAGIC: u32 = 0x5A6F12E1; pub const MAGIC: u32 = 0x5A6F12E1;
#[cfg(feature = "oodle")]
mod oodle {
pub type OodleGetter = fn() -> Result<OodleDecompress, Box<dyn std::error::Error>>;
pub type OodleDecompress = fn(comp_buf: &[u8], raw_buf: &mut [u8]) -> i32;
}
#[cfg(feature = "oodle_loader")]
pub use oodle_loader;
#[derive( #[derive(
Clone, Clone,
Copy, Copy,
@ -127,7 +135,6 @@ pub enum Compression {
Gzip, Gzip,
Oodle, Oodle,
Zstd, Zstd,
LZ4,
} }
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
@ -145,3 +152,11 @@ impl From<aes::Aes256> for Key {
Self::Some(value) Self::Some(value)
} }
} }
#[derive(Debug, Default)]
pub(crate) enum Oodle {
#[cfg(feature = "oodle")]
Some(oodle::OodleGetter),
#[default]
None,
}

View file

@ -1,6 +1,5 @@
use crate::data::build_partial_entry;
use crate::entry::Entry; use crate::entry::Entry;
use crate::{Compression, Error, PartialEntry}; use crate::Compression;
use super::ext::{ReadExt, WriteExt}; use super::ext::{ReadExt, WriteExt};
use super::{Version, VersionMajor}; use super::{Version, VersionMajor};
@ -8,17 +7,10 @@ use byteorder::{ReadBytesExt, WriteBytesExt, LE};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::io::{self, Read, Seek, Write}; use std::io::{self, Read, Seek, Write};
#[derive(Default, Clone, Copy)]
pub(crate) struct Hash(pub(crate) [u8; 20]);
impl std::fmt::Debug for Hash {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Hash({})", hex::encode(self.0))
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct PakBuilder { pub struct PakBuilder {
key: super::Key, key: super::Key,
oodle: super::Oodle,
allowed_compression: Vec<Compression>, allowed_compression: Vec<Compression>,
} }
@ -32,6 +24,10 @@ impl PakBuilder {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
key: Default::default(), key: Default::default(),
#[cfg(not(feature = "oodle_implicit_dynamic"))]
oodle: super::Oodle::None,
#[cfg(feature = "oodle_implicit_dynamic")]
oodle: super::Oodle::Some(oodle_loader::decompress),
allowed_compression: Default::default(), allowed_compression: Default::default(),
} }
} }
@ -40,20 +36,25 @@ impl PakBuilder {
self.key = super::Key::Some(key); self.key = super::Key::Some(key);
self self
} }
#[cfg(feature = "oodle_explicit")]
pub fn oodle(mut self, oodle_getter: super::oodle::OodleGetter) -> Self {
self.oodle = super::Oodle::Some(oodle_getter);
self
}
#[cfg(feature = "compression")] #[cfg(feature = "compression")]
pub fn compression(mut self, compression: impl IntoIterator<Item = Compression>) -> Self { pub fn compression(mut self, compression: impl IntoIterator<Item = Compression>) -> Self {
self.allowed_compression = compression.into_iter().collect(); self.allowed_compression = compression.into_iter().collect();
self self
} }
pub fn reader<R: Read + Seek>(self, reader: &mut R) -> Result<PakReader, super::Error> { pub fn reader<R: Read + Seek>(self, reader: &mut R) -> Result<PakReader, super::Error> {
PakReader::new_any_inner(reader, self.key) PakReader::new_any_inner(reader, self.key, self.oodle)
} }
pub fn reader_with_version<R: Read + Seek>( pub fn reader_with_version<R: Read + Seek>(
self, self,
reader: &mut R, reader: &mut R,
version: super::Version, version: super::Version,
) -> Result<PakReader, super::Error> { ) -> Result<PakReader, super::Error> {
PakReader::new_inner(reader, version, self.key) PakReader::new_inner(reader, version, self.key, self.oodle)
} }
pub fn writer<W: Write + Seek>( pub fn writer<W: Write + Seek>(
self, self,
@ -77,6 +78,7 @@ impl PakBuilder {
pub struct PakReader { pub struct PakReader {
pak: Pak, pak: Pak,
key: super::Key, key: super::Key,
oodle: super::Oodle,
} }
#[derive(Debug)] #[derive(Debug)]
@ -142,8 +144,8 @@ impl Index {
self.entries self.entries
} }
fn add_entry(&mut self, path: String, entry: super::entry::Entry) { fn add_entry(&mut self, path: &str, entry: super::entry::Entry) {
self.entries.insert(path, entry); self.entries.insert(path.to_string(), entry);
} }
} }
@ -164,13 +166,14 @@ impl PakReader {
fn new_any_inner<R: Read + Seek>( fn new_any_inner<R: Read + Seek>(
reader: &mut R, reader: &mut R,
key: super::Key, key: super::Key,
oodle: super::Oodle,
) -> Result<Self, super::Error> { ) -> Result<Self, super::Error> {
use std::fmt::Write; use std::fmt::Write;
let mut log = "\n".to_owned(); let mut log = "\n".to_owned();
for ver in Version::iter() { for ver in Version::iter() {
match Pak::read(&mut *reader, ver, &key) { match Pak::read(&mut *reader, ver, &key) {
Ok(pak) => return Ok(Self { pak, key }), Ok(pak) => return Ok(Self { pak, key, oodle }),
Err(err) => writeln!(log, "trying version {} failed: {}", ver, err)?, Err(err) => writeln!(log, "trying version {} failed: {}", ver, err)?,
} }
} }
@ -181,8 +184,9 @@ impl PakReader {
reader: &mut R, reader: &mut R,
version: super::Version, version: super::Version,
key: super::Key, key: super::Key,
oodle: super::Oodle,
) -> Result<Self, super::Error> { ) -> Result<Self, super::Error> {
Pak::read(reader, version, &key).map(|pak| Self { pak, key }) Pak::read(reader, version, &key).map(|pak| Self { pak, key, oodle })
} }
pub fn version(&self) -> super::Version { pub fn version(&self) -> super::Version {
@ -223,6 +227,7 @@ impl PakReader {
self.pak.version, self.pak.version,
&self.pak.compression, &self.pak.compression,
&self.key, &self.key,
&self.oodle,
writer, writer,
), ),
None => Err(super::Error::MissingEntry(path.to_owned())), None => Err(super::Error::MissingEntry(path.to_owned())),
@ -268,91 +273,27 @@ impl<W: Write + Seek> PakWriter<W> {
self.writer self.writer
} }
pub fn write_file( pub fn write_file(&mut self, path: &str, data: impl AsRef<[u8]>) -> Result<(), super::Error> {
&mut self,
path: &str,
allow_compress: bool,
data: impl AsRef<[u8]>,
) -> Result<(), super::Error> {
self.pak.index.add_entry( self.pak.index.add_entry(
path.to_string(), path,
Entry::write_file( Entry::write_file(
&mut self.writer, &mut self.writer,
self.pak.version, self.pak.version,
&mut self.pak.compression, &mut self.pak.compression,
if allow_compress { &self.allowed_compression,
&self.allowed_compression data,
} else {
&[]
},
data.as_ref(),
)?, )?,
); );
Ok(()) Ok(())
} }
pub fn entry_builder(&self) -> EntryBuilder {
EntryBuilder {
allowed_compression: self.allowed_compression.clone(),
}
}
pub fn write_entry<D: AsRef<[u8]>>(
&mut self,
path: String,
partial_entry: PartialEntry<D>,
) -> Result<(), Error> {
let stream_position = self.writer.stream_position()?;
let entry = partial_entry.build_entry(
self.pak.version,
&mut self.pak.compression,
stream_position,
)?;
entry.write(
&mut self.writer,
self.pak.version,
crate::entry::EntryLocation::Data,
)?;
self.pak.index.add_entry(path, entry);
partial_entry.write_data(&mut self.writer)?;
Ok(())
}
pub fn write_index(mut self) -> Result<W, super::Error> { pub fn write_index(mut self) -> Result<W, super::Error> {
self.pak.write(&mut self.writer, &self.key)?; self.pak.write(&mut self.writer, &self.key)?;
Ok(self.writer) Ok(self.writer)
} }
} }
struct Data<'d>(Box<dyn AsRef<[u8]> + Send + Sync + 'd>);
impl AsRef<[u8]> for Data<'_> {
fn as_ref(&self) -> &[u8] {
self.0.as_ref().as_ref()
}
}
#[derive(Clone)]
pub struct EntryBuilder {
allowed_compression: Vec<Compression>,
}
impl EntryBuilder {
/// Builds an entry in memory (compressed if requested) which must be written out later
pub fn build_entry<D: AsRef<[u8]> + Send + Sync>(
&self,
compress: bool,
data: D,
) -> Result<PartialEntry<D>, Error> {
let compression = compress
.then_some(self.allowed_compression.as_slice())
.unwrap_or_default();
build_partial_entry(compression, data)
}
}
impl Pak { impl Pak {
fn read<R: Read + Seek>( fn read<R: Read + Seek>(
reader: &mut R, reader: &mut R,
@ -600,12 +541,12 @@ impl Pak {
index_writer.write_u32::<LE>(1)?; // we have path hash index index_writer.write_u32::<LE>(1)?; // we have path hash index
index_writer.write_u64::<LE>(path_hash_index_offset)?; index_writer.write_u64::<LE>(path_hash_index_offset)?;
index_writer.write_u64::<LE>(phi_buf.len() as u64)?; // path hash index size index_writer.write_u64::<LE>(phi_buf.len() as u64)?; // path hash index size
index_writer.write_all(&hash(&phi_buf).0)?; index_writer.write_all(&hash(&phi_buf))?;
index_writer.write_u32::<LE>(1)?; // we have full directory index index_writer.write_u32::<LE>(1)?; // we have full directory index
index_writer.write_u64::<LE>(full_directory_index_offset)?; index_writer.write_u64::<LE>(full_directory_index_offset)?;
index_writer.write_u64::<LE>(fdi_buf.len() as u64)?; // path hash index size index_writer.write_u64::<LE>(fdi_buf.len() as u64)?; // path hash index size
index_writer.write_all(&hash(&fdi_buf).0)?; index_writer.write_all(&hash(&fdi_buf))?;
index_writer.write_u32::<LE>(encoded_entries.len() as u32)?; index_writer.write_u32::<LE>(encoded_entries.len() as u32)?;
index_writer.write_all(&encoded_entries)?; index_writer.write_all(&encoded_entries)?;
@ -643,11 +584,11 @@ impl Pak {
} }
} }
fn hash(data: &[u8]) -> Hash { fn hash(data: &[u8]) -> [u8; 20] {
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
let mut hasher = Sha1::new(); let mut hasher = Sha1::new();
hasher.update(data); hasher.update(data);
Hash(hasher.finalize().into()) hasher.finalize().into()
} }
fn generate_path_hash_index<W: Write>( fn generate_path_hash_index<W: Write>(

View file

@ -11,12 +11,8 @@ pub(crate) fn flag_writer<W: io::Write>(writer: &mut W,
writer.write_u32::<LE>(flags)?; writer.write_u32::<LE>(flags)?;
#[cfg(feature = "wuthering-waves")] #[cfg(feature = "wuthering-waves")]
if version == Version::V12 { if version == Version::V12 {
let tmp = let tmp = ((flags & 0x3f) << 16) | ((flags >> 6) & 0xFFFF) |
((flags & 0x3f) << 16) | ((flags << 6) & (1 << 28)) | ((flags >> 1) & 0x0FC00000) | flags & 0xE0000000;
((flags >> 6) & 0xFFFF) |
((flags << 6) & (1 << 28)) | // (flags & (1 << 22)) << 6
((flags >> 1) & 0x0FC00000) | // (flags & 0x1F800000) >> 1
flags & 0xE0000000;
writer.write_u32::<LE>(tmp)?; writer.write_u32::<LE>(tmp)?;
writer.write_u8(0)?; writer.write_u8(0)?;
} else { } else {

View file

@ -185,7 +185,7 @@ fn test_write(_version: repak::Version, _file_name: &str, bytes: &[u8]) {
for path in pak_reader.files() { for path in pak_reader.files() {
let data = pak_reader.get(&path, &mut reader).unwrap(); let data = pak_reader.get(&path, &mut reader).unwrap();
pak_writer.write_file(&path, false, data).unwrap(); pak_writer.write_file(&path, data).unwrap();
} }
assert!(pak_writer.write_index().unwrap().into_inner() == reader.into_inner()); assert!(pak_writer.write_index().unwrap().into_inner() == reader.into_inner());

View file

@ -19,7 +19,7 @@ path = "src/main.rs"
[features] [features]
default = ["oodle"] default = ["oodle"]
oodle = ["repak/oodle"] oodle = ["repak/oodle_implicit_dynamic"]
[dependencies] [dependencies]
repak = { path = "../repak" } repak = { path = "../repak" }

View file

@ -487,7 +487,7 @@ fn pack(args: ActionPack) -> Result<(), repak::Error> {
use indicatif::ProgressIterator; use indicatif::ProgressIterator;
let iter = paths.iter(); let iter = paths.iter();
let (log, iter) = if !args.quiet { let (log, mut iter) = if !args.quiet {
let iter = let iter =
iter.progress_with_style(indicatif::ProgressStyle::with_template(STYLE).unwrap()); iter.progress_with_style(indicatif::ProgressStyle::with_template(STYLE).unwrap());
( (
@ -498,39 +498,17 @@ fn pack(args: ActionPack) -> Result<(), repak::Error> {
(Output::Stdout, itertools::Either::Right(iter)) (Output::Stdout, itertools::Either::Right(iter))
}; };
let log = log.clone(); let log = log.clone();
iter.try_for_each(|p| {
let mut result = None; let rel = &p
let result_ref = &mut result; .strip_prefix(input_path)
rayon::in_place_scope(|scope| -> Result<(), repak::Error> { .expect("file not in input directory")
let (tx, rx) = std::sync::mpsc::sync_channel(0); .to_slash()
let entry_builder = pak.entry_builder(); .expect("failed to convert to slash path");
if args.verbose {
scope.spawn(move |_| { log.println(format!("packing {}", &rel));
*result_ref = Some(
iter.par_bridge()
.try_for_each(|p| -> Result<(), repak::Error> {
let rel = &p
.strip_prefix(input_path)
.expect("file not in input directory")
.to_slash()
.expect("failed to convert to slash path");
if args.verbose {
log.println(format!("packing {}", &rel));
}
let entry = entry_builder.build_entry(true, std::fs::read(p)?)?;
tx.send((rel.to_string(), entry)).unwrap();
Ok(())
}),
);
});
for (path, entry) in rx {
pak.write_entry(path, entry)?;
} }
Ok(()) pak.write_file(rel, std::fs::read(p)?)
})?; })?;
result.unwrap()?;
pak.write_index()?; pak.write_index()?;