conflicts-solved

This commit is contained in:
xavo95 2025-02-08 04:04:03 +01:00
commit ac663db7c9
13 changed files with 739 additions and 683 deletions

78
Cargo.lock generated
View file

@ -68,12 +68,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "anyhow"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "assert_cmd"
version = "2.0.16"
@ -229,21 +223,6 @@ dependencies = [
"libc",
]
[[package]]
name = "crc"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636"
dependencies = [
"crc-catalog",
]
[[package]]
name = "crc-catalog"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
name = "crc32fast"
version = "1.4.2"
@ -416,12 +395,6 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "hex-literal"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46"
[[package]]
name = "icu_collections"
version = "1.5.0"
@ -659,13 +632,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "lzma-rs"
version = "0.3.0"
name = "lz4_flex"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e"
checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5"
dependencies = [
"byteorder",
"crc",
"twox-hash",
]
[[package]]
@ -689,15 +661,6 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
[[package]]
name = "object"
version = "0.36.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
version = "1.20.2"
@ -708,15 +671,10 @@ checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
name = "oodle_loader"
version = "0.2.2"
dependencies = [
"anyhow",
"hex",
"hex-literal",
"libc",
"libloading",
"lzma-rs",
"object",
"seq-macro",
"sha1",
"sha2",
"thiserror",
"ureq",
]
@ -835,6 +793,8 @@ dependencies = [
"base64",
"byteorder",
"flate2",
"hex",
"lz4_flex",
"oodle_loader",
"paste",
"sha1",
@ -941,12 +901,6 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "seq-macro"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4"
[[package]]
name = "serde"
version = "1.0.217"
@ -1013,6 +967,12 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "strsim"
version = "0.11.1"
@ -1119,6 +1079,16 @@ dependencies = [
"zerovec",
]
[[package]]
name = "twox-hash"
version = "1.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
dependencies = [
"cfg-if",
"static_assertions",
]
[[package]]
name = "typenum"
version = "1.17.0"

View file

@ -6,18 +6,9 @@ license.workspace = true
version.workspace = true
edition.workspace = true
[target.'cfg(windows)'.dependencies]
libloading = "0.8"
[target.'cfg(unix)'.dependencies]
object = { version = "0.36.7", default-features = false, features = ["std", "read"] }
libc = "0.2.169"
seq-macro = "0.3.5"
[dependencies]
sha1 = { workspace = true }
libloading = "0.8"
ureq = "2.12"
hex-literal = "0.4"
hex = { workspace = true }
anyhow = "1.0.95"
lzma-rs = "0.3.0"
sha2 = "0.10.8"
thiserror = "2.0.11"

View file

@ -1,11 +1,77 @@
use anyhow::{anyhow, Context, Result};
use std::{io::Read, sync::OnceLock};
use std::sync::OnceLock;
type Result<T, E = Error> = std::result::Result<T, E>;
type OodleDecompress = fn(comp_buf: &[u8], raw_buf: &mut [u8]) -> i32;
pub use oodle_lz::{CompressionLevel, Compressor};
#[allow(non_camel_case_types)]
type OodleLZ_Decompress = unsafe extern "win64" fn(
mod oodle_lz {
#[derive(Debug, Clone, Copy)]
#[repr(i32)]
pub enum Compressor {
/// None = memcpy, pass through uncompressed bytes
None = 3,
/// Fast decompression and high compression ratios, amazing!
Kraken = 8,
/// Leviathan = Kraken's big brother with higher compression, slightly slower decompression.
Leviathan = 13,
/// Mermaid is between Kraken & Selkie - crazy fast, still decent compression.
Mermaid = 9,
/// Selkie is a super-fast relative of Mermaid. For maximum decode speed.
Selkie = 11,
/// Hydra, the many-headed beast = Leviathan, Kraken, Mermaid, or Selkie (see $OodleLZ_About_Hydra)
Hydra = 12,
}
#[derive(Debug, Clone, Copy)]
#[repr(i32)]
pub enum CompressionLevel {
/// don't compress, just copy raw bytes
None = 0,
/// super fast mode, lower compression ratio
SuperFast = 1,
/// fastest LZ mode with still decent compression ratio
VeryFast = 2,
/// fast - good for daily use
Fast = 3,
/// standard medium speed LZ mode
Normal = 4,
/// optimal parse level 1 (faster optimal encoder)
Optimal1 = 5,
/// optimal parse level 2 (recommended baseline optimal encoder)
Optimal2 = 6,
/// optimal parse level 3 (slower optimal encoder)
Optimal3 = 7,
/// optimal parse level 4 (very slow optimal encoder)
Optimal4 = 8,
/// optimal parse level 5 (don't care about encode speed, maximum compression)
Optimal5 = 9,
/// faster than SuperFast, less compression
HyperFast1 = -1,
/// faster than HyperFast1, less compression
HyperFast2 = -2,
/// faster than HyperFast2, less compression
HyperFast3 = -3,
/// fastest, less compression
HyperFast4 = -4,
}
pub type Compress = unsafe extern "system" fn(
compressor: Compressor,
rawBuf: *const u8,
rawLen: usize,
compBuf: *mut u8,
level: CompressionLevel,
pOptions: *const (),
dictionaryBase: *const (),
lrm: *const (),
scratchMem: *mut u8,
scratchSize: usize,
) -> isize;
pub type Decompress = unsafe extern "system" fn(
compBuf: *const u8,
compBufSize: usize,
rawBuf: *mut u8,
@ -20,22 +86,164 @@ type OodleLZ_Decompress = unsafe extern "win64" fn(
decoderMemory: *mut u8,
decoderMemorySize: usize,
threadPhase: u32,
) -> i32;
) -> isize;
pub fn decompress() -> Result<OodleDecompress, Box<dyn std::error::Error>> {
#[cfg(windows)]
return Ok(windows_oodle::decompress_wrapper_windows);
#[cfg(unix)]
return Ok(linux_oodle::oodle_loader_linux());
pub type GetCompressedBufferSizeNeeded =
unsafe extern "system" fn(compressor: Compressor, rawSize: usize) -> usize;
pub type SetPrintf = unsafe extern "system" fn(printf: *const ());
}
fn call_decompress(comp_buf: &[u8], raw_buf: &mut [u8], decompress: OodleLZ_Decompress) -> i32 {
static OODLE_VERSION: &str = "2.9.10";
static OODLE_BASE_URL: &str = "https://github.com/WorkingRobot/OodleUE/raw/refs/heads/main/Engine/Source/Programs/Shared/EpicGames.Oodle/Sdk/";
struct OodlePlatform {
path: &'static str,
name: &'static str,
hash: &'static str,
}
#[cfg(target_os = "linux")]
static OODLE_PLATFORM: OodlePlatform = OodlePlatform {
path: "linux/lib",
name: "liboo2corelinux64.so.9",
hash: "ed7e98f70be1254a80644efd3ae442ff61f854a2fe9debb0b978b95289884e9c",
};
#[cfg(target_os = "macos")]
static OODLE_PLATFORM: OodlePlatform = OodlePlatform {
path: "mac/lib",
name: "liboo2coremac64.2.9.10.dylib",
hash: "b09af35f6b84a61e2b6488495c7927e1cef789b969128fa1c845e51a475ec501",
};
#[cfg(windows)]
static OODLE_PLATFORM: OodlePlatform = OodlePlatform {
path: "win/redist",
name: "oo2core_9_win64.dll",
hash: "6f5d41a7892ea6b2db420f2458dad2f84a63901c9a93ce9497337b16c195f457",
};
fn url() -> String {
format!(
"{OODLE_BASE_URL}/{}/{}/{}",
OODLE_VERSION, OODLE_PLATFORM.path, OODLE_PLATFORM.name
)
}
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Oodle lib hash mismatch expected: {expected} got {found}")]
HashMismatch { expected: String, found: String },
#[error("Oodle compression failed")]
CompressionFailed,
#[error("Oodle initialization failed previously")]
InitializationFailed,
#[error("IO error {0:?}")]
Io(#[from] std::io::Error),
#[error("ureq error {0:?}")]
Ureq(Box<ureq::Error>),
#[error("Oodle libloading error {0:?}")]
LibLoading(#[from] libloading::Error),
}
impl From<ureq::Error> for Error {
fn from(value: ureq::Error) -> Self {
Self::Ureq(value.into())
}
}
fn check_hash(buffer: &[u8]) -> Result<()> {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(buffer);
let hash = hex::encode(hasher.finalize());
if hash != OODLE_PLATFORM.hash {
return Err(Error::HashMismatch {
expected: OODLE_PLATFORM.hash.into(),
found: hash,
});
}
Ok(())
}
fn fetch_oodle() -> Result<std::path::PathBuf> {
let oodle_path = std::env::current_exe()?.with_file_name(OODLE_PLATFORM.name);
if !oodle_path.exists() {
let mut buffer = vec![];
ureq::get(&url())
.call()?
.into_reader()
.read_to_end(&mut buffer)?;
check_hash(&buffer)?;
std::fs::write(&oodle_path, buffer)?;
}
// don't check existing file to allow user to substitute other versions
// check_hash(&std::fs::read(&oodle_path)?)?;
Ok(oodle_path)
}
pub struct Oodle {
_library: libloading::Library,
compress: oodle_lz::Compress,
decompress: oodle_lz::Decompress,
get_compressed_buffer_size_needed: oodle_lz::GetCompressedBufferSizeNeeded,
set_printf: oodle_lz::SetPrintf,
}
impl Oodle {
fn new(lib: libloading::Library) -> Result<Self> {
unsafe {
decompress(
comp_buf.as_ptr(),
comp_buf.len(),
raw_buf.as_mut_ptr(),
raw_buf.len(),
let res = Oodle {
compress: *lib.get(b"OodleLZ_Compress")?,
decompress: *lib.get(b"OodleLZ_Decompress")?,
get_compressed_buffer_size_needed: *lib
.get(b"OodleLZ_GetCompressedBufferSizeNeeded")?,
set_printf: *lib.get(b"OodleCore_Plugins_SetPrintf")?,
_library: lib,
};
(res.set_printf)(std::ptr::null()); // silence oodle logging
Ok(res)
}
}
pub fn compress(
&self,
input: &[u8],
compressor: Compressor,
compression_level: CompressionLevel,
) -> Result<Vec<u8>> {
unsafe {
let buffer_size = self.get_compressed_buffer_size_needed(compressor, input.len());
let mut buffer = vec![0; buffer_size];
let len = (self.compress)(
compressor,
input.as_ptr(),
input.len(),
buffer.as_mut_ptr(),
compression_level,
std::ptr::null(),
std::ptr::null(),
std::ptr::null(),
std::ptr::null_mut(),
0,
);
if len == -1 {
return Err(Error::CompressionFailed);
}
buffer.truncate(len as usize);
Ok(buffer)
}
}
pub fn decompress(&self, input: &[u8], output: &mut [u8]) -> isize {
unsafe {
(self.decompress)(
input.as_ptr(),
input.len(),
output.as_mut_ptr(),
output.len(),
1,
1,
0,
@ -49,357 +257,67 @@ fn call_decompress(comp_buf: &[u8], raw_buf: &mut [u8], decompress: OodleLZ_Deco
)
}
}
static OODLE_HASH: [u8; 20] = hex_literal::hex!("4bcc73614cb8fd2b0bce8d0f91ee5f3202d9d624");
static OODLE_DLL_NAME: &str = "oo2core_9_win64.dll";
fn fetch_oodle() -> Result<std::path::PathBuf> {
use sha1::{Digest, Sha1};
let oodle_path = std::env::current_exe()?.with_file_name(OODLE_DLL_NAME);
if !oodle_path.exists() {
let mut compressed = vec![];
ureq::get("https://origin.warframe.com/origin/50F7040A/index.txt.lzma")
.call()?
.into_reader()
.read_to_end(&mut compressed)?;
let mut decompressed = vec![];
lzma_rs::lzma_decompress(&mut std::io::Cursor::new(compressed), &mut decompressed).unwrap();
let index = String::from_utf8(decompressed)?;
let line = index
.lines()
.find(|l| l.contains(OODLE_DLL_NAME))
.with_context(|| format!("{OODLE_DLL_NAME} not found in index"))?;
let path = line.split_once(',').context("failed to parse index")?.0;
let mut compressed = vec![];
ureq::get(&format!("https://content.warframe.com{path}"))
.call()?
.into_reader()
.read_to_end(&mut compressed)?;
let mut decompressed = vec![];
lzma_rs::lzma_decompress(&mut std::io::Cursor::new(compressed), &mut decompressed).unwrap();
std::fs::write(&oodle_path, decompressed)?;
fn get_compressed_buffer_size_needed(
&self,
compressor: oodle_lz::Compressor,
raw_buffer: usize,
) -> usize {
unsafe { (self.get_compressed_buffer_size_needed)(compressor, raw_buffer) }
}
}
let mut hasher = Sha1::new();
hasher.update(std::fs::read(&oodle_path)?);
let hash = hasher.finalize();
(hash[..] == OODLE_HASH).then_some(()).ok_or_else(|| {
anyhow!(
"oodle hash mismatch expected: {} got: {} ",
hex::encode(OODLE_HASH),
hex::encode(hash)
)
})?;
static OODLE: OnceLock<Option<Oodle>> = OnceLock::new();
Ok(oodle_path)
fn load_oodle() -> Result<Oodle> {
let path = fetch_oodle()?;
unsafe {
let library = libloading::Library::new(path)?;
Oodle::new(library)
}
}
#[cfg(windows)]
mod windows_oodle {
pub fn oodle() -> Result<&'static Oodle> {
let mut result = None;
let oodle = OODLE.get_or_init(|| match load_oodle() {
Err(err) => {
result = Some(Err(err));
None
}
Ok(oodle) => Some(oodle),
});
match (result, oodle) {
// oodle initialized so return
(_, Some(oodle)) => Ok(oodle),
// error during initialization
(Some(result), _) => result?,
// no error because initialization was tried and failed before
_ => Err(Error::InitializationFailed),
}
}
#[cfg(test)]
mod test {
use super::*;
static DECOMPRESS: OnceLock<(OodleLZ_Decompress, libloading::Library)> = OnceLock::new();
#[test]
fn test_oodle() {
let oodle = oodle().unwrap();
pub fn decompress_wrapper_windows(comp_buf: &[u8], raw_buf: &mut [u8]) -> i32 {
let decompress = DECOMPRESS.get_or_init(|| {
let path = fetch_oodle().context("failed to fetch oodle").unwrap();
let data = b"In tools and when compressing large inputs in one call, consider using
$OodleXLZ_Compress_AsyncAndWait (in the Oodle2 Ext lib) instead to get parallelism. Alternatively,
chop the data into small fixed size chunks (we recommend at least 256KiB, i.e. 262144 bytes) and
call compress on each of them, which decreases compression ratio but makes for trivial parallel
compression and decompression.";
let lib = unsafe { libloading::Library::new(path) }
.context("failed to load oodle")
let buffer = oodle
.compress(data, Compressor::Mermaid, CompressionLevel::Optimal5)
.unwrap();
(*unsafe { lib.get(b"OodleLZ_Decompress") }.unwrap(), lib)
});
call_decompress(comp_buf, raw_buf, decompress.0)
}
}
#[cfg(unix)]
mod linux_oodle {
use super::*;
use object::pe::{
ImageNtHeaders64, IMAGE_REL_BASED_DIR64, IMAGE_SCN_MEM_EXECUTE, IMAGE_SCN_MEM_READ,
IMAGE_SCN_MEM_WRITE,
};
use object::read::pe::{ImageOptionalHeader, ImageThunkData, PeFile64};
use object::{LittleEndian as LE, Object, ObjectSection};
use std::collections::HashMap;
use std::ffi::{c_void, CStr};
#[repr(C)]
struct ThreadInformationBlock {
exception_list: *const c_void,
stack_base: *const c_void,
stack_limit: *const c_void,
sub_system_tib: *const c_void,
fiber_data: *const c_void,
arbitrary_user_pointer: *const c_void,
teb: *const c_void,
}
const TIB: ThreadInformationBlock = ThreadInformationBlock {
exception_list: std::ptr::null(),
stack_base: std::ptr::null(),
stack_limit: std::ptr::null(),
sub_system_tib: std::ptr::null(),
fiber_data: std::ptr::null(),
arbitrary_user_pointer: std::ptr::null(),
teb: std::ptr::null(),
};
static DECOMPRESS: OnceLock<OodleLZ_Decompress> = OnceLock::new();
fn decompress_wrapper(comp_buf: &[u8], raw_buf: &mut [u8]) -> i32 {
unsafe {
// Set GS register in calling thread
const ARCH_SET_GS: i32 = 0x1001;
libc::syscall(libc::SYS_arch_prctl, ARCH_SET_GS, &TIB);
// Call actual decompress function
call_decompress(comp_buf, raw_buf, *DECOMPRESS.get().unwrap())
}
}
#[allow(non_snake_case)]
mod imports {
use super::*;
pub unsafe extern "win64" fn OutputDebugStringA(string: *const std::ffi::c_char) {
print!("[OODLE] {}", CStr::from_ptr(string).to_string_lossy());
}
pub unsafe extern "win64" fn GetProcessHeap() -> *const c_void {
0x12345678 as *const c_void
}
pub unsafe extern "win64" fn HeapAlloc(
_heap: *const c_void,
flags: i32,
size: usize,
) -> *const c_void {
assert_eq!(0, flags);
libc::malloc(size)
}
pub unsafe extern "win64" fn HeapFree(
_heap: *const c_void,
_flags: i32,
ptr: *mut c_void,
) -> bool {
libc::free(ptr);
true
}
pub unsafe extern "win64" fn memset(
ptr: *mut c_void,
value: i32,
num: usize,
) -> *const c_void {
libc::memset(ptr, value, num)
}
pub unsafe extern "win64" fn memmove(
destination: *mut c_void,
source: *const c_void,
num: usize,
) -> *const c_void {
libc::memmove(destination, source, num)
}
pub unsafe extern "win64" fn memcpy(
destination: *mut c_void,
source: *const c_void,
num: usize,
) -> *const c_void {
libc::memcpy(destination, source, num)
}
}
// Create some unique function pointers to use for unimplemented imports
const DEBUG_FNS: [*const fn(); 100] = gen_debug_fns();
static mut DEBUG_NAMES: [&str; 100] = [""; 100];
const fn gen_debug_fns() -> [*const fn(); 100] {
fn log<const I: usize>() {
unimplemented!("import {:?}", unsafe { DEBUG_NAMES[I] });
}
let mut array = [std::ptr::null(); 100];
seq_macro::seq!(N in 0..100 {
array[N] = log::<N> as *const fn();
});
array
}
pub fn oodle_loader_linux() -> OodleDecompress {
DECOMPRESS.get_or_init(|| get_decompress_inner().unwrap());
decompress_wrapper
}
fn get_decompress_inner() -> Result<OodleLZ_Decompress> {
fetch_oodle()?;
let oodle = std::env::current_exe()
.unwrap()
.with_file_name(OODLE_DLL_NAME);
let dll = std::fs::read(oodle)?;
let obj_file = PeFile64::parse(&*dll)?;
let size = obj_file.nt_headers().optional_header.size_of_image() as usize;
let header_size = obj_file.nt_headers().optional_header.size_of_headers() as usize;
let image_base = obj_file.relative_address_base() as usize;
// Create map
let mmap = unsafe {
std::slice::from_raw_parts_mut(
libc::mmap(
std::ptr::null_mut(),
size,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_PRIVATE | libc::MAP_ANONYMOUS,
-1,
0,
) as *mut u8,
size,
)
};
let map_base = mmap.as_ptr();
// Copy header to map
mmap[0..header_size].copy_from_slice(&dll[0..header_size]);
unsafe {
assert_eq!(
0,
libc::mprotect(
mmap.as_mut_ptr() as *mut c_void,
header_size,
libc::PROT_READ
)
);
}
// Copy section data to map
for section in obj_file.sections() {
let address = section.address() as usize;
let data = section.data()?;
mmap[(address - image_base)..(address - image_base + data.len())]
.copy_from_slice(section.data()?);
}
// Apply relocations
let sections = obj_file.section_table();
let mut blocks = obj_file
.data_directories()
.relocation_blocks(&*dll, &sections)?
.unwrap();
while let Some(block) = blocks.next()? {
let block_address = block.virtual_address();
let block_data = sections.pe_data_at(&*dll, block_address).map(object::Bytes);
for reloc in block {
let offset = (reloc.virtual_address - block_address) as usize;
match reloc.typ {
IMAGE_REL_BASED_DIR64 => {
let addend = block_data
.and_then(|data| data.read_at::<object::U64Bytes<LE>>(offset).ok())
.map(|addend| addend.get(LE));
if let Some(addend) = addend {
mmap[reloc.virtual_address as usize
..reloc.virtual_address as usize + 8]
.copy_from_slice(&u64::to_le_bytes(
addend - image_base as u64 + map_base as u64,
));
}
}
_ => unimplemented!(),
}
}
}
// Fix up imports
let import_table = obj_file.import_table()?.unwrap();
let mut import_descs = import_table.descriptors()?;
let mut i = 0;
while let Some(import_desc) = import_descs.next()? {
let mut thunks = import_table.thunks(import_desc.original_first_thunk.get(LE))?;
let mut address = import_desc.first_thunk.get(LE) as usize;
while let Some(thunk) = thunks.next::<ImageNtHeaders64>()? {
let (_hint, name) = import_table.hint_name(thunk.address())?;
let name = String::from_utf8_lossy(name).to_string();
use imports::*;
let fn_addr = match name.as_str() {
"OutputDebugStringA" => OutputDebugStringA as usize,
"GetProcessHeap" => GetProcessHeap as usize,
"HeapAlloc" => HeapAlloc as usize,
"HeapFree" => HeapFree as usize,
"memset" => memset as usize,
"memcpy" => memcpy as usize,
"memmove" => memmove as usize,
_ => {
unsafe { DEBUG_NAMES[i] = name.leak() }
let a = DEBUG_FNS[i] as usize;
i += 1;
a
}
};
mmap[address..address + 8].copy_from_slice(&usize::to_le_bytes(fn_addr));
address += 8;
}
}
// Build export table
let mut exports = HashMap::new();
for export in obj_file.exports()? {
let name = String::from_utf8_lossy(export.name());
let address = export.address() - image_base as u64 + map_base as u64;
exports.insert(name, address as *const c_void);
}
// Fix section permissions
for section in obj_file.sections() {
let address = section.address() as usize;
let data = section.data()?;
let size = data.len();
let mut permissions = 0;
let flags = match section.flags() {
object::SectionFlags::Coff { characteristics } => characteristics,
_ => unreachable!(),
};
if 0 != flags & IMAGE_SCN_MEM_READ {
permissions |= libc::PROT_READ;
}
if 0 != flags & IMAGE_SCN_MEM_WRITE {
permissions |= libc::PROT_WRITE;
}
if 0 != flags & IMAGE_SCN_MEM_EXECUTE {
permissions |= libc::PROT_EXEC;
}
unsafe {
assert_eq!(
0,
libc::mprotect(
mmap.as_mut_ptr().add(address - image_base) as *mut c_void,
size,
permissions
)
);
}
}
Ok(unsafe {
std::mem::transmute::<*const c_void, OodleLZ_Decompress>(exports["OodleLZ_Decompress"])
})
dbg!((data.len(), buffer.len()));
let mut uncomp = vec![0; data.len()];
oodle.decompress(&buffer, &mut uncomp);
assert_eq!(data[..], uncomp[..]);
}
}

View file

@ -9,11 +9,8 @@ keywords.workspace = true
[features]
default = ["compression", "encryption"]
compression = ["dep:flate2", "dep:zstd"]
oodle = []
oodle_loader = ["dep:oodle_loader"]
oodle_explicit = ["oodle"]
oodle_implicit_dynamic = ["dep:oodle_loader", "oodle"]
compression = ["dep:flate2", "dep:zstd", "dep:lz4_flex"]
oodle = ["dep:oodle_loader", "compression"]
encryption = ["dep:aes"]
wuthering-waves = []
@ -22,10 +19,12 @@ byteorder = "1.5"
aes = { workspace = true, optional = true }
flate2 = { version = "1.0", optional = true }
zstd = { version = "0.13", optional = true }
lz4_flex = { version = "0.11.3", optional = true }
oodle_loader = { path = "../oodle_loader", optional = true}
thiserror = "2.0"
sha1 = { workspace = true }
strum = { workspace = true }
hex.workspace = true
[dev-dependencies]
base64 = { workspace = true }

222
repak/src/data.rs Normal file
View file

@ -0,0 +1,222 @@
use std::io::Write;
use crate::{
entry::{Block, Entry},
Compression, Error, Hash, Version, VersionMajor,
};
type Result<T, E = Error> = std::result::Result<T, E>;
pub struct PartialEntry<D: AsRef<[u8]>> {
compression: Option<Compression>,
compressed_size: u64,
uncompressed_size: u64,
compression_block_size: u32,
data: PartialEntryData<D>,
hash: Hash,
}
pub(crate) struct PartialBlock {
uncompressed_size: usize,
data: Vec<u8>,
}
pub(crate) enum PartialEntryData<D> {
Slice(D),
Blocks(Vec<PartialBlock>),
}
#[cfg(feature = "compression")]
fn get_compression_slot(
version: Version,
compression_slots: &mut Vec<Option<Compression>>,
compression: Compression,
) -> Result<u32> {
let slot = compression_slots
.iter()
.enumerate()
.find(|(_, s)| **s == Some(compression));
Ok(if let Some((i, _)) = slot {
// existing found
i
} else {
if version.version_major() < VersionMajor::FNameBasedCompression {
return Err(Error::Other(format!(
"cannot use {compression:?} prior to FNameBasedCompression (pak version 8)"
)));
}
// find empty slot
if let Some((i, empty_slot)) = compression_slots
.iter_mut()
.enumerate()
.find(|(_, s)| s.is_none())
{
// empty found, set it to used compression type
*empty_slot = Some(compression);
i
} else {
// no empty slot found, add a new one
compression_slots.push(Some(compression));
compression_slots.len() - 1
}
} as u32)
}
impl<D: AsRef<[u8]>> PartialEntry<D> {
pub(crate) fn build_entry(
&self,
version: Version,
#[allow(unused)] compression_slots: &mut Vec<Option<Compression>>,
file_offset: u64,
) -> Result<Entry> {
#[cfg(feature = "compression")]
let compression_slot = self
.compression
.map(|c| get_compression_slot(version, compression_slots, c))
.transpose()?;
#[cfg(not(feature = "compression"))]
let compression_slot = None;
let blocks = match &self.data {
PartialEntryData::Slice(_) => None,
PartialEntryData::Blocks(blocks) => {
let entry_size =
Entry::get_serialized_size(version, compression_slot, blocks.len() as u32);
let mut offset = entry_size;
if version.version_major() < VersionMajor::RelativeChunkOffsets {
offset += file_offset;
};
Some(
blocks
.iter()
.map(|block| {
let start = offset;
offset += block.data.len() as u64;
let end = offset;
Block { start, end }
})
.collect(),
)
}
};
Ok(Entry {
offset: file_offset,
compressed: self.compressed_size,
uncompressed: self.uncompressed_size,
compression_slot,
timestamp: None,
hash: Some(self.hash),
blocks,
flags: 0,
compression_block_size: self.compression_block_size,
})
}
pub(crate) fn write_data<S: Write>(&self, stream: &mut S) -> Result<()> {
match &self.data {
PartialEntryData::Slice(data) => {
stream.write_all(data.as_ref())?;
}
PartialEntryData::Blocks(blocks) => {
for block in blocks {
stream.write_all(&block.data)?;
}
}
}
Ok(())
}
}
pub(crate) fn build_partial_entry<D>(
allowed_compression: &[Compression],
data: D,
) -> Result<PartialEntry<D>>
where
D: AsRef<[u8]>,
{
// TODO hash needs to be post-compression/encryption
use sha1::{Digest, Sha1};
let mut hasher = Sha1::new();
// TODO possibly select best compression based on some criteria instead of picking first
let compression = allowed_compression.first().cloned();
let uncompressed_size = data.as_ref().len() as u64;
let compression_block_size;
let (data, compressed_size) = match compression {
#[cfg(not(feature = "compression"))]
Some(_) => {
unreachable!("should not be able to reach this point without compression feature")
}
#[cfg(feature = "compression")]
Some(compression) => {
// https://github.com/EpicGames/UnrealEngine/commit/3aad0ff7976be1073005dca2c1282af548b45d89
// Block size must fit into flags field or it may cause unreadable paks for earlier Unreal Engine versions
compression_block_size = 0x3e << 11; // max possible block size
let mut compressed_size = 0;
let mut blocks = vec![];
for chunk in data.as_ref().chunks(compression_block_size as usize) {
let data = compress(compression, chunk)?;
compressed_size += data.len() as u64;
hasher.update(&data);
blocks.push(PartialBlock {
uncompressed_size: chunk.len(),
data,
})
}
(PartialEntryData::Blocks(blocks), compressed_size)
}
None => {
compression_block_size = 0;
hasher.update(data.as_ref());
(PartialEntryData::Slice(data), uncompressed_size)
}
};
Ok(PartialEntry {
compression,
compressed_size,
uncompressed_size,
compression_block_size,
data,
hash: Hash(hasher.finalize().into()),
})
}
#[cfg(feature = "compression")]
fn compress(compression: Compression, data: &[u8]) -> Result<Vec<u8>> {
use std::io::Write;
let compressed = match compression {
Compression::Zlib => {
let mut compress =
flate2::write::ZlibEncoder::new(Vec::new(), flate2::Compression::fast());
compress.write_all(data.as_ref())?;
compress.finish()?
}
Compression::Gzip => {
let mut compress =
flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::fast());
compress.write_all(data.as_ref())?;
compress.finish()?
}
Compression::Zstd => zstd::stream::encode_all(data, 0)?,
Compression::LZ4 => lz4_flex::block::compress(data),
Compression::Oodle => {
#[cfg(not(feature = "oodle"))]
return Err(super::Error::Oodle);
#[cfg(feature = "oodle")]
{
oodle_loader::oodle().unwrap().compress(
data.as_ref(),
oodle_loader::Compressor::Mermaid,
oodle_loader::CompressionLevel::Normal,
)?
}
}
};
Ok(compressed)
}

View file

@ -2,7 +2,7 @@ use std::io;
use byteorder::{LE, ReadBytesExt, WriteBytesExt};
use crate::{Error, reader, writer};
use crate::{data::build_partial_entry, reader, writer, Error, Hash};
use super::{Compression, ext::BoolExt, ext::ReadExt, Version, VersionMajor};
@ -12,7 +12,7 @@ pub(crate) enum EntryLocation {
Index,
}
#[derive(Debug)]
#[derive(Debug, Default, Clone)]
pub(crate) struct Block {
pub start: u64,
pub end: u64,
@ -57,7 +57,7 @@ pub(crate) struct Entry {
pub uncompressed: u64,
pub compression_slot: Option<u32>,
pub timestamp: Option<u64>,
pub hash: Option<[u8; 20]>,
pub hash: Option<Hash>,
pub blocks: Option<Vec<Block>>,
pub flags: u8,
pub compression_block_size: u32,
@ -105,127 +105,13 @@ impl Entry {
version: Version,
compression_slots: &mut Vec<Option<Compression>>,
allowed_compression: &[Compression],
data: impl AsRef<[u8]>,
) -> Result<Self, super::Error> {
// TODO hash needs to be post-compression
use sha1::{Digest, Sha1};
let mut hasher = Sha1::new();
hasher.update(&data);
let offset = writer.stream_position()?;
let len = data.as_ref().len() as u64;
// TODO possibly select best compression based on some criteria instead of picking first
let compression = allowed_compression.first().cloned();
let compression_slot = if let Some(compression) = compression {
// find existing
let slot = compression_slots
.iter()
.enumerate()
.find(|(_, s)| **s == Some(compression));
Some(if let Some((i, _)) = slot {
// existing found
i
} else {
if version.version_major() < VersionMajor::FNameBasedCompression {
return Err(Error::Other(format!(
"cannot use {compression:?} prior to FNameBasedCompression (pak version 8)"
)));
}
// find empty slot
if let Some((i, empty_slot)) = compression_slots
.iter_mut()
.enumerate()
.find(|(_, s)| s.is_none())
{
// empty found, set it to used compression type
*empty_slot = Some(compression);
i
} else {
// no empty slot found, add a new one
compression_slots.push(Some(compression));
compression_slots.len() - 1
}
} as u32)
} else {
None
};
let (blocks, compressed) = match compression {
#[cfg(not(feature = "compression"))]
Some(_) => {
unreachable!("should not be able to reach this point without compression feature")
}
#[cfg(feature = "compression")]
Some(compression) => {
use std::io::Write;
let entry_size = Entry::get_serialized_size(version, compression_slot, 1);
let data_offset = offset + entry_size;
let compressed = match compression {
Compression::Zlib => {
let mut compress = flate2::write::ZlibEncoder::new(
Vec::new(),
flate2::Compression::fast(),
);
compress.write_all(data.as_ref())?;
compress.finish()?
}
Compression::Gzip => {
let mut compress =
flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::fast());
compress.write_all(data.as_ref())?;
compress.finish()?
}
Compression::Zstd => zstd::stream::encode_all(data.as_ref(), 0)?,
Compression::Oodle => {
return Err(Error::Other("writing Oodle compression unsupported".into()))
}
};
let compute_offset = |index: usize| -> u64 {
match version.version_major() >= VersionMajor::RelativeChunkOffsets {
true => index as u64 + (data_offset - offset),
false => index as u64 + data_offset,
}
};
let blocks = vec![Block {
start: compute_offset(0),
end: compute_offset(compressed.len()),
}];
(Some(blocks), Some(compressed))
}
None => (None, None),
};
let entry = super::entry::Entry {
offset,
compressed: compressed
.as_ref()
.map(|c: &Vec<u8>| c.len() as u64)
.unwrap_or(len),
uncompressed: len,
compression_slot,
timestamp: None,
hash: Some(hasher.finalize().into()),
blocks,
flags: 0,
compression_block_size: compressed.as_ref().map(|_| len as u32).unwrap_or_default(),
};
entry.write(writer, version, EntryLocation::Data)?;
if let Some(compressed) = compressed {
writer.write_all(&compressed)?;
} else {
writer.write_all(data.as_ref())?;
}
data: &[u8],
) -> Result<Self, Error> {
let partial_entry = build_partial_entry(allowed_compression, data)?;
let stream_position = writer.stream_position()?;
let entry = partial_entry.build_entry(version, compression_slots, stream_position)?;
entry.write(writer, version, crate::entry::EntryLocation::Data)?;
partial_entry.write_data(writer)?;
Ok(entry)
}
@ -245,7 +131,7 @@ impl Entry {
n => Some(n - 1),
};
let timestamp = (ver == VersionMajor::Initial).then_try(|| reader.read_u64::<LE>())?;
let hash = Some(reader.read_guid()?);
let hash = Some(Hash(reader.read_guid()?));
let blocks = (ver >= VersionMajor::CompressionEncryption && compression.is_some())
.then_try(|| reader.read_array(Block::read))?;
let flags = (ver >= VersionMajor::CompressionEncryption)
@ -289,7 +175,7 @@ impl Entry {
writer.write_u64::<LE>(self.timestamp.unwrap_or_default())?;
}
if let Some(hash) = self.hash {
writer.write_all(&hash)?;
writer.write_all(&hash.0)?;
} else {
panic!("hash missing");
}
@ -391,6 +277,11 @@ impl Entry {
let is_uncompressed_size_32_bit_safe = self.uncompressed <= u32::MAX as u64;
let is_offset_32_bit_safe = self.offset <= u32::MAX as u64;
assert!(
compression_blocks_count < 0x10_000,
"compression blocks count fits in 16 bits"
);
let flags = (compression_block_size)
| (compression_blocks_count << 6)
| ((self.is_encrypted() as u32) << 22)
@ -436,7 +327,6 @@ impl Entry {
version: Version,
compression: &[Option<Compression>],
#[allow(unused)] key: &super::Key,
#[allow(unused)] oodle: &super::Oodle,
buf: &mut W,
) -> Result<(), super::Error> {
reader.seek(io::SeekFrom::Start(self.offset))?;
@ -464,7 +354,7 @@ impl Entry {
}
}
#[cfg(any(feature = "compression", feature = "oodle"))]
#[cfg(feature = "compression")]
let ranges = {
let offset = |index: u64| -> usize {
(match version.version_major() >= VersionMajor::RelativeChunkOffsets {
@ -494,56 +384,52 @@ impl Entry {
match self.compression_slot.and_then(|c| compression[c as usize]) {
None => buf.write_all(&data)?,
#[cfg(not(feature = "compression"))]
_ => return Err(super::Error::Compression),
#[cfg(feature = "compression")]
Some(Compression::Zlib) => decompress!(flate2::read::ZlibDecoder<&[u8]>),
#[cfg(feature = "compression")]
Some(Compression::Gzip) => decompress!(flate2::read::GzDecoder<&[u8]>),
#[cfg(feature = "compression")]
Some(Compression::Zstd) => {
Some(comp) => {
let chunk_size = if ranges.len() == 1 {
self.uncompressed as usize
} else {
self.compression_block_size as usize
};
match comp {
Compression::Zlib => decompress!(flate2::read::ZlibDecoder<&[u8]>),
Compression::Gzip => decompress!(flate2::read::GzDecoder<&[u8]>),
Compression::Zstd => {
for range in ranges {
io::copy(&mut zstd::stream::read::Decoder::new(&data[range])?, buf)?;
}
}
#[cfg(feature = "oodle")]
Some(Compression::Oodle) => {
let oodle = match oodle {
crate::Oodle::Some(getter) => getter().map_err(|_| super::Error::OodleFailed),
crate::Oodle::None => Err(super::Error::OodleFailed),
}?;
Compression::LZ4 => {
let mut decompressed = vec![0; self.uncompressed as usize];
let mut compress_offset = 0;
let mut decompress_offset = 0;
let block_count = ranges.len();
for range in ranges {
let decomp = if block_count == 1 {
self.uncompressed as usize
} else {
(self.compression_block_size as usize)
.min(self.uncompressed as usize - compress_offset)
};
let buffer = &mut data[range];
let out = oodle(
buffer,
&mut decompressed[decompress_offset..decompress_offset + decomp],
);
for (decomp_chunk, comp_range) in
decompressed.chunks_mut(chunk_size).zip(ranges)
{
lz4_flex::block::decompress_into(&data[comp_range], decomp_chunk)
.map_err(|_| Error::DecompressionFailed(Compression::LZ4))?;
}
buf.write_all(&decompressed)?;
}
#[cfg(feature = "oodle")]
Compression::Oodle => {
let mut decompressed = vec![0; self.uncompressed as usize];
for (decomp_chunk, comp_range) in
decompressed.chunks_mut(chunk_size).zip(ranges)
{
let out =
oodle_loader::oodle()?.decompress(&data[comp_range], decomp_chunk);
if out == 0 {
return Err(super::Error::DecompressionFailed(Compression::Oodle));
return Err(Error::DecompressionFailed(Compression::Oodle));
}
compress_offset += self.compression_block_size as usize;
decompress_offset += out as usize;
}
debug_assert_eq!(
decompress_offset, self.uncompressed as usize,
"Oodle decompression length mismatch"
);
buf.write_all(&decompressed)?;
}
#[cfg(not(feature = "oodle"))]
Some(Compression::Oodle) => return Err(super::Error::Oodle),
#[cfg(not(feature = "compression"))]
_ => return Err(super::Error::Compression),
Compression::Oodle => return Err(super::Error::Oodle),
}
}
}
buf.flush()?;
Ok(())

View file

@ -42,8 +42,9 @@ pub enum Error {
#[error("found magic of {:#x} instead of {:#x}", .0, super::MAGIC)]
Magic(u32),
#[error("pointer to OodleLZ_Decompress was not provided")]
OodleFailed,
#[cfg(feature = "oodle")]
#[error("Oodle loader error: {0}")]
OodleFailed(#[from] oodle_loader::Error),
#[error("No entry found at {0}")]
MissingEntry(String),

View file

@ -1,4 +1,7 @@
use crate::ext::{BoolExt, WriteExt};
use crate::{
ext::{BoolExt, WriteExt},
Hash,
};
use super::{ext::ReadExt, Compression, Version, VersionMajor};
use byteorder::{ReadBytesExt, WriteBytesExt, LE};
@ -13,7 +16,7 @@ pub struct Footer {
pub version_major: VersionMajor,
pub index_offset: u64,
pub index_size: u64,
pub hash: [u8; 20],
pub hash: Hash,
pub frozen: bool,
pub compression: Vec<Option<Compression>>,
}
@ -29,7 +32,7 @@ impl Footer {
VersionMajor::from_repr(reader.read_u32::<LE>()?).unwrap_or(version.version_major());
let index_offset = reader.read_u64::<LE>()?;
let index_size = reader.read_u64::<LE>()?;
let hash = reader.read_guid()?;
let hash = Hash(reader.read_guid()?);
let frozen = version.version_major() == VersionMajor::FrozenIndex && reader.read_bool()?;
let compression = {
let mut compression = Vec::with_capacity(match version {
@ -91,7 +94,7 @@ impl Footer {
writer.write_u32::<LE>(self.version_major as u32)?;
writer.write_u64::<LE>(self.index_offset)?;
writer.write_u64::<LE>(self.index_size)?;
writer.write_all(&self.hash)?;
writer.write_all(&self.hash.0)?;
if self.version_major == VersionMajor::FrozenIndex {
writer.write_bool(self.frozen)?;
}

View file

@ -1,4 +1,5 @@
#![allow(dead_code)]
mod data;
mod entry;
mod error;
mod ext;
@ -7,19 +8,10 @@ mod pak;
mod reader;
mod writer;
pub use {error::*, pak::*};
pub use {data::PartialEntry, error::*, pak::*};
pub const MAGIC: u32 = 0x5A6F12E1;
#[cfg(feature = "oodle")]
mod oodle {
pub type OodleGetter = fn() -> Result<OodleDecompress, Box<dyn std::error::Error>>;
pub type OodleDecompress = fn(comp_buf: &[u8], raw_buf: &mut [u8]) -> i32;
}
#[cfg(feature = "oodle_loader")]
pub use oodle_loader;
#[derive(
Clone,
Copy,
@ -135,6 +127,7 @@ pub enum Compression {
Gzip,
Oodle,
Zstd,
LZ4,
}
#[allow(clippy::large_enum_variant)]
@ -152,11 +145,3 @@ impl From<aes::Aes256> for Key {
Self::Some(value)
}
}
#[derive(Debug, Default)]
pub(crate) enum Oodle {
#[cfg(feature = "oodle")]
Some(oodle::OodleGetter),
#[default]
None,
}

View file

@ -1,5 +1,6 @@
use crate::data::build_partial_entry;
use crate::entry::Entry;
use crate::Compression;
use crate::{Compression, Error, PartialEntry};
use super::ext::{ReadExt, WriteExt};
use super::{Version, VersionMajor};
@ -7,10 +8,17 @@ use byteorder::{ReadBytesExt, WriteBytesExt, LE};
use std::collections::BTreeMap;
use std::io::{self, Read, Seek, Write};
#[derive(Default, Clone, Copy)]
pub(crate) struct Hash(pub(crate) [u8; 20]);
impl std::fmt::Debug for Hash {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Hash({})", hex::encode(self.0))
}
}
#[derive(Debug)]
pub struct PakBuilder {
key: super::Key,
oodle: super::Oodle,
allowed_compression: Vec<Compression>,
}
@ -24,10 +32,6 @@ impl PakBuilder {
pub fn new() -> Self {
Self {
key: Default::default(),
#[cfg(not(feature = "oodle_implicit_dynamic"))]
oodle: super::Oodle::None,
#[cfg(feature = "oodle_implicit_dynamic")]
oodle: super::Oodle::Some(oodle_loader::decompress),
allowed_compression: Default::default(),
}
}
@ -36,25 +40,20 @@ impl PakBuilder {
self.key = super::Key::Some(key);
self
}
#[cfg(feature = "oodle_explicit")]
pub fn oodle(mut self, oodle_getter: super::oodle::OodleGetter) -> Self {
self.oodle = super::Oodle::Some(oodle_getter);
self
}
#[cfg(feature = "compression")]
pub fn compression(mut self, compression: impl IntoIterator<Item = Compression>) -> Self {
self.allowed_compression = compression.into_iter().collect();
self
}
pub fn reader<R: Read + Seek>(self, reader: &mut R) -> Result<PakReader, super::Error> {
PakReader::new_any_inner(reader, self.key, self.oodle)
PakReader::new_any_inner(reader, self.key)
}
pub fn reader_with_version<R: Read + Seek>(
self,
reader: &mut R,
version: super::Version,
) -> Result<PakReader, super::Error> {
PakReader::new_inner(reader, version, self.key, self.oodle)
PakReader::new_inner(reader, version, self.key)
}
pub fn writer<W: Write + Seek>(
self,
@ -78,7 +77,6 @@ impl PakBuilder {
pub struct PakReader {
pak: Pak,
key: super::Key,
oodle: super::Oodle,
}
#[derive(Debug)]
@ -144,8 +142,8 @@ impl Index {
self.entries
}
fn add_entry(&mut self, path: &str, entry: super::entry::Entry) {
self.entries.insert(path.to_string(), entry);
fn add_entry(&mut self, path: String, entry: super::entry::Entry) {
self.entries.insert(path, entry);
}
}
@ -166,14 +164,13 @@ impl PakReader {
fn new_any_inner<R: Read + Seek>(
reader: &mut R,
key: super::Key,
oodle: super::Oodle,
) -> Result<Self, super::Error> {
use std::fmt::Write;
let mut log = "\n".to_owned();
for ver in Version::iter() {
match Pak::read(&mut *reader, ver, &key) {
Ok(pak) => return Ok(Self { pak, key, oodle }),
Ok(pak) => return Ok(Self { pak, key }),
Err(err) => writeln!(log, "trying version {} failed: {}", ver, err)?,
}
}
@ -184,9 +181,8 @@ impl PakReader {
reader: &mut R,
version: super::Version,
key: super::Key,
oodle: super::Oodle,
) -> Result<Self, super::Error> {
Pak::read(reader, version, &key).map(|pak| Self { pak, key, oodle })
Pak::read(reader, version, &key).map(|pak| Self { pak, key })
}
pub fn version(&self) -> super::Version {
@ -227,7 +223,6 @@ impl PakReader {
self.pak.version,
&self.pak.compression,
&self.key,
&self.oodle,
writer,
),
None => Err(super::Error::MissingEntry(path.to_owned())),
@ -273,27 +268,91 @@ impl<W: Write + Seek> PakWriter<W> {
self.writer
}
pub fn write_file(&mut self, path: &str, data: impl AsRef<[u8]>) -> Result<(), super::Error> {
pub fn write_file(
&mut self,
path: &str,
allow_compress: bool,
data: impl AsRef<[u8]>,
) -> Result<(), super::Error> {
self.pak.index.add_entry(
path,
path.to_string(),
Entry::write_file(
&mut self.writer,
self.pak.version,
&mut self.pak.compression,
&self.allowed_compression,
data,
if allow_compress {
&self.allowed_compression
} else {
&[]
},
data.as_ref(),
)?,
);
Ok(())
}
pub fn entry_builder(&self) -> EntryBuilder {
EntryBuilder {
allowed_compression: self.allowed_compression.clone(),
}
}
pub fn write_entry<D: AsRef<[u8]>>(
&mut self,
path: String,
partial_entry: PartialEntry<D>,
) -> Result<(), Error> {
let stream_position = self.writer.stream_position()?;
let entry = partial_entry.build_entry(
self.pak.version,
&mut self.pak.compression,
stream_position,
)?;
entry.write(
&mut self.writer,
self.pak.version,
crate::entry::EntryLocation::Data,
)?;
self.pak.index.add_entry(path, entry);
partial_entry.write_data(&mut self.writer)?;
Ok(())
}
pub fn write_index(mut self) -> Result<W, super::Error> {
self.pak.write(&mut self.writer, &self.key)?;
Ok(self.writer)
}
}
struct Data<'d>(Box<dyn AsRef<[u8]> + Send + Sync + 'd>);
impl AsRef<[u8]> for Data<'_> {
fn as_ref(&self) -> &[u8] {
self.0.as_ref().as_ref()
}
}
#[derive(Clone)]
pub struct EntryBuilder {
allowed_compression: Vec<Compression>,
}
impl EntryBuilder {
/// Builds an entry in memory (compressed if requested) which must be written out later
pub fn build_entry<D: AsRef<[u8]> + Send + Sync>(
&self,
compress: bool,
data: D,
) -> Result<PartialEntry<D>, Error> {
let compression = compress
.then_some(self.allowed_compression.as_slice())
.unwrap_or_default();
build_partial_entry(compression, data)
}
}
impl Pak {
fn read<R: Read + Seek>(
reader: &mut R,
@ -541,12 +600,12 @@ impl Pak {
index_writer.write_u32::<LE>(1)?; // we have path hash index
index_writer.write_u64::<LE>(path_hash_index_offset)?;
index_writer.write_u64::<LE>(phi_buf.len() as u64)?; // path hash index size
index_writer.write_all(&hash(&phi_buf))?;
index_writer.write_all(&hash(&phi_buf).0)?;
index_writer.write_u32::<LE>(1)?; // we have full directory index
index_writer.write_u64::<LE>(full_directory_index_offset)?;
index_writer.write_u64::<LE>(fdi_buf.len() as u64)?; // path hash index size
index_writer.write_all(&hash(&fdi_buf))?;
index_writer.write_all(&hash(&fdi_buf).0)?;
index_writer.write_u32::<LE>(encoded_entries.len() as u32)?;
index_writer.write_all(&encoded_entries)?;
@ -584,11 +643,11 @@ impl Pak {
}
}
fn hash(data: &[u8]) -> [u8; 20] {
fn hash(data: &[u8]) -> Hash {
use sha1::{Digest, Sha1};
let mut hasher = Sha1::new();
hasher.update(data);
hasher.finalize().into()
Hash(hasher.finalize().into())
}
fn generate_path_hash_index<W: Write>(

View file

@ -185,7 +185,7 @@ fn test_write(_version: repak::Version, _file_name: &str, bytes: &[u8]) {
for path in pak_reader.files() {
let data = pak_reader.get(&path, &mut reader).unwrap();
pak_writer.write_file(&path, data).unwrap();
pak_writer.write_file(&path, false, data).unwrap();
}
assert!(pak_writer.write_index().unwrap().into_inner() == reader.into_inner());

View file

@ -19,7 +19,7 @@ path = "src/main.rs"
[features]
default = ["oodle"]
oodle = ["repak/oodle_implicit_dynamic"]
oodle = ["repak/oodle"]
[dependencies]
repak = { path = "../repak" }

View file

@ -487,7 +487,7 @@ fn pack(args: ActionPack) -> Result<(), repak::Error> {
use indicatif::ProgressIterator;
let iter = paths.iter();
let (log, mut iter) = if !args.quiet {
let (log, iter) = if !args.quiet {
let iter =
iter.progress_with_style(indicatif::ProgressStyle::with_template(STYLE).unwrap());
(
@ -498,7 +498,17 @@ fn pack(args: ActionPack) -> Result<(), repak::Error> {
(Output::Stdout, itertools::Either::Right(iter))
};
let log = log.clone();
iter.try_for_each(|p| {
let mut result = None;
let result_ref = &mut result;
rayon::in_place_scope(|scope| -> Result<(), repak::Error> {
let (tx, rx) = std::sync::mpsc::sync_channel(0);
let entry_builder = pak.entry_builder();
scope.spawn(move |_| {
*result_ref = Some(
iter.par_bridge()
.try_for_each(|p| -> Result<(), repak::Error> {
let rel = &p
.strip_prefix(input_path)
.expect("file not in input directory")
@ -507,8 +517,20 @@ fn pack(args: ActionPack) -> Result<(), repak::Error> {
if args.verbose {
log.println(format!("packing {}", &rel));
}
pak.write_file(rel, std::fs::read(p)?)
let entry = entry_builder.build_entry(true, std::fs::read(p)?)?;
tx.send((rel.to_string(), entry)).unwrap();
Ok(())
}),
);
});
for (path, entry) in rx {
pak.write_entry(path, entry)?;
}
Ok(())
})?;
result.unwrap()?;
pak.write_index()?;