Allow disabling compression per file

This commit is contained in:
Truman Kilen 2025-01-18 20:32:20 -06:00
parent d8b3d2f089
commit 3b78c00527
3 changed files with 23 additions and 10 deletions

View file

@ -89,7 +89,7 @@ pub struct PakWriter<W: Write + Seek> {
} }
pub struct ParallelPakWriter { pub struct ParallelPakWriter {
tx: std::sync::mpsc::SyncSender<(String, Arc<Vec<u8>>)>, tx: std::sync::mpsc::SyncSender<(String, bool, Arc<Vec<u8>>)>,
} }
#[derive(Debug)] #[derive(Debug)]
@ -273,14 +273,23 @@ impl<W: Write + Seek> PakWriter<W> {
self.writer self.writer
} }
pub fn write_file(&mut self, path: &str, data: impl AsRef<[u8]>) -> Result<(), super::Error> { pub fn write_file(
&mut self,
path: &str,
allow_compress: bool,
data: impl AsRef<[u8]>,
) -> Result<(), super::Error> {
self.pak.index.add_entry( self.pak.index.add_entry(
path, path,
Entry::write_file( Entry::write_file(
&mut self.writer, &mut self.writer,
self.pak.version, self.pak.version,
&mut self.pak.compression, &mut self.pak.compression,
&self.allowed_compression, if allow_compress {
&self.allowed_compression
} else {
&[]
},
data.as_ref(), data.as_ref(),
)?, )?,
); );
@ -308,9 +317,13 @@ impl<W: Write + Seek> PakWriter<W> {
.into_iter() .into_iter()
.parallel_map_scoped( .parallel_map_scoped(
scope, scope,
|(path, data): (String, Arc<Vec<u8>>)| -> Result<_, Error> { |(path, allow_compress, data): (String, bool, Arc<Vec<u8>>)| -> Result<_, Error> {
let partial_entry = let allowed_compression = if allow_compress {
build_partial_entry(&self.allowed_compression, &data)?; self.allowed_compression.as_slice()
} else {
&[]
};
let partial_entry = build_partial_entry(allowed_compression, &data)?;
let data = partial_entry.blocks.is_empty().then(|| Arc::new(data)); let data = partial_entry.blocks.is_empty().then(|| Arc::new(data));
Ok((path, data, partial_entry)) Ok((path, data, partial_entry))
}, },
@ -363,8 +376,8 @@ impl<W: Write + Seek> PakWriter<W> {
} }
impl ParallelPakWriter { impl ParallelPakWriter {
pub fn write_file(&mut self, path: String, data: Vec<u8>) -> Result<(), Error> { pub fn write_file(&mut self, path: String, compress: bool, data: Vec<u8>) -> Result<(), Error> {
self.tx.send((path, Arc::new(data))).unwrap(); self.tx.send((path, compress, Arc::new(data))).unwrap();
Ok(()) Ok(())
} }
} }

View file

@ -185,7 +185,7 @@ fn test_write(_version: repak::Version, _file_name: &str, bytes: &[u8]) {
for path in pak_reader.files() { for path in pak_reader.files() {
let data = pak_reader.get(&path, &mut reader).unwrap(); let data = pak_reader.get(&path, &mut reader).unwrap();
pak_writer.write_file(&path, data).unwrap(); pak_writer.write_file(&path, false, data).unwrap();
} }
assert!(pak_writer.write_index().unwrap().into_inner() == reader.into_inner()); assert!(pak_writer.write_index().unwrap().into_inner() == reader.into_inner());

View file

@ -508,7 +508,7 @@ fn pack(args: ActionPack) -> Result<(), repak::Error> {
if args.verbose { if args.verbose {
log.println(format!("packing {}", &rel)); log.println(format!("packing {}", &rel));
} }
writer.write_file(rel.to_string(), std::fs::read(p)?)?; writer.write_file(rel.to_string(), true, std::fs::read(p)?)?;
} }
Ok(()) Ok(())
})?; })?;