mirror of
https://github.com/xavo95/repak.git
synced 2025-01-18 19:04:07 +00:00
fix bad compression and make example multithreaded
This commit is contained in:
parent
0959394241
commit
7dda23967d
4 changed files with 23 additions and 11 deletions
Binary file not shown.
|
@ -6,12 +6,22 @@ pub fn unpack(path: String, key: String) -> Result<(), unpak::Error> {
|
|||
.unwrap_or_default(),
|
||||
);
|
||||
let mut pak = super::load_pak(path.clone(), key)?;
|
||||
std::thread::scope(|scope| -> Result<(), unpak::Error> {
|
||||
for file in pak.files() {
|
||||
std::fs::create_dir_all(folder.join(&file).parent().expect("will be a file"))?;
|
||||
match pak.get(&file).expect("file should be in pak") {
|
||||
Ok(data) => std::fs::write(folder.join(&file), data)?,
|
||||
Ok(data) => {
|
||||
scope.spawn(move || -> Result<(), unpak::Error> {
|
||||
std::fs::create_dir_all(
|
||||
folder.join(&file).parent().expect("will be a file"),
|
||||
)?;
|
||||
println!("{file}");
|
||||
std::fs::write(folder.join(&file), data)?;
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
Err(e) => eprintln!("{e}"),
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
|
|
@ -7,14 +7,13 @@ fn main() {
|
|||
help()
|
||||
};
|
||||
// can't map key to &[u8] because refers to owned data
|
||||
match match args.next().unwrap_or_default().as_str() {
|
||||
if let Err(e) = match args.next().unwrap_or_default().as_str() {
|
||||
"version" => subcommands::version(path, args.next().unwrap_or_default()),
|
||||
"list" => subcommands::list(path, args.next().unwrap_or_default()),
|
||||
"unpack" | "" => subcommands::unpack(path, args.next().unwrap_or_default()),
|
||||
"help" | _ => help(),
|
||||
} {
|
||||
Ok(_) => println!("success!"),
|
||||
Err(e) => eprintln!("{e}"),
|
||||
eprintln!("{e}")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -98,7 +98,10 @@ impl Entry {
|
|||
io::copy(
|
||||
&mut <$decompressor>::new(
|
||||
&data[match version >= Version::RelativeChunkOffsets {
|
||||
true => block.start as usize..block.end as usize,
|
||||
true => {
|
||||
(block.start - (data_offset - self.offset)) as usize
|
||||
..(block.end - (data_offset - self.offset)) as usize
|
||||
}
|
||||
false => {
|
||||
(block.start - data_offset) as usize
|
||||
..(block.end - data_offset) as usize
|
||||
|
|
Loading…
Reference in a new issue