mirror of
https://github.com/xavo95/repak.git
synced 2025-01-18 19:04:07 +00:00
Clean up entry writing
This commit is contained in:
parent
fc055d4e40
commit
0853cf7875
2 changed files with 93 additions and 101 deletions
|
@ -135,7 +135,118 @@ impl Entry {
|
|||
version: super::Version,
|
||||
location: EntryLocation,
|
||||
) -> Result<(), super::Error> {
|
||||
if version >= super::Version::V10 && location == EntryLocation::Index {
|
||||
writer.write_u64::<LE>(match location {
|
||||
EntryLocation::Data => 0,
|
||||
EntryLocation::Index => self.offset,
|
||||
})?;
|
||||
writer.write_u64::<LE>(self.compressed)?;
|
||||
writer.write_u64::<LE>(self.uncompressed)?;
|
||||
let compression = self.compression.map_or(0, |n| n + 1);
|
||||
match version {
|
||||
Version::V8A => writer.write_u8(compression.try_into().unwrap())?,
|
||||
_ => writer.write_u32::<LE>(compression)?,
|
||||
}
|
||||
|
||||
if version.version_major() == VersionMajor::Initial {
|
||||
writer.write_u64::<LE>(self.timestamp.unwrap_or_default())?;
|
||||
}
|
||||
if let Some(hash) = self.hash {
|
||||
writer.write_all(&hash)?;
|
||||
} else {
|
||||
panic!("hash missing");
|
||||
}
|
||||
if version.version_major() >= VersionMajor::CompressionEncryption {
|
||||
if let Some(blocks) = &self.blocks {
|
||||
writer.write_u32::<LE>(blocks.len() as u32)?;
|
||||
for block in blocks {
|
||||
block.write(writer)?;
|
||||
}
|
||||
}
|
||||
writer.write_u8(self.flags)?;
|
||||
writer.write_u32::<LE>(self.compression_block_size)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read_encoded<R: io::Read>(
|
||||
reader: &mut R,
|
||||
version: super::Version,
|
||||
) -> Result<Self, super::Error> {
|
||||
let bits = reader.read_u32::<LE>()?;
|
||||
let compression = match (bits >> 23) & 0x3f {
|
||||
0 => None,
|
||||
n => Some(n - 1),
|
||||
};
|
||||
|
||||
let encrypted = (bits & (1 << 22)) != 0;
|
||||
let compression_block_count: u32 = (bits >> 6) & 0xffff;
|
||||
let mut compression_block_size = bits & 0x3f;
|
||||
|
||||
if compression_block_size == 0x3f {
|
||||
compression_block_size = reader.read_u32::<LE>()?;
|
||||
} else {
|
||||
compression_block_size <<= 11;
|
||||
}
|
||||
|
||||
let mut var_int = |bit: u32| -> Result<_, super::Error> {
|
||||
Ok(if (bits & (1 << bit)) != 0 {
|
||||
reader.read_u32::<LE>()? as u64
|
||||
} else {
|
||||
reader.read_u64::<LE>()?
|
||||
})
|
||||
};
|
||||
|
||||
let offset = var_int(31)?;
|
||||
let uncompressed = var_int(30)?;
|
||||
let compressed = match compression {
|
||||
None => uncompressed,
|
||||
_ => var_int(29)?,
|
||||
};
|
||||
|
||||
let offset_base = Entry::get_serialized_size(version, compression, compression_block_count);
|
||||
|
||||
let blocks = if compression_block_count == 1 && !encrypted {
|
||||
Some(vec![Block {
|
||||
start: offset_base,
|
||||
end: offset_base + compressed,
|
||||
}])
|
||||
} else if compression_block_count > 0 {
|
||||
let mut index = offset_base;
|
||||
Some(
|
||||
(0..compression_block_count)
|
||||
.map(|_| {
|
||||
let mut block_size = reader.read_u32::<LE>()? as u64;
|
||||
let block = Block {
|
||||
start: index,
|
||||
end: index + block_size,
|
||||
};
|
||||
if encrypted {
|
||||
block_size = align(block_size);
|
||||
}
|
||||
index += block_size;
|
||||
Ok(block)
|
||||
})
|
||||
.collect::<Result<Vec<_>, super::Error>>()?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Entry {
|
||||
offset,
|
||||
compressed,
|
||||
uncompressed,
|
||||
timestamp: None,
|
||||
compression,
|
||||
hash: None,
|
||||
blocks,
|
||||
flags: encrypted as u8,
|
||||
compression_block_size,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write_encoded<W: io::Write>(&self, writer: &mut W) -> Result<(), super::Error> {
|
||||
let mut compression_block_size = (self.compression_block_size >> 11) & 0x3f;
|
||||
if (compression_block_size << 11) != self.compression_block_size {
|
||||
compression_block_size = 0x3f;
|
||||
|
@ -193,121 +304,6 @@ impl Entry {
|
|||
}
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
writer.write_u64::<LE>(match location {
|
||||
EntryLocation::Data => 0,
|
||||
EntryLocation::Index => self.offset,
|
||||
})?;
|
||||
writer.write_u64::<LE>(self.compressed)?;
|
||||
writer.write_u64::<LE>(self.uncompressed)?;
|
||||
let compression = self.compression.map_or(0, |n| n + 1);
|
||||
match version {
|
||||
Version::V8A => writer.write_u8(compression.try_into().unwrap())?,
|
||||
_ => writer.write_u32::<LE>(compression)?,
|
||||
}
|
||||
|
||||
if version.version_major() == VersionMajor::Initial {
|
||||
writer.write_u64::<LE>(self.timestamp.unwrap_or_default())?;
|
||||
}
|
||||
if let Some(hash) = self.hash {
|
||||
writer.write_all(&hash)?;
|
||||
} else {
|
||||
panic!("hash missing");
|
||||
}
|
||||
if version.version_major() >= VersionMajor::CompressionEncryption {
|
||||
if let Some(blocks) = &self.blocks {
|
||||
writer.write_u32::<LE>(blocks.len() as u32)?;
|
||||
for block in blocks {
|
||||
block.write(writer)?;
|
||||
}
|
||||
}
|
||||
writer.write_u8(self.flags)?;
|
||||
writer.write_u32::<LE>(self.compression_block_size)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_encoded<R: io::Read>(
|
||||
reader: &mut R,
|
||||
version: super::Version,
|
||||
) -> Result<Self, super::Error> {
|
||||
let bits = reader.read_u32::<LE>()?;
|
||||
let compression = match (bits >> 23) & 0x3f {
|
||||
0 => None,
|
||||
n => Some(n - 1),
|
||||
};
|
||||
|
||||
let encrypted = (bits & (1 << 22)) != 0;
|
||||
let compression_block_count: u32 = (bits >> 6) & 0xffff;
|
||||
let mut compression_block_size = bits & 0x3f;
|
||||
|
||||
if compression_block_size == 0x3f {
|
||||
compression_block_size = reader.read_u32::<LE>()?;
|
||||
} else {
|
||||
compression_block_size <<= 11;
|
||||
}
|
||||
|
||||
let mut var_int = |bit: u32| -> Result<_, super::Error> {
|
||||
Ok(if (bits & (1 << bit)) != 0 {
|
||||
reader.read_u32::<LE>()? as u64
|
||||
} else {
|
||||
reader.read_u64::<LE>()?
|
||||
})
|
||||
};
|
||||
|
||||
let offset = var_int(31)?;
|
||||
let uncompressed = var_int(30)?;
|
||||
let compressed = match compression {
|
||||
None => uncompressed,
|
||||
_ => var_int(29)?,
|
||||
};
|
||||
|
||||
let offset_base =
|
||||
match version.version_major() >= VersionMajor::RelativeChunkOffsets {
|
||||
true => 0,
|
||||
false => offset,
|
||||
} + Entry::get_serialized_size(version, compression, compression_block_count);
|
||||
|
||||
let blocks = if compression_block_count == 1 && !encrypted {
|
||||
Some(vec![Block {
|
||||
start: offset_base,
|
||||
end: offset_base + compressed,
|
||||
}])
|
||||
} else if compression_block_count > 0 {
|
||||
let mut index = offset_base;
|
||||
Some(
|
||||
(0..compression_block_count)
|
||||
.map(|_| {
|
||||
let mut block_size = reader.read_u32::<LE>()? as u64;
|
||||
let block = Block {
|
||||
start: index,
|
||||
end: index + block_size,
|
||||
};
|
||||
if encrypted {
|
||||
block_size = align(block_size);
|
||||
}
|
||||
index += block_size;
|
||||
Ok(block)
|
||||
})
|
||||
.collect::<Result<Vec<_>, super::Error>>()?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Entry {
|
||||
offset,
|
||||
compressed,
|
||||
uncompressed,
|
||||
timestamp: None,
|
||||
compression,
|
||||
hash: None,
|
||||
blocks,
|
||||
flags: encrypted as u8,
|
||||
compression_block_size,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn read_file<R: io::Read + io::Seek, W: io::Write>(
|
||||
|
|
|
@ -371,11 +371,7 @@ impl Pak {
|
|||
let mut encoded_entries = io::Cursor::new(vec![]);
|
||||
for entry in self.index.entries.values() {
|
||||
offsets.push(encoded_entries.get_ref().len() as u32);
|
||||
entry.write(
|
||||
&mut encoded_entries,
|
||||
self.version,
|
||||
super::entry::EntryLocation::Index,
|
||||
)?;
|
||||
entry.write_encoded(&mut encoded_entries)?;
|
||||
}
|
||||
(encoded_entries.into_inner(), offsets)
|
||||
};
|
||||
|
|
Loading…
Reference in a new issue