Moved tools from private repo to public

This commit is contained in:
xavo95 2025-01-02 23:52:10 +01:00
commit 8565eba0f3
Signed by: xavo95
GPG key ID: CBF8ADED6DEBB783
15 changed files with 1034 additions and 0 deletions

3
.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
/target
.idea
*.exe

254
Cargo.lock generated Normal file
View file

@ -0,0 +1,254 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aes-key-finder"
version = "0.1.0"
dependencies = [
"goblin",
"offset-finder",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "cursor"
version = "0.1.0"
dependencies = [
"thiserror 2.0.9",
"widestring",
]
[[package]]
name = "either"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
[[package]]
name = "goblin"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53ab3f32d1d77146981dea5d6b1e8fe31eedcb7013e5e00d6ccd1259a4b4d923"
dependencies = [
"log",
"plain",
"scroll",
]
[[package]]
name = "log"
version = "0.4.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "offset-finder"
version = "0.1.0"
dependencies = [
"goblin",
"log",
"patternscanner",
"pe-utils",
"serde",
"thiserror 2.0.9",
]
[[package]]
name = "patternscanner"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "231e020a85ebd759abfe6da0220754bfee066eb5b6e4970a673c09a96da50033"
dependencies = [
"rayon",
"thiserror 1.0.69",
]
[[package]]
name = "pe-utils"
version = "0.1.0"
dependencies = [
"goblin",
"thiserror 2.0.9",
]
[[package]]
name = "plain"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
[[package]]
name = "proc-macro2"
version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rayon"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]]
name = "restorer"
version = "0.1.0"
dependencies = [
"goblin",
"log",
"pe-utils",
"thiserror 2.0.9",
]
[[package]]
name = "scroll"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ab8598aa408498679922eff7fa985c25d58a90771bd6be794434c5277eab1a6"
dependencies = [
"scroll_derive",
]
[[package]]
name = "scroll_derive"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f81c2fde025af7e69b1d1420531c8a8811ca898919db177141a85313b1cb932"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde"
version = "1.0.217"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.217"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "syn"
version = "2.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "987bc0be1cdea8b10216bd06e2ca407d40b9543468fafd3ddfb02f36e77f71f3"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thiserror"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
dependencies = [
"thiserror-impl 1.0.69",
]
[[package]]
name = "thiserror"
version = "2.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc"
dependencies = [
"thiserror-impl 2.0.9",
]
[[package]]
name = "thiserror-impl"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "thiserror-impl"
version = "2.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "unicode-ident"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
[[package]]
name = "widestring"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7219d36b6eac893fa81e84ebe06485e7dcbb616177469b142df14f1f4deb1311"

30
Cargo.toml Normal file
View file

@ -0,0 +1,30 @@
[workspace]
resolver = "2"
members = [
"aes-key-finder",
"cursor",
"offset-finder",
"pe-utils",
"restorer"
]
[workspace.package]
version = "0.1.0"
edition = "2021"
[workspace.dependencies]
goblin = "0.9.2"
log = "0.4.22"
patternscanner = "0.5.0"
serde = { version = "1.0.217", features = ["derive"] }
thiserror = "2.0.9"
widestring = "1.1.0"
offset-finder = { path = "offset-finder" }
pe-utils = { path = "pe-utils" }
[profile.release]
strip = true # Automatically strip symbols from the binary.
lto = true # Link-time optimization.
opt-level = 3 # Optimization level 3.
codegen-units = 1 # Maximum size reduction optimizations.

28
README.md Normal file
View file

@ -0,0 +1,28 @@
# Reverse Assembling Program Engineering(RAPE) Toolkit
This repository contains a subset of my private tools for Reverse Engineering, a new project(codename: Symphonic) will
make an appearance soon. As such, all required dependencies are made public ahead of time.
Also on the past 6 months I received questions on: How do you find this? How do you get AES keys? How do you fix the
dump? So I hope this helps people wanting to learn to
## Quick tool summary
- AES Key Finder
- Should be self-explanatory but, basically after parsing a PE file you can pass image base, sections, and
data(raw binary) and the filter(this tool includes Restricted and Relax filters, but you can add more)
- To get the 3 first params, please refer to PE Utils down below
- Cursor
- An implementation for a cursor Read + Write. Rust already has one, but I needed something like string and
wide string parsing, so I created my own
- Offset Finder
- This library allows to find patterns in executables
- Allows to find either exact or partial matches by leveraging wildcards(??)
- Also has options for silent reporting(skip_print_offset) or allow multiple matches
- Leveraging PE Utils it returns both in file and RVA of the pattern found
- PE Utils
- Subset of functions to work with PE files. Not very valuable alone, but it allows to omit repetitive code for the
rest of projects
- Restorer
- Allows to go from a memory dump(Frida, and others(Including private dumpers)) to a file that has section table
fixed for more analysis with other tools

View file

@ -0,0 +1,9 @@
[package]
name = "aes-key-finder"
version.workspace = true
edition.workspace = true
[dependencies]
goblin.workspace = true
offset-finder.workspace = true

110
aes-key-finder/src/lib.rs Normal file
View file

@ -0,0 +1,110 @@
use std::collections::{HashMap, HashSet};
use std::sync::OnceLock;
use goblin::pe::section_table::SectionTable;
// TODO: Check for more false positives
const FALSE_POSITIVES: [[u8; 32]; 2] = [
[0x6F, 0x16, 0x80, 0x73, 0xB9, 0xB2, 0x14, 0x49, 0xD7, 0x42, 0x24, 0x17, 0x00, 0x06, 0x8A, 0xDA, 0xBC, 0x30, 0x6F, 0xA9, 0xAA, 0x38, 0x31, 0x16, 0x4D, 0xEE, 0x8D, 0xE3, 0x4E, 0x0E, 0xFB, 0xB0],
[0x67, 0xE6, 0x09, 0x6A, 0x85, 0xAE, 0x67, 0xBB, 0x72, 0xF3, 0x6E, 0x3C, 0x3A, 0xF5, 0x4F, 0xA5, 0x7F, 0x52, 0x0E, 0x51, 0x8C, 0x68, 0x05, 0x9B, 0xAB, 0xD9, 0x83, 0x1F, 0x19, 0xCD, 0xE0, 0x5B]
];
struct Filter {
offsets: HashMap<usize, &'static [u8; 8]>,
locator: offset_finder::OffsetLocator<'static>,
}
static RESTRICTED_FILTER: OnceLock<Filter> = OnceLock::new();
static RELAXED_FILTER: OnceLock<Filter> = OnceLock::new();
fn get_restricted_filter() -> &'static Filter {
RESTRICTED_FILTER.get_or_init(|| {
let mut offsets: HashMap<usize, &'static [u8; 8]> = HashMap::new();
offsets.insert(0, &[2, 9, 16, 23, 30, 37, 44, 51]);
offsets.insert(1, &[3, 10, 17, 24, 35, 42, 49, 56]);
offsets.insert(2, &[3, 14, 25, 32, 44, 51, 58, 65]);
offsets.insert(3, &[3, 10, 21, 28, 35, 42, 49, 56]);
offsets.insert(4, &[3, 10, 21, 28, 35, 42, 49, 56]);
Filter {
offsets,
locator: offset_finder::OffsetLocator {
name: "AES",
partial_match: vec![
"c7 01 ?? ?? ?? ?? c7 41 04 ?? ?? ?? ?? c7 41 08 ?? ?? ?? ?? c7 41 0c ?? ?? ?? ?? c7 41 10 ?? ?? ?? ?? c7 41 14 ?? ?? ?? ?? c7 41 18 ?? ?? ?? ?? c7 41 1c ?? ?? ?? ?? c3",
"c7 45 d0 ?? ?? ?? ?? c7 45 d4 ?? ?? ?? ?? c7 45 d8 ?? ?? ?? ?? c7 45 dc ?? ?? ?? ?? 0f ?? ?? ?? c7 45 e0 ?? ?? ?? ?? c7 45 e4 ?? ?? ?? ?? c7 45 e8 ?? ?? ?? ?? c7 45 ec ?? ?? ?? ?? 0f",
"c7 45 d0 ?? ?? ?? ?? ?? ?? ?? ?? c7 45 d4 ?? ?? ?? ?? ?? ?? ?? ?? c7 45 d8 ?? ?? ?? ?? c7 45 dc ?? ?? ?? ?? ?? ?? ?? ?? ?? c7 45 e0 ?? ?? ?? ?? c7 45 e4 ?? ?? ?? ?? c7 45 e8 ?? ?? ?? ?? c7 45 ec ?? ?? ?? ??",
"c7 45 d0 ?? ?? ?? ?? c7 45 d4 ?? ?? ?? ?? ?? ?? ?? ?? c7 45 d8 ?? ?? ?? ?? c7 45 dc ?? ?? ?? ?? c7 45 e0 ?? ?? ?? ?? c7 45 e4 ?? ?? ?? ?? c7 45 e8 ?? ?? ?? ?? c7 45 ec ?? ?? ?? ??",
"c7 45 ?? ?? ?? ?? ?? c7 45 ?? ?? ?? ?? ?? ?? ?? ?? ?? c7 45 ?? ?? ?? ?? ?? c7 45 ?? ?? ?? ?? ?? c7 45 ?? ?? ?? ?? ?? c7 45 ?? ?? ?? ?? ?? c7 45 ?? ?? ?? ?? ?? c7 45 ?? ?? ?? ?? ??",
],
full_match: "",
skip_offset_print: false,
allow_multiple_matches: true,
},
}
})
}
fn get_relaxed_filter() -> &'static Filter {
RELAXED_FILTER.get_or_init(|| {
let mut offsets: HashMap<usize, &'static [u8; 8]> = HashMap::new();
offsets.insert(0, &[3, 10, 17, 24, 35, 42, 49, 56]);
offsets.insert(1, &[2, 9, 16, 23, 30, 37, 44, 51]);
offsets.insert(2, &[3, 10, 21, 28, 35, 42, 49, 56]);
Filter {
offsets,
locator: offset_finder::OffsetLocator {
name: "AES",
partial_match: vec![
"c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ??",
"c7 ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ??",
"c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ?? c7 ?? ?? ?? ?? ?? ??",
],
full_match: "",
skip_offset_print: false,
allow_multiple_matches: true,
},
}
})
}
pub fn dump_aes_key_restricted(image_base: usize,
sections: &[SectionTable],
data: &[u8]) -> Result<HashSet<Vec<u8>>, offset_finder::Error> {
dump_aes_key_internal(image_base, sections, data, get_restricted_filter())
}
pub fn dump_aes_key(image_base: usize,
sections: &[SectionTable],
data: &[u8]) -> Result<HashSet<Vec<u8>>, offset_finder::Error> {
dump_aes_key_internal(image_base, sections, data, get_relaxed_filter())
}
fn dump_aes_key_internal(image_base: usize,
sections: &[SectionTable],
data: &[u8],
filter: &Filter) -> Result<HashSet<Vec<u8>>, offset_finder::Error> {
let results = filter.locator.find_all_partial_only(image_base, sections, data)?;
// Probabilistic allocation, 50% or more will be false positives, so preallocate (n / 2) + 1
let mut output: HashSet<Vec<u8>> = HashSet::with_capacity((results.len() / 2) + 1);
for outer in results {
let offset = *filter.offsets.get(&outer.0).unwrap();
for inner in outer.1 {
let mut key = Vec::with_capacity(32);
for tmp in offset {
let tmp = inner.0 + (*tmp as usize);
key.extend_from_slice(&data[tmp..tmp + 4]);
}
let mut should_add = true;
for false_positive in FALSE_POSITIVES {
if false_positive.eq(&key[0..32]) {
should_add = false;
break;
}
}
if should_add {
output.insert(key);
}
}
}
Ok(output)
}

8
cursor/Cargo.toml Normal file
View file

@ -0,0 +1,8 @@
[package]
name = "cursor"
version.workspace = true
edition.workspace = true
[dependencies]
thiserror.workspace = true
widestring.workspace = true

141
cursor/src/lib.rs Normal file
View file

@ -0,0 +1,141 @@
use std::io::{Read, Seek, Write};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("IO Error: {0}")]
Io(#[from] std::io::Error),
#[error("Nul error: {0}")]
WideStringNul(#[from] widestring::error::NulError<u16>),
#[error("ContainsNul error: {0}")]
ContainsNul(#[from] widestring::error::ContainsNul<u16>),
#[error("Utf16 conversion error: {0}")]
Utf16(#[from] widestring::error::Utf16Error),
#[error("FromVecWithNul conversion error: {0}")]
FromVecWithNul(#[from] std::ffi::FromVecWithNulError),
#[error("IntoString conversion error: {0}")]
IntoString(#[from] std::ffi::IntoStringError),
#[error("Nul error: {0}")]
FfiNul(#[from] std::ffi::NulError),
}
pub trait Reader {
fn read_wide_string(&mut self) -> Result<String, Error>;
fn read_string(&mut self) -> Result<String, Error>;
fn read_u8(&mut self) -> Result<u8, Error>;
fn read_u16_le(&mut self) -> Result<u16, Error>;
fn read_u32_le(&mut self) -> Result<u32, Error>;
fn read_u64_le(&mut self) -> Result<u64, Error>;
fn read_exact(&mut self, buf: &mut [u8]) -> Result<(), Error>;
}
pub trait Writer {
fn write_wide_string<T: AsRef<str>>(&mut self, value: T) -> Result<usize, Error>;
fn write_string<T: AsRef<str>>(&mut self, value: T) -> Result<usize, Error>;
fn write_u8(&mut self, value: u8) -> Result<usize, Error>;
fn write_u16_le(&mut self, value: u16) -> Result<usize, Error>;
fn write_u32_le(&mut self, value: u32) -> Result<usize, Error>;
fn write_u64_le(&mut self, value: u64) -> Result<usize, Error>;
fn write_all(&mut self, buf: &[u8]) -> Result<usize, Error>;
}
pub struct Cursor<T> {
inner: T,
}
impl<T> Cursor<T> {
pub fn new(inner: T) -> Self {
Self { inner }
}
}
impl<T: Read> Reader for Cursor<T> {
fn read_wide_string(&mut self) -> Result<String, Error> {
let mut output: Vec<u16> = Vec::with_capacity(1024);
let mut character = self.read_u16_le()?;
while character != 0 {
output.push(character);
character = self.read_u16_le()?;
}
// Push last null
output.push(character);
Ok(widestring::U16CStr::from_slice(&output)?.to_string()?)
}
fn read_string(&mut self) -> Result<String, Error> {
let mut output: Vec<u8> = Vec::with_capacity(1024);
let mut character = self.read_u8()?;
while character != 0 {
output.push(character);
character = self.read_u8()?;
}
// Push last null
output.push(character);
Ok(std::ffi::CString::from_vec_with_nul(output)?.into_string()?)
}
fn read_u8(&mut self) -> Result<u8, Error> {
let mut result = [0u8; 1];
self.inner.read_exact(&mut result)?;
Ok(u8::from_le_bytes(result))
}
fn read_u16_le(&mut self) -> Result<u16, Error> {
let mut result = [0u8; 2];
self.inner.read_exact(&mut result)?;
Ok(u16::from_le_bytes(result))
}
fn read_u32_le(&mut self) -> Result<u32, Error> {
let mut result = [0u8; 4];
self.inner.read_exact(&mut result)?;
Ok(u32::from_le_bytes(result))
}
fn read_u64_le(&mut self) -> Result<u64, Error> {
let mut result = [0u8; 8];
self.inner.read_exact(&mut result)?;
Ok(u64::from_le_bytes(result))
}
fn read_exact(&mut self, buf: &mut [u8]) -> Result<(), Error> {
Ok(self.inner.read_exact(buf)?)
}
}
impl<T: Write + Seek> Writer for Cursor<T> {
fn write_wide_string<S: AsRef<str>>(&mut self, value: S) -> Result<usize, Error> {
let wide_string = widestring::U16CString::from_str(value.as_ref())?;
let data = wide_string.into_vec_with_nul();
let mut output = Vec::with_capacity(data.len() * 2);
for element in data {
output.extend_from_slice(&element.to_le_bytes());
}
self.write_all(&output)
}
fn write_string<S: AsRef<str>>(&mut self, value: S) -> Result<usize, Error> {
let string = std::ffi::CString::new(value.as_ref())?;
self.write_all(string.as_bytes_with_nul())
}
fn write_u8(&mut self, value: u8) -> Result<usize, Error> {
self.write_all(&[value])
}
fn write_u16_le(&mut self, value: u16) -> Result<usize, Error> {
self.write_all(&value.to_le_bytes())
}
fn write_u32_le(&mut self, value: u32) -> Result<usize, Error> {
self.write_all(&value.to_le_bytes())
}
fn write_u64_le(&mut self, value: u64) -> Result<usize, Error> {
self.write_all(&value.to_le_bytes())
}
fn write_all(&mut self, buf: &[u8]) -> Result<usize, Error> {
self.inner.write_all(buf)?;
Ok(self.inner.stream_position()? as usize)
}
}

16
offset-finder/Cargo.toml Normal file
View file

@ -0,0 +1,16 @@
[package]
name = "offset-finder"
version.workspace = true
edition.workspace = true
[features]
json_input = ["dep:serde"]
[dependencies]
goblin.workspace = true
log.workspace = true
patternscanner.workspace = true
serde = { workspace = true, optional = true }
thiserror.workspace = true
pe-utils.workspace = true

View file

@ -0,0 +1,32 @@
#![cfg(feature = "json_input")]
use serde::{Deserialize, Serialize};
use crate::OffsetLocator;
#[derive(Serialize, Deserialize)]
pub struct OffsetLocatorJson {
pub name: String,
pub partial_match: Vec<String>,
pub full_match: String,
#[serde(default)]
pub skip_offset_print: bool,
#[serde(default)]
pub allow_multiple_matches: bool,
}
impl<'a> Into<OffsetLocator<'a>> for &'a OffsetLocatorJson {
fn into(self) -> OffsetLocator<'a> {
let partials = self.partial_match.iter()
.map(|pattern| pattern.as_str())
.collect::<Vec<&str>>();
OffsetLocator {
name: &self.name,
partial_match: partials,
full_match: &self.full_match,
skip_offset_print: self.skip_offset_print,
allow_multiple_matches: self.allow_multiple_matches,
}
}
}

155
offset-finder/src/lib.rs Normal file
View file

@ -0,0 +1,155 @@
use std::collections::HashMap;
use goblin::pe::section_table::SectionTable;
use log::{debug, warn};
use patternscanner::PatternScannerBuilder;
#[cfg(feature = "json_input")]
pub mod json;
pub struct OffsetLocator<'a> {
pub name: &'a str,
pub partial_match: Vec<&'a str>,
pub full_match: &'a str,
pub skip_offset_print: bool,
pub allow_multiple_matches: bool,
}
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Offset for: {0} not found")]
NotFound(String),
#[error("Too many matches found for: {0}")]
TooManyMatches(String),
#[error("PE Utils: {0}")]
PeUtils(#[from] pe_utils::Error),
#[error("Toml Error: {0}")]
PatternScanner(#[from] patternscanner::PatternScannerError),
}
#[inline]
fn find_pattern(image_base: usize,
sections: &[SectionTable],
data: &[u8],
pattern: &str,
name: &str,
allow_multiple_matches: bool) -> Result<(usize, usize), Error> {
let items = find_all_pattern(image_base, sections, data, pattern, name)?;
if items.len() == 1 {
return Ok((items[0].0, items[0].1));
}
if allow_multiple_matches {
return Ok(items[0]);
}
for item in items {
warn!("Possible candidate for: {}, 0x{:02X?}", name, item.1);
}
Err(Error::TooManyMatches(name.to_string()))
}
#[inline]
fn find_all_pattern(image_base: usize,
sections: &[SectionTable],
data: &[u8],
pattern: &str,
name: &str) -> Result<Vec<(usize, usize)>, Error> {
let items = PatternScannerBuilder::builder()
.with_bytes(data)
.build()
.scan_all(pattern)?;
if items.is_empty() {
return Err(Error::NotFound(name.to_string()));
}
let mut output: Vec<(usize, usize)> = Vec::with_capacity(items.len());
for item in items {
output.push((item, pe_utils::resolve_symbol(image_base, sections, item)?));
}
Ok(output)
}
#[inline]
fn find_patterns(image_base: usize,
sections: &[SectionTable],
data: &[u8],
patterns: &[&str],
name: &str,
allow_multiple_matches: bool) -> Result<(usize, usize), Error> {
for pattern in patterns {
let result = match find_pattern(image_base, sections, data, pattern, name, allow_multiple_matches) {
Ok(result) => Ok(result),
Err(Error::NotFound(_)) => continue,
Err(err) => Err(err)
}?;
debug!("Partial pattern match with: {}", pattern);
return Ok(result);
}
Err(Error::NotFound(name.to_string()))
}
#[inline]
fn find_all_patterns(image_base: usize,
sections: &[SectionTable],
data: &[u8],
patterns: &[&str],
name: &str) -> Result<HashMap<usize, Vec<(usize, usize)>>, Error> {
let mut output = HashMap::new();
for i in 0..patterns.len() {
let result = match find_all_pattern(image_base, sections, data, &patterns[i], name) {
Ok(result) => Ok(result),
Err(Error::NotFound(_)) => continue,
Err(err) => Err(err)
}?;
debug!("Partial pattern match with: {}", &patterns[i]);
output.insert(i, result);
}
match output.is_empty() {
true => Err(Error::NotFound(name.to_string())),
false => Ok(output)
}
}
impl<'a> OffsetLocator<'a> {
pub fn find_offset(&self,
image_base: usize,
sections: &[SectionTable],
executable: &[u8]) -> Result<(usize, usize, bool), Error> {
match find_pattern(
image_base,
sections,
executable,
self.full_match,
self.name,
self.allow_multiple_matches,
) {
Ok(result) => Ok((result.0, result.1, true)),
Err(Error::NotFound(_)) => {
let result = find_patterns(
image_base,
sections,
executable,
&self.partial_match,
self.name,
self.allow_multiple_matches,
)?;
Ok((result.0, result.1, false))
}
Err(err) => Err(err)
}
}
pub fn find_all_partial_only(&self,
image_base: usize,
sections: &[SectionTable],
executable: &[u8]) -> Result<HashMap<usize, Vec<(usize, usize)>>, Error> {
find_all_patterns(
image_base,
sections,
executable,
&self.partial_match,
self.name,
)
}
}

8
pe-utils/Cargo.toml Normal file
View file

@ -0,0 +1,8 @@
[package]
name = "pe-utils"
version.workspace = true
edition.workspace = true
[dependencies]
goblin.workspace = true
thiserror.workspace = true

130
pe-utils/src/lib.rs Normal file
View file

@ -0,0 +1,130 @@
use goblin::container;
use goblin::pe::{import, options};
use goblin::pe::header::Header;
use goblin::pe::import::ImportData;
use goblin::pe::optional_header::OptionalHeader;
use goblin::pe::section_table::SectionTable;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Goblin Error: {0}")]
Goblin(#[from] goblin::error::Error),
#[error("Optional header missing")]
NoOptionalHeader,
#[error("Offset: {0} not found in any section")]
NotInSection(usize),
}
pub fn take_hint_bytes(bytes: &[u8]) -> Option<&[u8; 16]> {
bytes
.get(0..16)
.and_then(|hint_bytes_slice| hint_bytes_slice.try_into().ok())
}
pub trait MemAlignedAddress<T> {
fn get_mem_aligned_address(address: T, alignment: T) -> T;
}
impl MemAlignedAddress<Self> for u32 {
fn get_mem_aligned_address(address: Self, alignment: Self) -> Self {
let remainder = address % alignment;
if remainder != 0 {
return address + alignment - remainder;
}
address
}
}
impl MemAlignedAddress<Self> for u64 {
fn get_mem_aligned_address(address: Self, alignment: Self) -> Self {
let remainder = address % alignment;
if remainder != 0 {
return address + alignment - remainder;
}
address
}
}
pub fn parse_headers(dump: &[u8]) -> Result<Header, Error> {
let result = if let Some(hint_bytes) = take_hint_bytes(dump) {
match goblin::peek_bytes(hint_bytes)? {
goblin::Hint::PE => Ok(Header::parse(dump)?),
_ => Err(goblin::error::Error::Malformed(
"We were expecting a PE and it's not a PE".to_string())
)
}
} else {
Err(goblin::error::Error::Malformed("Object is too small.".to_string()))
}?;
Ok(result)
}
pub fn get_optional_headers(header: &Header) -> Result<OptionalHeader, Error> {
match header.optional_header {
None => Err(Error::NoOptionalHeader),
Some(optional_header) => Ok(optional_header)
}
}
pub fn get_sections(header: &Header, dump: &[u8]) -> Result<Vec<SectionTable>, Error> {
let optional_header_offset = header.dos_header.pe_pointer as usize
+ goblin::pe::header::SIZEOF_PE_MAGIC
+ goblin::pe::header::SIZEOF_COFF_HEADER;
let offset =
&mut (optional_header_offset + header.coff_header.size_of_optional_header as usize);
Ok(header.coff_header.sections(dump, offset)?)
}
pub fn resolve_symbol(image_base: usize,
sections: &[SectionTable],
addr: usize) -> Result<usize, Error> {
for section in sections {
if (addr > section.pointer_to_raw_data as usize) &&
(addr < (section.pointer_to_raw_data + section.size_of_raw_data) as usize) {
return Ok(image_base + (section.virtual_address - section.pointer_to_raw_data) as usize + addr);
}
}
Err(Error::NotInSection(addr))
}
#[deprecated(since = "0.1.0", note = "Actually not deprecated, but its not finished, so do not use")]
pub fn get_imports<'a>(bytes: &'a [u8], optional_header: &OptionalHeader, sections: &[SectionTable]) -> Result<Option<ImportData<'a>>, Error> {
let opts = &options::ParseOptions::default();
let file_alignment = optional_header.windows_fields.file_alignment;
let is_64 = optional_header.container()? == container::Container::Big;
let mut imports = vec![];
let mut import_data = None;
if let Some(&import_table) = optional_header.data_directories.get_import_table() {
let id = if is_64 {
ImportData::parse_with_opts::<u64>(
bytes,
import_table,
&sections,
file_alignment,
opts,
)?
} else {
ImportData::parse_with_opts::<u32>(
bytes,
import_table,
&sections,
file_alignment,
opts,
)?
};
if is_64 {
imports = import::Import::parse::<u64>(bytes, &id, &sections)?
} else {
imports = import::Import::parse::<u32>(bytes, &id, &sections)?
}
let mut libraries = id
.import_data
.iter()
.map(|data| data.name)
.collect::<Vec<&'a str>>();
libraries.sort();
libraries.dedup();
import_data = Some(id);
}
Ok(import_data)
}

11
restorer/Cargo.toml Normal file
View file

@ -0,0 +1,11 @@
[package]
name = "restorer"
version.workspace = true
edition.workspace = true
[dependencies]
goblin.workspace = true
log.workspace = true
thiserror.workspace = true
pe-utils.workspace = true

99
restorer/src/lib.rs Normal file
View file

@ -0,0 +1,99 @@
use std::io::Write;
use goblin::pe::optional_header::OptionalHeader;
use goblin::pe::section_table::SectionTable;
use log::{info, trace};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("PE Utils Error: {0}")]
PEUtils(#[from] pe_utils::Error),
#[error("IO Error: {0}")]
Io(#[from] std::io::Error),
}
pub fn restore_from_ptr<A: AsRef<str>, B: AsRef<str>>(name: A,
module_base: usize,
restored_filename: Option<B>) -> Result<Vec<u8>, Error> {
let data = unsafe { std::slice::from_raw_parts(module_base as *const u8, 0x1000) };
let header = pe_utils::parse_headers(data)?;
trace!("{:#?}", header);
let optional_headers = pe_utils::get_optional_headers(&header)?;
let sections = pe_utils::get_sections(&header, data)?;
let mut vaddr_end: u32 = 0;
for section in &sections {
let virtual_end = section.virtual_address + section.size_of_raw_data;
if virtual_end > vaddr_end {
vaddr_end = virtual_end;
}
}
let data = unsafe {
std::slice::from_raw_parts(module_base as *const u8, vaddr_end as usize)
};
restore_raw(name, data, optional_headers, &sections, restored_filename)
}
pub fn restore_from_dump<A: AsRef<str>, B: AsRef<str>>(name: A,
dump: &[u8],
restored_filename: Option<B>) -> Result<Vec<u8>, Error> {
let header = pe_utils::parse_headers(dump)?;
trace!("{:#?}", header);
let optional_headers = pe_utils::get_optional_headers(&header)?;
let sections = pe_utils::get_sections(&header, dump)?;
restore_raw(name, dump, optional_headers, &sections, restored_filename)
}
pub fn restore_raw<A: AsRef<str>, B: AsRef<str>>(name: A,
dump: &[u8],
optional_headers: OptionalHeader,
sections: &[SectionTable],
restored_filename: Option<B>) -> Result<Vec<u8>, Error> {
let mut output = vec![0; dump.len()];
output[0..optional_headers.windows_fields.size_of_headers as usize]
.copy_from_slice(&dump[0..optional_headers.windows_fields.size_of_headers as usize]);
let mut eof: u32 = 0;
for section in sections {
let phys_end = section.pointer_to_raw_data + section.size_of_raw_data;
let virtual_end = section.virtual_address + section.size_of_raw_data;
let virtual_end_aligned = <u32 as pe_utils::MemAlignedAddress<u32>>::get_mem_aligned_address(
section.virtual_address + section.virtual_size,
optional_headers.windows_fields.section_alignment,
);
trace!(
"Section name: {}\nPhys ptr: 0x{:02X?}\nPhys size: 0x{:02X?}\nPhys End: 0x{:02X?}\n\
Virtual ptr: 0x{:02X?}\nVirtual size: 0x{:02X?}\nVirtual End: 0x{:02X?}\n\
Virtual End Aligned: 0x{:02X?}",
String::from_utf8_lossy(&section.name),
section.pointer_to_raw_data,
section.size_of_raw_data,
phys_end,
section.virtual_address,
section.virtual_size,
virtual_end,
virtual_end_aligned
);
output[section.pointer_to_raw_data as usize..phys_end as usize]
.copy_from_slice(&dump[section.virtual_address as usize..virtual_end as usize]);
if phys_end > eof {
eof = phys_end;
}
}
match restored_filename {
None => info!("Since no restored_filename was provided, the restored output will not be saved to a file"),
Some(filename) => {
let mut data_file = std::fs::File::create(filename.as_ref())?;
data_file.write_all(&output[0..eof as usize])?;
info!("Restored executable saved to: {}", filename.as_ref());
}
}
info!("Executable {} restored successfully", name.as_ref());
Ok(output)
}