resource exploding

This commit is contained in:
Jimmy-Z 2023-08-19 20:56:52 +08:00
parent 5e90bbc38d
commit a61543e808
3 changed files with 218 additions and 44 deletions

1
.gitignore vendored
View file

@ -1 +1,2 @@
/target
/out

View file

@ -1 +1,23 @@
WIP
Supported
===
* exploding rsc and nrsc archives
* including but not limited to: contents(pages), audio, graphics
Note
===
currently needs my fork of monokakido.rs to work, link below
TODO
===
* exploding keystore archives
License
===
GPL
Links
===
https://github.com/golddranks/monokakido
https://github.com/stephenmk/monokakido
https://github.com/stephenmk/jitenbot
https://github.com/Jimmy-Z/monokakido

View file

@ -2,6 +2,7 @@ use std::{
collections::HashMap,
ffi::OsStr,
fs,
io::Write,
path::{Path, PathBuf},
};
@ -18,13 +19,30 @@ use utils::*;
struct Args {
#[command(subcommand)]
cmd: Cmds,
/// explode items from resource archives, dry-run if not specified (default)
#[arg(long, short)]
explode: bool,
/// do not enumerate items in resource archives, has no effect when exploding
#[arg(long, short)]
shallow: bool,
#[arg(long, short, default_value = "out")]
out_dir: String,
}
#[derive(Subcommand)]
enum Cmds {
scan_base { dir: String },
scan_dict { dir: String },
scan_contents { dir: String },
/// scan a base dir, contains dictionary sub dirs (more description below)
ScanBase { dir: String },
/// scan a dictionary dir, should contain a "Contents" sub dir
ScanDict { dir: String },
/// scan a content dir, should be a sub dir of the "Contents" dir mentioned above,
/// should contain sub dirs like "contents", "key", "audio"
ScanContents { dir: String },
}
#[derive(Deserialize, Debug)]
@ -46,14 +64,12 @@ struct MkdContent {
fn main() {
let args = Args::parse();
match args.cmd {
Cmds::scan_base { dir } => {
scan_base(&dir);
Cmds::ScanBase { dir } => {
scan_base(&dir, args.explode, args.shallow, &args.out_dir);
}
Cmds::scan_dict { dir } => {
scan_dict(&dir)
}
Cmds::scan_contents { dir } => {
scan_contents(&dir)
Cmds::ScanDict { dir } => scan_dict(&dir, args.explode, args.shallow, &args.out_dir),
Cmds::ScanContents { dir } => {
scan_contents(&dir, args.explode, args.shallow, &args.out_dir)
}
}
}
@ -62,18 +78,18 @@ fn main() {
// each sub-dir should have a "Contents" sub-dir
// for example:
// mac: "/Library/Application Support/AppStoreContent/jp.monokakido.Dictionaries/Products/"
fn scan_base<P: AsRef<Path>>(dir: P) {
fn scan_base<D: AsRef<Path>, O: AsRef<Path>>(dir: D, explode: bool, shallow: bool, out_dir: O) {
for e in fs::read_dir(dir).unwrap() {
let e = e.unwrap();
let p = e.path();
if !p.is_dir() {
continue;
}
scan_dict(p);
scan_dict(p, explode, shallow, &out_dir);
}
}
fn scan_dict<P: AsRef<Path>>(dir: P) {
fn scan_dict<D: AsRef<Path>, O: AsRef<Path>>(dir: D, explode: bool, shallow: bool, out_dir: O) {
let mut p: PathBuf = dir.as_ref().into();
let dir_name = p.file_name().unwrap().to_str().unwrap().to_string();
// main JSON
@ -90,23 +106,22 @@ fn scan_dict<P: AsRef<Path>>(dir: P) {
let json = json[0].to_str().unwrap();
// println!("{}", json);
let json: MkdProduct = serde_json::from_reader(fs::File::open(json).unwrap()).unwrap();
println!(
"{} [{}]",
fmt_ml_str(&json.title),
dir_name,
);
println!("{} [{}]", fmt_ml_str(&json.title), dir_name,);
let mut out_dir = out_dir.as_ref().to_path_buf();
for c in json.contents {
println!("\t{} [{}]", fmt_ml_str(&c.title), &c.dir);
p.push(c.dir);
scan_contents(&p);
p.push(&c.dir);
out_dir.push(&c.dir);
scan_contents(&p, explode, shallow, &out_dir);
p.pop();
out_dir.pop();
}
}
// dir: the content directory of a single dict
// should be a sub-dir of the "Contents" dir mentioned above
// should contain sub-dirs like "key"
fn scan_contents<P: AsRef<Path>>(dir: P) {
fn scan_contents<D: AsRef<Path>, O: AsRef<Path>>(dir: D, explode: bool, shallow: bool, out_dir: O) {
for d in fs::read_dir(dir).unwrap() {
let d = d.unwrap();
let dp = d.path();
@ -115,17 +130,24 @@ fn scan_contents<P: AsRef<Path>>(dir: P) {
}
let dn = d.file_name();
let dn = dn.to_str().unwrap();
// counters
let mut c_idx = 0;
let mut c_nidx = 0;
let mut c_keys = 0;
// lists and counters
let mut l_toc = HashMap::<String, Vec<String>>::new();
let mut c_not_file = 0;
let mut c_no_ext = 0;
let mut c_other = HashMap::<String, isize>::new();
// counter helper
let mut l_toc_add = |e: &str, n: &str| {
l_toc
.entry(e.to_string())
.or_insert(Vec::new())
.push(n.to_string());
};
let mut c_other_mod = |e: &str, m: isize| {
*c_other.entry(e.to_string()).or_insert(0) += m;
};
let mut out_dir = out_dir.as_ref().to_path_buf();
out_dir.push(dn);
for f in fs::read_dir(&dp).unwrap() {
let f = f.unwrap();
let fp = f.path();
@ -142,25 +164,11 @@ fn scan_contents<P: AsRef<Path>>(dir: P) {
continue;
}
};
match fext.to_str().unwrap() {
"idx" => {
let mp = fp.with_extension("map");
if mp.exists() && mp.is_file() {
println!("\t\t{}: {}|map", dn, fname);
// prevent the corresponding map file from showing up in c_other
c_other_mod("map", -1);
} else {
println!(
"\t\t{}: {} without corresponding map file, unexpected",
dn, fname
)
}
}
"nidx" => {
println!("\t\t{}: {}", dn, fname);
}
"keystore" => {
let fext = fext.to_str().unwrap();
match fext {
"map" | "idx" | "nidx" | "keystore" => {
println!("\t\t{}: {}", dn, fname);
l_toc_add(fext, fname);
}
e => {
c_other_mod(e, 1);
@ -183,5 +191,148 @@ fn scan_contents<P: AsRef<Path>>(dir: P) {
if r.len() > 0 {
println!("\t\t{}: {}", dn, r.join(", "));
}
// try rsc(map|idx)
let safe_get = |k: &str| match l_toc.get(k) {
Some(v) => &v[..],
None => &[],
};
if safe_get("map").len() == 1 {
match mkt::resource::Rsc::new(&dp, &dn) {
Ok(mut rsc) => {
println!("\t\t\tinitilized rsc(map|idx)");
rsc.explode(explode, shallow, &out_dir, derive_ext(dn));
}
Err(e) => {
eprintln!("failed to parse rsc(map|idx): {:?}", e);
}
}
}
if safe_get("nidx").len() == 1 {
match mkt::resource::Nrsc::new(&dp) {
Ok(mut nrsc) => {
println!("\t\t\tinitilized nrsc(nidx)");
nrsc.explode(explode, shallow, &out_dir, derive_ext(dn));
}
Err(e) => {
eprintln!("failed to parse nrsc(nidx): {:?}", e);
}
}
}
for k in safe_get("keystore").iter() {
let mut p = dp.clone();
p.push(k);
match mkt::Keys::new(p) {
Ok(keys) => {
println!("\t\t\tinitilized keystore from {}", k);
// TODO
// keys.explode(explode, out_dir);
}
Err(e) => {
eprintln!("failed to parse keystore from {}: {:?}", k, e);
}
}
}
}
}
trait Explode {
fn len(&self) -> usize;
fn get(&mut self, idx: usize) -> Result<(String, &[u8]), mkt::Error>;
fn explode<P: AsRef<Path>>(
&mut self,
do_explode: bool,
shallow: bool,
dir: P,
ext: Option<&str>,
) {
println!("\t\t\t\t{} entires", self.len());
if !do_explode && shallow {
return;
}
if do_explode {
if let Err(e) = fs::create_dir_all(&dir) {
eprintln!(
"failed to create dir {}: {}",
dir.as_ref().as_os_str().to_str().unwrap(),
e
);
return;
}
}
let mut p = dir.as_ref().to_path_buf();
for idx in 0..self.len() {
let (id, asset) = match self.get(idx) {
Ok(r) => r,
Err(e) => {
eprintln!("failed to get resource {}: {:?}", idx, e);
continue;
}
};
let an = match ext {
Some(ext) => format!("{}.{}", id, ext),
None => id,
};
println!("\t\t\t\t{}", &an);
if !do_explode {
continue;
}
p.push(an);
match fs::File::create(&p) {
Ok(mut f) => match f.write_all(asset) {
Ok(()) => {}
Err(e) => {
eprintln!(
"error writing file {}: {}",
&p.as_os_str().to_str().unwrap(),
e
);
}
},
Err(e) => {
eprintln!(
"failed to create file {}: {}",
&p.as_os_str().to_str().unwrap(),
e
);
}
}
p.pop();
}
}
}
impl Explode for mkt::resource::Rsc {
fn len(&self) -> usize {
self.len()
}
fn get(&mut self, idx: usize) -> Result<(String, &[u8]), mkt::Error> {
let (id, asset) = self.get_by_idx(idx)?;
Ok((format!("{:0>10}", id), asset))
}
}
impl Explode for mkt::resource::Nrsc {
fn len(&self) -> usize {
self.len()
}
fn get(&mut self, idx: usize) -> Result<(String, &[u8]), mkt::Error> {
let (id, asset) = self.get_by_idx(idx)?;
Ok((id.to_string(), asset))
}
}
fn derive_ext(c_name: &str) -> Option<&'static str> {
match c_name {
"audio" => Some("aac"),
"graphics" => None,
"contents" => Some("xml"),
_ => Some("bin"),
}
}