resource exploding

This commit is contained in:
Jimmy-Z 2023-08-19 20:56:52 +08:00
parent 5e90bbc38d
commit a61543e808
3 changed files with 218 additions and 44 deletions

1
.gitignore vendored
View file

@ -1 +1,2 @@
/target /target
/out

View file

@ -1 +1,23 @@
WIP Supported
===
* exploding rsc and nrsc archives
* including but not limited to: contents(pages), audio, graphics
Note
===
currently needs my fork of monokakido.rs to work, link below
TODO
===
* exploding keystore archives
License
===
GPL
Links
===
https://github.com/golddranks/monokakido
https://github.com/stephenmk/monokakido
https://github.com/stephenmk/jitenbot
https://github.com/Jimmy-Z/monokakido

View file

@ -2,6 +2,7 @@ use std::{
collections::HashMap, collections::HashMap,
ffi::OsStr, ffi::OsStr,
fs, fs,
io::Write,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -18,13 +19,30 @@ use utils::*;
struct Args { struct Args {
#[command(subcommand)] #[command(subcommand)]
cmd: Cmds, cmd: Cmds,
/// explode items from resource archives, dry-run if not specified (default)
#[arg(long, short)]
explode: bool,
/// do not enumerate items in resource archives, has no effect when exploding
#[arg(long, short)]
shallow: bool,
#[arg(long, short, default_value = "out")]
out_dir: String,
} }
#[derive(Subcommand)] #[derive(Subcommand)]
enum Cmds { enum Cmds {
scan_base { dir: String }, /// scan a base dir, contains dictionary sub dirs (more description below)
scan_dict { dir: String }, ScanBase { dir: String },
scan_contents { dir: String },
/// scan a dictionary dir, should contain a "Contents" sub dir
ScanDict { dir: String },
/// scan a content dir, should be a sub dir of the "Contents" dir mentioned above,
/// should contain sub dirs like "contents", "key", "audio"
ScanContents { dir: String },
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@ -46,14 +64,12 @@ struct MkdContent {
fn main() { fn main() {
let args = Args::parse(); let args = Args::parse();
match args.cmd { match args.cmd {
Cmds::scan_base { dir } => { Cmds::ScanBase { dir } => {
scan_base(&dir); scan_base(&dir, args.explode, args.shallow, &args.out_dir);
} }
Cmds::scan_dict { dir } => { Cmds::ScanDict { dir } => scan_dict(&dir, args.explode, args.shallow, &args.out_dir),
scan_dict(&dir) Cmds::ScanContents { dir } => {
} scan_contents(&dir, args.explode, args.shallow, &args.out_dir)
Cmds::scan_contents { dir } => {
scan_contents(&dir)
} }
} }
} }
@ -62,18 +78,18 @@ fn main() {
// each sub-dir should have a "Contents" sub-dir // each sub-dir should have a "Contents" sub-dir
// for example: // for example:
// mac: "/Library/Application Support/AppStoreContent/jp.monokakido.Dictionaries/Products/" // mac: "/Library/Application Support/AppStoreContent/jp.monokakido.Dictionaries/Products/"
fn scan_base<P: AsRef<Path>>(dir: P) { fn scan_base<D: AsRef<Path>, O: AsRef<Path>>(dir: D, explode: bool, shallow: bool, out_dir: O) {
for e in fs::read_dir(dir).unwrap() { for e in fs::read_dir(dir).unwrap() {
let e = e.unwrap(); let e = e.unwrap();
let p = e.path(); let p = e.path();
if !p.is_dir() { if !p.is_dir() {
continue; continue;
} }
scan_dict(p); scan_dict(p, explode, shallow, &out_dir);
} }
} }
fn scan_dict<P: AsRef<Path>>(dir: P) { fn scan_dict<D: AsRef<Path>, O: AsRef<Path>>(dir: D, explode: bool, shallow: bool, out_dir: O) {
let mut p: PathBuf = dir.as_ref().into(); let mut p: PathBuf = dir.as_ref().into();
let dir_name = p.file_name().unwrap().to_str().unwrap().to_string(); let dir_name = p.file_name().unwrap().to_str().unwrap().to_string();
// main JSON // main JSON
@ -90,23 +106,22 @@ fn scan_dict<P: AsRef<Path>>(dir: P) {
let json = json[0].to_str().unwrap(); let json = json[0].to_str().unwrap();
// println!("{}", json); // println!("{}", json);
let json: MkdProduct = serde_json::from_reader(fs::File::open(json).unwrap()).unwrap(); let json: MkdProduct = serde_json::from_reader(fs::File::open(json).unwrap()).unwrap();
println!( println!("{} [{}]", fmt_ml_str(&json.title), dir_name,);
"{} [{}]", let mut out_dir = out_dir.as_ref().to_path_buf();
fmt_ml_str(&json.title),
dir_name,
);
for c in json.contents { for c in json.contents {
println!("\t{} [{}]", fmt_ml_str(&c.title), &c.dir); println!("\t{} [{}]", fmt_ml_str(&c.title), &c.dir);
p.push(c.dir); p.push(&c.dir);
scan_contents(&p); out_dir.push(&c.dir);
scan_contents(&p, explode, shallow, &out_dir);
p.pop(); p.pop();
out_dir.pop();
} }
} }
// dir: the content directory of a single dict // dir: the content directory of a single dict
// should be a sub-dir of the "Contents" dir mentioned above // should be a sub-dir of the "Contents" dir mentioned above
// should contain sub-dirs like "key" // should contain sub-dirs like "key"
fn scan_contents<P: AsRef<Path>>(dir: P) { fn scan_contents<D: AsRef<Path>, O: AsRef<Path>>(dir: D, explode: bool, shallow: bool, out_dir: O) {
for d in fs::read_dir(dir).unwrap() { for d in fs::read_dir(dir).unwrap() {
let d = d.unwrap(); let d = d.unwrap();
let dp = d.path(); let dp = d.path();
@ -115,17 +130,24 @@ fn scan_contents<P: AsRef<Path>>(dir: P) {
} }
let dn = d.file_name(); let dn = d.file_name();
let dn = dn.to_str().unwrap(); let dn = dn.to_str().unwrap();
// counters
let mut c_idx = 0; // lists and counters
let mut c_nidx = 0; let mut l_toc = HashMap::<String, Vec<String>>::new();
let mut c_keys = 0;
let mut c_not_file = 0; let mut c_not_file = 0;
let mut c_no_ext = 0; let mut c_no_ext = 0;
let mut c_other = HashMap::<String, isize>::new(); let mut c_other = HashMap::<String, isize>::new();
// counter helper // counter helper
let mut l_toc_add = |e: &str, n: &str| {
l_toc
.entry(e.to_string())
.or_insert(Vec::new())
.push(n.to_string());
};
let mut c_other_mod = |e: &str, m: isize| { let mut c_other_mod = |e: &str, m: isize| {
*c_other.entry(e.to_string()).or_insert(0) += m; *c_other.entry(e.to_string()).or_insert(0) += m;
}; };
let mut out_dir = out_dir.as_ref().to_path_buf();
out_dir.push(dn);
for f in fs::read_dir(&dp).unwrap() { for f in fs::read_dir(&dp).unwrap() {
let f = f.unwrap(); let f = f.unwrap();
let fp = f.path(); let fp = f.path();
@ -142,25 +164,11 @@ fn scan_contents<P: AsRef<Path>>(dir: P) {
continue; continue;
} }
}; };
match fext.to_str().unwrap() { let fext = fext.to_str().unwrap();
"idx" => { match fext {
let mp = fp.with_extension("map"); "map" | "idx" | "nidx" | "keystore" => {
if mp.exists() && mp.is_file() {
println!("\t\t{}: {}|map", dn, fname);
// prevent the corresponding map file from showing up in c_other
c_other_mod("map", -1);
} else {
println!(
"\t\t{}: {} without corresponding map file, unexpected",
dn, fname
)
}
}
"nidx" => {
println!("\t\t{}: {}", dn, fname);
}
"keystore" => {
println!("\t\t{}: {}", dn, fname); println!("\t\t{}: {}", dn, fname);
l_toc_add(fext, fname);
} }
e => { e => {
c_other_mod(e, 1); c_other_mod(e, 1);
@ -183,5 +191,148 @@ fn scan_contents<P: AsRef<Path>>(dir: P) {
if r.len() > 0 { if r.len() > 0 {
println!("\t\t{}: {}", dn, r.join(", ")); println!("\t\t{}: {}", dn, r.join(", "));
} }
// try rsc(map|idx)
let safe_get = |k: &str| match l_toc.get(k) {
Some(v) => &v[..],
None => &[],
};
if safe_get("map").len() == 1 {
match mkt::resource::Rsc::new(&dp, &dn) {
Ok(mut rsc) => {
println!("\t\t\tinitilized rsc(map|idx)");
rsc.explode(explode, shallow, &out_dir, derive_ext(dn));
}
Err(e) => {
eprintln!("failed to parse rsc(map|idx): {:?}", e);
}
}
}
if safe_get("nidx").len() == 1 {
match mkt::resource::Nrsc::new(&dp) {
Ok(mut nrsc) => {
println!("\t\t\tinitilized nrsc(nidx)");
nrsc.explode(explode, shallow, &out_dir, derive_ext(dn));
}
Err(e) => {
eprintln!("failed to parse nrsc(nidx): {:?}", e);
}
}
}
for k in safe_get("keystore").iter() {
let mut p = dp.clone();
p.push(k);
match mkt::Keys::new(p) {
Ok(keys) => {
println!("\t\t\tinitilized keystore from {}", k);
// TODO
// keys.explode(explode, out_dir);
}
Err(e) => {
eprintln!("failed to parse keystore from {}: {:?}", k, e);
}
}
}
}
}
trait Explode {
fn len(&self) -> usize;
fn get(&mut self, idx: usize) -> Result<(String, &[u8]), mkt::Error>;
fn explode<P: AsRef<Path>>(
&mut self,
do_explode: bool,
shallow: bool,
dir: P,
ext: Option<&str>,
) {
println!("\t\t\t\t{} entires", self.len());
if !do_explode && shallow {
return;
}
if do_explode {
if let Err(e) = fs::create_dir_all(&dir) {
eprintln!(
"failed to create dir {}: {}",
dir.as_ref().as_os_str().to_str().unwrap(),
e
);
return;
}
}
let mut p = dir.as_ref().to_path_buf();
for idx in 0..self.len() {
let (id, asset) = match self.get(idx) {
Ok(r) => r,
Err(e) => {
eprintln!("failed to get resource {}: {:?}", idx, e);
continue;
}
};
let an = match ext {
Some(ext) => format!("{}.{}", id, ext),
None => id,
};
println!("\t\t\t\t{}", &an);
if !do_explode {
continue;
}
p.push(an);
match fs::File::create(&p) {
Ok(mut f) => match f.write_all(asset) {
Ok(()) => {}
Err(e) => {
eprintln!(
"error writing file {}: {}",
&p.as_os_str().to_str().unwrap(),
e
);
}
},
Err(e) => {
eprintln!(
"failed to create file {}: {}",
&p.as_os_str().to_str().unwrap(),
e
);
}
}
p.pop();
}
}
}
impl Explode for mkt::resource::Rsc {
fn len(&self) -> usize {
self.len()
}
fn get(&mut self, idx: usize) -> Result<(String, &[u8]), mkt::Error> {
let (id, asset) = self.get_by_idx(idx)?;
Ok((format!("{:0>10}", id), asset))
}
}
impl Explode for mkt::resource::Nrsc {
fn len(&self) -> usize {
self.len()
}
fn get(&mut self, idx: usize) -> Result<(String, &[u8]), mkt::Error> {
let (id, asset) = self.get_by_idx(idx)?;
Ok((id.to_string(), asset))
}
}
fn derive_ext(c_name: &str) -> Option<&'static str> {
match c_name {
"audio" => Some("aac"),
"graphics" => None,
"contents" => Some("xml"),
_ => Some("bin"),
} }
} }