#!/usr/bin/env python3
"""
batch_decode_msgpack.py
Decode MessagePack stored in "BinaryData" arrays across all JSON files in a directory.
Usage:
python batch_decode_msgpack.py --input-dir ./assets
python batch_decode_msgpack.py --input-dir ./assets --out-dir ./decoded --suffix Decoded
python batch_decode_msgpack.py --input-dir ./assets --recursive
Requirements:
pip install msgpack
"""
import argparse
import base64
import json
import os
import struct
import sys
from typing import Any, Dict, List, Optional
import msgpack
def find_binarydata(obj):
"""Recursively search JSON-like structure for a key named BinaryData (case-insensitive)."""
results = []
if isinstance(obj, dict):
for k, v in obj.items():
if isinstance(k, str) and k.lower() == "binarydata" and isinstance(v, list):
results.append(v)
else:
results.extend(find_binarydata(v))
elif isinstance(obj, list):
for item in obj:
results.extend(find_binarydata(item))
return results
def list_to_bytes(lst: List[int]) -> bytes:
"""Convert a list of ints (0-255) to bytes. Ignores invalid entries with a warning."""
raw = bytearray()
for i, x in enumerate(lst):
if isinstance(x, int) and 0 <= x <= 255:
raw.append(x)
else:
try:
xi = int(x)
if 0 <= xi <= 255:
raw.append(xi)
else:
print(f"warning: element {i}={xi!r} out of byte range, skipping", file=sys.stderr)
except Exception:
print(f"warning: element {i}={x!r} not an int, skipping", file=sys.stderr)
return bytes(raw)
def bytes_to_displayable(o: Any) -> Any:
"""Recursively convert bytes -> base64 strings so JSON is safe and readable."""
if isinstance(o, bytes):
return {"__bytes_base64": base64.b64encode(o).decode("ascii")}
if isinstance(o, dict):
return {bytes_to_displayable(k): bytes_to_displayable(v) for k, v in o.items()}
if isinstance(o, list):
return [bytes_to_displayable(x) for x in o]
return o
def try_unpack_stream(b: bytes):
"""Try to unpack using an Unpacker (handles concatenated msgpack objects)."""
unpacker = msgpack.Unpacker(raw=False, strict_map_key=False)
unpacker.feed(b)
results = []
try:
for item in unpacker:
results.append(item)
except Exception:
# if Unpacker trips on malformed tail, ignore remainder
pass
return results
def try_unpack_single(b: bytes) -> Optional[Any]:
"""Try to unpack a single top-level object using unpackb."""
try:
return msgpack.unpackb(b, raw=False, strict_map_key=False)
except Exception:
return None
def decode_msgpack_from_bytes(b: bytes) -> List[Any]:
"""
Attempt several strategies to decode MessagePack content from bytes.
Returns a list of decoded objects (may be empty).
"""
# 1) try streaming unpack (handles multiple concatenated objects)
objs = try_unpack_stream(b)
if objs:
return objs
# 2) try single unpack of whole buffer
single = try_unpack_single(b)
if single is not None:
return [single]
# 3) 4-byte little-endian length prefix heuristic at offset 0
L = len(b)
if L >= 4:
size0 = struct.unpack_from("<I", b, 0)[0]
if 0 < size0 <= L - 4:
candidate = b[4 : 4 + size0]
single = try_unpack_single(candidate)
if single is not None:
return [single]
# 4) scan a few starting offsets for embedded msgpack objects
for off in range(1, min(256, max(1, L - 4))):
candidate = b[off:]
items = try_unpack_stream(candidate)
if items:
return items
# check 4byte prefix at this offset
if off + 4 <= L:
size = struct.unpack_from("<I", b, off)[0]
if 0 < size <= L - off - 4:
cand = b[off + 4 : off + 4 + size]
single = try_unpack_single(cand)
if single is not None:
return [single]
# nothing found
return []
def process_json_file(path: str) -> Optional[List[Any]]:
"""Load JSON file, find BinaryData arrays, decode them, and return list of decoded objects."""
try:
with open(path, "r", encoding="utf-8") as f:
payload = json.load(f)
except Exception as e:
print(f"ERROR reading {path}: {e}", file=sys.stderr)
return None
lists = find_binarydata(payload)
# If top-level is a numeric list, treat that as BinaryData too
if not lists and isinstance(payload, list) and all(isinstance(x, (int, str)) for x in payload):
lists = [payload]
if not lists:
print(f"No BinaryData found in {os.path.basename(path)}")
return []
decoded_all = []
for idx, lst in enumerate(lists):
b = list_to_bytes(lst)
decoded = decode_msgpack_from_bytes(b)
if not decoded:
print(f" Warning: no msgpack decoded for BinaryData #{idx} in {os.path.basename(path)}")
# convert bytes fields for JSON safety
decoded_safe = [bytes_to_displayable(o) for o in decoded]
decoded_all.append(
{
"source_index": idx,
"decoded_count": len(decoded_safe),
"decoded": decoded_safe,
}
)
return decoded_all
def output_path_for(input_path: str, out_dir: Optional[str], suffix: str) -> str:
base = os.path.basename(input_path)
name, ext = os.path.splitext(base)
new_name = f"{name}{suffix}{ext}"
if out_dir:
os.makedirs(out_dir, exist_ok=True)
return os.path.join(out_dir, new_name)
return os.path.join(os.path.dirname(input_path), new_name)
def main():
p = argparse.ArgumentParser(description="Batch decode MessagePack BinaryData in JSON files.")
p.add_argument("--input-dir", required=True, help="Directory containing .json files to decode.")
p.add_argument("--out-dir", help="Output directory (defaults to same folder as input files).")
p.add_argument("--suffix", default="Decoded", help="Suffix to append to filename (default: 'Decoded').")
p.add_argument("--recursive", action="store_true", help="Search directories recursively.")
args = p.parse_args()
input_dir = args.input_dir
if not os.path.isdir(input_dir):
print(f"Error: input-dir '{input_dir}' is not a directory.", file=sys.stderr)
sys.exit(2)
# collect json files
json_files = []
if args.recursive:
for root, _, files in os.walk(input_dir):
for fn in files:
if fn.lower().endswith(".json"):
json_files.append(os.path.join(root, fn))
else:
for fn in os.listdir(input_dir):
if fn.lower().endswith(".json"):
json_files.append(os.path.join(input_dir, fn))
if not json_files:
print("No .json files found in the input directory.")
sys.exit(0)
print(f"Found {len(json_files)} .json file(s). Processing...")
processed = 0
for path in sorted(json_files):
print(f"Processing: {path}")
decoded_all = process_json_file(path)
if decoded_all is None:
print(f" Skipped due to read error.")
continue
out_path = output_path_for(path, args.out_dir, args.suffix)
try:
with open(out_path, "w", encoding="utf-8") as f:
json.dump(decoded_all, f, ensure_ascii=False, indent=2)
print(f" Wrote decoded output to: {out_path}")
processed += 1
except Exception as e:
print(f" ERROR writing {out_path}: {e}", file=sys.stderr)
print(f"Done — processed {processed}/{len(json_files)} files.")
if __name__ == "__main__":
main()