diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f753f71 --- /dev/null +++ b/.gitignore @@ -0,0 +1,230 @@ +/__private_info + +# Created by https://www.toptal.com/developers/gitignore/api/macos,python,visualstudiocode +# Edit at https://www.toptal.com/developers/gitignore?templates=macos,python,visualstudiocode + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### macOS Patch ### +# iCloud generated files +*.icloud + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### Python Patch ### +# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration +poetry.toml + +# ruff +.ruff_cache/ + +# LSP config files +pyrightconfig.json + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +# End of https://www.toptal.com/developers/gitignore/api/macos,python,visualstudiocode \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..edbe737 --- /dev/null +++ b/README.md @@ -0,0 +1,24 @@ +# ADManager Tool + +This is a simple backup management tool for TIGI Software Apps Manager. It allows you to unpack and repack the encrypted `__private_info`, which contains the backup's keychain dump. The produced file is bit-perfect match of the original. + +## Usage + +``` +TIGI Software Apps Manager backup tool. + +positional arguments: + {pack,unpack} Operation to perform. + input File to read. + output File to save. + +options: + -h, --help show this help message and exit +``` + +Examples: + +```bash +./admanager-tool.py unpack __private_info unpacked +./admanager-tool.py pack unpacked __private_info +``` diff --git a/admanager-tool.py b/admanager-tool.py new file mode 100755 index 0000000..fde62f5 --- /dev/null +++ b/admanager-tool.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python3 + +import argparse +from pathlib import Path + +xor_key = "1A F2 53 18 69 76 B7 A8 00 C2 1A F2 53 18 69 76 B7 A8 00 C2" +xor_key = bytes.fromhex(xor_key) + + +def chunks(lst, n): + for i in range(0, len(lst), n): + yield lst[i : i + n] + + +def unpack(src_file: Path, dst_dir: Path): + assert src_file.is_file(), "Source must be a file" + + with open(src_file, "rb") as f: + + def read_xor(size: int): + result = bytearray(f.read(size)) + for i, _ in enumerate(result): + result[i] ^= xor_key[i % len(xor_key)] + return result + + def get_int(x: bytes): + return int.from_bytes(x, "little") + + count = get_int(read_xor(4)) + header_size = 4 * count + 4 + + f.seek(0, 0) + header = read_xor(header_size) + name_list = read_xor(header_size - 4) + # TODO: Not currently used + thumb_list = read_xor(header_size - 4) + + name_list = [get_int(x) for x in chunks(name_list, 4)] + names = [] + for offset in name_list: + f.seek(offset, 0) + size = get_int(read_xor(4)) + names.append(read_xor(size).decode()) + + dst_dir.mkdir(exist_ok=True) + + for i, name in enumerate(names): + offset = get_int(header[4 * i + 4 : 4 * i + 8]) + f.seek(offset, 0) + size = get_int(read_xor(4)) + with open(dst_dir / name, "wb") as f2: + offset = 0 + while offset < size: + buffer_size = min(0x2000, size - offset) + f2.write(read_xor(buffer_size)) + offset += buffer_size + + +def pack(src_dir: Path, dst_file: Path): + assert src_dir.is_dir(), "Source must be a directory" + + with open(dst_file, "wb") as f: + + def write_xor(data: bytes): + result = bytearray(data) + for i, _ in enumerate(result): + result[i] ^= xor_key[i % len(xor_key)] + f.write(result) + + def get_bytes(x: int): + return x.to_bytes(4, "little") + + files = sorted(f for f in src_dir.glob("*") if f.is_file()) + file_sizes = [f.stat().st_size for f in files] + # TODO: Not currently used + thumbs = [bytes(1) for i in range(len(files))] + + count = len(files) + header_size = 4 * count + 4 + name_list_size = 4 * count + thumb_list_size = 4 * count + + header = get_bytes(len(files)) + name_list = bytes(0) + thumb_list = bytes(0) + offset = header_size + name_list_size + thumb_list_size + for i, file in enumerate(files): + header += get_bytes(offset) + name_offset = offset + 4 + file_sizes[i] + name_list += get_bytes(name_offset) + thumb_offset = name_offset + 4 + len(file.name) + thumb_list += get_bytes(thumb_offset) + offset = thumb_offset + 4 + len(thumbs[i]) + + write_xor(header) + write_xor(name_list) + write_xor(thumb_list) + + for i, file in enumerate(files): + size = file_sizes[i] + write_xor(get_bytes(size)) + with open(file, "rb") as f2: + offset = 0 + while offset < size: + buffer_size = min(0x2000, size - offset) + write_xor(f2.read(buffer_size)) + offset += buffer_size + write_xor(get_bytes(len(file.name))) + write_xor(file.name.encode()) + write_xor(get_bytes(len(thumbs[i]))) + write_xor(thumbs[i]) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="TIGI Software Apps Manager backup tool.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + subparsers = parser.add_subparsers(dest="cmd", help="Operation to perform.", required=True) + + pack_parser = subparsers.add_parser("pack", formatter_class=argparse.ArgumentDefaultsHelpFormatter) + unpack_parser = subparsers.add_parser("unpack", formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + parser.add_argument( + "input", + type=str, + help="File to read.", + ) + parser.add_argument( + "output", + type=str, + help="File to save.", + ) + + args = parser.parse_args() + args.input = Path(args.input) + args.output = Path(args.output) + + assert args.input.exists(), "Input file does not exist" + assert args.output.parent.exists(), "Output file's directory does not exist" + + if args.cmd == "unpack": + print(f"Unpacking {args.input}...") + unpack(args.input, args.output) + print("Done! Unpacked to:", args.output) + else: + print(f"Packing {args.input}...") + pack(args.input, args.output) + print("Done! Packed to:", args.output)