this repo has no description
1// https://github.com/electron/asar
2// http://formats.kaitai.io/python_pickle/
3import { BinaryReader } from "./util/binary";
4
5/*
6 The asar format is kinda bad, especially because it uses multiple pickle
7 entries. It spams sizes, expecting us to read small buffers and parse those,
8 but we can just take it all through at once without having to create multiple
9 BinaryReaders. This implementation might be wrong, though.
10
11 This either has size/offset or files but I can't get the type to cooperate,
12 so pretend this is a union.
13*/
14
15type AsarEntry = {
16 size: number;
17 offset: `${number}`; // who designed this
18
19 files?: Record<string, AsarEntry>;
20};
21
22export default function extractAsar(file: ArrayBuffer) {
23 const array = new Uint8Array(file);
24 const br = new BinaryReader(array);
25
26 // two uints, one containing the number '4', to signify that the other uint takes up 4 bytes
27 // bravo, electron, bravo
28 const _payloadSize = br.readUInt32();
29 const _headerSize = br.readInt32();
30
31 const headerStringStart = br.position;
32 const headerStringSize = br.readUInt32(); // How big the block is
33 const actualStringSize = br.readUInt32(); // How big the string in that block is
34
35 const base = headerStringStart + headerStringSize + 4;
36
37 const string = br.readString(actualStringSize);
38 const header: AsarEntry = JSON.parse(string);
39
40 const ret: Record<string, Uint8Array> = {};
41 function addDirectory(dir: AsarEntry, path: string) {
42 for (const [name, data] of Object.entries(dir.files!)) {
43 const fullName = path + "/" + name;
44 if (data.files != null) {
45 addDirectory(data, fullName);
46 } else {
47 br.position = base + parseInt(data.offset);
48 const file = br.read(data.size);
49 ret[fullName] = file;
50 }
51 }
52 }
53
54 addDirectory(header, "");
55
56 return ret;
57}