asar.js 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151
  1. "use strict";
  2. Object.defineProperty(exports, "__esModule", { value: true });
  3. exports.readAsarJson = exports.readAsar = exports.readAsarHeader = exports.AsarFilesystem = exports.Node = void 0;
  4. const chromium_pickle_js_1 = require("chromium-pickle-js");
  5. const fs_extra_1 = require("fs-extra");
  6. const path = require("path");
  7. class Node {
  8. }
  9. exports.Node = Node;
  10. class AsarFilesystem {
  11. constructor(src, header = new Node(), headerSize = -1) {
  12. this.src = src;
  13. this.header = header;
  14. this.headerSize = headerSize;
  15. this.offset = 0;
  16. if (this.header.files == null) {
  17. this.header.files = {};
  18. }
  19. }
  20. searchNodeFromDirectory(p, isCreate) {
  21. let node = this.header;
  22. for (const dir of p.split(path.sep)) {
  23. if (dir !== ".") {
  24. let child = node.files[dir];
  25. if (child == null) {
  26. if (!isCreate) {
  27. return null;
  28. }
  29. child = new Node();
  30. child.files = {};
  31. node.files[dir] = child;
  32. }
  33. node = child;
  34. }
  35. }
  36. return node;
  37. }
  38. getOrCreateNode(p) {
  39. if (p == null || p.length === 0) {
  40. return this.header;
  41. }
  42. const name = path.basename(p);
  43. const dirNode = this.searchNodeFromDirectory(path.dirname(p), true);
  44. if (dirNode.files == null) {
  45. dirNode.files = {};
  46. }
  47. let result = dirNode.files[name];
  48. if (result == null) {
  49. result = new Node();
  50. dirNode.files[name] = result;
  51. }
  52. return result;
  53. }
  54. addFileNode(file, dirNode, size, unpacked, stat, integrity) {
  55. if (size > 4294967295) {
  56. throw new Error(`${file}: file size cannot be larger than 4.2GB`);
  57. }
  58. const node = new Node();
  59. node.size = size;
  60. if (integrity) {
  61. node.integrity = integrity;
  62. }
  63. if (unpacked) {
  64. node.unpacked = true;
  65. }
  66. else {
  67. // electron expects string
  68. node.offset = this.offset.toString();
  69. if (process.platform !== "win32" && stat.mode & 0o100) {
  70. node.executable = true;
  71. }
  72. this.offset += node.size;
  73. }
  74. let children = dirNode.files;
  75. if (children == null) {
  76. children = {};
  77. dirNode.files = children;
  78. }
  79. children[path.basename(file)] = node;
  80. return node;
  81. }
  82. getNode(p) {
  83. const node = this.searchNodeFromDirectory(path.dirname(p), false);
  84. return node.files[path.basename(p)];
  85. }
  86. getFile(p, followLinks = true) {
  87. const info = this.getNode(p);
  88. // if followLinks is false we don't resolve symlinks
  89. return followLinks && info.link != null ? this.getFile(info.link) : info;
  90. }
  91. async readJson(file) {
  92. return JSON.parse((await this.readFile(file)).toString());
  93. }
  94. readFile(file) {
  95. return readFileFromAsar(this, file, this.getFile(file));
  96. }
  97. }
  98. exports.AsarFilesystem = AsarFilesystem;
  99. async function readAsarHeader(archive) {
  100. const fd = await (0, fs_extra_1.open)(archive, "r");
  101. let size;
  102. let headerBuf;
  103. try {
  104. const sizeBuf = Buffer.allocUnsafe(8);
  105. if ((await (0, fs_extra_1.read)(fd, sizeBuf, 0, 8, null)).bytesRead !== 8) {
  106. throw new Error("Unable to read header size");
  107. }
  108. const sizePickle = (0, chromium_pickle_js_1.createFromBuffer)(sizeBuf);
  109. size = sizePickle.createIterator().readUInt32();
  110. headerBuf = Buffer.allocUnsafe(size);
  111. if ((await (0, fs_extra_1.read)(fd, headerBuf, 0, size, null)).bytesRead !== size) {
  112. throw new Error("Unable to read header");
  113. }
  114. }
  115. finally {
  116. await (0, fs_extra_1.close)(fd);
  117. }
  118. const headerPickle = (0, chromium_pickle_js_1.createFromBuffer)(headerBuf);
  119. return { header: headerPickle.createIterator().readString(), size };
  120. }
  121. exports.readAsarHeader = readAsarHeader;
  122. async function readAsar(archive) {
  123. const { header, size } = await readAsarHeader(archive);
  124. return new AsarFilesystem(archive, JSON.parse(header), size);
  125. }
  126. exports.readAsar = readAsar;
  127. async function readAsarJson(archive, file) {
  128. const fs = await readAsar(archive);
  129. return await fs.readJson(file);
  130. }
  131. exports.readAsarJson = readAsarJson;
  132. async function readFileFromAsar(filesystem, filename, info) {
  133. const size = info.size;
  134. const buffer = Buffer.allocUnsafe(size);
  135. if (size <= 0) {
  136. return buffer;
  137. }
  138. if (info.unpacked) {
  139. return await (0, fs_extra_1.readFile)(path.join(`${filesystem.src}.unpacked`, filename));
  140. }
  141. const fd = await (0, fs_extra_1.open)(filesystem.src, "r");
  142. try {
  143. const offset = 8 + filesystem.headerSize + parseInt(info.offset, 10);
  144. await (0, fs_extra_1.read)(fd, buffer, 0, size, offset);
  145. }
  146. finally {
  147. await (0, fs_extra_1.close)(fd);
  148. }
  149. return buffer;
  150. }
  151. //# sourceMappingURL=asar.js.map