1
0

single-file-cli-api.js 8.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245
  1. /*
  2. * Copyright 2010-2020 Gildas Lormeau
  3. * contact : gildas.lormeau <at> gmail.com
  4. *
  5. * This file is part of SingleFile.
  6. *
  7. * The code in this file is free software: you can redistribute it and/or
  8. * modify it under the terms of the GNU Affero General Public License
  9. * (GNU AGPL) as published by the Free Software Foundation, either version 3
  10. * of the License, or (at your option) any later version.
  11. *
  12. * The code in this file is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
  15. * General Public License for more details.
  16. *
  17. * As additional permission under GNU AGPL version 3 section 7, you may
  18. * distribute UNMODIFIED VERSIONS OF THIS file without the copy of the GNU
  19. * AGPL normally required by section 4, provided you include this license
  20. * notice and a URL through which recipients can access the Corresponding
  21. * Source.
  22. */
  23. /* global require, module, URL */
  24. const fs = require("fs");
  25. const VALID_URL_TEST = /^(https?|file):\/\//;
  26. const STATE_PROCESSING = "processing";
  27. const STATE_PROCESSED = "processed";
  28. const backEnds = {
  29. jsdom: "./back-ends/jsdom.js",
  30. puppeteer: "./back-ends/puppeteer.js",
  31. "puppeteer-firefox": "./back-ends/puppeteer-firefox.js",
  32. "webdriver-chromium": "./back-ends/webdriver-chromium.js",
  33. "webdriver-gecko": "./back-ends/webdriver-gecko.js",
  34. "playwright-firefox": "./back-ends/playwright-firefox.js",
  35. "playwright-chromium": "./back-ends/playwright-chromium.js"
  36. };
  37. let backend, tasks = [], maxParallelWorkers = 8, sessionFilename;
  38. module.exports = initialize;
  39. async function initialize(options) {
  40. maxParallelWorkers = options.maxParallelWorkers;
  41. backend = require(backEnds[options.backEnd]);
  42. await backend.initialize(options);
  43. if (options.crawlSyncSession || options.crawlLoadSession) {
  44. try {
  45. tasks = JSON.parse(fs.readFileSync(options.crawlSyncSession || options.crawlLoadSession).toString());
  46. } catch (error) {
  47. if (options.crawlLoadSession) {
  48. throw error;
  49. }
  50. }
  51. }
  52. if (options.crawlSyncSession || options.crawlSaveSession) {
  53. sessionFilename = options.crawlSyncSession || options.crawlSaveSession;
  54. }
  55. return {
  56. capture: urls => capture(urls, options),
  57. finish: () => finish(options),
  58. VALID_URL_TEST
  59. };
  60. }
  61. async function capture(urls, options) {
  62. let newTasks;
  63. const taskUrls = tasks.map(task => task.url);
  64. newTasks = urls.map(url => createTask(url, options));
  65. newTasks = newTasks.filter(task => task && !taskUrls.includes(task.url));
  66. if (newTasks.length) {
  67. tasks = tasks.concat(newTasks);
  68. saveTasks();
  69. }
  70. await runTasks();
  71. }
  72. async function finish(options) {
  73. const promiseTasks = tasks.map(task => task.promise);
  74. await Promise.all(promiseTasks);
  75. if (options.crawlReplaceURLs) {
  76. tasks.forEach(task => {
  77. try {
  78. let pageContent = fs.readFileSync(task.filename).toString();
  79. tasks.forEach(otherTask => {
  80. if (otherTask.filename) {
  81. pageContent = pageContent.replace(new RegExp(escapeRegExp("\"" + otherTask.originalUrl + "\""), "gi"), "\"" + otherTask.filename + "\"");
  82. pageContent = pageContent.replace(new RegExp(escapeRegExp("'" + otherTask.originalUrl + "'"), "gi"), "'" + otherTask.filename + "'");
  83. const filename = otherTask.filename.replace(/ /g, "%20");
  84. pageContent = pageContent.replace(new RegExp(escapeRegExp("=" + otherTask.originalUrl + " "), "gi"), "=" + filename + " ");
  85. pageContent = pageContent.replace(new RegExp(escapeRegExp("=" + otherTask.originalUrl + ">"), "gi"), "=" + filename + ">");
  86. }
  87. });
  88. fs.writeFileSync(task.filename, pageContent);
  89. } catch (error) {
  90. // ignored
  91. }
  92. });
  93. }
  94. if (!options.browserDebug) {
  95. return backend.closeBrowser();
  96. }
  97. }
  98. async function runTasks() {
  99. const availableTasks = tasks.filter(task => !task.status).length;
  100. const processingTasks = tasks.filter(task => task.status == STATE_PROCESSING).length;
  101. const promisesTasks = [];
  102. for (let workerIndex = 0; workerIndex < Math.min(availableTasks, maxParallelWorkers - processingTasks); workerIndex++) {
  103. promisesTasks.push(runNextTask());
  104. }
  105. return Promise.all(promisesTasks);
  106. }
  107. async function runNextTask() {
  108. const task = tasks.find(task => !task.status);
  109. if (task) {
  110. const options = task.options;
  111. let taskOptions = JSON.parse(JSON.stringify(options));
  112. taskOptions.url = task.url;
  113. task.status = STATE_PROCESSING;
  114. saveTasks();
  115. task.promise = capturePage(taskOptions);
  116. const pageData = await task.promise;
  117. task.status = STATE_PROCESSED;
  118. if (pageData) {
  119. task.filename = pageData.filename;
  120. if (options.crawlLinks && testMaxDepth(task)) {
  121. let newTasks = pageData.links
  122. .map(urlLink => createTask(urlLink, options, task, tasks[0]))
  123. .filter(task => task &&
  124. testMaxDepth(task) &&
  125. !tasks.find(otherTask => otherTask.url == task.url) &&
  126. (!options.crawlInnerLinksOnly || task.isInnerLink) &&
  127. (!options.crawlNoParent || task.isChild));
  128. tasks.splice(tasks.length, 0, ...newTasks);
  129. }
  130. }
  131. saveTasks();
  132. await runTasks();
  133. }
  134. }
  135. function testMaxDepth(task) {
  136. const options = task.options;
  137. return (options.crawlMaxDepth == 0 || task.depth <= options.crawlMaxDepth) &&
  138. (options.crawlExternalLinksMaxDepth == 0 || task.externalLinkDepth < options.crawlExternalLinksMaxDepth);
  139. }
  140. function createTask(url, options, parentTask, rootTask) {
  141. url = parentTask ? rewriteURL(url, options.crawlRemoveURLFragment, options.crawlRewriteRules) : url;
  142. if (VALID_URL_TEST.test(url)) {
  143. const isInnerLink = rootTask && url.startsWith(getHostURL(rootTask.url));
  144. const rootBaseURIMatch = rootTask && rootTask.url.match(/(.*?)[^/]*$/);
  145. const isChild = isInnerLink && rootBaseURIMatch && rootBaseURIMatch[1] && url.startsWith(rootBaseURIMatch[1]);
  146. return {
  147. url,
  148. isInnerLink,
  149. isChild,
  150. originalUrl: url,
  151. rootBaseURI: rootBaseURIMatch && rootBaseURIMatch[1],
  152. depth: parentTask ? parentTask.depth + 1 : 0,
  153. externalLinkDepth: isInnerLink ? -1 : parentTask ? parentTask.externalLinkDepth + 1 : -1,
  154. options
  155. };
  156. }
  157. }
  158. function saveTasks() {
  159. if (sessionFilename) {
  160. fs.writeFileSync(sessionFilename, JSON.stringify(
  161. tasks.map(task => Object.assign({}, task, {
  162. status: task.status == STATE_PROCESSING ? undefined : task.status,
  163. promise: undefined,
  164. options: task.status && task.status == STATE_PROCESSED ? undefined : task.options
  165. }))
  166. ));
  167. }
  168. }
  169. function rewriteURL(url, crawlRemoveURLFragment, crawlRewriteRules) {
  170. url = url.trim();
  171. if (crawlRemoveURLFragment) {
  172. url = url.replace(/^(.*?)#.*$/, "$1");
  173. }
  174. crawlRewriteRules.forEach(rewriteRule => {
  175. const parts = rewriteRule.trim().split(/ +/);
  176. if (parts.length) {
  177. url = url.replace(new RegExp(parts[0]), parts[1] || "").trim();
  178. }
  179. });
  180. return url;
  181. }
  182. function getHostURL(url) {
  183. url = new URL(url);
  184. return url.protocol + "//" + (url.username ? url.username + (url.password || "") + "@" : "") + url.hostname;
  185. }
  186. async function capturePage(options) {
  187. try {
  188. const pageData = await backend.getPageData(options);
  189. if (options.output) {
  190. fs.writeFileSync(getFilename(options.output), pageData.content);
  191. } else {
  192. if (options.filenameTemplate && pageData.filename) {
  193. fs.writeFileSync(getFilename(pageData.filename), pageData.content);
  194. } else {
  195. console.log(pageData.content); // eslint-disable-line no-console
  196. }
  197. }
  198. return pageData;
  199. } catch (error) {
  200. const message = "URL: " + options.url + "\nStack: " + error.stack + "\n";
  201. if (options.errorFile) {
  202. fs.writeFileSync(options.errorFile, message, { flag: "a" });
  203. } else {
  204. console.error(message); // eslint-disable-line no-console
  205. }
  206. }
  207. }
  208. function getFilename(filename, index = 1) {
  209. let newFilename = filename;
  210. if (index > 1) {
  211. const regExpMatchExtension = /(\.[^.]+)$/;
  212. const matchExtension = newFilename.match(regExpMatchExtension);
  213. if (matchExtension && matchExtension[1]) {
  214. newFilename = newFilename.replace(regExpMatchExtension, " - " + index + matchExtension[1]);
  215. } else {
  216. newFilename += " - " + index;
  217. }
  218. }
  219. if (fs.existsSync(newFilename)) {
  220. return getFilename(filename, index + 1);
  221. } else {
  222. return newFilename;
  223. }
  224. }
  225. function escapeRegExp(string) {
  226. return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
  227. }