1
0

single-file-cli-api.js 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235
  1. /*
  2. * Copyright 2010-2020 Gildas Lormeau
  3. * contact : gildas.lormeau <at> gmail.com
  4. *
  5. * This file is part of SingleFile.
  6. *
  7. * The code in this file is free software: you can redistribute it and/or
  8. * modify it under the terms of the GNU Affero General Public License
  9. * (GNU AGPL) as published by the Free Software Foundation, either version 3
  10. * of the License, or (at your option) any later version.
  11. *
  12. * The code in this file is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
  15. * General Public License for more details.
  16. *
  17. * As additional permission under GNU AGPL version 3 section 7, you may
  18. * distribute UNMODIFIED VERSIONS OF THIS file without the copy of the GNU
  19. * AGPL normally required by section 4, provided you include this license
  20. * notice and a URL through which recipients can access the Corresponding
  21. * Source.
  22. */
  23. /* global require, module, URL */
  24. const fs = require("fs");
  25. const VALID_URL_TEST = /^(https?|file):\/\//;
  26. const backEnds = {
  27. jsdom: "./back-ends/jsdom.js",
  28. puppeteer: "./back-ends/puppeteer.js",
  29. "puppeteer-firefox": "./back-ends/puppeteer-firefox.js",
  30. "webdriver-chromium": "./back-ends/webdriver-chromium.js",
  31. "webdriver-gecko": "./back-ends/webdriver-gecko.js"
  32. };
  33. let backend, tasks = [], maxParallelWorkers = 8, sessionFilename;
  34. module.exports = initialize;
  35. async function initialize(options) {
  36. maxParallelWorkers = options.maxParallelWorkers;
  37. backend = require(backEnds[options.backEnd]);
  38. await backend.initialize(options);
  39. if (options.crawlSyncSession || options.crawlLoadSession) {
  40. try {
  41. tasks = JSON.parse(fs.readFileSync(options.crawlSyncSession || options.crawlLoadSession).toString());
  42. } catch (error) {
  43. if (options.crawlLoadSession) {
  44. throw error;
  45. }
  46. }
  47. }
  48. if (options.crawlSyncSession || options.crawlSaveSession) {
  49. sessionFilename = options.crawlSyncSession || options.crawlSaveSession;
  50. }
  51. return {
  52. capture: urls => capture(urls, options),
  53. finish: () => finish(options),
  54. VALID_URL_TEST
  55. };
  56. }
  57. async function capture(urls, options) {
  58. let newTasks;
  59. const taskUrls = tasks.map(task => task.url);
  60. newTasks = urls.map(url => createTask(url, options));
  61. newTasks = newTasks.filter(task => task && !taskUrls.includes(task.url));
  62. if (newTasks.length) {
  63. tasks = tasks.concat(newTasks);
  64. saveTasks();
  65. }
  66. await runTasks();
  67. }
  68. async function finish(options) {
  69. const promiseTasks = tasks.map(task => task.promise);
  70. await Promise.all(promiseTasks);
  71. if (options.crawlReplaceURLs) {
  72. tasks.forEach(task => {
  73. try {
  74. let pageContent = fs.readFileSync(task.filename).toString();
  75. tasks.forEach(otherTask => {
  76. if (otherTask.filename) {
  77. pageContent = pageContent.replace(new RegExp(escapeRegExp("\"" + otherTask.originalUrl + "\""), "gi"), "\"" + otherTask.filename + "\"");
  78. pageContent = pageContent.replace(new RegExp(escapeRegExp("'" + otherTask.originalUrl + "'"), "gi"), "'" + otherTask.filename + "'");
  79. const filename = otherTask.filename.replace(/ /g, "%20");
  80. pageContent = pageContent.replace(new RegExp(escapeRegExp("=" + otherTask.originalUrl + " "), "gi"), "=" + filename + " ");
  81. pageContent = pageContent.replace(new RegExp(escapeRegExp("=" + otherTask.originalUrl + ">"), "gi"), "=" + filename + ">");
  82. }
  83. });
  84. fs.writeFileSync(task.filename, pageContent);
  85. } catch (error) {
  86. // ignored
  87. }
  88. });
  89. }
  90. if (!options.browserDebug) {
  91. return backend.closeBrowser();
  92. }
  93. }
  94. async function runTasks() {
  95. const availableTasks = tasks.filter(task => !task.status).length;
  96. const processingTasks = tasks.filter(task => task.status == "processing").length;
  97. const promisesTasks = [];
  98. for (let workerIndex = 0; workerIndex < Math.min(availableTasks, maxParallelWorkers - processingTasks); workerIndex++) {
  99. promisesTasks.push(runNextTask());
  100. }
  101. return Promise.all(promisesTasks);
  102. }
  103. async function runNextTask() {
  104. const task = tasks.find(task => !task.status);
  105. if (task) {
  106. const options = task.options;
  107. let taskOptions = JSON.parse(JSON.stringify(options));
  108. taskOptions.url = task.url;
  109. task.status = "processing";
  110. saveTasks();
  111. task.promise = capturePage(taskOptions);
  112. const pageData = await task.promise;
  113. task.status = "processed";
  114. if (pageData) {
  115. task.filename = pageData.filename;
  116. if (options.crawlLinks && testMaxDepth(task)) {
  117. let newTasks = pageData.links
  118. .map(urlLink => createTask(urlLink, options, task, tasks[0]))
  119. .filter(task => task &&
  120. testMaxDepth(task) &&
  121. !tasks.find(otherTask => otherTask.url == task.url) &&
  122. (!options.crawlInnerLinksOnly || task.isInnerLink));
  123. tasks.splice(tasks.length, 0, ...newTasks);
  124. }
  125. }
  126. saveTasks();
  127. await runTasks();
  128. }
  129. }
  130. function testMaxDepth(task) {
  131. const options = task.options;
  132. return (options.crawlMaxDepth == 0 || task.depth <= options.crawlMaxDepth) &&
  133. (options.crawlExternalLinksMaxDepth == 0 || task.externalLinkDepth < options.crawlExternalLinksMaxDepth);
  134. }
  135. function createTask(url, options, parentTask, rootTask) {
  136. url = parentTask ? rewriteURL(url, options.crawlRemoveURLFragment, options.crawlRewriteRules) : url;
  137. if (VALID_URL_TEST.test(url)) {
  138. const isInnerLink = rootTask && url.startsWith(getHostURL(rootTask.url));
  139. return {
  140. url,
  141. isInnerLink,
  142. originalUrl: url,
  143. depth: parentTask ? parentTask.depth + 1 : 0,
  144. externalLinkDepth: isInnerLink ? -1 : parentTask ? parentTask.externalLinkDepth + 1 : -1,
  145. options
  146. };
  147. }
  148. }
  149. function saveTasks() {
  150. if (sessionFilename) {
  151. fs.writeFileSync(sessionFilename, JSON.stringify(
  152. tasks.map(task => Object.assign({}, task, {
  153. status: task.status == "processing" ? undefined : task.status,
  154. promise: undefined,
  155. options: task.status && task.status == "processed" ? undefined : task.options
  156. }))
  157. ));
  158. }
  159. }
  160. function rewriteURL(url, crawlRemoveURLFragment, crawlRewriteRules) {
  161. url = url.trim();
  162. if (crawlRemoveURLFragment) {
  163. url = url.replace(/^(.*?)#.*$/, "$1");
  164. }
  165. crawlRewriteRules.forEach(rewriteRule => {
  166. const parts = rewriteRule.trim().split(/ +/);
  167. if (parts.length) {
  168. url = url.replace(new RegExp(parts[0]), parts[1] || "").trim();
  169. }
  170. });
  171. return url;
  172. }
  173. function getHostURL(url) {
  174. url = new URL(url);
  175. return url.protocol + "//" + (url.username ? url.username + (url.password || "") + "@" : "") + url.hostname;
  176. }
  177. async function capturePage(options) {
  178. try {
  179. const pageData = await backend.getPageData(options);
  180. if (options.output) {
  181. fs.writeFileSync(getFilename(options.output), pageData.content);
  182. } else {
  183. if (options.filenameTemplate && pageData.filename) {
  184. fs.writeFileSync(getFilename(pageData.filename), pageData.content);
  185. } else {
  186. console.log(pageData.content); // eslint-disable-line no-console
  187. }
  188. }
  189. return pageData;
  190. } catch (error) {
  191. const message = "URL: " + options.url + "\nStack: " + error.stack + "\n";
  192. if (options.errorFile) {
  193. fs.writeFileSync(options.errorFile, message, { flag: "a" });
  194. } else {
  195. console.error(message); // eslint-disable-line no-console
  196. }
  197. }
  198. }
  199. function getFilename(filename, index = 1) {
  200. let newFilename = filename;
  201. if (index > 1) {
  202. const regExpMatchExtension = /(\.[^.]+)$/;
  203. const matchExtension = newFilename.match(regExpMatchExtension);
  204. if (matchExtension && matchExtension[1]) {
  205. newFilename = newFilename.replace(regExpMatchExtension, " - " + index + matchExtension[1]);
  206. } else {
  207. newFilename += " - " + index;
  208. }
  209. }
  210. if (fs.existsSync(newFilename)) {
  211. return getFilename(filename, index + 1);
  212. } else {
  213. return newFilename;
  214. }
  215. }
  216. function escapeRegExp(string) {
  217. return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
  218. }