diff --git a/eslint/pack.js b/eslint/pack.js index 53b6d8aa2..0b9724ded 100644 --- a/eslint/pack.js +++ b/eslint/pack.js @@ -14,10 +14,9 @@ var extractFileListFromHtml = function(htmlAsString) { return []; }; -var getFileList = function(fileName) { - return readFilePromise(fileName).then(function(data) { - return extractFileListFromHtml(data.toString()); - }); +var getFileList = async function(fileName) { + let data = await readFilePromise(fileName); + return extractFileListFromHtml(data.toString()); }; var adjustedFileListForEslint = function(fileList) { @@ -55,18 +54,15 @@ var writeFilePromise = function(fileName, buffer) { }; // package geting all the files -var readAllFiles = function(fileList, loadedFiles) { - return fileList.reduce(function(sequence, fileName) { - return sequence.then(function() { - return readFilePromise(fileName); - }).then(function(data) { - console.log("saving file: " + fileName); - loadedFiles.push({ - fileName: fileName, - text: data.toString() - }); +var readAllFiles = async function(fileList, loadedFiles) { + return await fileList.forEach(async function(fileName) { + let data = await readFilePromise(fileName); + console.log("saving file: " + fileName); + loadedFiles.push({ + fileName: fileName, + text: data.toString() }); - }, Promise.resolve()); + }); }; var countLines = function(fileText) { @@ -77,56 +73,25 @@ var makeIndexLine = function(fileName, startIndex, count) { return "\"" + fileName + "\", " + (startIndex + 1) + ", " + (startIndex + count) + "\r\n"; }; - -//================================================================= -// pack source into packed.js So its easy to be examined with eslint -// just run eslint against packed.js - -var loadedFiles = []; -getFileList("../plugin/popup.html").then(function(fileList) { - fileList = adjustedFileListForEslint(fileList); - console.log(fileList); - return readAllFiles(fileList, loadedFiles); -}).then(function() { - let temp = ""; - let lineCount = 0; - let index = ""; - for (let f of loadedFiles) { - temp += f.text; - let count = countLines(f.text); - index += makeIndexLine(f.fileName, lineCount, count); - lineCount += count; - } - fs.writeFileSync("packed.js", temp); - fs.writeFileSync("index.csv", index); -}).catch(function(err) { - console.log(err); -}); - //================================================================= // This is bit where we pack the extension into a zip & xpi files. -var addToZipFile = function(zip, nameInZip, filePath) { - return readFilePromise(filePath).then(function(data) { - zip.add(nameInZip, new zipjs.Uint8ArrayReader(data)); - }); +var addToZipFile = async function(zip, nameInZip, filePath) { + let data = await readFilePromise(filePath); + zip.add(nameInZip, new zipjs.Uint8ArrayReader(data)); }; -var writeZipToDisk = function(zip, filePath) { +var writeZipToDisk = async function(zip, filePath) { console.log("writeZipToDisk " + filePath); - return zip.close().then(function(buffer) { - buffer.arrayBuffer().then(function(arraybuffer) { - return writeFilePromise(filePath, arraybuffer); - }); - }); + let buffer = await zip.close(); + let arraybuffer = await buffer.arrayBuffer(); + await writeFilePromise(filePath, arraybuffer); }; -var addFilesToZip = function(zip, fileList) { - return fileList.reduce(function(sequence, fileName) { - return sequence.then(function() { - return addToZipFile(zip, fileName, "../plugin/" + fileName); - }); - }, Promise.resolve()); +var addFilesToZip = async function(zip, fileList) { + return await fileList.forEach(async function(fileName) { + return await addToZipFile(zip, fileName, "../plugin/" + fileName); + }); }; var getLocaleFilesNames = function() { @@ -141,22 +106,18 @@ var getLocaleFilesNames = function() { }); }; -var addPopupHtmlToZip = function(zip) { - return readFilePromise("../plugin/popup.html") - .then(function(data) { - let htmlAsString = data.toString() - .split("\r") - .filter(s => !s.includes("/experimental/")) - .join("\r"); - zip.add("popup.html", new zipjs.TextReader(htmlAsString)); - }); +var addPopupHtmlToZip = async function(zip) { + let data = await readFilePromise("../plugin/popup.html"); + let htmlAsString = data.toString() + .split("\r") + .filter(s => !s.includes("/experimental/")) + .join("\r"); + zip.add("popup.html", new zipjs.TextReader(htmlAsString)); }; -var addBinaryFileToZip = function(zip, fileName, nameInZip) { - return readFilePromise(fileName) - .then(function(data) { - zip.add(nameInZip, new zipjs.Uint8ArrayReader(data)); - }); +var addBinaryFileToZip = async function(zip, fileName, nameInZip) { + let data = await readFilePromise(fileName); + zip.add(nameInZip, new zipjs.Uint8ArrayReader(data)); }; var addImageFileToZip = function(zip, fileName) { @@ -169,44 +130,29 @@ var addCssFileToZip = function(zip, fileName) { return addBinaryFileToZip(zip, "../plugin/" + dest, dest); }; -var packNonManifestExtensionFiles = function(zip, packedFileName) { - return addBinaryFileToZip(zip, "../plugin/book128.png", "book128.png") - .then(function() { - return addImageFileToZip(zip, "ChapterStateDownloading.svg"); - }).then(function() { - return addImageFileToZip(zip, "ChapterStateLoaded.svg"); - }).then(function() { - return addImageFileToZip(zip, "ChapterStateNone.svg"); - }).then(function() { - return addImageFileToZip(zip, "ChapterStateSleeping.svg"); - }).then(function() { - return addImageFileToZip(zip, "FileEarmarkCheck.svg"); - }).then(function() { - return addImageFileToZip(zip, "FileEarmarkCheckFill.svg"); - }).then(function() { - return addCssFileToZip(zip, "default.css"); - }).then(function() { - return addCssFileToZip(zip, "alwaysDark.css"); - }).then(function() { - return addCssFileToZip(zip, "autoDark.css"); - }).then(function() { - return getFileList("../plugin/popup.html"); - }).then(function(fileList) { - return getLocaleFilesNames().then(function(localeNames) { - return ["js/ContentScript.js"].concat(localeNames) - .concat(fileList.filter(n => !n.includes("/experimental/"))); - }); - }).then(function(fileList) { - return addFilesToZip(zip, fileList); - }).then(function() { - return addPopupHtmlToZip(zip); - }).then(function() { - return writeZipToDisk(zip, packedFileName); - }).then(function() { - console.log("Wrote Zip to disk"); - }).catch(function(err) { - console.log(err); - }); +var packNonManifestExtensionFiles = async function(zip, packedFileName) { + try { + await addBinaryFileToZip(zip, "../plugin/book128.png", "book128.png"); + await addImageFileToZip(zip, "ChapterStateDownloading.svg"); + await addImageFileToZip(zip, "ChapterStateLoaded.svg"); + await addImageFileToZip(zip, "ChapterStateNone.svg"); + await addImageFileToZip(zip, "ChapterStateSleeping.svg"); + await addImageFileToZip(zip, "FileEarmarkCheck.svg"); + await addImageFileToZip(zip, "FileEarmarkCheckFill.svg"); + await addCssFileToZip(zip, "default.css"); + await addCssFileToZip(zip, "alwaysDark.css"); + await addCssFileToZip(zip, "autoDark.css"); + let fileList = await getFileList("../plugin/popup.html"); + let localeNames = await getLocaleFilesNames(); + ["js/ContentScript.js"].concat(localeNames).concat(fileList.filter(n => !n.includes("/experimental/"))); + await addFilesToZip(zip, fileList); + await addPopupHtmlToZip(zip); + await writeZipToDisk(zip, packedFileName); + console.log("Wrote Zip to disk"); + } + catch (err) { + console.log(err); + } }; var makeManifestForFirefox = function(data) { @@ -239,18 +185,42 @@ var makeManifestForChrome = function(data) { return manifest; }; -var packExtension = function(manifest, fileExtension) { +var packExtension = async function(manifest, fileExtension) { let zipFileWriter = new zipjs.BlobWriter("application/epub+zip"); let zipWriter = new zipjs.ZipWriter(zipFileWriter, {useWebWorkers: false,compressionMethod: 8, extendedTimestamp: false}); zipWriter.add("manifest.json", new zipjs.TextReader(JSON.stringify(manifest))); - return packNonManifestExtensionFiles(zipWriter, "WebToEpub" + manifest.version + fileExtension); -}; - -// pack the extensions for Chrome and firefox -readFilePromise("../plugin/manifest.json") - .then(function(data) { - packExtension(makeManifestForFirefox(data), ".xpi"); - packExtension(makeManifestForChrome(data), ".zip"); - }).catch(function(err) { + return await packNonManifestExtensionFiles(zipWriter, "WebToEpub" + manifest.version + fileExtension); +}; + + +(async () => { + //================================================================= + // pack source into packed.js So its easy to be examined with eslint + // just run eslint against packed.js + try { + var loadedFiles = []; + let fileList = await getFileList("../plugin/popup.html"); + fileList = adjustedFileListForEslint(fileList); + console.log(fileList); + + await readAllFiles(fileList, loadedFiles); + let temp = ""; + let lineCount = 0; + let index = ""; + for (let f of loadedFiles) { + temp += f.text; + let count = countLines(f.text); + index += makeIndexLine(f.fileName, lineCount, count); + lineCount += count; + } + fs.writeFileSync("packed.js", temp); + fs.writeFileSync("index.csv", index); + + // pack the extensions for Chrome and firefox + let data = await readFilePromise("../plugin/manifest.json"); + await packExtension(makeManifestForFirefox(data), ".xpi"); + await packExtension(makeManifestForChrome(data), ".zip"); + } catch (err) { console.log(err); - }); + } +})(); diff --git a/plugin/js/Download.js b/plugin/js/Download.js index de1ae1e59..f5973cf3e 100644 --- a/plugin/js/Download.js +++ b/plugin/js/Download.js @@ -87,18 +87,19 @@ class Download { } } - static saveOnFirefox(options, cleanup) { - return browser.runtime.getPlatformInfo().then(platformInfo => { + static async saveOnFirefox(options, cleanup) { + try { + let platformInfo = await browser.runtime.getPlatformInfo(); if (Download.isAndroid(platformInfo)) { Download.saveOnFirefoxForAndroid(options, cleanup); } else { - return browser.downloads.download(options).then( - // on Firefox, resolves when "Save As" dialog CLOSES, so no - // need to delay past this point. - downloadId => Download.onDownloadStarted(downloadId, cleanup) - ); + // on Firefox, resolves when "Save As" dialog CLOSES, so no + // need to delay past this point.v + Download.onDownloadStarted(await browser.downloads.download(options), cleanup); } - }).catch(cleanup); + } catch { + cleanup(); + } } static saveOnFirefoxForAndroid(options, cleanup) { diff --git a/plugin/js/HttpClient.js b/plugin/js/HttpClient.js index a9a49d39b..dee21ff92 100644 --- a/plugin/js/HttpClient.js +++ b/plugin/js/HttpClient.js @@ -297,42 +297,35 @@ class FetchResponseHandler { this.contentType = response.headers.get("content-type"); } - extractContentFromResponse(response) { + async extractContentFromResponse(response) { if (this.isHtml()) { - return this.responseToHtml(response); + return await this.responseToHtml(response); } else { - return this.responseToBinary(response); + return await this.responseToBinary(response); } } - responseToHtml(response) { - return response.arrayBuffer().then(function(rawBytes) { - let data = this.makeTextDecoder(response).decode(rawBytes); - let html = new DOMParser().parseFromString(data, "text/html"); - util.setBaseTag(this.response.url, html); - this.responseXML = html; - return this; - }.bind(this)); + async responseToHtml(response) { + let rawBytes = await response.arrayBuffer(); + let data = this.makeTextDecoder(response).decode(rawBytes); + let html = new DOMParser().parseFromString(data, "text/html"); + util.setBaseTag(this.response.url, html); + this.responseXML = html; } - responseToBinary(response) { - return response.arrayBuffer().then(function(data) { - this.arrayBuffer = data; - return this; - }.bind(this)); + async responseToBinary(response) { + let data = await response.arrayBuffer(); + this.arrayBuffer = data; } - responseToText(response) { - return response.arrayBuffer().then(function(rawBytes) { - return this.makeTextDecoder(response).decode(rawBytes); - }.bind(this)); + async responseToText(response) { + let rawBytes = await response.arrayBuffer(); + this.makeTextDecoder(response).decode(rawBytes); } - responseToJson(response) { - return response.text().then(function(data) { - this.json = JSON.parse(data); - return this; - }.bind(this)); + async responseToJson(response) { + let data = await response.text(); + this.json = JSON.parse(data); } makeTextDecoder(response) { @@ -358,8 +351,8 @@ class FetchJsonResponseHandler extends FetchResponseHandler { super(); } - extractContentFromResponse(response) { - return super.responseToJson(response); + async extractContentFromResponse(response) { + return await super.responseToJson(response); } } @@ -368,8 +361,8 @@ class FetchTextResponseHandler extends FetchResponseHandler { super(); } - extractContentFromResponse(response) { - return super.responseToText(response); + async extractContentFromResponse(response) { + return await super.responseToText(response); } } @@ -378,7 +371,7 @@ class FetchHtmlResponseHandler extends FetchResponseHandler { super(); } - extractContentFromResponse(response) { - return super.responseToHtml(response); + async extractContentFromResponse(response) { + return await super.responseToHtml(response); } } diff --git a/plugin/js/Library.js b/plugin/js/Library.js index e484ceea0..a12255549 100644 --- a/plugin/js/Library.js +++ b/plugin/js/Library.js @@ -819,17 +819,18 @@ class Library { // eslint-disable-line no-unused-vars Library.LibClearFields(); } - static LibSearchNewChapter(objbtn) { + static async LibSearchNewChapter(objbtn) { let LibGetURL = ["LibStoryURL" + objbtn.dataset.libepubid]; - chrome.storage.local.get(LibGetURL, function(items) { + await chrome.storage.local.get(LibGetURL, async function(items) { Library.LibClearFields(); document.getElementById("startingUrlInput").value = items[LibGetURL]; //document.getElementById("libinvisbutton").click(); // load page via XmlHTTPRequest - main.onLoadAndAnalyseButtonClick().then(function() { - },function(e) { + try { + await main.onLoadAndAnalyseButtonClick(); + } catch (e) { ErrorLog.showErrorMessage(e); - }); + } }); } diff --git a/plugin/js/parsers/BakaTsukiParser.js b/plugin/js/parsers/BakaTsukiParser.js index b48353dc0..df04421ae 100644 --- a/plugin/js/parsers/BakaTsukiParser.js +++ b/plugin/js/parsers/BakaTsukiParser.js @@ -391,23 +391,23 @@ class BakaTsukiParser extends Parser { } } - onFetchImagesClicked() { + async onFetchImagesClicked() { if (0 == this.imageCollector.imageInfoList.length) { ErrorLog.showErrorMessage(UIText.Error.noImagesFound); } else { - this.fetchContent(); + await this.fetchContent(); } } - fetchContent() { + async fetchContent() { this.rebuildImagesToFetch(); this.setUiToShowLoadingProgress(this.imageCollector.numberOfImagesToFetch()); - return this.imageCollector.fetchImages(() => this.updateProgressBarOneStep(), this.state.firstPageDom.baseURI) - .then(function() { - main.getPackEpubButton().disabled = false; - }).catch(function(err) { - ErrorLog.log(err); - }); + try { + await this.imageCollector.fetchImages(() => this.updateProgressBarOneStep(), this.state.firstPageDom.baseURI); + main.getPackEpubButton().disabled = false; + } catch (err) { + ErrorLog.log(err); + } } updateProgressBarOneStep() { diff --git a/plugin/js/parsers/ComrademaoParser.js b/plugin/js/parsers/ComrademaoParser.js index c7fba5c6d..106339aed 100644 --- a/plugin/js/parsers/ComrademaoParser.js +++ b/plugin/js/parsers/ComrademaoParser.js @@ -16,12 +16,12 @@ class ComrademaoParser extends Parser { document.getElementById("removeOriginalRow").hidden = false; } - getChapterUrls(dom, chapterUrlsUI) { - return this.getChapterUrlsFromMultipleTocPages(dom, + async getChapterUrls(dom, chapterUrlsUI) { + return await this.getChapterUrlsFromMultipleTocPages(dom, ComrademaoParser.extractPartialChapterList, ComrademaoParser.getUrlsOfTocPages, chapterUrlsUI - ).then(urls => urls.reverse()); + ).reverse(); } static getUrlsOfTocPages(dom) { diff --git a/plugin/js/parsers/FlyingLinesParser.js b/plugin/js/parsers/FlyingLinesParser.js index 565c8f65a..7f7ab7695 100644 --- a/plugin/js/parsers/FlyingLinesParser.js +++ b/plugin/js/parsers/FlyingLinesParser.js @@ -34,13 +34,11 @@ class FlyingLinesParser extends Parser { } // this is basically identical to NovelSpread - fetchChapter(url) { - return HttpClient.wrapFetch(url).then(function(xhr) { - let restUrl = FlyingLinesParser.extractRestUrl(xhr.responseXML); - return HttpClient.fetchJson(restUrl); - }).then(function(handler) { - return FlyingLinesParser.buildChapter(handler.json.data); - }); + async fetchChapter(url) { + let xhr = await HttpClient.wrapFetch(url) + let restUrl = FlyingLinesParser.extractRestUrl(xhr.responseXML); + let handler = await HttpClient.fetchJson(restUrl); + return FlyingLinesParser.buildChapter(handler.json.data); } static extractRestUrl(dom) { diff --git a/plugin/js/parsers/GravityTalesParser.js b/plugin/js/parsers/GravityTalesParser.js index c97371965..5248a27fb 100644 --- a/plugin/js/parsers/GravityTalesParser.js +++ b/plugin/js/parsers/GravityTalesParser.js @@ -23,7 +23,7 @@ class GravityTalesParser extends Parser { // older logic let novelId = GravityTalesParser.getNovelId(dom); if (novelId !== null) { - return GravityTalesParser.fetchUrlsOfChapters(novelId, dom.baseURI, HttpClient.fetchJson); + return await GravityTalesParser.fetchUrlsOfChapters(novelId, dom.baseURI, HttpClient.fetchJson); } let content = this.findContent(dom) || dom.querySelector("chapters") || @@ -87,26 +87,22 @@ class GravityTalesParser extends Parser { return param.split("=").map(s => s.trim()); } - static fetchUrlsOfChapters(novelId, baseUri, fetchJson) { + static async fetchUrlsOfChapters(novelId, baseUri, fetchJson) { let chapterGroupsUrl = `https://gravitytales.com/api/novels/chaptergroups/${novelId}`; - return fetchJson(chapterGroupsUrl).then(function(handler) { - return Promise.all( - handler.json.map(group => GravityTalesParser.fetchChapterListForGroup(novelId, group, fetchJson)) - ); - }).then(function(chapterLists) { - return GravityTalesParser.mergeChapterLists(chapterLists, baseUri); - }); + let handler = await fetchJson(chapterGroupsUrl); + let chapterLists = await Promise.all(handler.json.map(group => + GravityTalesParser.fetchChapterListForGroup(novelId, group, fetchJson))); + return GravityTalesParser.mergeChapterLists(chapterLists, baseUri); } - static fetchChapterListForGroup(novelId, chapterGroup, fetchJson) { + static async fetchChapterListForGroup(novelId, chapterGroup, fetchJson) { let groupId = chapterGroup.ChapterGroupId; let chaptersUrl = `https://gravitytales.com/api/novels/chaptergroup/${groupId}`; - return fetchJson(chaptersUrl).then(function(handler) { - return { - groupTitle: chapterGroup.Title, - chapters: handler.json - }; - }); + let handler = await fetchJson(chaptersUrl) + return { + groupTitle: chapterGroup.Title, + chapters: handler.json + }; } static mergeChapterLists(chapterLists, baseUri) { diff --git a/plugin/js/parsers/KakaoParser.js b/plugin/js/parsers/KakaoParser.js index 5f69690fb..4f8d35a90 100644 --- a/plugin/js/parsers/KakaoParser.js +++ b/plugin/js/parsers/KakaoParser.js @@ -63,44 +63,43 @@ class KakaoParser extends Parser { jsonUrl = url + "/body"; } - return this.wrapFetch(jsonUrl).then((jsonResponse) => { - let json = jsonResponse.json; - - let doc = Parser.makeEmptyDocForContent(url); - - let metaChapId = doc.dom.createElement("meta"); - metaChapId.id = "chapterId"; - metaChapId.content = url.split("/")[6]; - doc.content.appendChild(metaChapId); - - let metaNovelId = doc.dom.createElement("meta"); - metaNovelId.id = "novelId"; - metaNovelId.content = url.split("/")[4]; - doc.content.appendChild(metaNovelId); - - let novelTitle = doc.dom.createElement("meta"); - novelTitle.id = "novelTitle"; - novelTitle.content = json.novelTitle; - doc.content.appendChild(novelTitle); - - let body = json.body.split("\n").filter(s => !util.isNullOrEmpty(s)); - - let title = doc.dom.createElement("h1"); - title.id = "title"; - title.textContent = json.title + " - " + body[0]; - doc.content.appendChild(title); - - let div = doc.dom.createElement("div"); - div.id = "content"; - for (let i = 1; i < body.length; ++i) { - let p = doc.dom.createElement("p"); - p.textContent = body[i]; - div.appendChild(p); - } - doc.content.appendChild(div); - - return doc.dom; - }); + let jsonResponse = await this.wrapFetch(jsonUrl) + let json = jsonResponse.json; + + let doc = Parser.makeEmptyDocForContent(url); + + let metaChapId = doc.dom.createElement("meta"); + metaChapId.id = "chapterId"; + metaChapId.content = url.split("/")[6]; + doc.content.appendChild(metaChapId); + + let metaNovelId = doc.dom.createElement("meta"); + metaNovelId.id = "novelId"; + metaNovelId.content = url.split("/")[4]; + doc.content.appendChild(metaNovelId); + + let novelTitle = doc.dom.createElement("meta"); + novelTitle.id = "novelTitle"; + novelTitle.content = json.novelTitle; + doc.content.appendChild(novelTitle); + + let body = json.body.split("\n").filter(s => !util.isNullOrEmpty(s)); + + let title = doc.dom.createElement("h1"); + title.id = "title"; + title.textContent = json.title + " - " + body[0]; + doc.content.appendChild(title); + + let div = doc.dom.createElement("div"); + div.id = "content"; + for (let i = 1; i < body.length; ++i) { + let p = doc.dom.createElement("p"); + p.textContent = body[i]; + div.appendChild(p); + } + doc.content.appendChild(div); + + return doc.dom; } findContent(dom) { @@ -109,30 +108,28 @@ class KakaoParser extends Parser { async getChapterUrls(dom) { let jsonUrl = dom.baseURI.replace("pagestage", "api-pagestage"); - return this.wrapFetch(jsonUrl).then(jsonResponse => { - let json = jsonResponse.json; - - jsonUrl = dom.baseURI.replace("pagestage", "api-pagestage") - + "/episodes?size=" + json.publishedEpisodeCount - + "&sort=publishedAt,id,asc"; - return this.wrapFetch(jsonUrl); - }).then(jsonResponse => { - let json = jsonResponse.json; - - let chapterList = []; - for (let chapter of json.content) { - let url = dom.baseURI.replace("pagestage", "api-pagestage") - + "/episodes/" + chapter.id; - let chapterInfo = { - sourceUrl: url, - title: chapter.title, - newArc: null - }; - chapterList.push(chapterInfo); - } - - return chapterList; - }); + let jsonResponse = await this.wrapFetch(jsonUrl) + let json = jsonResponse.json; + + jsonUrl = dom.baseURI.replace("pagestage", "api-pagestage") + + "/episodes?size=" + json.publishedEpisodeCount + + "&sort=publishedAt,id,asc"; + let jsonChaptersResponse = await this.wrapFetch(jsonUrl); + let jsonChapters = jsonChaptersResponse.json; + + let chapterList = []; + for (let chapter of jsonChapters.content) { + let url = dom.baseURI.replace("pagestage", "api-pagestage") + + "/episodes/" + chapter.id; + let chapterInfo = { + sourceUrl: url, + title: chapter.title, + newArc: null + }; + chapterList.push(chapterInfo); + } + + return chapterList; } // extractAuthor diff --git a/plugin/js/parsers/KobatochanParser.js b/plugin/js/parsers/KobatochanParser.js index efe954bdd..83a941b0b 100644 --- a/plugin/js/parsers/KobatochanParser.js +++ b/plugin/js/parsers/KobatochanParser.js @@ -8,13 +8,12 @@ class KobatochanParser extends WordpressBaseParser { super(); } - fetchChapter(url) { - return HttpClient.wrapFetch(url).then((xhr) => { - let newDom = xhr.responseXML; - let extraPageUrls = KobatochanParser.findAdditionalPageUrls(newDom); - KobatochanParser.removePaginationElements(newDom); - return this.fetchAdditionalPages(newDom, extraPageUrls.reverse()); - }); + async fetchChapter(url) { + let xhr = await HttpClient.wrapFetch(url); + let newDom = xhr.responseXML; + let extraPageUrls = KobatochanParser.findAdditionalPageUrls(newDom); + KobatochanParser.removePaginationElements(newDom); + return await this.fetchAdditionalPages(newDom, extraPageUrls.reverse()); } static findAdditionalPageUrls(dom) { @@ -27,20 +26,19 @@ class KobatochanParser extends WordpressBaseParser { return pages; } - fetchAdditionalPages(dom, extraPageUrls) { + async fetchAdditionalPages(dom, extraPageUrls) { if (extraPageUrls.length === 0) { return Promise.resolve(dom); } - return HttpClient.wrapFetch(extraPageUrls.pop()).then((xhr) => { - let newDom = xhr.responseXML; - KobatochanParser.removePaginationElements(newDom); - let dest = this.findContent(dom); - let src = this.findContent(newDom); - for (let node of [...src.childNodes]) { - dest.appendChild(node); - } - return this.fetchAdditionalPages(dom, extraPageUrls); - }); + let xhr = await HttpClient.wrapFetch(extraPageUrls.pop()); + let newDom = xhr.responseXML; + KobatochanParser.removePaginationElements(newDom); + let dest = this.findContent(dom); + let src = this.findContent(newDom); + for (let node of [...src.childNodes]) { + dest.appendChild(node); + } + return await this.fetchAdditionalPages(dom, extraPageUrls); } static removePaginationElements(dom) { diff --git a/plugin/js/parsers/LibersparkParser.js b/plugin/js/parsers/LibersparkParser.js index 3af37cbe1..d2b753d8c 100644 --- a/plugin/js/parsers/LibersparkParser.js +++ b/plugin/js/parsers/LibersparkParser.js @@ -13,13 +13,12 @@ class LibersparkParser extends Parser { super(); } - getChapterUrls(dom) { + async getChapterUrls(dom) { // Page in browser has chapter links reduced to 5 // Fetch page again to get all chapter links. - return HttpClient.wrapFetch(dom.baseURI).then(function(xhr) { - let table = xhr.responseXML.querySelector("table#novel-chapters-list"); - return util.hyperlinksToChapterList(table).reverse(); - }); + let xhr = await HttpClient.wrapFetch(dom.baseURI); + let table = xhr.responseXML.querySelector("table#novel-chapters-list"); + return util.hyperlinksToChapterList(table).reverse(); } findContent(dom) { diff --git a/plugin/js/parsers/LiteroticaParser.js b/plugin/js/parsers/LiteroticaParser.js index cb88dbce8..0e9e49e05 100644 --- a/plugin/js/parsers/LiteroticaParser.js +++ b/plugin/js/parsers/LiteroticaParser.js @@ -113,15 +113,13 @@ class LiteroticaParser extends Parser { return dom.querySelector("h1.headline"); } - fetchChapter(url) { + async fetchChapter(url) { let dom = null; - return HttpClient.wrapFetch(url).then(function(xhr) { - dom = xhr.responseXML; - let pageUrls = LiteroticaParser.findUrlsOfAdditionalPagesMakingChapter(url, dom); - return Promise.all(pageUrls.map(LiteroticaParser.fetchAdditionalPageContent)); - }).then(function(fragments) { - return LiteroticaParser.assembleChapter(dom, fragments); - }); + let xhr = await HttpClient.wrapFetch(url); + dom = xhr.responseXML; + let pageUrls = LiteroticaParser.findUrlsOfAdditionalPagesMakingChapter(url, dom); + let fragments = await Promise.all(pageUrls.map(LiteroticaParser.fetchAdditionalPageContent)); + return LiteroticaParser.assembleChapter(dom, fragments); } static findUrlsOfAdditionalPagesMakingChapter(url, dom) { @@ -136,10 +134,9 @@ class LiteroticaParser extends Parser { return urls; } - static fetchAdditionalPageContent(url) { - return HttpClient.wrapFetch(url).then(function(xhr) { - return LiteroticaParser.contentForPage(xhr.responseXML); - }); + static async fetchAdditionalPageContent(url) { + let xhr = await HttpClient.wrapFetch(url); + return LiteroticaParser.contentForPage(xhr.responseXML); } static assembleChapter(dom, fragments) { diff --git a/plugin/js/parsers/LnmtlParser.js b/plugin/js/parsers/LnmtlParser.js index 89444ed42..421023f47 100644 --- a/plugin/js/parsers/LnmtlParser.js +++ b/plugin/js/parsers/LnmtlParser.js @@ -16,12 +16,11 @@ class LnmtlParser extends Parser { document.getElementById("removeTranslatedRow").hidden = false; } - getChapterUrls(dom) { + async getChapterUrls(dom) { let volumesList = LnmtlParser.findVolumesList(dom); if (volumesList.length !== 0) { - return LnmtlParser.fetchChapterLists(volumesList, HttpClient.fetchJson).then(function(lists) { - return LnmtlParser.mergeChapterLists(lists); - }); + let lists = await LnmtlParser.fetchChapterLists(volumesList, HttpClient.fetchJson); + return LnmtlParser.mergeChapterLists(lists); } let table = dom.querySelector("#volumes-container table"); @@ -67,23 +66,22 @@ class LnmtlParser extends Parser { return []; } - static fetchChapterLists(volumesList, fetchJson) { - return Promise.all( + static async fetchChapterLists(volumesList, fetchJson) { + return await Promise.all( volumesList.map(volume => LnmtlParser.fetchChapterListsForVolume(volume, fetchJson)) ); } - static fetchChapterListsForVolume(volumeInfo, fetchJson) { + static async fetchChapterListsForVolume(volumeInfo, fetchJson) { let restUrl = LnmtlParser.makeChapterListUrl(volumeInfo.id, 1); - return fetchJson(restUrl).then(function(handler) { - let firstPage = handler.json; - let pagesForVolume = [Promise.resolve(handler)]; - for ( let i = 2; i <= firstPage.last_page; ++i) { - let url = LnmtlParser.makeChapterListUrl(volumeInfo.id, i); - pagesForVolume.push(fetchJson(url)); - } - return Promise.all(pagesForVolume); - }); + let handler = await fetchJson(restUrl); + let firstPage = handler.json; + let pagesForVolume = [handler]; + for ( let i = 2; i <= firstPage.last_page; ++i) { + let url = LnmtlParser.makeChapterListUrl(volumeInfo.id, i); + pagesForVolume.push(await fetchJson(url)); + } + return pagesForVolume; } static makeChapterListUrl(volumeId, page) { diff --git a/plugin/js/parsers/NovelUniverseParser.js b/plugin/js/parsers/NovelUniverseParser.js index d4c25a598..9400a6d6f 100644 --- a/plugin/js/parsers/NovelUniverseParser.js +++ b/plugin/js/parsers/NovelUniverseParser.js @@ -11,20 +11,19 @@ class NovelUniverseParser extends Parser { super(); } - getChapterUrls(dom) { - return NovelUniverseParser.fetchRestOfToc(dom, []); + async getChapterUrls(dom) { + return await NovelUniverseParser.fetchRestOfToc(dom, []); } - static fetchRestOfToc(dom, chapterList) { + static async fetchRestOfToc(dom, chapterList) { let nextPage = NovelUniverseParser.urlOfNextToC(dom); let newChapters = NovelUniverseParser.extractPartialChapterList(dom); chapterList = chapterList.concat(newChapters); if (nextPage.length == 0) { return Promise.resolve(chapterList); } - return HttpClient.wrapFetch(nextPage[0].href).then(function(xhr) { - return NovelUniverseParser.fetchRestOfToc(xhr.responseXML, chapterList); - }); + let xhr = await HttpClient.wrapFetch(nextPage[0].href); + return await NovelUniverseParser.fetchRestOfToc(xhr.responseXML, chapterList); } static urlOfNextToC(dom) { diff --git a/plugin/js/parsers/NovelUpdatesParser.js b/plugin/js/parsers/NovelUpdatesParser.js index efd9d2dd4..d07cecb0f 100644 --- a/plugin/js/parsers/NovelUpdatesParser.js +++ b/plugin/js/parsers/NovelUpdatesParser.js @@ -12,28 +12,25 @@ class NovelUpdatesParser extends Parser { // returns promise with the URLs of the chapters to fetch // promise is used because may need to fetch the list of URLs from internet - getChapterUrls(dom) { - return NovelUpdatesParser.fetchChapterUrls(dom).then(function(links) { - let chapters = links.map(l => util.hyperLinkToChapter(l)); - return Promise.resolve(chapters.reverse()); - }); + async getChapterUrls(dom) { + let links = await NovelUpdatesParser.fetchChapterUrls(dom); + let chapters = links.map(l => util.hyperLinkToChapter(l)); + return chapters.reverse(); } - static fetchChapterUrls(dom) { + static async fetchChapterUrls(dom) { let extraPagesWithToc = NovelUpdatesParser.findPagesWithToC(dom); - return Promise.all( + let chapterLists = await Promise.all( extraPagesWithToc.map(url => NovelUpdatesParser.fetchChapterListFromPage(url)) - ).then(function(chapterLists) { - return chapterLists.reduce(function(prev, current) { - return prev.concat(current); - }, NovelUpdatesParser.chapterLinksFromDom(dom)); - }); + ); + return chapterLists.reduce(function(prev, current) { + return prev.concat(current); + }, NovelUpdatesParser.chapterLinksFromDom(dom)); } - static fetchChapterListFromPage(url) { - return HttpClient.wrapFetch(url).then(function(xhr) { - return Promise.resolve(NovelUpdatesParser.chapterLinksFromDom(xhr.responseXML)); - }); + static async fetchChapterListFromPage(url) { + let xhr = await HttpClient.wrapFetch(url); + return NovelUpdatesParser.chapterLinksFromDom(xhr.responseXML); } static chapterLinksFromDom(dom) { diff --git a/plugin/js/parsers/OnlinenovelbookParser.js b/plugin/js/parsers/OnlinenovelbookParser.js index 57d8c8255..4f1017c6b 100644 --- a/plugin/js/parsers/OnlinenovelbookParser.js +++ b/plugin/js/parsers/OnlinenovelbookParser.js @@ -8,16 +8,17 @@ class OnlinenovelbookParser extends WordpressBaseParser { super(); } - getChapterUrls(dom, chapterUrlsUI) { + async getChapterUrls(dom, chapterUrlsUI) { let paginationUrl = OnlinenovelbookParser.getLastPaginationUrl(dom); if (paginationUrl === null) { return super.getChapterUrls(dom); } - return this.getChapterUrlsFromMultipleTocPages(dom, + let urls = await this.getChapterUrlsFromMultipleTocPages(dom, OnlinenovelbookParser.extractPartialChapterList, OnlinenovelbookParser.getUrlsOfTocPages, chapterUrlsUI - ).then(c => c.reverse()); + ); + return urls.reverse(); } static getUrlsOfTocPages(dom) { diff --git a/plugin/js/parsers/QinxiaoshuoParser.js b/plugin/js/parsers/QinxiaoshuoParser.js index f2d4bc9e3..2e05d9724 100644 --- a/plugin/js/parsers/QinxiaoshuoParser.js +++ b/plugin/js/parsers/QinxiaoshuoParser.js @@ -35,27 +35,25 @@ class QinxiaoshuoParser extends Parser { return util.getFirstImgSrc(dom, "div.book_info"); } - fetchChapter(url) { - return HttpClient.wrapFetch(url).then(function(xhr) { - let finalDom = xhr.responseXML; - let fetchedUrls = new Set(); - fetchedUrls.add(url); - fetchedUrls.add(url + "?xiaoshuo=1"); - let nextUrl = QinxiaoshuoParser.urlOfNextPageOfChapter(finalDom, fetchedUrls); - return QinxiaoshuoParser.fetchPagesOfChapter(finalDom, fetchedUrls, nextUrl); - }); + async fetchChapter(url) { + let xhr = await HttpClient.wrapFetch(url); + let finalDom = xhr.responseXML; + let fetchedUrls = new Set(); + fetchedUrls.add(url); + fetchedUrls.add(url + "?xiaoshuo=1"); + let nextUrl = QinxiaoshuoParser.urlOfNextPageOfChapter(finalDom, fetchedUrls); + return await QinxiaoshuoParser.fetchPagesOfChapter(finalDom, fetchedUrls, nextUrl); } - static fetchPagesOfChapter(finalDom, fetchedUrls, url) { + static async fetchPagesOfChapter(finalDom, fetchedUrls, url) { if (url === null) { return Promise.resolve(finalDom); } else { - return HttpClient.wrapFetch(url).then(function(xhr) { - fetchedUrls.add(url); - QinxiaoshuoParser.copyContentNodes(finalDom, xhr.responseXML); - let nextUrl = QinxiaoshuoParser.urlOfNextPageOfChapter(xhr.responseXML, fetchedUrls); - return QinxiaoshuoParser.fetchPagesOfChapter(finalDom, fetchedUrls, nextUrl); - }); + let xhr = await HttpClient.wrapFetch(url); + fetchedUrls.add(url); + QinxiaoshuoParser.copyContentNodes(finalDom, xhr.responseXML); + let nextUrl = QinxiaoshuoParser.urlOfNextPageOfChapter(xhr.responseXML, fetchedUrls); + return await QinxiaoshuoParser.fetchPagesOfChapter(finalDom, fetchedUrls, nextUrl); } } diff --git a/plugin/js/parsers/ReadNovelFullParser.js b/plugin/js/parsers/ReadNovelFullParser.js index b9454556a..9cdea4d17 100644 --- a/plugin/js/parsers/ReadNovelFullParser.js +++ b/plugin/js/parsers/ReadNovelFullParser.js @@ -7,20 +7,19 @@ class ReadNovelFullParser extends Parser { super(); } - getChapterUrls(dom) { + async getChapterUrls(dom) { let chapters = ReadNovelFullParser.extractChapterList(dom); if (0 < chapters.length) { return Promise.resolve(chapters); } - return ReadNovelFullParser.fetchChapterList(dom); + return await ReadNovelFullParser.fetchChapterList(dom); } - static fetchChapterList(dom) { + static async fetchChapterList(dom) { let novelId = dom.querySelector("div#rating").getAttribute("data-novel-id"); let url = `https://readnovelfull.com/ajax/chapter-archive?novelId=${novelId}`; - return HttpClient.wrapFetch(url).then(function(xhr) { - return ReadNovelFullParser.extractChapterList(xhr.responseXML); - }); + let xhr = await HttpClient.wrapFetch(url); + return ReadNovelFullParser.extractChapterList(xhr.responseXML); } static extractChapterList(dom) { diff --git a/plugin/js/parsers/Wenku8Parser.js b/plugin/js/parsers/Wenku8Parser.js index 2b45cc5d5..cce12f77a 100644 --- a/plugin/js/parsers/Wenku8Parser.js +++ b/plugin/js/parsers/Wenku8Parser.js @@ -7,13 +7,12 @@ class Wenku8Parser extends Parser { super(); } - getChapterUrls(dom) { + async getChapterUrls(dom) { let id = Wenku8Parser.extractBookId(dom); let tocUrl = ` https://www.wenku8.net/modules/article/reader.php?aid=${id}`; - return HttpClient.wrapFetch(tocUrl, this.makeOptions()).then(function(xhr) { - let menu = xhr.responseXML.querySelector("table"); - return Promise.resolve(util.hyperlinksToChapterList(menu)); - }); + let xhr = await HttpClient.wrapFetch(tocUrl, this.makeOptions()); + let menu = xhr.responseXML.querySelector("table"); + return util.hyperlinksToChapterList(menu); } static extractBookId(dom) { @@ -42,11 +41,10 @@ class Wenku8Parser extends Parser { return util.getFirstImgSrc(dom, "div#content"); } - fetchChapter(url) { + async fetchChapter(url) { // site does not tell us GBK is used to encode text - return HttpClient.wrapFetch(url, this.makeOptions()).then(function(xhr) { - return Promise.resolve(xhr.responseXML); - }); + let xhr = await HttpClient.wrapFetch(url, this.makeOptions()); + return xhr.responseXML; } makeOptions() { diff --git a/plugin/js/parsers/XiaoshuoguiParser.js b/plugin/js/parsers/XiaoshuoguiParser.js index dfa00946f..9fc5e0722 100644 --- a/plugin/js/parsers/XiaoshuoguiParser.js +++ b/plugin/js/parsers/XiaoshuoguiParser.js @@ -34,11 +34,10 @@ class XiaoshuoguiParser extends Parser { return util.getFirstImgSrc(dom, "div#bookinfo"); } - fetchChapter(url) { + async fetchChapter(url) { // site does not tell us gb18030 is used to encode text - return HttpClient.wrapFetch(url, this.makeOptions()).then(function(xhr) { - return Promise.resolve(xhr.responseXML); - }); + let xhr = await HttpClient.wrapFetch(url, this.makeOptions()); + return xhr.responseXML; } makeOptions() { diff --git a/plugin/js/parsers/ZenithNovelsParser.js b/plugin/js/parsers/ZenithNovelsParser.js index 442b8c175..d7715a49e 100644 --- a/plugin/js/parsers/ZenithNovelsParser.js +++ b/plugin/js/parsers/ZenithNovelsParser.js @@ -8,12 +8,13 @@ class ZenithNovelsParser extends WordpressBaseParser { super(); } - getChapterUrls(dom, chapterUrlsUI) { - return this.getChapterUrlsFromMultipleTocPages(dom, + async getChapterUrls(dom, chapterUrlsUI) { + let urls = await this.getChapterUrlsFromMultipleTocPages(dom, ZenithNovelsParser.extractPartialChapterList, ZenithNovelsParser.getUrlsOfTocPages, chapterUrlsUI - ).then(l => l.reverse()); + ); + return urls.reverse(); } static getUrlsOfTocPages(dom) { diff --git a/unitTest/UtestChineseFantasyNovelsParser.js b/unitTest/UtestChineseFantasyNovelsParser.js index affded137..eb9a7dbfe 100644 --- a/unitTest/UtestChineseFantasyNovelsParser.js +++ b/unitTest/UtestChineseFantasyNovelsParser.js @@ -2,18 +2,15 @@ module("ChineseFantasyNovelsParser"); -test("getChapterUrls", function (assert) { +test("getChapterUrls", async function (assert) { let dom = new DOMParser().parseFromString(ChineseFantasyNovelsToCSample, "text/html"); - let done = assert.async(); let parser = new ChineseFantasyNovelsParser(); - parser.getChapterUrls(dom).then(function(chapterUrls) { - assert.equal(chapterUrls.length, 2); - assert.deepEqual(chapterUrls[1], { - newArc: null, - sourceUrl: "https://m.chinesefantasynovels.com/421/91906.html", - title: "Chapter 2: Living with great joy, dying with no regrets" - }); - done(); + let chapterUrls = await parser.getChapterUrls(dom); + assert.equal(chapterUrls.length, 2); + assert.deepEqual(chapterUrls[1], { + newArc: null, + sourceUrl: "https://m.chinesefantasynovels.com/421/91906.html", + title: "Chapter 2: Living with great joy, dying with no regrets" }); }); diff --git a/unitTest/UtestFanFictionParser.js b/unitTest/UtestFanFictionParser.js index 95116ccbb..762b352ba 100644 --- a/unitTest/UtestFanFictionParser.js +++ b/unitTest/UtestFanFictionParser.js @@ -11,17 +11,14 @@ function loadFanFictionSinglePageDoc() { return util.syncLoadSampleDoc("../testdata/FanFictionSinglePage.html", "https://www.fanfiction.net/s/1234567/1/WebToEpub") } -QUnit.test("getChapterUrls", function (assert) { - let done = assert.async(); +QUnit.test("getChapterUrls", async function (assert) { let parser = new FanFictionParser(); - parser.getChapterUrls(loadFanFictionMultiPageDoc()).then(function (chapterUrls) { - chapterUrls = parser.cleanWebPageUrls(chapterUrls); - assert.equal(chapterUrls.length, 5); - assert.equal(chapterUrls[0].sourceUrl, "https://www.fanfiction.net/s/1234567/1/WebToEpub"); - assert.equal(chapterUrls[1].sourceUrl, "https://www.fanfiction.net/s/1234567/2/WebToEpub"); - assert.equal(chapterUrls[4].title, "5. Using Chrome's \"Inspect Element\" to examine the DOM"); - done(); - }); + let chapterUrls = await parser.getChapterUrls(loadFanFictionMultiPageDoc()) + chapterUrls = parser.cleanWebPageUrls(chapterUrls); + assert.equal(chapterUrls.length, 5); + assert.equal(chapterUrls[0].sourceUrl, "https://www.fanfiction.net/s/1234567/1/WebToEpub"); + assert.equal(chapterUrls[1].sourceUrl, "https://www.fanfiction.net/s/1234567/2/WebToEpub"); + assert.equal(chapterUrls[4].title, "5. Using Chrome's \"Inspect Element\" to examine the DOM"); }); QUnit.test("findMultiPageContent", function (assert) { @@ -46,15 +43,12 @@ QUnit.test("parserFactory", function (assert) { assert.ok(parser instanceof FanFictionParser); }); -QUnit.test("getSingleChapterUrls", function (assert) { - let done = assert.async(); +QUnit.test("getSingleChapterUrls", async function (assert) { let parser = new FanFictionParser(); - parser.getChapterUrls(loadFanFictionSinglePageDoc()).then(function (chapterUrls) { - assert.equal(chapterUrls.length, 1); - assert.equal(chapterUrls[0].sourceUrl, "https://www.fanfiction.net/s/1234567/1/WebToEpub"); - assert.equal(chapterUrls[0].title, "Web to Epub"); - done(); - }); + let chapterUrls = await parser.getChapterUrls(loadFanFictionSinglePageDoc()); + assert.equal(chapterUrls.length, 1); + assert.equal(chapterUrls[0].sourceUrl, "https://www.fanfiction.net/s/1234567/1/WebToEpub"); + assert.equal(chapterUrls[0].title, "Web to Epub"); }); QUnit.test("findSinglePageContent", function (assert) { diff --git a/unitTest/UtestFicwadParser.js b/unitTest/UtestFicwadParser.js index b63f5738f..499708774 100644 --- a/unitTest/UtestFicwadParser.js +++ b/unitTest/UtestFicwadParser.js @@ -2,18 +2,15 @@ module("Ficwad"); -test("findChapters_storyIndex", function (assert) { +test("findChapters_storyIndex", async function (assert) { let dom = new DOMParser().parseFromString(FicWadStoryIndexSample, "text/html"); - let done = assert.async(); let parser = new FicwadParser(); - parser.getChapterUrls(dom).then(function(chapterUrls) { - assert.equal(chapterUrls.length, 4); - assert.deepEqual(chapterUrls[3], { - newArc: null, - sourceUrl: "https://ficwad.com/story/51728", - title: "I Love You" - }); - done(); + let chapterUrls = await parser.getChapterUrls(dom); + assert.equal(chapterUrls.length, 4); + assert.deepEqual(chapterUrls[3], { + newArc: null, + sourceUrl: "https://ficwad.com/story/51728", + title: "I Love You" }); }); @@ -24,17 +21,14 @@ test("extractTitleImpl_storyIndex", function (assert) { assert.equal(actual.textContent, "Beyond the Mask"); }); -test("findChapters_chapter", function (assert) { +test("findChapters_chapter", async function (assert) { let dom = new DOMParser().parseFromString(FicWadChapterSample, "text/html"); - let done = assert.async(); let parser = new FicwadParser(); - parser.getChapterUrls(dom).then(function(chapterUrls) { - assert.equal(chapterUrls.length, 7); - assert.deepEqual(chapterUrls[6], { - sourceUrl: "https://ficwad.com/story/24278", - title: "6. Part Six--The Last Chapter" - }); - done(); + let chapterUrls = await parser.getChapterUrls(dom); + assert.equal(chapterUrls.length, 7); + assert.deepEqual(chapterUrls[6], { + sourceUrl: "https://ficwad.com/story/24278", + title: "6. Part Six--The Last Chapter" }); }); diff --git a/unitTest/UtestGravityTalesParser.js b/unitTest/UtestGravityTalesParser.js index de5553096..4795e64e1 100644 --- a/unitTest/UtestGravityTalesParser.js +++ b/unitTest/UtestGravityTalesParser.js @@ -54,25 +54,15 @@ function fetchJsonStub(url) { return Promise.resolve({ json: lookup.get(url) }); } -test("fetchChapterListForGroup", function (assert) { - let done = assert.async(); +test("fetchChapterListForGroup", async function (assert) { let chapterGroup = {ChapterGroupId:1, Title:"Group 1"}; - GravityTalesParser.fetchChapterListForGroup(1, chapterGroup, fetchJsonStub).then( - function(actual) { - assert.deepEqual(actual, expectedChapterLists[0]); - done(); - } - ); + let actual = await GravityTalesParser.fetchChapterListForGroup(1, chapterGroup, fetchJsonStub); + assert.deepEqual(actual, expectedChapterLists[0]); }); -test("fetchUrlsOfChapters", function (assert) { - let done = assert.async(); - GravityTalesParser.fetchUrlsOfChapters(1, baseUri, fetchJsonStub).then( - function(actual) { - assert.deepEqual(actual, expectedFinalChapters); - done(); - } - ); +test("fetchUrlsOfChapters", async function (assert) { + let actual = await GravityTalesParser.fetchUrlsOfChapters(1, baseUri, fetchJsonStub); + assert.deepEqual(actual, expectedFinalChapters); }); test("searchForNovelIdinString_idNotPresent", function (assert) { diff --git a/unitTest/UtestImageCollector.js b/unitTest/UtestImageCollector.js index b92ba87b1..f4534b2a3 100644 --- a/unitTest/UtestImageCollector.js +++ b/unitTest/UtestImageCollector.js @@ -221,8 +221,7 @@ QUnit.test("findImageWrappingElement", function (assert) { assert.equal(wrapper.id, "a004"); }); -QUnit.test("replaceHyperlinksToImagesWithImages", function (assert) { - let done = assert.async(); +QUnit.test("replaceHyperlinksToImagesWithImages", async function (assert) { let dom = TestUtils.makeDomWithBody( "Insert image" + "Insert image" + @@ -233,33 +232,28 @@ QUnit.test("replaceHyperlinksToImagesWithImages", function (assert) { "" ); - ImageCollector.replaceHyperlinksToImagesWithImages(dom.body).then(function () { - assert.equal(dom.body.innerHTML, - "" + - "" + - "" + - ""+ - "Insert image"+ - "" + - "" - ); - done(); - }); + await ImageCollector.replaceHyperlinksToImagesWithImages(dom.body); + assert.equal(dom.body.innerHTML, + "" + + "" + + "" + + ""+ + "Insert image"+ + "" + + "" + ); }); -QUnit.test("dontReplaceNonImageLinksWithImages", function (assert) { - let done = assert.async(); +QUnit.test("dontReplaceNonImageLinksWithImages", async function (assert) { let dom = TestUtils.makeDomWithBody( "Insert image" + "" ); - ImageCollector.replaceHyperlinksToImagesWithImages(dom.body).then(function () { - assert.equal(dom.body.innerHTML, - "Insert image" + - "" - ); - done(); - }); + await ImageCollector.replaceHyperlinksToImagesWithImages(dom.body); + assert.equal(dom.body.innerHTML, + "Insert image" + + "" + ); }); QUnit.test("getExtensionFromUrlFilename", function (assert) { diff --git a/unitTest/UtestLnmtlParser.js b/unitTest/UtestLnmtlParser.js index d425f213e..4bd463e66 100644 --- a/unitTest/UtestLnmtlParser.js +++ b/unitTest/UtestLnmtlParser.js @@ -82,24 +82,14 @@ function fetchJsonStubInmtl(url) { return Promise.resolve({json: lookup.get(url)}); } -test("fetchChapterListsForVolume", function (assert) { - let done = assert.async(); - LnmtlParser.fetchChapterListsForVolume(volumesListInmtl[0], fetchJsonStubInmtl).then( - function(actual) { - assert.deepEqual(actual, expectedLnmplFetchChapterListsOutput[0]); - done(); - } - ); +test("fetchChapterListsForVolume", async function (assert) { + let actual = await LnmtlParser.fetchChapterListsForVolume(volumesListInmtl[0], fetchJsonStubInmtl); + assert.deepEqual(actual, expectedLnmplFetchChapterListsOutput[0]); }); -test("fetchChapterLists", function (assert) { - let done = assert.async(); - LnmtlParser.fetchChapterLists(volumesListInmtl, fetchJsonStubInmtl).then( - function(actual) { - assert.deepEqual(actual, expectedLnmplFetchChapterListsOutput); - done(); - } - ); +test("fetchChapterLists", async function (assert) { + let actual = await LnmtlParser.fetchChapterLists(volumesListInmtl, fetchJsonStubInmtl); + assert.deepEqual(actual, expectedLnmplFetchChapterListsOutput); }); test("mergeChapterLists", function (assert) { diff --git a/unitTest/UtestMuggleNetParser.js b/unitTest/UtestMuggleNetParser.js index 839229108..11c0aed39 100644 --- a/unitTest/UtestMuggleNetParser.js +++ b/unitTest/UtestMuggleNetParser.js @@ -17,16 +17,13 @@ function loadMuggleNetSinglePageDoc() { ); } -QUnit.test("getChapterUrls", function (assert) { - let done = assert.async(); +QUnit.test("getChapterUrls", async function (assert) { let parser = new MuggleNetParser(); - parser.getChapterUrls(loadMuggleNetMultiPageDoc()).then(function (chapterUrls) { - assert.equal(chapterUrls.length, 5); - assert.equal(chapterUrls[0].sourceUrl, "http://fanfiction.mugglenet.com/viewstory.php?sid=123456&chapter=1"); - assert.equal(chapterUrls[1].sourceUrl, "http://fanfiction.mugglenet.com/viewstory.php?sid=123456&chapter=2"); - assert.equal(chapterUrls[4].title, "5. Using Chrome's \"Inspect Element\" to examine the DOM"); - done(); - }); + let chapterUrls = await parser.getChapterUrls(loadMuggleNetMultiPageDoc()); + assert.equal(chapterUrls.length, 5); + assert.equal(chapterUrls[0].sourceUrl, "http://fanfiction.mugglenet.com/viewstory.php?sid=123456&chapter=1"); + assert.equal(chapterUrls[1].sourceUrl, "http://fanfiction.mugglenet.com/viewstory.php?sid=123456&chapter=2"); + assert.equal(chapterUrls[4].title, "5. Using Chrome's \"Inspect Element\" to examine the DOM"); }); QUnit.test("findMultiPageContent", function (assert) { @@ -50,15 +47,12 @@ QUnit.test("parserFactory", function (assert) { assert.ok(parser instanceof MuggleNetParser); }); -QUnit.test("getSingleChapterUrls", function (assert) { - let done = assert.async(); +QUnit.test("getSingleChapterUrls", async function (assert) { let parser = new MuggleNetParser(); - parser.getChapterUrls(loadMuggleNetSinglePageDoc()).then(function (chapterUrls) { - assert.equal(chapterUrls.length, 1); - assert.equal(chapterUrls[0].sourceUrl, "http://fanfiction.mugglenet.com/viewstory.php?sid=123457&chapter=1"); - assert.equal(chapterUrls[0].title, "Web to Epub"); - done(); - }); + let chapterUrls = await parser.getChapterUrls(loadMuggleNetSinglePageDoc()); + assert.equal(chapterUrls.length, 1); + assert.equal(chapterUrls[0].sourceUrl, "http://fanfiction.mugglenet.com/viewstory.php?sid=123457&chapter=1"); + assert.equal(chapterUrls[0].title, "Web to Epub"); }); QUnit.test("findSinglePageContent", function (assert) { diff --git a/unitTest/UtestParserFactory.js b/unitTest/UtestParserFactory.js index e72dc5800..aec1adc22 100644 --- a/unitTest/UtestParserFactory.js +++ b/unitTest/UtestParserFactory.js @@ -64,8 +64,7 @@ QUnit.test("hostNameForParserSelection", function (assert) { assert.equal("fanfiction.net", fn("https://www.fanfiction.net/s/1234567/1/WebToEpub")); }); -test("assignParsersToPages", function (assert) { - let done = assert.async(); +test("assignParsersToPages", async function (assert) { let webPages = [ {sourceUrl: "https://zirusmusings.com/ldm-ch84/"}, {sourceUrl: "https://zirusmusings.com/ldm-ch85/"}, @@ -76,15 +75,11 @@ test("assignParsersToPages", function (assert) { ]; let parser = new ZirusMusingsParser(); parser.state.chapterListUrl = "https://zirusmusings.com/ldm/"; - parserFactory.addParsersToPages(parser, webPages).then( - function() { - assert.equal(parser, webPages[0].parser); - assert.equal(parser, webPages[1].parser); - assert.ok(webPages[2].parser instanceof RoyalRoadParser); - assert.equal(webPages[2].parser, webPages[3].parser); - assert.ok(webPages[4].parser instanceof LightNovelBastionParser); - assert.ok(webPages[5].parser instanceof LightNovelsTranslationsParser); - done(); - } - ); + await parserFactory.addParsersToPages(parser, webPages); + assert.equal(parser, webPages[0].parser); + assert.equal(parser, webPages[1].parser); + assert.ok(webPages[2].parser instanceof RoyalRoadParser); + assert.equal(webPages[2].parser, webPages[3].parser); + assert.ok(webPages[4].parser instanceof LightNovelBastionParser); + assert.ok(webPages[5].parser instanceof LightNovelsTranslationsParser); }); diff --git a/unitTest/UtestWattpadParser.js b/unitTest/UtestWattpadParser.js index d7c37ffd3..de4c5f4d8 100644 --- a/unitTest/UtestWattpadParser.js +++ b/unitTest/UtestWattpadParser.js @@ -51,24 +51,19 @@ HttpClient.simulateFetch = function (url, handler) { return Promise.resolve(response); } -HttpClient.wrapFetchImpl = function (url, wrapOptions) { - return HttpClient.simulateFetch(url, HttpClient.makeOptions()).then(function (response) { - return HttpClient.checkResponseAndGetData(url, wrapOptions, response); - }); +HttpClient.wrapFetchImpl = async function (url, wrapOptions) { + let response = await HttpClient.simulateFetch(url, HttpClient.makeOptions()); + return HttpClient.checkResponseAndGetData(url, wrapOptions, response); } -QUnit.test("fetchExtraChapterContent", function (assert) { - let done = assert.async(); - +QUnit.test("fetchExtraChapterContent", async function (assert) { let extraUris = { "pages": 3, "uriStart": "https://s.wattpad.com/i", "uriEnd": "" }; - new WattpadParser().fetchExtraChapterContent(extraUris).then(function(actual) { - assert.deepEqual(actual, watpaddExtraContent); - done(); - }); + let actual = await new WattpadParser().fetchExtraChapterContent(extraUris); + assert.deepEqual(actual, watpaddExtraContent); }); QUnit.test("removeDuplicateParagraphs", function (assert) { diff --git a/unitTest/UtestWuxiaworldParser.js b/unitTest/UtestWuxiaworldParser.js index c7e88e3a9..bbf15814a 100644 --- a/unitTest/UtestWuxiaworldParser.js +++ b/unitTest/UtestWuxiaworldParser.js @@ -41,19 +41,16 @@ QUnit.test("getChapterArc", function (assert) { assert.equal(actual, "Alien Massacre"); }); -QUnit.test("getChapterUrls-withArcTitles", function (assert) { - let done = assert.async(); +QUnit.test("getChapterUrls-withArcTitles", async function (assert) { let dom = new DOMParser().parseFromString(WuxiaworldiSamplePage2, "text/html"); - new WuxiaworldParser().getChapterUrls(dom).then(function(actual) { - assert.deepEqual(actual, - [ - {title: "Vol 1 Chapter 1-1", sourceUrl: "https://www.wuxiaworld.com/novel/terror-infinity/ti-vol-1-chapter-1-1", newArc: "The name is Resident Evil"}, - {title: "Vol 1 Chapter 1-2", sourceUrl: "https://www.wuxiaworld.com/novel/terror-infinity/ti-vol-1-chapter-1-2", newArc: null}, - {title: "Vol 2 Chapter 1-1", sourceUrl: "https://www.wuxiaworld.com/novel/terror-infinity/ti-vol-2-chapter-1-1", newArc: "Alien Massacre"} - ] - ); - done(); - }); + let actual = await new WuxiaworldParser().getChapterUrls(dom); + assert.deepEqual(actual, + [ + {title: "Vol 1 Chapter 1-1", sourceUrl: "https://www.wuxiaworld.com/novel/terror-infinity/ti-vol-1-chapter-1-1", newArc: "The name is Resident Evil"}, + {title: "Vol 1 Chapter 1-2", sourceUrl: "https://www.wuxiaworld.com/novel/terror-infinity/ti-vol-1-chapter-1-2", newArc: null}, + {title: "Vol 2 Chapter 1-1", sourceUrl: "https://www.wuxiaworld.com/novel/terror-infinity/ti-vol-2-chapter-1-1", newArc: "Alien Massacre"} + ] + ); }); QUnit.test("removeArcsWhenOnlyOne-singleArc", function (assert) {