diff --git a/.npmrelease b/.npmrelease index 3727dec..6d8fbff 100644 --- a/.npmrelease +++ b/.npmrelease @@ -1 +1 @@ -Sat Aug 24 12:02:25 CST 2024 +Thu Aug 29 19:39:08 CST 2024 diff --git a/package-lock.json b/package-lock.json index c5c4421..5505324 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "downloadnet", - "version": "4.1.3", + "version": "4.2.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "downloadnet", - "version": "4.1.3", + "version": "4.2.0", "license": "AGPL-3.0", "dependencies": { "@667/ps-list": "latest", @@ -500,9 +500,9 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.17.1", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.17.1.tgz", - "integrity": "sha512-BlYOpej8AQ8Ev9xVqroV7a02JK3SkBAaN9GfMMH9W6Ch8FlQlkjGw4Ir7+FgYwfirivAf4t+GtzuAxqfukmISA==", + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.18.0.tgz", + "integrity": "sha512-fTxvnS1sRMu3+JjXwJG0j/i4RT9u4qJ+lqS/yCGap4lH4zZGzQ7tu+xZqQmcMZq5OBZDL4QRxQzRjkWcGt8IVw==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -552,9 +552,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.9.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.9.0.tgz", - "integrity": "sha512-hhetes6ZHP3BlXLxmd8K2SNgkhNSi+UcecbnwWKwpP7kyi/uC75DJ1lOOBO3xrC4jyojtGE3YxKZPHfk4yrgug==", + "version": "9.9.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.9.1.tgz", + "integrity": "sha512-xIDQRsfg5hNBqHz04H1R3scSVwmI+KUbqjsQKHKQ1DAUSaUjYPReZZmS/5PNiKu1fUvzDd6H7DEDKACSEhu+TQ==", "dev": true, "license": "MIT", "engines": { @@ -923,9 +923,9 @@ } }, "node_modules/@types/node": { - "version": "22.5.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.0.tgz", - "integrity": "sha512-DkFrJOe+rfdHTqqMg0bSNlGlQ85hSoh2TPzZyhHsXnMtligRWpxUySiyw8FY14ITt24HVCiQPWxS3KO/QlGmWg==", + "version": "22.5.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.1.tgz", + "integrity": "sha512-KkHsxej0j9IW1KKOOAA/XBA0z08UFSrRQHErzEfA3Vgq57eXIMYboIlHJuYIfd+lwCQjtKqUu3UnmKbtUc9yRw==", "license": "MIT", "dependencies": { "undici-types": "~6.19.2" @@ -1597,17 +1597,17 @@ } }, "node_modules/eslint": { - "version": "9.9.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.9.0.tgz", - "integrity": "sha512-JfiKJrbx0506OEerjK2Y1QlldtBxkAlLxT5OEcRF8uaQ86noDe2k31Vw9rnSWv+MXZHj7OOUV/dA0AhdLFcyvA==", + "version": "9.9.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.9.1.tgz", + "integrity": "sha512-dHvhrbfr4xFQ9/dq+jcVneZMyRYLjggWjk6RVsIiHsP8Rz6yZ8LvZ//iU4TrZF+SXWG+JkNF2OyiZRvzgRDqMg==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.11.0", - "@eslint/config-array": "^0.17.1", + "@eslint/config-array": "^0.18.0", "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "9.9.0", + "@eslint/js": "9.9.1", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.3.0", "@nodelib/fs.walk": "^1.2.8", @@ -2647,9 +2647,9 @@ } }, "node_modules/mongodb": { - "version": "6.7.0", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.7.0.tgz", - "integrity": "sha512-TMKyHdtMcO0fYBNORiYdmM25ijsHs+Njs963r4Tro4OQZzqYigAzYQouwWRg4OIaiLRUEGUh/1UAcH5lxdSLIA==", + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.8.0.tgz", + "integrity": "sha512-HGQ9NWDle5WvwMnrvUxsFYPd3JEbqD3RgABHBQRuoCEND0qzhsd0iH5ypHsf1eJ+sXmvmyKpP+FLOKY8Il7jMw==", "license": "Apache-2.0", "dependencies": { "@mongodb-js/saslprep": "^1.1.5", @@ -2703,14 +2703,14 @@ } }, "node_modules/mongoose": { - "version": "8.5.3", - "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-8.5.3.tgz", - "integrity": "sha512-OubSDbsAclDFGHjV82MsKyIGQWFc42Ot1l+0dhRS6U9xODM7rm/ES/WpOQd8Ds9j0Mx8QzxZtrSCnBh6o9wUqw==", + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-8.6.0.tgz", + "integrity": "sha512-p6VSbYKvD4ZIabqo8C0kS5eKX1Xpji+opTAIJ9wyuPJ8Y/FblgXSMnFRXnB40bYZLKPQT089K5KU8+bqIXtFdw==", "license": "MIT", "dependencies": { "bson": "^6.7.0", "kareem": "2.6.3", - "mongodb": "6.7.0", + "mongodb": "6.8.0", "mpath": "0.9.0", "mquery": "5.0.0", "ms": "2.1.3", @@ -3365,9 +3365,9 @@ "license": "MIT" }, "node_modules/safe-stable-stringify": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz", - "integrity": "sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", "license": "MIT", "engines": { "node": ">=10" @@ -3663,9 +3663,9 @@ } }, "node_modules/tslib": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", - "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz", + "integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==", "license": "0BSD" }, "node_modules/type-check": { diff --git a/package.json b/package.json index b845db1..4f2ee41 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "downloadnet", - "version": "4.1.3", + "version": "4.2.0", "type": "module", "description": "Library server and an archivist browser controller.", "main": "global-run.cjs", diff --git a/show b/show deleted file mode 100644 index 0f613b9..0000000 --- a/show +++ /dev/null @@ -1 +0,0 @@ -Show HN: dn - Full-text search and archiving for your Chromium-based browser diff --git a/src/archivist.js b/src/archivist.js index 3b94617..29a7af9 100644 --- a/src/archivist.js +++ b/src/archivist.js @@ -213,626 +213,634 @@ // main async function collect({chrome_port:port, mode} = {}) { - const {library_path} = args; - const exitHandlers = []; - process.on('beforeExit', runHandlers); - process.on('SIGUSR2', code => runHandlers(code, 'SIGUSR2', {exit: true})); - process.on('exit', code => runHandlers(code, 'exit', {exit: true})); - State.connection = State.connection || await connect({port}); - State.onExit = { - addHandler(h) { - exitHandlers.push(h); - } - }; - const {send, on, close} = State.connection; - //const DELAY = 100; // 500 ? - Close = close; - - let requestStage; - - await loadFiles(); + try { + console.log('Starting collect'); + const {library_path} = args; + const exitHandlers = []; + process.on('beforeExit', runHandlers); + process.on('SIGUSR2', code => runHandlers(code, 'SIGUSR2', {exit: true})); + process.on('exit', code => runHandlers(code, 'exit', {exit: true})); + State.connection = State.connection || await connect({port}); + console.log('Connection established'); + State.onExit = { + addHandler(h) { + exitHandlers.push(h); + } + }; + const {send, on, close} = State.connection; + //const DELAY = 100; // 500 ? + Close = close; - clearSavers(); + let requestStage; + + console.log('Loading files...'); + await loadFiles(); - Mode = mode; - console.log({Mode}); - if ( Mode == 'save' || Mode == 'select' ) { - requestStage = "Response"; - // in case we get a updateBasePath call before an interval - // and we don't clear it in time, leading us to erroneously save the old - // cache to the new path, we always used our saved copy - State.saver = setInterval(() => saveCache(State.SavedCacheFilePath), 17000); - // we use timeout because we can trigger this ourself - // so in order to not get a race condition (overlapping calls) we ensure - // only 1 call at 1 time - State.indexSaver = setTimeout(() => saveIndex(State.SavedIndexFilePath), 11001); - State.ftsIndexSaver = setTimeout(() => saveFTS(State.SavedFTSIndexDirPath), 31001); - } else if ( Mode == 'serve' ) { - requestStage = "Request"; clearSavers(); - } else { - throw new TypeError(`Must specify mode, and must be one of: save, serve, select`); - } - on("Target.targetInfoChanged", attachToTarget); - on("Target.targetInfoChanged", updateTargetInfo); - on("Target.targetInfoChanged", indexURL); - on("Target.attachedToTarget", installForSession); - on("Page.loadEventFired", reloadIfNotLive); - on("Fetch.requestPaused", cacheRequest); - on("Runtime.consoleAPICalled", handleMessage); - - await send("Target.setDiscoverTargets", {discover:true}); - await send("Target.setAutoAttach", {autoAttach:true, waitForDebuggerOnStart:false, flatten: true}); - await send("Security.setIgnoreCertificateErrors", {ignore:true}); - await send("Fetch.enable", { - patterns: [ - { - urlPattern: "http*://*", - requestStage - } - ], - }); + Mode = mode; + console.log({Mode}); + if ( Mode == 'save' || Mode == 'select' ) { + requestStage = "Response"; + // in case we get a updateBasePath call before an interval + // and we don't clear it in time, leading us to erroneously save the old + // cache to the new path, we always used our saved copy + State.saver = setInterval(() => saveCache(State.SavedCacheFilePath), 17000); + // we use timeout because we can trigger this ourself + // so in order to not get a race condition (overlapping calls) we ensure + // only 1 call at 1 time + State.indexSaver = setTimeout(() => saveIndex(State.SavedIndexFilePath), 11001); + State.ftsIndexSaver = setTimeout(() => saveFTS(State.SavedFTSIndexDirPath), 31001); + } else if ( Mode == 'serve' ) { + requestStage = "Request"; + clearSavers(); + } else { + throw new TypeError(`Must specify mode, and must be one of: save, serve, select`); + } - const {targetInfos:targets} = await send("Target.getTargets", {}); - const pageTargets = targets.filter(({type}) => type == 'page').map(targetInfo => ({targetInfo})); - await Promise.all(pageTargets.map(attachToTarget)); - sleep(5000).then(() => Promise.all(pageTargets.map(reloadIfNotLive))); + on("Target.targetInfoChanged", attachToTarget); + on("Target.targetInfoChanged", updateTargetInfo); + on("Target.targetInfoChanged", indexURL); + on("Target.attachedToTarget", installForSession); + on("Page.loadEventFired", reloadIfNotLive); + on("Fetch.requestPaused", cacheRequest); + on("Runtime.consoleAPICalled", handleMessage); + + await send("Target.setDiscoverTargets", {discover:true}); + await send("Target.setAutoAttach", {autoAttach:true, waitForDebuggerOnStart:false, flatten: true}); + await send("Security.setIgnoreCertificateErrors", {ignore:true}); + await send("Fetch.enable", { + patterns: [ + { + urlPattern: "http*://*", + requestStage + } + ], + }); - State.bookmarkObserver = State.bookmarkObserver || startObservingBookmarkChanges(); + const {targetInfos:targets} = await send("Target.getTargets", {}); + DEBUG.debug && console.log({targets}); + const pageTargets = targets.filter(({type}) => type == 'page').map(targetInfo => ({targetInfo})); + await Promise.all(pageTargets.map(attachToTarget)); + sleep(5000).then(() => Promise.all(pageTargets.map(reloadIfNotLive))); - Status.loaded = true; + State.bookmarkObserver = State.bookmarkObserver || startObservingBookmarkChanges(); - return Status.loaded; + Status.loaded = true; - async function runHandlers(reason, err, {exit = false} = {}) { - debug.verbose && console.log('before exit running', exitHandlers, {reason, err}); - while(exitHandlers.length) { - const h = exitHandlers.shift(); - try { - h(); - } catch(e) { - console.warn(`Error in exit handler`, h, e); - } - } - if ( exit ) { - console.log(`Exiting in 3 seconds...`); - await sleep(3000); - process.exit(0); - } - } + return Status.loaded; - function handleMessage(args) { - const {type, args:[{value:strVal}]} = args; - if ( type == 'info' ) { - try { - const val = JSON.parse(strVal); - // possible messages - const {install, titleChange, textChange} = val; - switch(true) { - case !!install: { - confirmInstall({install}); - } break; - case !!titleChange: { - reindexOnContentChange({titleChange}); - } break; - case !!textChange: { - reindexOnContentChange({textChange}); - } break; - default: { - if ( DEBUG ) { - console.warn(`Unknown message`, strVal); - } - } break; + async function runHandlers(reason, err, {exit = false} = {}) { + debug.verbose && console.log('before exit running', exitHandlers, {reason, err}); + while(exitHandlers.length) { + const h = exitHandlers.shift(); + try { + h(); + } catch(e) { + console.warn(`Error in exit handler`, h, e); } - } catch(e) { - DEBUG.verboseSlow && console.info('Not the message we expected to confirm install. This is OK.', {originalMessage:args}); - } + } + if ( exit ) { + console.log(`Exiting in 3 seconds...`); + await sleep(3000); + process.exit(0); + } } - } - function confirmInstall({install}) { - const {sessionId} = install; - if ( ! State.ConfirmedInstalls.has(sessionId) ) { - State.ConfirmedInstalls.add(sessionId); - DEBUG.verboseSlow && console.log({confirmedInstall:install}); + function handleMessage(args) { + const {type, args:[{value:strVal}]} = args; + if ( type == 'info' ) { + try { + const val = JSON.parse(strVal); + // possible messages + const {install, titleChange, textChange} = val; + switch(true) { + case !!install: { + confirmInstall({install}); + } break; + case !!titleChange: { + reindexOnContentChange({titleChange}); + } break; + case !!textChange: { + reindexOnContentChange({textChange}); + } break; + default: { + if ( DEBUG ) { + console.warn(`Unknown message`, strVal); + } + } break; + } + } catch(e) { + DEBUG.verboseSlow && console.info('Not the message we expected to confirm install. This is OK.', {originalMessage:args}); + } + } } - } - async function reindexOnContentChange({titleChange, textChange}) { - const data = titleChange || textChange; - if ( data ) { - const {sessionId} = data; - const latestTargetInfo = clone(await untilHas(Targets, sessionId)); - if ( titleChange ) { - const {currentTitle} = titleChange; - DEBUG.verboseSlow && console.log('Received titleChange', titleChange); - latestTargetInfo.title = currentTitle; - Targets.set(sessionId, latestTargetInfo); - DEBUG.verboseSlow && console.log('Updated stored target info', latestTargetInfo); - } else { - DEBUG.verboseSlow && console.log('Received textChange', textChange); + function confirmInstall({install}) { + const {sessionId} = install; + if ( ! State.ConfirmedInstalls.has(sessionId) ) { + State.ConfirmedInstalls.add(sessionId); + DEBUG.verboseSlow && console.log({confirmedInstall:install}); } - if ( ! dontCache(latestTargetInfo) ) { - DEBUG.verboseSlow && console.log( - `Will reindex because we were told ${titleChange ? 'title' : 'text'} content maybe changed.`, - data - ); - indexURL({targetInfo:latestTargetInfo}); + } + + async function reindexOnContentChange({titleChange, textChange}) { + const data = titleChange || textChange; + if ( data ) { + const {sessionId} = data; + const latestTargetInfo = clone(await untilHas(Targets, sessionId)); + if ( titleChange ) { + const {currentTitle} = titleChange; + DEBUG.verboseSlow && console.log('Received titleChange', titleChange); + latestTargetInfo.title = currentTitle; + Targets.set(sessionId, latestTargetInfo); + DEBUG.verboseSlow && console.log('Updated stored target info', latestTargetInfo); + } else { + DEBUG.verboseSlow && console.log('Received textChange', textChange); + } + if ( ! dontCache(latestTargetInfo) ) { + DEBUG.verboseSlow && console.log( + `Will reindex because we were told ${titleChange ? 'title' : 'text'} content maybe changed.`, + data + ); + indexURL({targetInfo:latestTargetInfo}); + } } } - } - function updateTargetInfo({targetInfo}) { - if ( targetInfo.type === 'page' ) { - const sessionId = State.Sessions.get(targetInfo.targetId); - DEBUG.verboseSlow && console.log('Updating target info', targetInfo, sessionId); - if ( sessionId ) { - const existingTargetInfo = Targets.get(sessionId); - // if we have an existing target info for this URL and have saved an updated title - DEBUG.verboseSlow && console.log('Existing target info', existingTargetInfo); - if ( existingTargetInfo && existingTargetInfo.url === targetInfo.url ) { - // keep that title (because targetInfo does not reflect the latest title) - if ( existingTargetInfo.title !== existingTargetInfo.url ) { - DEBUG.verboseSlow && console.log('Setting title to existing', existingTargetInfo); - targetInfo.title = existingTargetInfo.title; + function updateTargetInfo({targetInfo}) { + if ( targetInfo.type === 'page' ) { + const sessionId = State.Sessions.get(targetInfo.targetId); + DEBUG.verboseSlow && console.log('Updating target info', targetInfo, sessionId); + if ( sessionId ) { + const existingTargetInfo = Targets.get(sessionId); + // if we have an existing target info for this URL and have saved an updated title + DEBUG.verboseSlow && console.log('Existing target info', existingTargetInfo); + if ( existingTargetInfo && existingTargetInfo.url === targetInfo.url ) { + // keep that title (because targetInfo does not reflect the latest title) + if ( existingTargetInfo.title !== existingTargetInfo.url ) { + DEBUG.verboseSlow && console.log('Setting title to existing', existingTargetInfo); + targetInfo.title = existingTargetInfo.title; + } } + Targets.set(sessionId, clone(targetInfo)); } - Targets.set(sessionId, clone(targetInfo)); } } - } - async function reloadIfNotLive({targetInfo, sessionId} = {}) { - if ( Mode == 'serve' ) return; - if ( !targetInfo && !!sessionId ) { - targetInfo = Targets.get(sessionId); - console.log(targetInfo); - } - if ( neverCache(targetInfo?.url) ) return; - const {attached, type} = targetInfo; - if ( attached && type == 'page' ) { - const {url, targetId} = targetInfo; - const sessionId = State.Sessions.get(targetId); - if ( !!sessionId && !State.ConfirmedInstalls.has(sessionId) ) { - DEBUG.verboseSlow && console.log({ - reloadingAsNotConfirmedInstalled:{ - url, - sessionId - }, - confirmedInstalls: State.ConfirmedInstalls - }); - await sleep(600); - send("Page.stopLoading", {}, sessionId); - send("Page.reload", {}, sessionId); + async function reloadIfNotLive({targetInfo, sessionId} = {}) { + if ( Mode == 'serve' ) return; + if ( !targetInfo && !!sessionId ) { + targetInfo = Targets.get(sessionId); + console.log(targetInfo); + } + if ( neverCache(targetInfo?.url) ) return; + const {attached, type} = targetInfo; + if ( attached && type == 'page' ) { + const {url, targetId} = targetInfo; + const sessionId = State.Sessions.get(targetId); + if ( !!sessionId && !State.ConfirmedInstalls.has(sessionId) ) { + DEBUG.verboseSlow && console.log({ + reloadingAsNotConfirmedInstalled:{ + url, + sessionId + }, + confirmedInstalls: State.ConfirmedInstalls + }); + await sleep(600); + send("Page.stopLoading", {}, sessionId); + send("Page.reload", {}, sessionId); + } } } - } - - function neverCache(url) { - if ( ! url ) return true; - try { - url = new URL(url); - return url?.href == "about:blank" || url?.href?.startsWith('chrome') || NEVER_CACHE.has(url.origin); - } catch(e) { - DEBUG.debug && console.warn('Could not form url', url, e); - return true; - } - } - async function installForSession({sessionId, targetInfo, waitingForDebugger}) { - if ( waitingForDebugger ) { - console.warn(targetInfo); - throw new TypeError(`Target not ready for install`); - } - if ( ! sessionId ) { - throw new TypeError(`installForSession needs a sessionId`); + function neverCache(url) { + if ( ! url ) return true; + try { + url = new URL(url); + return url?.href == "about:blank" || url?.href?.startsWith('chrome') || NEVER_CACHE.has(url.origin); + } catch(e) { + DEBUG.debug && console.warn('Could not form url', url, e); + return true; + } } - const {targetId, url} = targetInfo; - - const installUneeded = dontInstall(targetInfo) || - State.Installations.has(sessionId) - ; + async function installForSession({sessionId, targetInfo, waitingForDebugger}) { + if ( waitingForDebugger ) { + console.warn(targetInfo); + throw new TypeError(`Target not ready for install`); + } + if ( ! sessionId ) { + throw new TypeError(`installForSession needs a sessionId`); + } - if ( installUneeded ) return; + const {targetId, url} = targetInfo; - DEBUG.verboseSlow && console.log("installForSession running on target " + targetId); + const installUneeded = dontInstall(targetInfo) || + State.Installations.has(sessionId) + ; - State.Sessions.set(targetId, sessionId); - Targets.set(sessionId, clone(targetInfo)); + if ( installUneeded ) return; - if ( Mode == 'save' || Mode == 'select' ) { - send("Network.setCacheDisabled", {cacheDisabled:true}, sessionId); - send("Network.setBypassServiceWorker", {bypass:true}, sessionId); - - await send("Runtime.enable", {}, sessionId); - await send("Page.enable", {}, sessionId); - await send("Page.setAdBlockingEnabled", {enabled: true}, sessionId); - await send("DOMSnapshot.enable", {}, sessionId); - - on("Page.frameNavigated", updateFrameNode); - on("Page.frameAttached", addFrameNode); - // on("Page.frameDetached", updateFrameNodes); // necessary? maybe not - - await send("Page.addScriptToEvaluateOnNewDocument", { - source: getInjection({sessionId}), - worldName: "Context-22120-Indexing", - runImmediately: true - }, sessionId); + DEBUG.verboseSlow && console.log("installForSession running on target " + targetId); - DEBUG.verboseSlow && console.log("Just request install", targetId, url); - } + State.Sessions.set(targetId, sessionId); + Targets.set(sessionId, clone(targetInfo)); - State.Installations.add(sessionId); + if ( Mode == 'save' || Mode == 'select' ) { + send("Network.setCacheDisabled", {cacheDisabled:true}, sessionId); + send("Network.setBypassServiceWorker", {bypass:true}, sessionId); - DEBUG.verboseSlow && console.log('Installed sessionId', sessionId); - if ( Mode == 'save' ) { - indexURL({targetInfo}); - } - } + await send("Runtime.enable", {}, sessionId); + await send("Page.enable", {}, sessionId); + await send("Page.setAdBlockingEnabled", {enabled: true}, sessionId); + await send("DOMSnapshot.enable", {}, sessionId); - async function indexURL({targetInfo:info = {}, sessionId, waitingForDebugger} = {}) { - if ( waitingForDebugger ) { - console.warn(info); - throw new TypeError(`Target not ready for install`); - } - if ( Mode == 'serve' ) return; - if ( info.type != 'page' ) return; - if ( ! info.url || info.url == 'about:blank' ) return; - if ( info.url.startsWith('chrome') ) return; - if ( dontCache(info) ) return; + on("Page.frameNavigated", updateFrameNode); + on("Page.frameAttached", addFrameNode); + // on("Page.frameDetached", updateFrameNodes); // necessary? maybe not - DEBUG.verboseSlow && console.log('Index URL', info); + await send("Page.addScriptToEvaluateOnNewDocument", { + source: getInjection({sessionId}), + worldName: "Context-22120-Indexing", + runImmediately: true + }, sessionId); - DEBUG.verboseSlow && console.log('Index URL called', info); + DEBUG.verboseSlow && console.log("Just request install", targetId, url); + } - if ( State.Indexing.has(info.targetId) ) return; - State.Indexing.add(info.targetId); + State.Installations.add(sessionId); - if ( ! sessionId ) { - sessionId = await untilHas( - State.Sessions, info.targetId, - {timeout: State.crawling && State.crawlTimeout} - ); + DEBUG.verboseSlow && console.log('Installed sessionId', sessionId); + if ( Mode == 'save' ) { + indexURL({targetInfo}); + } } - if ( !State.Installations.has(sessionId) ) { - await untilHas( - State.Installations, sessionId, - {timeout: State.crawling && State.crawlTimeout} - ); - } + async function indexURL({targetInfo:info = {}, sessionId, waitingForDebugger} = {}) { + if ( waitingForDebugger ) { + console.warn(info); + throw new TypeError(`Target not ready for install`); + } + if ( Mode == 'serve' ) return; + if ( info.type != 'page' ) return; + if ( ! info.url || info.url == 'about:blank' ) return; + if ( info.url.startsWith('chrome') ) return; + if ( dontCache(info) ) return; + + DEBUG.verboseSlow && console.log('Index URL', info); - send("DOMSnapshot.enable", {}, sessionId); + DEBUG.verboseSlow && console.log('Index URL called', info); - await sleep(500); + if ( State.Indexing.has(info.targetId) ) return; + State.Indexing.add(info.targetId); - const flatDoc = await send("DOMSnapshot.captureSnapshot", { - computedStyles: [], - }, sessionId); - const pageText = processDoc(flatDoc).replace(STRIP_CHARS, ' '); + if ( ! sessionId ) { + sessionId = await untilHas( + State.Sessions, info.targetId, + {timeout: State.crawling && State.crawlTimeout} + ); + } - if ( State.crawling ) { - const has = await untilTrue(() => State.CrawlData.has(info.targetId)); + if ( !State.Installations.has(sessionId) ) { + await untilHas( + State.Installations, sessionId, + {timeout: State.crawling && State.crawlTimeout} + ); + } - const {url} = Targets.get(sessionId); - if ( ! dontCache({url}) ) { - if ( has ) { - const {depth,links} = State.CrawlData.get(info.targetId); - DEBUG.verboseSlow && console.log(info, {depth,links}); + send("DOMSnapshot.enable", {}, sessionId); - const {result:{value:{title,links:crawlLinks}}} = await send("Runtime.evaluate", { - expression: `(function () { - return { - links: Array.from( - document.querySelectorAll('a[href].titlelink') - ).map(a => a.href), - title: document.title - }; - }())`, - returnByValue: true - }, sessionId); + await sleep(500); - if ( (depth + 1) <= State.crawlDepth ) { - links.length = 0; - links.push(...crawlLinks.map(url => ({url,depth:depth+1}))); + const flatDoc = await send("DOMSnapshot.captureSnapshot", { + computedStyles: [], + }, sessionId); + const pageText = processDoc(flatDoc).replace(STRIP_CHARS, ' '); + + if ( State.crawling ) { + const has = await untilTrue(() => State.CrawlData.has(info.targetId)); + + const {url} = Targets.get(sessionId); + if ( ! dontCache({url}) ) { + if ( has ) { + const {depth,links} = State.CrawlData.get(info.targetId); + DEBUG.verboseSlow && console.log(info, {depth,links}); + + const {result:{value:{title,links:crawlLinks}}} = await send("Runtime.evaluate", { + expression: `(function () { + return { + links: Array.from( + document.querySelectorAll('a[href].titlelink') + ).map(a => a.href), + title: document.title + }; + }())`, + returnByValue: true + }, sessionId); + + if ( (depth + 1) <= State.crawlDepth ) { + links.length = 0; + links.push(...crawlLinks.map(url => ({url,depth:depth+1}))); + } + if ( logStream ) { + console.log(`Writing ${links.length} entries to ${logName}`); + logStream.cork(); + links.forEach(url => { + logStream.write(`${url}\n`); + }); + logStream.uncork(); + } + console.log(`Just crawled: ${title} (${info.url})`); } - if ( logStream ) { - console.log(`Writing ${links.length} entries to ${logName}`); - logStream.cork(); - links.forEach(url => { - logStream.write(`${url}\n`); + + if ( ! State.titles ) { + State.titles = new Map(); + State.onExit.addHandler(() => { + Fs.writeFileSync( + Path.resolve(args.CONFIG_DIR, `titles-${(new Date).toISOString()}.txt`), + JSON.stringify([...State.titles.entries()], null, 2) + '\n' + ); }); - logStream.uncork(); } - console.log(`Just crawled: ${title} (${info.url})`); - } - - if ( ! State.titles ) { - State.titles = new Map(); - State.onExit.addHandler(() => { - Fs.writeFileSync( - Path.resolve(args.CONFIG_DIR, `titles-${(new Date).toISOString()}.txt`), - JSON.stringify([...State.titles.entries()], null, 2) + '\n' - ); - }); - } - const {result:{value:data}} = await send("Runtime.evaluate", - { - expression: `(function () { - return { - url: document.location.href, - title: document.title, - }; - }())`, - returnByValue: true - }, - sessionId - ); + const {result:{value:data}} = await send("Runtime.evaluate", + { + expression: `(function () { + return { + url: document.location.href, + title: document.title, + }; + }())`, + returnByValue: true + }, + sessionId + ); - State.titles.set(data.url, data.title); - console.log(`Saved ${State.titles.size} titles`); + State.titles.set(data.url, data.title); + console.log(`Saved ${State.titles.size} titles`); - if ( State.program && ! dontCache(info) ) { - const targetInfo = info; - const fs = Fs; - const path = Path; - try { - await sleep(500); - await eval(`(async () => { - try { - ${State.program} - } catch(e) { - console.warn('Error in program', e, State.program); - } - })();`); - await sleep(500); - } catch(e) { - console.warn(`Error evaluate program`, e); + if ( State.program && ! dontCache(info) ) { + const targetInfo = info; + const fs = Fs; + const path = Path; + try { + await sleep(500); + await eval(`(async () => { + try { + ${State.program} + } catch(e) { + console.warn('Error in program', e, State.program); + } + })();`); + await sleep(500); + } catch(e) { + console.warn(`Error evaluate program`, e); + } } } } - } - const {title, url} = Targets.get(sessionId); - let id, ndx_id; - if ( State.Index.has(url) ) { - ({ndx_id, id} = State.Index.get(url)); - } else { - Id++; - id = Id; - } - const doc = toNDXDoc({id, url, title, pageText}); - State.Index.set(url, {date:Date.now(),id:doc.id, ndx_id:doc.ndx_id, title}); - State.Index.set(doc.id, url); - State.Index.set('ndx'+doc.ndx_id, url); - - const contentSignature = getContentSig(doc); - - //Flex code - Flex.update(doc.id, contentSignature); - - //New NDX code - NDX_FTSIndex.update(doc, ndx_id); - - // Fuzzy - // eventually we can use this update logic for everyone - let updateFuzz = true; - if ( State.Docs.has(url) ) { - const current = State.Docs.get(url); - if ( current.contentSignature === contentSignature ) { - updateFuzz = false; + const {title, url} = Targets.get(sessionId); + let id, ndx_id; + if ( State.Index.has(url) ) { + ({ndx_id, id} = State.Index.get(url)); + } else { + Id++; + id = Id; + } + const doc = toNDXDoc({id, url, title, pageText}); + State.Index.set(url, {date:Date.now(),id:doc.id, ndx_id:doc.ndx_id, title}); + State.Index.set(doc.id, url); + State.Index.set('ndx'+doc.ndx_id, url); + + const contentSignature = getContentSig(doc); + + //Flex code + Flex.update(doc.id, contentSignature); + + //New NDX code + NDX_FTSIndex.update(doc, ndx_id); + + // Fuzzy + // eventually we can use this update logic for everyone + let updateFuzz = true; + if ( State.Docs.has(url) ) { + const current = State.Docs.get(url); + if ( current.contentSignature === contentSignature ) { + updateFuzz = false; + } + } + if ( updateFuzz ) { + doc.contentSignature = contentSignature; + fuzzy.add(doc); + State.Docs.set(url, doc); + DEBUG.verboseSlow && console.log({updateFuzz: {doc,url}}); } - } - if ( updateFuzz ) { - doc.contentSignature = contentSignature; - fuzzy.add(doc); - State.Docs.set(url, doc); - DEBUG.verboseSlow && console.log({updateFuzz: {doc,url}}); - } - DEBUG.verboseSlow && console.log("NDX updated", doc.ndx_id); + DEBUG.verboseSlow && console.log("NDX updated", doc.ndx_id); - UpdatedKeys.add(url); + UpdatedKeys.add(url); - DEBUG.verboseSlow && console.log({id: doc.id, title, url, indexed: true}); + DEBUG.verboseSlow && console.log({id: doc.id, title, url, indexed: true}); - State.Indexing.delete(info.targetId); - State.CrawlIndexing.delete(info.targetId); - } + State.Indexing.delete(info.targetId); + State.CrawlIndexing.delete(info.targetId); + } - async function attachToTarget({targetInfo}, retryCount = 0) { - if ( dontInstall(targetInfo) ) return; - const {url} = targetInfo; - if ( url && targetInfo.type == 'page' ) { - try { - if ( ! targetInfo.attached ) { - const {sessionId} = (await send("Target.attachToTarget", { - targetId: targetInfo.targetId, - flatten: true - })); - State.Sessions.set(targetInfo.targetId, sessionId); + async function attachToTarget({targetInfo}, retryCount = 0) { + if ( dontInstall(targetInfo) ) return; + const {url} = targetInfo; + if ( url && targetInfo.type == 'page' ) { + try { + if ( ! targetInfo.attached ) { + const {sessionId} = (await send("Target.attachToTarget", { + targetId: targetInfo.targetId, + flatten: true + })); + State.Sessions.set(targetInfo.targetId, sessionId); + } + } catch(e) { + DEBUG.verboseSlow && console.error(`Attach to target failed`, targetInfo); + if ( retryCount < 3 ) { + const ms = 1500; + DEBUG.verboseSlow && console.log(`Retrying attach in ${ms/1000} seconds...`); + setTimeout(() => attachToTarget({targetInfo}, (retryCount || 1) + 1), ms); + } } - } catch(e) { - DEBUG.verboseSlow && console.error(`Attach to target failed`, targetInfo); - if ( retryCount < 3 ) { - const ms = 1500; - DEBUG.verboseSlow && console.log(`Retrying attach in ${ms/1000} seconds...`); - setTimeout(() => attachToTarget({targetInfo}, (retryCount || 1) + 1), ms); - } } } - } - async function cacheRequest(pausedRequest) { - const { - requestId, request, resourceType, - frameId, - responseStatusCode, responseHeaders, responseErrorReason - } = pausedRequest; - const isNavigationRequest = resourceType == "Document"; - const isFont = resourceType == "Font"; - - if ( dontCache(request) ) { - DEBUG.verboseSlow && console.log("Not caching", request.url); - send(`Fetch.continue${requestStage}`, {requestId}); - return; - } - const key = serializeRequestKey(request); - if ( Mode == 'serve' ) { - if ( State.Cache.has(key) ) { - let {body, responseCode, responseHeaders} = await getResponseData(State.Cache.get(key)); - responseCode = responseCode || 200; - //DEBUG.verboseSlow && console.log("Fulfilling", key, responseCode, responseHeaders, body.slice(0,140)); - DEBUG.verboseSlow && console.log("Fulfilling", key, responseCode, body.slice(0,140)); - await send("Fetch.fulfillRequest", { - requestId, body, responseCode, responseHeaders - }); - } else { - DEBUG.verboseSlow && console.log("Sending cache stub", key); - await send("Fetch.fulfillRequest", { - requestId, ...UNCACHED - }); - } - } else { - let saveIt = false; - if ( Mode == 'select' ) { - const rootFrameURL = getRootFrameURL(frameId); - const frameDescendsFromBookmarkedURLFrame = hasBookmark(rootFrameURL); - saveIt = frameDescendsFromBookmarkedURLFrame; - DEBUG.verboseSlow && console.log({rootFrameURL, frameId, mode, saveIt}); - } else if ( Mode == 'save' ) { - saveIt = true; + async function cacheRequest(pausedRequest) { + const { + requestId, request, resourceType, + frameId, + responseStatusCode, responseHeaders, responseErrorReason + } = pausedRequest; + const isNavigationRequest = resourceType == "Document"; + const isFont = resourceType == "Font"; + + if ( dontCache(request) ) { + DEBUG.verboseSlow && console.log("Not caching", request.url); + send(`Fetch.continue${requestStage}`, {requestId}); + return; } - if ( saveIt ) { - const response = {key, responseCode: responseStatusCode, responseHeaders}; - const resp = await getBody({requestId, responseStatusCode}); - if ( resp ) { - let {body, base64Encoded} = resp; - if ( ! base64Encoded ) { - body = b64(body); - } - response.body = body; - const responsePath = await saveResponseData(key, request.url, response); - State.Cache.set(key, responsePath); + const key = serializeRequestKey(request); + if ( Mode == 'serve' ) { + if ( State.Cache.has(key) ) { + let {body, responseCode, responseHeaders} = await getResponseData(State.Cache.get(key)); + responseCode = responseCode || 200; + //DEBUG.verboseSlow && console.log("Fulfilling", key, responseCode, responseHeaders, body.slice(0,140)); + DEBUG.verboseSlow && console.log("Fulfilling", key, responseCode, body.slice(0,140)); + await send("Fetch.fulfillRequest", { + requestId, body, responseCode, responseHeaders + }); } else { - DEBUG.verboseSlow && console.warn("get response body error", key, responseStatusCode, responseHeaders, pausedRequest.responseErrorReason); - response.body = ''; + DEBUG.verboseSlow && console.log("Sending cache stub", key); + await send("Fetch.fulfillRequest", { + requestId, ...UNCACHED + }); + } + } else { + let saveIt = false; + if ( Mode == 'select' ) { + const rootFrameURL = getRootFrameURL(frameId); + const frameDescendsFromBookmarkedURLFrame = hasBookmark(rootFrameURL); + saveIt = frameDescendsFromBookmarkedURLFrame; + DEBUG.verboseSlow && console.log({rootFrameURL, frameId, mode, saveIt}); + } else if ( Mode == 'save' ) { + saveIt = true; } - //await sleep(DELAY); - if ( !isFont && responseErrorReason ) { - if ( isNavigationRequest ) { - await send("Fetch.fulfillRequest", { - requestId, - responseHeaders: BLOCKED_HEADERS, - responseCode: BLOCKED_CODE, - body: Buffer.from(responseErrorReason).toString("base64"), - }, - ); + if ( saveIt ) { + const response = {key, responseCode: responseStatusCode, responseHeaders}; + const resp = await getBody({requestId, responseStatusCode}); + if ( resp ) { + let {body, base64Encoded} = resp; + if ( ! base64Encoded ) { + body = b64(body); + } + response.body = body; + const responsePath = await saveResponseData(key, request.url, response); + State.Cache.set(key, responsePath); } else { - await send("Fetch.failRequest", { - requestId, - errorReason: responseErrorReason - }, - ); + DEBUG.verboseSlow && console.warn("get response body error", key, responseStatusCode, responseHeaders, pausedRequest.responseErrorReason); + response.body = ''; } - return; - } - } - send(`Fetch.continue${requestStage}`, {requestId}).catch( - e => console.warn("Issue with continuing request", {e, requestStage, requestId}) - ); + //await sleep(DELAY); + if ( !isFont && responseErrorReason ) { + if ( isNavigationRequest ) { + await send("Fetch.fulfillRequest", { + requestId, + responseHeaders: BLOCKED_HEADERS, + responseCode: BLOCKED_CODE, + body: Buffer.from(responseErrorReason).toString("base64"), + }, + ); + } else { + await send("Fetch.failRequest", { + requestId, + errorReason: responseErrorReason + }, + ); + } + return; + } + } + send(`Fetch.continue${requestStage}`, {requestId}).catch( + e => console.warn("Issue with continuing request", {e, requestStage, requestId}) + ); + } } - } - async function getBody({requestId, responseStatusCode}) { - let resp; - if ( ! BODYLESS.has(responseStatusCode) ) { - resp = await send("Fetch.getResponseBody", {requestId}); - } else { - resp = {body:'', base64Encoded:true}; + async function getBody({requestId, responseStatusCode}) { + let resp; + if ( ! BODYLESS.has(responseStatusCode) ) { + resp = await send("Fetch.getResponseBody", {requestId}); + } else { + resp = {body:'', base64Encoded:true}; + } + return resp; } - return resp; - } - - function dontInstall(targetInfo) { - return targetInfo.type !== 'page'; - } - - async function getResponseData(path) { - try { - return JSON.parse(await Fs.promises.readFile(path)); - } catch(e) { - console.warn(`Error with ${path}`, e); - return UNCACHED; + + function dontInstall(targetInfo) { + return targetInfo.type !== 'page'; } - } - async function saveResponseData(key, url, response) { - const origin = (new URL(url).origin); - let originDir = State.Cache.get(origin); - if ( ! originDir ) { - originDir = Path.resolve(library_path(), origin.replace(TBL, '_')); + async function getResponseData(path) { try { - await Fs.promises.mkdir(originDir, {recursive:true}); + return JSON.parse(await Fs.promises.readFile(path)); } catch(e) { - console.warn(`Issue with origin directory ${Path.dirname(responsePath)}`, e); + console.warn(`Error with ${path}`, e); + return UNCACHED; } - State.Cache.set(origin, originDir); } - const fileName = `${await sha1(key)}.json`; + async function saveResponseData(key, url, response) { + const origin = (new URL(url).origin); + let originDir = State.Cache.get(origin); + if ( ! originDir ) { + originDir = Path.resolve(library_path(), origin.replace(TBL, '_')); + try { + await Fs.promises.mkdir(originDir, {recursive:true}); + } catch(e) { + console.warn(`Issue with origin directory ${Path.dirname(responsePath)}`, e); + } + State.Cache.set(origin, originDir); + } - const responsePath = Path.resolve(originDir, fileName); - await Fs.promises.writeFile(responsePath, JSON.stringify(response,null,2)); + const fileName = `${await sha1(key)}.json`; - return responsePath; - } + const responsePath = Path.resolve(originDir, fileName); + await Fs.promises.writeFile(responsePath, JSON.stringify(response,null,2)); - async function sha1(key) { - return crypto.createHash('sha1').update(key).digest('hex'); - } - - async function rainbow(key) { - return rainbowHash(128, 0, new Uint8Array(Buffer.from(key))); - } - - function serializeRequestKey(request) { - const {url, /*urlFragment,*/ method, /*headers, postData, hasPostData*/} = request; + return responsePath; + } - /** - let sortedHeaders = ''; - for( const key of Object.keys(headers).sort() ) { - sortedHeaders += `${key}:${headers[key]}/`; + async function sha1(key) { + return crypto.createHash('sha1').update(key).digest('hex'); + } + + async function rainbow(key) { + return rainbowHash(128, 0, new Uint8Array(Buffer.from(key))); } - **/ - return `${method}${url}`; - //return `${url}${urlFragment}:${method}:${sortedHeaders}:${postData}:${hasPostData}`; - } + function serializeRequestKey(request) { + const {url, /*urlFragment,*/ method, /*headers, postData, hasPostData*/} = request; - async function startObservingBookmarkChanges() { - console.info("Not observing"); - return; - for await ( const change of bookmarkChanges() ) { - if ( Mode == 'select' ) { - switch(change.type) { - case 'new': { - DEBUG.verboseSlow && console.log(change); - archiveAndIndexURL(change.url); - } break; - case 'delete': { - DEBUG.verboseSlow && console.log(change); - deleteFromIndexAndSearch(change.url); + /** + let sortedHeaders = ''; + for( const key of Object.keys(headers).sort() ) { + sortedHeaders += `${key}:${headers[key]}/`; + } + **/ + + return `${method}${url}`; + //return `${url}${urlFragment}:${method}:${sortedHeaders}:${postData}:${hasPostData}`; + } + + async function startObservingBookmarkChanges() { + console.info("Not observing"); + return; + for await ( const change of bookmarkChanges() ) { + if ( Mode == 'select' ) { + switch(change.type) { + case 'new': { + DEBUG.verboseSlow && console.log(change); + archiveAndIndexURL(change.url); + } break; + case 'delete': { + DEBUG.verboseSlow && console.log(change); + deleteFromIndexAndSearch(change.url); + } break; + default: { + console.log(`We don't do anything about this bookmark change, currently`, change); } break; - default: { - console.log(`We don't do anything about this bookmark change, currently`, change); - } break; + } } } } + } catch(e) { + console.error('Error while collect', e); } } @@ -894,7 +902,7 @@ } async function isReady() { - return await untilHas(Status, 'loaded'); + return await untilTrue(() => Status.loaded); } async function loadFuzzy({fromMemOnly: fromMemOnly = false} = {}) { diff --git a/src/common.js b/src/common.js index 1f745b2..c2646e9 100644 --- a/src/common.js +++ b/src/common.js @@ -16,7 +16,7 @@ export const DEBUG = { verboseSlow: process.env.VERBOSE_DEBUG_22120 || DEEB, debug: process.env.DEBUG_22120 || DEEB, verbose: false, - checkPred: false + checkPred: false, } export const SHOW_FETCH = false; diff --git a/src/protocol.js b/src/protocol.js index a5b57d4..e73a9b5 100644 --- a/src/protocol.js +++ b/src/protocol.js @@ -1,26 +1,49 @@ import Ws from 'ws'; -import Fetch from 'node-fetch'; -import {untilTrue, SHOW_FETCH, DEBUG, ERROR_CODE_SAFE_TO_IGNORE} from './common.js'; +import {sleep, untilTrue, SHOW_FETCH, DEBUG, ERROR_CODE_SAFE_TO_IGNORE} from './common.js'; const ROOT_SESSION = "browser"; const MESSAGES = new Map(); +const RANDOM_LOCAL = () => [ + '127.0.0.1', + '[::1]', + 'localhost', + '127.0.0.1', + '[::1]', + 'localhost' +][Math.floor(Math.random()*6)]; + export async function connect({port:port = 9222} = {}) { let webSocketDebuggerUrl, socket; + let url; try { await untilTrue(async () => { let result = false; try { - const {webSocketDebuggerUrl} = await Fetch(`http://127.0.0.1:${port}/json/version`).then(r => r.json()); + url = `http://${RANDOM_LOCAL()}:${port}/json/version`; + DEBUG.verbose && console.log(`Trying browser at ${url}...`, url); + const {webSocketDebuggerUrl} = await Promise.race([ + fetch(url).then(r => r.json()), + (async () => { + await sleep(2500); + throw new Error(`Connect took too long.`) + })(), + ]); if ( webSocketDebuggerUrl ) { result = true; } + } catch(e) { + DEBUG.verbose && console.error('Error while checking browser', e); } finally { return result; } }); - ({webSocketDebuggerUrl} = await Fetch(`http://127.0.0.1:${port}/json/version`).then(r => r.json())); + ({webSocketDebuggerUrl} = await fetch(url).then(r => r.json())); + let isOpen = false; socket = new Ws(webSocketDebuggerUrl); + socket.on('open', () => { isOpen = true }); + await untilTrue(() => isOpen); + DEBUG.verbose && console.log(`Connected to browser`); } catch(e) { console.log("Error communicating with browser", e); process.exit(1); @@ -31,10 +54,18 @@ export async function connect({port:port = 9222} = {}) { socket.on('message', handle); let id = 0; - let resolve; - const promise = new Promise(res => resolve = res); + let resolve, reject; + const promise = new Promise((res, rej) => (resolve = res, reject = rej)); - socket.on('open', () => resolve()); + switch(socket.readyState) { + case Ws.CONNECTING: + socket.on('open', () => resolve()); break; + case Ws.OPEN: + resolve(); break; + case Ws.CLOSED: + case Ws.CLOSING: + reject(); break; + } await promise;