diff --git a/README.md b/README.md index 5ca04d367..e2e3eacbf 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ To use Replay with a Desktop Browser, visit [replay.io](https://www.replay.io/) ## Packages -- [`/replay`](./packages/replay/README.md) CLI for viewing + uploading recordings +- [`/replayio`](./packages/replayio/README.md) CLI for viewing and uploading recordings - [`/cypress`](./packages/cypress/README.md) Beta Plugin for recording and capturing metadata for Cypress tests. - [`/playwright`](./packages/playwright/README.md) Beta Plugin for recording and capturing metadata for Playwright tests. - [`/puppeteer`](./packages/puppeteer/README.md) Experimental Plugin for recording Puppeteer tests. diff --git a/examples/create-react-app-typescript/package.json b/examples/create-react-app-typescript/package.json index 0fd8dd19d..1231b843d 100644 --- a/examples/create-react-app-typescript/package.json +++ b/examples/create-react-app-typescript/package.json @@ -4,7 +4,6 @@ "dependencies": { "@playwright/test": "^1.40.1", "@replayio/playwright": "workspace:^", - "@replayio/replay": "workspace:^", "@testing-library/jest-dom": "^5.16.2", "@testing-library/react": "^12.1.4", "@testing-library/user-event": "^13.5.0", diff --git a/packages/replay/.gitignore b/packages/replay/.gitignore deleted file mode 100644 index 473912655..000000000 --- a/packages/replay/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/node_modules/ -/metadata/ diff --git a/packages/replay/CHANGELOG.md b/packages/replay/CHANGELOG.md deleted file mode 100644 index 395fc74dc..000000000 --- a/packages/replay/CHANGELOG.md +++ /dev/null @@ -1,46 +0,0 @@ -# @replayio/replay - -## 0.22.12 - -### Patch Changes - -- [#594](https://github.com/replayio/replay-cli/pull/594) [`78f5c72`](https://github.com/replayio/replay-cli/commit/78f5c72a62a38772cad90cccf1283b80eea49b61) Thanks [@miriambudayr](https://github.com/miriambudayr)! - Add missing stack-utils dependencies - -## 0.22.11 - -### Patch Changes - -- [#573](https://github.com/replayio/replay-cli/pull/573) [`9494138`](https://github.com/replayio/replay-cli/commit/9494138fe6235fd365ce952be384524d30415f21) Thanks [@hbenl](https://github.com/hbenl)! - Log broken entries when reading the recordings.log file - -## 0.22.10 - -### Patch Changes - -- [#567](https://github.com/replayio/replay-cli/pull/567) [`40beb19`](https://github.com/replayio/replay-cli/commit/40beb199c1d1dec640611fec0e04e911e24b5fe3) Thanks [@Andarist](https://github.com/Andarist)! - Fixed an issue with missing some files realted to `/metadata` entrypoint - -## 0.22.9 - -### Patch Changes - -- Updated dependencies [[`e7bd234`](https://github.com/replayio/replay-cli/commit/e7bd234980e9dfc7ab9584d47ebaf1812712f291)]: - - @replayio/sourcemap-upload@2.0.6 - -## 0.22.8 - -### Patch Changes - -- Updated dependencies [[`89c5082`](https://github.com/replayio/replay-cli/commit/89c5082a06265255ffdc8b4f1e87dcb1d3d9c2d2)]: - - @replayio/sourcemap-upload@2.0.5 - -## 0.22.7 - -### Patch Changes - -- [#519](https://github.com/replayio/replay-cli/pull/519) [`34b1ba7`](https://github.com/replayio/replay-cli/commit/34b1ba705d5c6918333482707b5232fc8edf6170) Thanks [@Andarist](https://github.com/Andarist)! - Export a new version of the new test run metadata validator - -## 0.22.6 - -### Patch Changes - -- Updated dependencies [[`75d475a`](https://github.com/replayio/replay-cli/commit/75d475ad5aed0c331cfc3b36bdcd8e7822b58c39)]: - - @replayio/sourcemap-upload@2.0.4 diff --git a/packages/replay/LICENSE b/packages/replay/LICENSE deleted file mode 100644 index 51cd180d5..000000000 --- a/packages/replay/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2021, Record Replay Inc. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/packages/replay/README.md b/packages/replay/README.md deleted file mode 100644 index 6b31f5b27..000000000 --- a/packages/replay/README.md +++ /dev/null @@ -1,205 +0,0 @@ -# @replayio/replay - -CLI tool and Node module for managing and uploading [Replay](https://replay.io) recordings and installing Replay Browsers. - -## Overview - -When using the Replay plugins to record automated tests or the Replay version of Node, recordings which are created are saved to disk, by default in `$HOME/.replay`. This package is used to manage these recordings and upload them to the record/replay web service so that they can be viewed. - -**Check out the ["Recording Automated Tests Guide"](https://docs.replay.io/docs/recording-automated-tests-5bf7d91b65cd46deab1867b07bd12bdf) to get started with recording Cypress or Playwright tests.** - -## Installation - -`npm i @replayio/replay --global` - -## Usage - -```bash -npx @replayio/replay -``` - -Possible commands are given below. These may be used with the `--directory ` option to override the default recording directory, or `--server
` to override the default server address. When uploading, an API key is required, which can be passed via `--api-key ` or by setting the `REPLAY_API_KEY` environment variable. - -### launch - -Launches the Replay browser and starts recording. - -Options: - -- `[url]`: URL to open on launch -- `-b, --browser`: Browser to launch - `chromium` (default), `firefox` - -### ls - -View information about all known recordings. - -Options: - -- `--all`: Include `uploaded`, `crashUploaded` and `unusable` recordings in the output. -- `--filter`: Filter the recordings to upload using a [JSONata-compatible filter function](https://docs.jsonata.org/higher-order-functions#filter). If used with `--all`, the filter is applied after including all status values. -- `--json`: Prints a JSON array with one descriptor element for each recording. - -Recording descriptors have the following required properties: - -- `id`: ID used to refer to this recording in other commands. -- `createTime`: Time when the recording was created. -- `runtime`: Runtime used to create the recording: either `gecko`, `chromium`, or `node`. -- `metadata`: Any information the runtime associated with this recording. For gecko/chromium recordings this is the URI of the first page loaded, and for node recordings this is the original command line arguments. -- `status`: Status of the recording, see below for possible values. - -The possible status values for a recording are as follows: - -- `onDisk`: The recording was fully written out to disk. -- `uploaded`: The recording was fully uploaded to the record/replay web service. -- `startedWrite`: The recording started being written to disk but wasn't finished. Either the recording process is still running, or the recording process was killed and didn't shut down normally. -- `startedUpload`: The recording started being uploaded but didn't finish. -- `unusable`: The recording was marked as unusable for some reason, such as a stack overflow occurring. -- `crashed`: The recording process crashed before finishing. -- `crashUploaded`: The recording process crashed and the crash data was uploaded to the record/replay web service for analysis. - -Depending on the status the recording descriptor can have some of the following additional properties: - -- `path`: If the recording started being written to disk (including before being uploaded), the path to the recording file. -- `server`: If the recording started being uploaded, the address of the server it was uploaded to. -- `recordingId`: If the recording started being uploaded, the server-assigned ID for this recording which can be used to view it. -- `unusableReason`: If the recording is unusable, the reason it was marked unusable. - -### upload `` - -Upload the recording with the given ID to the web service. - -### process `` - -Upload a recording, and then process it to ensure it can be replayed successfully. - -### upload-all - -Upload all recordings to the web service which can be uploaded. - -Options: - -- `--filter`: Filter the recordings to upload using a [JSONata-compatible filter function](https://docs.jsonata.org/higher-order-functions#filter) - -### view `` - -View the the given recording in the system's default browser, uploading it first if necessary. - -### view-latest - -View the most recently created recording in the system's default browser, uploading it first if necessary. - -### rm `` - -Remove the recording with the given ID and any on disk file for it. - -### rm-all - -Remove all recordings and on disk recording files. - -### update-browsers - -Updates any installed browsers used for recording in automation: [playwright](https://www.npmjs.com/package/@replayio/playwright), [puppeteer](https://www.npmjs.com/package/@replayio/puppeteer), and [cypress](https://www.npmjs.com/package/@replayio/cypress). - -### upload-sourcemaps - -Allows uploading production sourcemaps to Replay's servers so that they can be used when viewing recordings. - -The CLI command `replay upload-sourcemaps [opts] ` has the following options: - -- ``: (Required) A set of files or directories to search for generated files and sourcemap files. -- `--group`: (Required) To allow for tracking and browsing of maps that have been uploaded, we - require uploaded names to have an overall group name associated with them. - This could for instance be a version number, or commit hash. -- `--api-key`: The API key to use when connecting to Replay's servers. - Defaults to `process.env.REPLAY_API_KEY`. -- `--root`: Set the directory that relative paths should be computed with respect to. The relative path - of sourcemaps is included in the uploaded entry, and will be visible in the uploaded-asset UI, so this - can be used to strip off unimportant directories in the build path. Defaults to `process.cwd()`. -- `--ignore`: Provide an ignore pattern for files to ignore when searching for sourcemap-related data. - This may be passed multiple times to ignore multiple things. -- `--quiet`: Tell the CLI to output nothing to stdout. Errors will still log to stderr. -- `--verbose`: Output additional information about the sourcemap map search. -- `--dry-run`: Run all of the local processing and searching for maps, but skip uploading them. -- `--extensions`: The comma-separated set of file extensions to search for sourcemap-related data. - Defaults to `".js,.map"`. - -To programmatically upload from a node script, use [`@replayio/sourcemap-upload`](https://www.npmjs.com/package/@replayio/sourcemap-upload). - -### metadata - -Sets metadata on local recordings. With no options, this command will add the provided `metadata` to each local recording. - -``` -# Sets the provided x-build metadata and attempts to generate the source -# metadata from relevant environment variables -replay metadata --init '{"x-build": {"id": 1234}}' --keys source --warn -``` - -The CLI command `replay metadata [opts]` has the following options: - -- `--init `: Initializes the metadata object from the provided JSON-formatted `metadata` string -- `--keys `: Initializes known metadata keys by retrieving values from environment variables. -- `--warn`: Warn instead of exit with an error when metadata cannot be initialized -- `--filter`: Filter the recordings to which the metadata is applied using a [JSONata-compatible filter function](https://docs.jsonata.org/higher-order-functions#filter) - -## Node Module Usage - -This package can be used as a node module to directly access its functionality rather than going through the CLI tool. - -Installation: - -```bash -npm i @replayio/replay -``` - -Usage: - -```js -const interface = require("@replayio/replay"); -``` - -The interface includes the following members. Options objects can include `directory`, `server`, and `apiKey` properties which behave the same as `--directory`, `--server`, and `--api-key` arguments to the CLI tool, and a `verbose` property which can be set to log the same output as the CLI tool. Any of these properties or the options object themselves can be omitted to use default values. - -### launchBrowser(browserName, args) - -Equivalent to `replay launch`, launches the browser specified by `browserName` with the provided additional arguments in `args`. Returns a handle to the detached, spawned child process. - -### listAllRecordings(opts) - -Equivalent to `replay ls`, returns the JSON object for the recordings. - -### uploadRecording(id, opts) - -Equivalent to `replay upload `, returns a promise that resolves with a recording ID if the upload succeeded, or null if uploading failed. - -### processRecording(id, opts) - -Equivalent to `replay process `, returns a promise that resolves with a recording ID if the upload and processing succeeded, or null if either failed. - -### uploadAllRecordings(opts) - -Equivalent to `replay upload-all`, returns a promise that resolves with whether all uploads succeeded. - -### viewRecording(id, opts) - -Equivalent to `replay view `, returns a promise that resolves with whether the recording is being viewed. - -### viewLatestRecording(opts) - -Equivalent to `replay view-latest`, returns a promise that resolves with whether the latest recording is being viewed. - -### removeRecording(id, opts) - -Equivalent to `replay rm `, returns whether the recording was removed. - -### removeAllRecordings(opts) - -Equivalent to `replay rm-all`. - -### updateBrowsers(opts) - -Equivalent to `replay update-browsers`. - -## Contributing - -Contributing guide can be found [here](contributing.md). diff --git a/packages/replay/bin.js b/packages/replay/bin.js deleted file mode 100755 index cb9ede04f..000000000 --- a/packages/replay/bin.js +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env node -"use strict"; - -require("./dist/bin.js"); diff --git a/packages/replay/contributing.md b/packages/replay/contributing.md deleted file mode 100644 index bd44d4d0f..000000000 --- a/packages/replay/contributing.md +++ /dev/null @@ -1,7 +0,0 @@ -Testing changes to `replay-cli/replay` locally - -1. Make your changes -2. Finish the build step `npm run build` -3. Symlink the package folder with `cd dist; npm link .` -4. Double check that it works by running `which replay` -5. šŸ‘ diff --git a/packages/replay/jest.config.js b/packages/replay/jest.config.js deleted file mode 100644 index 59daf89bc..000000000 --- a/packages/replay/jest.config.js +++ /dev/null @@ -1,9 +0,0 @@ -/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ -module.exports = { - preset: "ts-jest", - testEnvironment: "node", - testMatch: ["**/__tests__/**/*.[jt]s?(x)", "**/(*.)+(spec|test).[jt]s?(x)"], - moduleNameMapper: { - uuid: require.resolve("uuid"), - }, -}; diff --git a/packages/replay/package.json b/packages/replay/package.json deleted file mode 100644 index f73c8291b..000000000 --- a/packages/replay/package.json +++ /dev/null @@ -1,80 +0,0 @@ -{ - "name": "@replayio/replay", - "version": "0.22.12", - "description": "CLI tool for uploading and managing recordings", - "bin": "./bin.js", - "main": "./dist/main.js", - "exports": { - ".": "./dist/main.js", - "./metadata": "./dist/metadata/index.js", - "./metadata/test": "./dist/metadata/test/index.js", - "./metadata/*": "./dist/metadata/*.js", - "./utils": "./dist/utils.js", - "./package.json": "./package.json" - }, - "files": [ - "dist", - "metadata", - "*.js", - "*.d.ts" - ], - "scripts": { - "prepare": "yarn run build", - "build": "pkg-build && node ./scripts/mirror-dist-metadata.js", - "test": "jest --ci", - "typecheck": "tsc --noEmit" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/replayio/replay-cli.git" - }, - "author": "", - "license": "BSD-3-Clause", - "bugs": { - "url": "https://github.com/replayio/replay-cli/issues" - }, - "homepage": "https://github.com/replayio/replay-cli/blob/main/packages/replay/README.md", - "dependencies": { - "@replayio/sourcemap-upload": "workspace:^", - "@types/semver": "^7.5.6", - "commander": "^12.0.0", - "debug": "^4.3.4", - "fs-extra": "^11.2.0", - "is-uuid": "^1.0.2", - "jsonata": "^1.8.6", - "launchdarkly-node-client-sdk": "^3.2.1", - "node-fetch": "^2.6.8", - "p-map": "^4.0.0", - "query-registry": "^2.6.0", - "semver": "^7.5.4", - "stack-utils": "^2.0.6", - "superstruct": "^1.0.4", - "text-table": "^0.2.0", - "undici": "^5.28.4", - "winston": "^3.13.0", - "winston-loki": "^6.1.2", - "ws": "^7.5.0" - }, - "devDependencies": { - "@replay-cli/pkg-build": "workspace:^", - "@replay-cli/shared": "workspace:^", - "@replay-cli/tsconfig": "workspace:^", - "@types/debug": "^4.1.7", - "@types/jest": "^28.1.5", - "@types/node-fetch": "^2.6.3", - "@types/stack-utils": "^2.0.3", - "@types/text-table": "^0.2.2", - "@types/ws": "^8.5.10", - "jest": "^28.1.3", - "ts-jest": "^28.0.6", - "typescript": "^5.5.2" - }, - "@replay-cli/pkg-build": { - "entrypoints": [ - "./src/bin.ts", - "./src/main.ts", - "./src/utils.ts", - "./src/metadata/*.ts" - ] - } -} diff --git a/packages/replay/scripts/mirror-dist-metadata.js b/packages/replay/scripts/mirror-dist-metadata.js deleted file mode 100644 index 8b70a98d7..000000000 --- a/packages/replay/scripts/mirror-dist-metadata.js +++ /dev/null @@ -1,59 +0,0 @@ -const fs = require("fs/promises"); -const path = require("path"); - -async function mirrorDistFiles(sourceDir, targetDir) { - const sourceFiles = await fs.readdir(sourceDir); - - await Promise.all( - sourceFiles.map(async filename => { - const sourcePath = path.join(sourceDir, filename); - const targetPath = path.join(targetDir, filename); - - if ((await fs.stat(sourcePath)).isDirectory()) { - try { - await fs.mkdir(targetPath); - } catch (err) { - if (err.code !== "EEXIST") { - throw err; - } - } - await mirrorDistFiles(sourcePath, targetPath); - return; - } - if (!filename.endsWith(".js")) { - return; - } - const relativeSourcePath = path.relative(path.dirname(targetPath), sourcePath); - const hasDefaultExport = ( - await fs.readFile(sourcePath.replace(/\.js$/, ".d.ts"), "utf8") - ).includes("export default"); - await Promise.all([ - fs.writeFile(targetPath, `module.exports = require("${relativeSourcePath}");\n`), - fs.writeFile( - targetPath.replace(/\.js$/, ".d.ts"), - [ - `export * from "${relativeSourcePath}";`, - hasDefaultExport && `export { default } from "${relativeSourcePath}";`, - ] - .filter(Boolean) - .join("\n") + "\n" - ), - ]); - }) - ); -} - -(async () => { - const source = path.join(__dirname, "..", "dist", "metadata"); - const target = path.join(__dirname, "..", "metadata"); - - try { - await fs.mkdir(target); - } catch (err) { - if (err.code !== "EEXIST") { - throw err; - } - } - - await mirrorDistFiles(source, target); -})(); diff --git a/packages/replay/src/auth.ts b/packages/replay/src/auth.ts deleted file mode 100644 index 652b72c17..000000000 --- a/packages/replay/src/auth.ts +++ /dev/null @@ -1,395 +0,0 @@ -import fetch from "node-fetch"; -import { spawn } from "child_process"; -import { createHash } from "crypto"; -import dbg from "./debug"; -import { readFile, writeFile, mkdir } from "fs/promises"; -import path from "path"; - -import { queryGraphQL } from "@replay-cli/shared/graphql/queryGraphQL"; -import { getDirectory, maybeLog, openExecutable } from "./utils"; -import { Options } from "./types"; -import { getLaunchDarkly } from "./launchdarkly"; - -const debug = dbg("replay:cli:auth"); - -class GraphQLError extends Error { - constructor(message: string, public errors: any[]) { - const errorsMessage = errors - .map((e: any) => e.message) - .filter(Boolean) - .join(", "); - super(`${message}: ${errorsMessage}`); - } -} - -function isInternalError(e: unknown): e is { id: string } { - return typeof e === "object" && !!e && "id" in e && typeof e.id === "string"; -} - -function getAuthHost() { - return process.env.REPLAY_AUTH_HOST || "webreplay.us.auth0.com"; -} - -function getAuthClientId() { - return process.env.REPLAY_AUTH_CLIENT_ID || "4FvFnJJW4XlnUyrXQF8zOLw6vNAH1MAo"; -} - -function tokenInfo(token: string) { - const [_header, encPayload, _cypher] = token.split(".", 3); - if (typeof encPayload !== "string") { - debug("Token did not contain a valid payload: %s", maskToken(token)); - return null; - } - - let payload; - try { - payload = JSON.parse(Buffer.from(encPayload, "base64").toString()); - } catch (err) { - debug("Failed to decode token: %s %e", maskToken(token), err); - return null; - } - - if (typeof payload !== "object") { - debug("Token payload was not an object"); - return null; - } - - return { payload }; -} - -function hasTokenExpired(token: string) { - const userInfo = tokenInfo(token); - const exp: number | undefined = userInfo?.payload?.exp; - debug("token expiration time: %d", exp ? exp * 1000 : 0); - - return exp != null && Date.now() - exp * 1000 > 0; -} - -function maskToken(token: string) { - return token.replace(/.(?!.{0,2}$)/g, "*"); -} - -async function refresh(refreshToken: string) { - try { - const resp = await fetch(`https://${getAuthHost()}/oauth/token`, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - audience: "https://api.replay.io", - scope: "openid profile", - grant_type: "refresh_token", - client_id: getAuthClientId(), - refresh_token: refreshToken, - }), - }); - - const json: any = await resp.json(); - - if (json.error) { - debug("OAuth token request failed: %O", json.error); - - throw { - id: "auth0-error", - message: json.error, - }; - } - - if (!json.access_token) { - debug("OAuth token request was missing access token: %O", json); - - throw { - id: "no-access-token", - }; - } - - return json.access_token; - } catch (e: any) { - throw { - ...e, - refreshToken: maskToken(refreshToken), - }; - } -} - -function generateAuthKey() { - const hash = createHash("sha256"); - hash.write(String(globalThis.performance.now())); - return hash.digest("hex").toString(); -} - -function initAuthRequest(options: Options = {}) { - maybeLog(options.verbose, "šŸŒŽ Launching browser to login to replay.io"); - const key = generateAuthKey(); - const server = process.env.REPLAY_APP_SERVER || "https://app.replay.io"; - spawn(openExecutable(), [`${server}/api/browser/auth?key=${key}&source=cli`]); - - return key; -} - -async function fetchToken(key: string) { - const resp = await queryGraphQL( - "CloseAuthRequest", - ` - mutation CloseAuthRequest($key: String!) { - closeAuthRequest(input: {key: $key}) { - success - token - } - } - `, - { - key, - } - ); - - if (resp.errors) { - if ( - resp.errors.length === 1 && - resp.errors[0].message === "Authentication request does not exist" - ) { - throw { - id: "missing-request", - }; - } else { - throw { - id: "close-graphql-error", - message: resp.errors - .map((e: any) => e.message) - .filter(Boolean) - .join(", "), - }; - } - } else if (!resp.data.closeAuthRequest.token) { - // there's no obvious reason this would occur but for completeness ... - throw { - id: "close-missing-token", - message: JSON.stringify(resp), - }; - } - - const refreshToken = resp.data.closeAuthRequest.token; - - return refreshToken; -} - -export async function pollForToken(key: string, options: Options = {}) { - let timedOut = false; - setTimeout(() => { - timedOut = true; - }, 60 * 1000); - - while (true) { - if (timedOut) { - debug("Timed out waiting for auth request"); - throw { id: "timeout" }; - } - - try { - const refreshToken = await fetchToken(key); - maybeLog(options.verbose, "šŸ”‘ Fetching token"); - - return await refresh(refreshToken); - } catch (e: any) { - if (e.id === "missing-request") { - debug("Auth request was not found. Retrying."); - await new Promise(resolve => setTimeout(resolve, 3000)); - } else { - throw e; - } - } - } -} - -function getTokenPath(options: Options = {}) { - const directory = getDirectory(options); - return path.resolve(path.join(directory, "profile", "auth.json")); -} - -export async function readToken(options: Options = {}) { - try { - const tokenPath = getTokenPath(options); - const tokenJson = await readFile(tokenPath, { encoding: "utf-8" }); - const { token } = JSON.parse(tokenJson); - - if (hasTokenExpired(token)) { - await writeFile(tokenPath, "{}"); - return; - } - - if (typeof token !== "string") { - throw new Error("Unexpect token value: " + token); - } - - return token; - } catch (e) { - debug("Failed to read/write token file: %o", e); - return; - } -} - -async function getApiKey(options: Options = {}) { - return ( - options.apiKey ?? - process.env.REPLAY_API_KEY ?? - process.env.RECORD_REPLAY_API_KEY ?? - (await readToken(options)) - ); -} - -async function writeToken(token: string, options: Options = {}) { - maybeLog(options.verbose, "āœļø Saving token"); - const tokenPath = getTokenPath(options); - await mkdir(path.dirname(tokenPath), { recursive: true }); - await writeFile( - tokenPath, - JSON.stringify( - { - "// Docs": "This contains your app.replay.io authentication token. Do not share!", - token, - }, - undefined, - 2 - ), - { encoding: "utf-8" } - ); -} - -export async function maybeAuthenticateUser(options: Options = {}) { - try { - const key = initAuthRequest(options); - const token = await pollForToken(key, options); - await writeToken(token); - - maybeLog(options.verbose, "āœ… Authentication complete!"); - - return true; - } catch (e) { - debug("Failed to authenticate user: %o", e); - - if (isInternalError(e)) { - if (e.id === "timeout") { - console.error("Timed out waiting for browser authentication. Please try again."); - } else { - console.error("Failed to authenticate (reason: %s)", e.id); - } - } else { - console.error("Failed to authenticate"); - } - - return false; - } -} - -async function getAuthInfo(key: string): Promise { - const resp = await queryGraphQL( - "AuthInfo", - ` - query AuthInfo { - viewer { - user { - id - } - } - auth { - workspaces { - edges { - node { - id - } - } - } - } - } - `, - undefined, - key - ); - - if (resp.errors) { - throw new GraphQLError("Failed to fetch auth info", resp.errors); - } - - const response = resp.data as { - viewer: { - user: { - id: string | null; - } | null; - }; - auth: { - workspaces: { - edges: { - node: { - id: string; - }; - }[]; - }; - }; - }; - - const { viewer, auth } = response; - - if (viewer?.user?.id) { - return viewer.user.id; - } - - if (auth?.workspaces?.edges?.[0]?.node?.id) { - return auth.workspaces.edges[0].node.id; - } - - throw new Error("Unrecognized type of an API key: Missing both user ID and workspace ID."); -} - -function getAuthInfoCachePath(options: Options = {}) { - const directory = getDirectory(options); - return path.resolve(path.join(directory, "profile", "authInfo.json")); -} - -// We don't want to store the API key in plain text, especially when provided -// via env or CLI arg. Hashing it would prevent leaking the key -function authInfoCacheKey(key: string) { - return createHash("sha256").update(key).digest("hex"); -} - -async function writeAuthInfoCache( - key: string, - authInfo: string, - options: Options = {} -): Promise<{ - [key: string]: string; -}> { - const cachePath = getAuthInfoCachePath(options); - await mkdir(path.dirname(getAuthInfoCachePath(options)), { recursive: true }); - const cache = { - [authInfoCacheKey(key)]: authInfo, - }; - await writeFile(cachePath, JSON.stringify(cache, undefined, 2), { encoding: "utf-8" }); - return cache; -} - -async function readAuthInfoCache(key: string, options: Options = {}): Promise { - try { - const cachePath = getAuthInfoCachePath(options); - const cacheJson = await readFile(cachePath, { encoding: "utf-8" }); - const cache = JSON.parse(cacheJson); - return cache[authInfoCacheKey(key)]; - } catch (e) { - debug("Failed to read auth info cache: %o", e); - return; - } -} - -export async function initLDContextFromApiKey(options: Options = {}) { - const apiKey = await getApiKey(options); - if (!apiKey) { - return; - } - - let targetId: string | undefined = await readAuthInfoCache(apiKey, options); - if (!targetId) { - debug("Fetching auth info from server"); - targetId = await getAuthInfo(apiKey); - await writeAuthInfoCache(apiKey, targetId, options); - } - - await getLaunchDarkly().initialize().identify({ type: "user", id: targetId }); -} diff --git a/packages/replay/src/bin.ts b/packages/replay/src/bin.ts deleted file mode 100644 index e0b871275..000000000 --- a/packages/replay/src/bin.ts +++ /dev/null @@ -1,498 +0,0 @@ -import { LogCallback, uploadSourceMaps } from "@replayio/sourcemap-upload"; -import { program } from "commander"; -import dbg, { printLogPath } from "./debug"; -import { formatAllRecordingsHumanReadable, formatAllRecordingsJson } from "./cli/formatRecordings"; -import { - listAllRecordings, - uploadRecording, - processRecording, - uploadAllRecordings, - viewRecording, - viewLatestRecording, - removeRecording, - removeAllRecordings, - updateBrowsers, - updateMetadata, - launchBrowser, - version, -} from "./main"; -import { - FilterOptions, - LaunchOptions, - MetadataOptions, - Options, - SourcemapUploadOptions, - UploadAllOptions, -} from "./types"; -import { assertValidBrowserName, fuzzyBrowserName } from "./utils"; -import { initLDContextFromApiKey, maybeAuthenticateUser } from "./auth"; -import { getLaunchDarkly } from "./launchdarkly"; - -export interface CommandLineOptions extends Options { - /** - * JSON output - */ - json?: boolean; - - /** - * Warn of failures but do not quit with a non-zero exit code - */ - warn?: boolean; - - /** - * Pass along browser commandline arguments - */ - browserArgs: string; -} - -const debug = dbg("replay:cli"); - -// Create command with global options -function commandWithGlobalOptions(cmdString: string) { - return program - .command(cmdString) - .option("--warn", "Terminate with a 0 exit code on error") - .option("--directory ", "Alternate recording directory") - .option("--server
", "Alternate server to upload recordings to") - .hook("preAction", async cmd => { - try { - await initLDContextFromApiKey(cmd.opts()); - } catch (e) { - debug("LaunchDarkly profile is anonymous %o", e); - } - }); -} - -// TODO(dmiller): `--json` should probably be a global option that applies to all commands. -commandWithGlobalOptions("ls") - .description("List information about all recordings.") - .option("-a, --all", "Include all recordings") - .option("--json", "Output in JSON format") - .option("--filter ", "String to filter recordings") - .option("--include-crashes", "Always include crash reports") - .action(commandListAllRecordings); - -commandWithGlobalOptions("upload ") - .description("Upload a recording to the remote server.") - .option("--api-key ", "Authentication API Key") - .action(commandUploadRecording); - -commandWithGlobalOptions("launch [url]") - .description("Launch the replay browser") - .option("-b, --browser ", "Browser to launch", "chromium") - .option( - "--attach ", - "Whether to attach to the browser process after launching", - false - ) - .allowUnknownOption() - .action(commandLaunchBrowser); - -commandWithGlobalOptions("record [url]") - .description("Launch the replay browser and start recording") - .option("-b, --browser ", "Browser to launch", "chromium") - .option("--browser-args ", "Browser arguments", "") - .option( - "--attach ", - "Whether to attach to the browser process after launching", - false - ) - .allowUnknownOption() - .action(commandLaunchBrowserAndRecord); - -commandWithGlobalOptions("process ") - .description("Upload a recording to the remote server and process it.") - .option("--api-key ", "Authentication API Key") - .action(commandProcessRecording); - -commandWithGlobalOptions("upload-all") - .description("Upload all recordings to the remote server.") - .option("--api-key ", "Authentication API Key") - .option("--filter ", "String to filter recordings") - .option("--batch-size ", "Number of recordings to upload in parallel (max 25)") - .option("--include-crashes", "Always include crash reports") - .action(commandUploadAllRecordings); - -commandWithGlobalOptions("view ") - .description("Load the devtools on a recording, uploading it if needed.") - .option("--view-server ", "Alternate server to view recording from.") - .option("--api-key ", "Authentication API Key") - .action(commandViewRecording); - -commandWithGlobalOptions("view-latest") - .description("Load the devtools on the latest recording, uploading it if needed.") - .option("--view-server ", "Alternate server to view recording from.") - .option("--api-key ", "Authentication API Key") - .action(commandViewLatestRecording); - -commandWithGlobalOptions("rm ") - .description("Remove a specific recording.") - .action(commandRemoveRecording); - -commandWithGlobalOptions("rm-all") - .description("Remove all recordings.") - .action(commandRemoveAllRecordings); - -commandWithGlobalOptions("update-browsers") - .description( - "Update your installed Replay runtimes. Optional argument: Comma-separated list of replay runtimes. Possible values: chromium,firefox.\n Node not yet supported." - ) - .arguments("[]") - .action(commandUpdateBrowsers); - -commandWithGlobalOptions("upload-sourcemaps") - .requiredOption( - "-g, --group ", - "The name to group this sourcemap into, e.g. A commit SHA or release version." - ) - .option("--api-key ", "Authentication API Key") - .option("--dry-run", "Perform all of the usual CLI logic, but the final sourcemap upload.") - .option( - "-x, --extensions ", - "A comma-separated list of extensions to process. Defaults to '.js,.map'.", - collectExtensions - ) - .option("-i, --ignore ", "Ignore files that match this pattern", collectIgnorePatterns) - .option("-q, --quiet", "Silence all stdout logging.") - .option("-v, --verbose", "Output extra data to stdout when processing files.") - .option("--batch-size ", "Number of sourcemaps to upload in parallel (max 25)") - .option("--root ", "The base directory to use when computing relative paths") - .arguments("") - .action((filepaths, opts) => commandUploadSourcemaps(filepaths, opts)); - -commandWithGlobalOptions("metadata") - .option("--init [metadata]") - .option("--keys ", "Metadata keys to initialize") - .option("--filter ", "String to filter recordings") - .action(commandMetadata); - -commandWithGlobalOptions("login") - .description("Log in interactively with your browser") - .action(commandLogin); - -commandWithGlobalOptions("version") - .description("Returns the current version of the CLI") - .option("--json", "Output in JSON format") - .action(commandVersion); - -async function exitCommand(exitCode: number) { - await getLaunchDarkly().close(); - process.exit(exitCode); -} - -program.parseAsync().catch(async err => { - console.error(err); - await exitCommand(1); -}); - -function collectExtensions(value: string) { - return value.split(","); -} -function collectIgnorePatterns(value: string, previous: Array = []) { - return previous.concat([value]); -} - -async function commandListAllRecordings( - opts: Pick & FilterOptions -) { - try { - debug("Options", opts); - - const recordings = listAllRecordings({ ...opts, verbose: true }); - if (opts.json) { - console.log(formatAllRecordingsJson(recordings)); - } else { - console.log(formatAllRecordingsHumanReadable(recordings)); - } - - await exitCommand(0); - } catch (e) { - console.error("Failed to list all recordings"); - printLogPath(); - debug("removeRecording error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandUploadRecording(id: string, opts: CommandLineOptions) { - try { - debug("Options", opts); - - const recordingId = await uploadRecording(id, { ...opts, verbose: true }); - if (!recordingId) { - printLogPath(); - } - - await exitCommand(recordingId || opts.warn ? 0 : 1); - } catch (e) { - console.error("Failed to upload recording"); - printLogPath(); - debug("uploadRecording error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandLaunchBrowser( - url: string | undefined, - opts: Pick & LaunchOptions -) { - try { - debug("Options", opts); - - const browser = fuzzyBrowserName(opts.browser) || "chromium"; - assertValidBrowserName(browser); - - await launchBrowser(browser, [url || "about:blank"], false, { ...opts, verbose: true }); - await exitCommand(0); - } catch (e) { - console.error("Failed to launch browser"); - printLogPath(); - debug("launchBrowser error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandLaunchBrowserAndRecord( - url: string | undefined, - opts: Pick & LaunchOptions -) { - try { - debug("Options", opts); - - const browser = fuzzyBrowserName(opts.browser) || "chromium"; - assertValidBrowserName(browser); - - await launchBrowser(browser, [url || "about:blank", opts.browserArgs], true, { - ...opts, - verbose: true, - }); - await exitCommand(0); - } catch (e) { - console.error("Failed to launch browser"); - printLogPath(); - debug("launchBrowser error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandProcessRecording(id: string, opts: CommandLineOptions) { - try { - debug("Options", opts); - - const recordingId = await processRecording(id, { ...opts, verbose: true }); - if (!recordingId) { - printLogPath(); - } - - await exitCommand(recordingId || opts.warn ? 0 : 1); - } catch (e) { - console.error("Failed to process recording"); - printLogPath(); - debug("processRecording error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandUploadAllRecordings(opts: CommandLineOptions & UploadAllOptions) { - try { - debug("Options", opts); - - const uploadedAll = await uploadAllRecordings({ ...opts, verbose: true }); - await exitCommand(uploadedAll || opts.warn ? 0 : 1); - } catch (e) { - console.error("Failed to upload all recordings"); - printLogPath(); - debug("uploadAllRecordings error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandViewRecording(id: string, opts: CommandLineOptions) { - try { - debug("Options", opts); - - const viewed = await viewRecording(id, { ...opts, verbose: true }); - await exitCommand(viewed || opts.warn ? 0 : 1); - } catch (e) { - console.error("Failed to view recording"); - printLogPath(); - debug("viewRecording error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandViewLatestRecording(opts: CommandLineOptions) { - try { - debug("Options", opts); - - const viewed = await viewLatestRecording({ ...opts, verbose: true }); - if (!viewed) { - printLogPath(); - } - - await exitCommand(viewed || opts.warn ? 0 : 1); - } catch (e) { - console.error("Failed to view recording"); - printLogPath(); - debug("viewLatestRecording error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandRemoveRecording( - id: string, - opts: Pick -) { - try { - debug("Options", opts); - - const removed = removeRecording(id, { ...opts, verbose: true }); - await exitCommand(removed || opts.warn ? 0 : 1); - } catch (e) { - console.error("Failed to remove recording"); - printLogPath(); - debug("removeRecording error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandRemoveAllRecordings(opts: Pick) { - try { - debug("Options", opts); - - removeAllRecordings({ ...opts, verbose: true }); - await exitCommand(0); - } catch (e) { - console.error("Failed to remove all recordings"); - printLogPath(); - debug("removeAllRecordings error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandUpdateBrowsers( - browsers: string, - opts: Pick -) { - try { - debug("Options", opts); - - await updateBrowsers({ - ...opts, - browsers: browsers?.split(",").map(fuzzyBrowserName), - verbose: true, - }); - await exitCommand(0); - } catch (e) { - console.error("Failed to updated browsers"); - printLogPath(); - debug("updateBrowser error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandUploadSourcemaps( - filepaths: Array, - cliOpts: SourcemapUploadOptions & Pick -): Promise { - debug("Options", cliOpts); - - const { quiet, verbose, apiKey, batchSize, warn, ...uploadOpts } = cliOpts; - - let log: LogCallback | undefined; - if (!quiet) { - if (verbose) { - log = (_level, message) => { - console.log(message); - }; - } else { - log = (level, message) => { - if (level === "normal") { - console.log(message); - } - }; - } - } - - try { - await uploadSourceMaps({ - filepaths, - key: apiKey, - ...uploadOpts, - concurrency: batchSize, - log, - }); - - await exitCommand(0); - } catch (e) { - console.error("Failed to upload source maps"); - debug("uploadSourceMaps error %o", e); - - await exitCommand(warn ? 0 : 1); - } -} - -async function commandMetadata(opts: MetadataOptions & FilterOptions) { - try { - debug("Options", opts); - - await updateMetadata({ ...opts, verbose: true }); - printLogPath(); - await exitCommand(0); - } catch (e) { - console.error("Failed to update recording metadata"); - debug("updateMetadata error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandLogin(opts: CommandLineOptions) { - try { - const ok = await maybeAuthenticateUser({ - ...opts, - verbose: true, - }); - await exitCommand(ok || opts.warn ? 0 : 1); - } catch (e) { - console.error("Failed to login"); - printLogPath(); - debug("maybeAuthenticateUser error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} - -async function commandVersion(opts: CommandLineOptions) { - try { - const versionInfo = await version(); - - if (opts.json) { - console.log(JSON.stringify(versionInfo)); - } else { - const { version, update, latest } = versionInfo; - console.log(`\n@replayio/replay version ${version}`); - if (update) { - console.log(`A newer version (${latest}) of the Replay CLI is available`); - } - } - await exitCommand(0); - } catch (e) { - console.error("Failed to get version information"); - printLogPath(); - debug("commandVersion error %o", e); - - await exitCommand(opts.warn ? 0 : 1); - } -} diff --git a/packages/replay/src/cli/__snapshots__/formatRecordings.test.ts.snap b/packages/replay/src/cli/__snapshots__/formatRecordings.test.ts.snap deleted file mode 100644 index 97d6765d2..000000000 --- a/packages/replay/src/cli/__snapshots__/formatRecordings.test.ts.snap +++ /dev/null @@ -1,90 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`formatAllRecordingsHumanReadable formats one basic recording 1`] = ` -"ID Status Title Created -1 onDisk 2 days ago" -`; - -exports[`formatAllRecordingsHumanReadable sorts recording by createTime, most recent recording first 1`] = ` -"ID Status Title Created -2 onDisk Replay of test 1 day ago -1 onDisk Replay of test 2 days ago" -`; - -exports[`formatAllRecordingsHumanReadable uses the metadata title when it exists 1`] = ` -"ID Status Title Created -1 onDisk A Node Recording 2 days ago" -`; - -exports[`formatAllReordingsJson matches snapshot 1`] = ` -"[ - { - \\"id\\": \\"1\\", - \\"createTime\\": \\"2020-01-01T00:00:00.000Z\\", - \\"runtime\\": \\"node\\", - \\"metadata\\": { - \\"uri\\": \\"test\\" - }, - \\"status\\": \\"onDisk\\", - \\"sourcemaps\\": [] - }, - { - \\"id\\": \\"2\\", - \\"createTime\\": \\"2020-01-02T00:00:00.000Z\\", - \\"runtime\\": \\"node\\", - \\"metadata\\": { - \\"argv\\": [ - \\"test\\", - \\"foo\\", - \\"bar\\" - ] - }, - \\"status\\": \\"onDisk\\", - \\"sourcemaps\\": [] - }, - { - \\"id\\": \\"48703074-c19b-4cbd-be85-b860d26e8b24\\", - \\"createTime\\": \\"2022-07-14T03:35:07.000Z\\", - \\"runtime\\": \\"node\\", - \\"metadata\\": { - \\"argv\\": [ - \\"/Users/dan/.nvm/versions/node/v16.13.0/bin/npx\\", - \\"@replayio/replay\\", - \\"ls\\" - ], - \\"title\\": \\"Replay of npx\\" - }, - \\"sourcemaps\\": [ - { - \\"id\\": \\"1\\", - \\"originalSources\\": [], - \\"path\\": \\"/Users/dan/.replay/sourcemap-7182615795.map\\", - \\"baseURL\\": \\"file:///Users/dan/.nvm/versions/node/v16.13.0/lib/node_modules/npm/node_modules/@tootallnate/once/dist/index.js.map\\", - \\"targetContentHash\\": \\"sha256:b9d3770080970a3e2923463bd5f5dc4e5f15493cc4d4d762eb60b7cd3eaeca14\\", - \\"targetURLHash\\": \\"sha256:e2bdfba8215cc5dd4ec43adc0417b5107aed8be39e4d904401f43268a546f5bc\\", - \\"targetMapURLHash\\": \\"sha256:b25adc50556e34b0ee1c13ec52717a1c23118970538906fe908693036d6ee912\\" - }, - { - \\"id\\": \\"2\\", - \\"originalSources\\": [], - \\"path\\": \\"/Users/dan/.replay/sourcemap-2269761573.map\\", - \\"baseURL\\": \\"file:///Users/dan/.nvm/versions/node/v16.13.0/lib/node_modules/npm/node_modules/agent-base/dist/src/index.js.map\\", - \\"targetContentHash\\": \\"sha256:13b6d658b492796461358e19fe1de30665ab2efb04c726b82530352cd364d4ac\\", - \\"targetURLHash\\": \\"sha256:41cb32196455d4d70440862ba8d8aa45e7a2eb1a8d9cef9d5f48558550f8edb9\\", - \\"targetMapURLHash\\": \\"sha256:105675b27ce1fc36fa2ce03826873962ae5abc76c849ee40a52863cbcd73557f\\" - }, - { - \\"id\\": \\"3\\", - \\"originalSources\\": [], - \\"path\\": \\"/Users/dan/.replay/sourcemap-5611864844.map\\", - \\"baseURL\\": \\"file:///Users/dan/.nvm/versions/node/v16.13.0/lib/node_modules/npm/node_modules/http-proxy-agent/dist/index.js.map\\", - \\"targetContentHash\\": \\"sha256:37c871632157431d22c0667a1688d54644e5d8172400cf21c747dd2f46cc4f47\\", - \\"targetURLHash\\": \\"sha256:bc5cddb6dab652efb92d4aecfbf67026f9d458a83101222d990e58433446a566\\", - \\"targetMapURLHash\\": \\"sha256:bbfa1057c6af9c45732ce5f57bbb38e55200ba21eb19dc7a2d939a472962f5c9\\" - } - ], - \\"status\\": \\"startedWrite\\", - \\"path\\": \\"/Users/dan/.replay/recording-48703074-c19b-4cbd-be85-b860d26e8b24.dat\\" - } -]" -`; diff --git a/packages/replay/src/cli/formatRecordings.test.ts b/packages/replay/src/cli/formatRecordings.test.ts deleted file mode 100644 index 76bb8200f..000000000 --- a/packages/replay/src/cli/formatRecordings.test.ts +++ /dev/null @@ -1,169 +0,0 @@ -import { ExternalRecordingEntry } from "../types"; -import { formatAllRecordingsHumanReadable, formatAllRecordingsJson } from "./formatRecordings"; - -describe("formatAllRecordingsHumanReadable", () => { - let now = Date.now; - beforeEach(() => { - Date.now = jest.fn(() => new Date("2020-01-03").getTime()); - }); - - afterEach(() => { - Date.now = now; - }); - - it("formats one basic recording", () => { - const recordings: ExternalRecordingEntry[] = [ - { - id: "1", - createTime: new Date("2020-01-01"), - runtime: "node", - metadata: {}, - status: "onDisk", - sourcemaps: [], - }, - ]; - const result = formatAllRecordingsHumanReadable(recordings); - expect(result).toMatchSnapshot(); - }); - - it("uses the metadata title when it exists", () => { - const recordings: ExternalRecordingEntry[] = [ - { - id: "1", - createTime: new Date("2020-01-01"), - runtime: "node", - metadata: { - title: "A Node Recording", - }, - status: "onDisk", - sourcemaps: [], - }, - ]; - const result = formatAllRecordingsHumanReadable(recordings); - expect(result).toMatchSnapshot(); - }); - - it("sorts recording by createTime, most recent recording first", () => { - const recordings: ExternalRecordingEntry[] = [ - { - id: "1", - createTime: new Date("2020-01-01"), - runtime: "node", - metadata: { - uri: "test", - }, - status: "onDisk", - sourcemaps: [], - }, - { - id: "2", - createTime: new Date("2020-01-02"), - runtime: "node", - metadata: { - argv: ["test", "foo", "bar"], - }, - status: "onDisk", - sourcemaps: [], - }, - ]; - const result = formatAllRecordingsHumanReadable(recordings); - expect(result).toMatchSnapshot(); - }); -}); - -describe("formatAllReordingsJson", () => { - it("formats a recording as JSON", () => { - const recordings: ExternalRecordingEntry[] = [ - { - id: "1", - createTime: new Date("2020-01-01"), - runtime: "node", - metadata: {}, - status: "onDisk", - sourcemaps: [], - }, - ]; - const result = formatAllRecordingsJson(recordings); - const parsedJson = JSON.parse(result); - expect(parsedJson).toBeInstanceOf(Array); - }); - - it("matches snapshot", () => { - const recordings: ExternalRecordingEntry[] = [ - { - id: "1", - createTime: new Date("2020-01-01"), - runtime: "node", - metadata: { - uri: "test", - }, - status: "onDisk", - sourcemaps: [], - }, - { - id: "2", - createTime: new Date("2020-01-02"), - runtime: "node", - metadata: { - argv: ["test", "foo", "bar"], - }, - status: "onDisk", - sourcemaps: [], - }, - { - id: "48703074-c19b-4cbd-be85-b860d26e8b24", - createTime: new Date("Wed Jul 13 2022 20:35:07 GMT-0700 (Pacific Daylight Time)"), - runtime: "node", - metadata: { - argv: ["/Users/dan/.nvm/versions/node/v16.13.0/bin/npx", "@replayio/replay", "ls"], - title: "Replay of npx", - }, - sourcemaps: [ - { - id: "1", - originalSources: [], - path: "/Users/dan/.replay/sourcemap-7182615795.map", - baseURL: - "file:///Users/dan/.nvm/versions/node/v16.13.0/lib/node_modules/npm/node_modules/@tootallnate/once/dist/index.js.map", - targetContentHash: - "sha256:b9d3770080970a3e2923463bd5f5dc4e5f15493cc4d4d762eb60b7cd3eaeca14", - targetURLHash: - "sha256:e2bdfba8215cc5dd4ec43adc0417b5107aed8be39e4d904401f43268a546f5bc", - targetMapURLHash: - "sha256:b25adc50556e34b0ee1c13ec52717a1c23118970538906fe908693036d6ee912", - }, - { - id: "2", - originalSources: [], - path: "/Users/dan/.replay/sourcemap-2269761573.map", - baseURL: - "file:///Users/dan/.nvm/versions/node/v16.13.0/lib/node_modules/npm/node_modules/agent-base/dist/src/index.js.map", - targetContentHash: - "sha256:13b6d658b492796461358e19fe1de30665ab2efb04c726b82530352cd364d4ac", - targetURLHash: - "sha256:41cb32196455d4d70440862ba8d8aa45e7a2eb1a8d9cef9d5f48558550f8edb9", - targetMapURLHash: - "sha256:105675b27ce1fc36fa2ce03826873962ae5abc76c849ee40a52863cbcd73557f", - }, - { - id: "3", - originalSources: [], - path: "/Users/dan/.replay/sourcemap-5611864844.map", - baseURL: - "file:///Users/dan/.nvm/versions/node/v16.13.0/lib/node_modules/npm/node_modules/http-proxy-agent/dist/index.js.map", - targetContentHash: - "sha256:37c871632157431d22c0667a1688d54644e5d8172400cf21c747dd2f46cc4f47", - targetURLHash: - "sha256:bc5cddb6dab652efb92d4aecfbf67026f9d458a83101222d990e58433446a566", - targetMapURLHash: - "sha256:bbfa1057c6af9c45732ce5f57bbb38e55200ba21eb19dc7a2d939a472962f5c9", - }, - ], - status: "startedWrite", - path: "/Users/dan/.replay/recording-48703074-c19b-4cbd-be85-b860d26e8b24.dat", - }, - ]; - const result = formatAllRecordingsJson(recordings); - expect(result).toMatchSnapshot(); - }); -}); diff --git a/packages/replay/src/cli/formatRecordings.ts b/packages/replay/src/cli/formatRecordings.ts deleted file mode 100644 index 599f58f5b..000000000 --- a/packages/replay/src/cli/formatRecordings.ts +++ /dev/null @@ -1,54 +0,0 @@ -/* - * This module is responsible for formatting recordings that are going to be printed to the console. - */ -import table from "text-table"; - -import { generateDefaultTitle } from "../generateDefaultTitle"; -import { ExternalRecordingEntry } from "../types"; - -const MsPerSecond = 1000; -const MsPerMinute = MsPerSecond * 60; -const MsPerHour = MsPerMinute * 60; -const MsPerDay = MsPerHour * 24; - -function formatTime(time: Date) { - const fmt = new Intl.RelativeTimeFormat("en", { - style: "long", - }); - - const ds = Date.now() - time.getTime(); - if (ds < MsPerMinute) { - return fmt.format(Math.round(-ds / MsPerSecond), "second"); - } else if (ds < MsPerHour) { - return fmt.format(Math.round(-ds / MsPerMinute), "minute"); - } else if (ds < MsPerDay) { - return fmt.format(Math.round(-ds / MsPerHour), "hour"); - } - - return fmt.format(Math.round(-ds / MsPerDay), "day"); -} - -export function formatAllRecordingsHumanReadable(recordings: ExternalRecordingEntry[]) { - // sort by created at date - recordings.sort((a, b) => { - return b.createTime.getTime() - a.createTime.getTime(); - }); - const formattedRecordings = recordings.map(recording => { - const title = - typeof recording.metadata?.title === "string" - ? recording.metadata.title - : generateDefaultTitle(recording.metadata); - return [recording.id, recording.status, title || "", formatTime(recording.createTime)]; - }); - - const tableBody: Array> = [ - ["ID", "Status", "Title", "Created"], - ...formattedRecordings, - ]; - - return table(tableBody); -} - -export function formatAllRecordingsJson(recordings: ExternalRecordingEntry[]) { - return JSON.stringify(recordings, null, 2); -} diff --git a/packages/replay/src/client.ts b/packages/replay/src/client.ts deleted file mode 100644 index 337107d3e..000000000 --- a/packages/replay/src/client.ts +++ /dev/null @@ -1,137 +0,0 @@ -import dbg from "./debug"; -import WebSocket from "ws"; -import { defer } from "./utils"; -import { Agent } from "http"; - -const debug = dbg("replay:protocol"); - -// Simple protocol client for use in writing standalone applications. - -interface Callbacks { - onOpen: (socket: WebSocket) => void; - onClose: (socket: WebSocket) => void; - onError: (socket: WebSocket) => void; -} - -type ErrorDataValue = string | number | boolean | null; -type ErrorData = Record; -type ProtocolErrorBase = { - code: number; - message: string; - data: ErrorData; -}; - -export class ProtocolError extends Error { - readonly protocolCode: number; - readonly protocolMessage: string; - readonly protocolData: unknown; - - constructor(err: ProtocolErrorBase) { - super(`protocol error ${err.code}: ${err.message}`); - this.protocolCode = err.code; - this.protocolMessage = err.message; - this.protocolData = err.data ?? {}; - } - - toString() { - return `Protocol error ${this.protocolCode}: ${this.protocolMessage}`; - } -} - -class ProtocolClient { - socket: WebSocket; - callbacks: Callbacks; - pendingMessages = new Map(); - eventListeners = new Map(); - nextMessageId = 1; - - constructor(address: string, callbacks: Callbacks, agent?: Agent) { - debug("Creating WebSocket for %s with %o", address, { agent }); - this.socket = new WebSocket(address, { - agent: agent, - }); - this.callbacks = callbacks; - - this.socket.on("open", callbacks.onOpen); - this.socket.on("close", callbacks.onClose); - this.socket.on("error", callbacks.onError); - this.socket.on("message", message => this.onMessage(message)); - } - - close() { - this.socket.close(); - } - - async setAccessToken(accessToken?: string) { - accessToken = accessToken || process.env.REPLAY_API_KEY || process.env.RECORD_REPLAY_API_KEY; - - if (!accessToken) { - throw new Error( - "Access token must be passed or set via the REPLAY_API_KEY environment variable." - ); - } - - return this.sendCommand("Authentication.setAccessToken", { - accessToken, - }); - } - - async sendCommand>( - method: string, - params: P, - data?: any, - sessionId?: string, - callback?: (err?: Error) => void - ) { - const id = this.nextMessageId++; - debug("Sending command %s: %o", method, { id, params, sessionId }); - this.socket.send( - JSON.stringify({ - id, - method, - params, - binary: data ? true : undefined, - sessionId, - }), - err => { - if (!err && data) { - this.socket.send(data, callback); - } else { - if (err) { - debug("Received socket error: %s", err); - } - callback?.(err); - } - } - ); - const waiter = defer(); - this.pendingMessages.set(id, waiter); - return waiter.promise; - } - - setEventListener(method: string, callback: (params: any) => void) { - this.eventListeners.set(method, callback); - } - - onMessage(contents: WebSocket.RawData) { - const msg = JSON.parse(String(contents)); - debug("Received message %o", msg); - if (msg.id) { - const { resolve, reject } = this.pendingMessages.get(msg.id); - this.pendingMessages.delete(msg.id); - if (msg.result) { - resolve(msg.result); - } else if (msg.error) { - reject(new ProtocolError(msg.error)); - } else { - reject(`Channel error: ${JSON.stringify(msg)}`); - } - } else if (this.eventListeners.has(msg.method)) { - this.eventListeners.get(msg.method)(msg.params); - } else { - console.log(`Received event without listener: ${msg.method}`); - } - } -} - -export default ProtocolClient; diff --git a/packages/replay/src/debug.ts b/packages/replay/src/debug.ts deleted file mode 100644 index 7b117d140..000000000 --- a/packages/replay/src/debug.ts +++ /dev/null @@ -1,62 +0,0 @@ -import dbg from "debug"; -import fs from "fs"; -import path from "path"; -import util from "node:util"; -import { getDirectory } from "./utils"; - -const debugDebug = dbg("replay:cli:debug"); - -const logDirPath = path.join(getDirectory(), "logs"); -export let logPath = path.join( - logDirPath, - "cli-" + - new Date() - .toISOString() - .replace(/:/g, "-") - .replace(/\.(\d+)Z$/, "-$1.log") -); - -function init() { - try { - fs.mkdirSync(logDirPath, { recursive: true }); - } catch (e) { - logPath = ""; - debugDebug("Failed to create log directory %o", e); - } -} - -let size = 0; -export default function debug(namespace: string, pathToLog: string = logPath) { - size = Math.max(size, namespace.length); - const d = dbg(namespace); - - if (process.env.REPLAY_CLI_DISABLE_LOG) { - return d; - } - - return (formatter: string, ...args: any[]) => { - d(formatter, ...args); - - if (pathToLog) { - try { - const output = util - .format(formatter, ...args) - .split("\n") - .map((l, i) => (i === 0 ? l : "".padStart(size + 3, " ") + l)) - .join("\n"); - const prefix = `[${namespace}] `.padStart(size + 3, " "); - fs.appendFileSync(pathToLog, `${prefix}${output}\n`); - } catch (e) { - debugDebug("Failed to write log %o", e); - } - } - }; -} - -export function printLogPath() { - if (logPath && fs.existsSync(logPath)) { - console.error("\nšŸ“„ Additional logs available in", logPath, "\n"); - } -} - -init(); diff --git a/packages/replay/src/generateDefaultTitle.ts b/packages/replay/src/generateDefaultTitle.ts deleted file mode 100644 index 554a653d4..000000000 --- a/packages/replay/src/generateDefaultTitle.ts +++ /dev/null @@ -1,17 +0,0 @@ -import path from "path"; - -export function generateDefaultTitle(metadata: Record) { - let host = metadata.uri; - if (host && typeof host === "string") { - try { - const url = new URL(host); - host = url.host; - } finally { - return `Replay of ${host}`; - } - } - - if (Array.isArray(metadata.argv) && typeof metadata.argv[0] === "string") { - return `Replay of ${path.basename(metadata.argv[0])}`; - } -} diff --git a/packages/replay/src/install.ts b/packages/replay/src/install.ts deleted file mode 100644 index d9050e683..000000000 --- a/packages/replay/src/install.ts +++ /dev/null @@ -1,252 +0,0 @@ -// Manage installation of browsers for other NPM packages. - -import { spawnSync } from "child_process"; -import dbg from "./debug"; -import fs from "fs"; -import https from "https"; -import path from "path"; -import { BrowserName, Options } from "./types"; -import { defer, getDirectory, maybeLog } from "./utils"; - -const debug = dbg("replay:cli:install"); - -type PlatformKeys = `${typeof process.platform}:${BrowserName}`; - -const EXECUTABLE_PATHS: Partial> = { - "darwin:firefox": ["firefox", "Nightly.app", "Contents", "MacOS", "firefox"], - "darwin:chromium": ["Replay-Chromium.app", "Contents", "MacOS", "Chromium"], - "linux:chromium": ["chrome-linux", "chrome"], - "linux:firefox": ["firefox", "firefox"], - "win32:chromium": ["replay-chromium", "chrome.exe"], -}; - -function getBrowserDownloadFileName(key: K): string { - switch (key) { - case "darwin:firefox": - return process.env.RECORD_REPLAY_FIREFOX_DOWNLOAD_FILE || "macOS-replay-playwright.tar.xz"; - case "darwin:chromium": - return ( - process.env.RECORD_REPLAY_CHROMIUM_DOWNLOAD_FILE || - (process.arch.startsWith("arm") - ? "macOS-replay-chromium-arm.tar.xz" - : "macOS-replay-chromium.tar.xz") - ); - - case "linux:chromium": - return process.env.RECORD_REPLAY_CHROMIUM_DOWNLOAD_FILE || "linux-replay-chromium.tar.xz"; - case "linux:firefox": - return process.env.RECORD_REPLAY_FIREFOX_DOWNLOAD_FILE || "linux-replay-playwright.tar.xz"; - - case "win32:chromium": - return process.env.RECORD_REPLAY_CHROMIUM_DOWNLOAD_FILE || "windows-replay-chromium.zip"; - } - - throw new Error("Unexpected platform"); -} - -async function ensureBrowsersInstalled( - kind: BrowserName | "all", - force: boolean, - opts: Options = {} -) { - maybeLog( - opts.verbose, - `Installing ${kind === "all" ? "browsers" : kind} for ${process.platform}` - ); - if (kind !== "all" && !getPlatformKey(kind)) { - console.log(`${kind} browser for Replay is not supported on ${process.platform}`); - return; - } - - switch (process.platform) { - case "darwin": - if (["all", "firefox"].includes(kind)) { - await installReplayBrowser( - getBrowserDownloadFileName("darwin:firefox"), - "firefox", - "firefox", - force, - opts - ); - } - if (["all", "chromium"].includes(kind)) { - await installReplayBrowser( - getBrowserDownloadFileName("darwin:chromium"), - "Replay-Chromium.app", - "Replay-Chromium.app", - force, - opts - ); - } - break; - case "linux": - if (["all", "firefox"].includes(kind)) { - await installReplayBrowser( - getBrowserDownloadFileName("linux:firefox"), - "firefox", - "firefox", - force, - opts - ); - } - if (["all", "chromium"].includes(kind)) { - await installReplayBrowser( - getBrowserDownloadFileName("linux:chromium"), - "replay-chromium", - "chrome-linux", - force, - opts - ); - } - break; - case "win32": - if (["all", "chromium"].includes(kind)) { - await installReplayBrowser( - getBrowserDownloadFileName("win32:chromium"), - "replay-chromium", - "replay-chromium", - force, - opts - ); - } - break; - } -} - -async function updateBrowsers(opts: Options & { browsers?: BrowserName[] }) { - if (opts.browsers) { - for (const browserName of opts.browsers) { - await ensureBrowsersInstalled(browserName, true, opts); - } - } else { - return ensureBrowsersInstalled("all", true, opts); - } -} - -function getPlatformKey(browserName: BrowserName): PlatformKeys | undefined { - const key = `${process.platform}:${browserName}`; - if (key in EXECUTABLE_PATHS) { - return key as keyof typeof EXECUTABLE_PATHS; - } - - return undefined; -} - -function getExecutablePath(browserName: BrowserName, opts?: Options) { - const overridePathKey = `REPLAY_${browserName.toUpperCase()}_EXECUTABLE_PATH`; - const overridePath = process.env[overridePathKey]; - if (overridePath) { - debug(`Using executable override for ${browserName}: ${overridePath}`); - return overridePath; - } - - const key = getPlatformKey(browserName); - if (!key) { - return null; - } - - const executablePathParts = EXECUTABLE_PATHS[key]; - return executablePathParts ? path.join(getRuntimesDirectory(opts), ...executablePathParts) : null; -} - -function extractBrowserArchive(browserDir: string, name: string) { - const fullName = path.join(browserDir, name); - const tarResult = spawnSync("tar", ["xf", name], { cwd: browserDir }); - if (tarResult.status !== 0) { - console.error("Failed to extract", fullName); - console.error(String(tarResult.stderr)); - - throw new Error("Unable to extract browser archive"); - } - - fs.unlinkSync(fullName); -} - -function getRuntimesDirectory(opts?: Options) { - const replayDir = getDirectory(opts); - return path.join(replayDir, "runtimes"); -} - -// Installs a browser if it isn't already installed. -async function installReplayBrowser( - name: string, - srcName: string, - dstName: string, - force = false, - opts: Options = {} -) { - const replayDir = getDirectory(opts); - const browserDir = getRuntimesDirectory(opts); - const dstDir = path.join(browserDir, dstName); - const dstExists = fs.existsSync(dstDir); - - if (dstExists) { - if (force) { - debug("Removing %s from %s before updating", name, dstDir); - // Remove the browser so installReplayBrowser will reinstall it. We don't have a way - // to see that the current browser is up to date. - fs.rmSync(dstDir, { force: true, recursive: true }); - } else { - maybeLog(opts.verbose, `Skipping ${dstName}. Already exists in ${browserDir}`); - return; - } - } - - debug("Installing %s from %s to %s", name, srcName, path.join(browserDir, name)); - - const contents = await downloadReplayFile(name, opts); - - for (const dir of [replayDir, browserDir]) { - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir); - } - } - - maybeLog(opts.verbose, `Saving ${dstName} to ${browserDir}`); - fs.writeFileSync(path.join(browserDir, name), contents); - extractBrowserArchive(browserDir, name); - - if (srcName != dstName) { - fs.renameSync(path.join(browserDir, srcName), path.join(browserDir, dstName)); - } -} - -async function downloadReplayFile(downloadFile: string, opts: Options) { - const options = { - host: "static.replay.io", - port: 443, - path: `/downloads/${downloadFile}`, - }; - - for (let i = 0; i < 5; i++) { - const waiter = defer(); - maybeLog(opts.verbose, `Downloading ${downloadFile} from replay.io (Attempt ${i + 1} / 5)`); - debug("Downloading %o", options); - const request = https.get(options, response => { - if (response.statusCode != 200) { - console.log(`Download received status code ${response.statusCode}, retrying...`); - request.destroy(); - waiter.resolve(null); - return; - } - const buffers: Buffer[] = []; - response.on("data", data => buffers.push(data)); - response.on("end", () => waiter.resolve(buffers)); - }); - request.on("error", err => { - console.log(`Download error ${err}, retrying...`); - request.destroy(); - waiter.resolve(null); - }); - const buffers = await waiter.promise; - if (buffers) { - return Buffer.concat(buffers); - } - - maybeLog(opts.verbose, `Download of ${downloadFile} complete`); - } - - throw new Error("Download failed, giving up"); -} - -export { getExecutablePath, ensureBrowsersInstalled, updateBrowsers }; diff --git a/packages/replay/src/launchdarkly.ts b/packages/replay/src/launchdarkly.ts deleted file mode 100644 index 6bffa00bc..000000000 --- a/packages/replay/src/launchdarkly.ts +++ /dev/null @@ -1,104 +0,0 @@ -import dbg from "./debug"; -import { initialize, LDClient, LDLogger } from "launchdarkly-node-client-sdk"; - -const debug = dbg("replay:launchdarkly"); - -type UserFeatureProfile = { - type: "user"; - id: string; -}; - -type AnonymousFeatureProfile = { - type: "anonymous"; - id: "anonymous"; -}; - -type FeatureProfile = AnonymousFeatureProfile | UserFeatureProfile; - -class NoOpLogger implements LDLogger { - error() {} - warn() {} - info() {} - debug() {} -} - -class LaunchDarkly { - private client: LDClient | undefined; - - public initialize() { - const key = "60ca05fb43d6f10d234bb3cf"; - const defaultProfile = { type: "anonymous", id: "anonymous" }; - this.client = initialize( - key, - { - kind: "user", - key: defaultProfile.id, - anonymous: defaultProfile.type === "anonymous", - }, - { - logger: new NoOpLogger(), - } - ); - return this; - } - - public async identify(profile: FeatureProfile): Promise { - if (!this.client) { - return; - } - try { - await this.client.waitForInitialization(); - } catch (e) { - debug("Failed to wait for LaunchDarkly initialization %j", e); - return; - } - - await this.client.identify({ - kind: "user", - key: profile.id, - anonymous: profile.type === "anonymous", - }); - } - - public async isEnabled(flag: string, defaultValue: boolean): Promise { - if (!this.client) { - return defaultValue; - } - return await this.variant(flag, defaultValue); - } - - public async variant(name: string, defaultValue: T): Promise { - if (!this.client) { - return defaultValue; - } - try { - await this.client.waitForInitialization(); - } catch (e) { - debug("Failed to wait for LaunchDarkly initialization %j", e); - return defaultValue; - } - - const val = await this.client.variation(name, defaultValue); - return val; - } - - public async close() { - if (!this.client) { - return; - } - try { - await this.client.close(); - } catch (e) { - debug("Failed to close LaunchDarkly client %j", e); - } - } -} - -let launchDarkly: LaunchDarkly | undefined; -export const getLaunchDarkly = () => { - if (launchDarkly) { - return launchDarkly; - } - launchDarkly = new LaunchDarkly(); - return launchDarkly; -}; diff --git a/packages/replay/src/main.test.ts b/packages/replay/src/main.test.ts deleted file mode 100644 index 66f4443b3..000000000 --- a/packages/replay/src/main.test.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { RecordingEntry, filterRecordings } from "./main"; - -describe("filterRecordings", () => { - it("excludes crash reports by default", () => { - const recordings: RecordingEntry[] = [ - { - status: "crashed", - id: "1", - createTime: new Date(), - metadata: {}, - runtime: "chromium", - sourcemaps: [], - }, - { - status: "crashUploaded", - id: "2", - createTime: new Date(), - metadata: {}, - runtime: "chromium", - sourcemaps: [], - }, - { - status: "onDisk", - id: "3", - createTime: new Date(), - metadata: {}, - runtime: "chromium", - sourcemaps: [], - }, - ]; - - const filtered = filterRecordings(recordings, r => r.id === "3", false); - expect(filtered).toHaveLength(1); - }); - it("includes crash reports when includeCrashes is set", () => { - const recordings: RecordingEntry[] = [ - { - status: "crashed", - id: "1", - createTime: new Date(), - metadata: {}, - runtime: "chromium", - sourcemaps: [], - }, - { - status: "crashUploaded", - id: "2", - createTime: new Date(), - metadata: {}, - runtime: "chromium", - sourcemaps: [], - }, - { - status: "onDisk", - id: "3", - createTime: new Date(), - metadata: {}, - runtime: "chromium", - sourcemaps: [], - }, - ]; - - const filtered = filterRecordings(recordings, r => r.id === "3", true); - expect(filtered).toHaveLength(2); - }); -}); diff --git a/packages/replay/src/main.ts b/packages/replay/src/main.ts deleted file mode 100644 index a847c742b..000000000 --- a/packages/replay/src/main.ts +++ /dev/null @@ -1,850 +0,0 @@ -import { retryWithExponentialBackoff } from "@replay-cli/shared/async/retryOnFailure"; -import fs from "fs"; -import path from "path"; -import { getPackument } from "query-registry"; -import { compare } from "semver"; -import dbg from "./debug"; -import { getCurrentVersion, getHttpAgent } from "./utils"; -import assert from "node:assert/strict"; - -// requiring v4 explicitly because it's the last version with commonjs support. -// Should be upgraded to the latest when converting this code to es modules. -import pMap from "p-map"; - -import { spawn } from "child_process"; -import { Agent, AgentOptions } from "http"; -import jsonata from "jsonata"; -import { readToken } from "./auth"; -import { ProtocolError } from "./client"; -import { ensureBrowsersInstalled, getExecutablePath, updateBrowsers } from "./install"; -import { getLaunchDarkly } from "./launchdarkly"; -export { sanitizeMetadata as sanitize } from "@replay-cli/shared/recording/metadata/sanitizeMetadata"; -import * as sourceMetadata from "@replay-cli/shared/recording/metadata/legacy/source"; -import * as testMetadata from "@replay-cli/shared/recording/metadata/legacy/test/index"; -import { addMetadata } from "@replay-cli/shared/recording/metadata/addMetadata"; -import { - addRecordingEvent, - readRecordings, - removeRecordingFromLog, - removeRecordingsFile, -} from "./recordingLog"; -import { - BrowserName, - FilterOptions, - LaunchOptions, - ListOptions, - MetadataOptions, - Options, - RecordingEntry, - RecordingMetadata, - SourceMapEntry, - UploadAllOptions, - UploadOptions, - type ExternalRecordingEntry, -} from "./types"; -export type { UnstructuredMetadata } from "@replay-cli/shared/recording/types"; -import { ReplayClient } from "./upload"; -import { getDirectory, maybeLog, openExecutable } from "./utils"; -import { sanitizeMetadata } from "@replay-cli/shared/recording/metadata/sanitizeMetadata"; -export type { BrowserName, RecordingEntry } from "./types"; -export { updateStatus } from "./updateStatus"; - -const debug = dbg("replay:cli"); - -export function filterRecordings( - recordings: RecordingEntry[], - filter: FilterOptions["filter"], - includeCrashes: FilterOptions["includeCrashes"] -) { - let filteredRecordings = recordings; - debug("Recording log contains %d replays", recordings.length); - if (filter && typeof filter === "string") { - debug("Using filter: %s", filter); - const exp = jsonata(`$filter($, ${filter})[]`); - filteredRecordings = exp.evaluate(recordings) || []; - - debug("Filtering resulted in %d replays", filteredRecordings.length); - } else if (typeof filter === "function") { - debug("Using filter function"); - filteredRecordings = recordings.filter(filter); - - debug("Filtering resulted in %d replays", filteredRecordings.length); - } - - if (includeCrashes) { - recordings.forEach(r => { - if (r.status === "crashed" && !filteredRecordings.includes(r)) { - filteredRecordings.push(r); - } - }); - } - - return filteredRecordings; -} - -// Convert a recording into a format for listing. -function listRecording(recording: RecordingEntry): ExternalRecordingEntry { - // Remove properties we only use internally. - const { buildId, crashData, ...recordingWithoutInternalProperties } = recording; - return recordingWithoutInternalProperties; -} - -function listAllRecordings(opts: Options & ListOptions = {}) { - const recordings = readRecordings(opts.directory); - - if (opts.all) { - return filterRecordings(recordings, opts.filter, opts.includeCrashes).map(listRecording); - } - - const uploadableRecordings = recordings.filter(recording => - ["onDisk", "startedWrite", "crashed"].includes(recording.status) - ); - return filterRecordings(uploadableRecordings, opts.filter, opts.includeCrashes).map( - listRecording - ); -} - -function uploadSkipReason(recording: RecordingEntry) { - // Status values where there is something worth uploading. - const canUploadStatus = ["onDisk", "startedWrite", "startedUpload", "crashed"]; - if (!canUploadStatus.includes(recording.status)) { - return `wrong recording status ${recording.status}`; - } - if (!recording.path && recording.status != "crashed") { - return "recording not saved to disk"; - } - return null; -} - -function getServer(opts: Options) { - return ( - opts.server || - process.env.RECORD_REPLAY_SERVER || - process.env.REPLAY_SERVER || - "wss://dispatch.replay.io" - ); -} - -async function doUploadCrash( - dir: string, - server: string, - recording: RecordingEntry, - verbose?: boolean, - apiKey?: string, - agent?: Agent -) { - const client = new ReplayClient(); - maybeLog(verbose, `Starting crash data upload for ${recording.id}...`); - if (!(await client.initConnection(server, apiKey, verbose, agent))) { - maybeLog(verbose, `Crash data upload failed: can't connect to server ${server}`); - return null; - } - - const crashData = recording.crashData || []; - crashData.push({ - kind: "recordingMetadata", - recordingId: recording.id, - }); - - await Promise.all( - crashData.map(async data => { - await client.connectionReportCrash(data); - }) - ); - addRecordingEvent(dir, "crashUploaded", recording.id, { server }); - maybeLog(verbose, `Crash data upload finished.`); - client.closeConnection(); -} - -class RecordingUploadError extends Error { - interiorError?: any; - - constructor(message?: string, interiorError?: any) { - super(message); - this.name = "RecordingUploadError"; - this.interiorError = interiorError; - Object.setPrototypeOf(this, new.target.prototype); // Restore error prototype chain. - } -} - -function handleUploadingError( - err: string, - strict: boolean, - verbose?: boolean, - interiorError?: any -) { - maybeLog(verbose, `Upload failed: ${err}`); - - if (interiorError) { - debug(interiorError); - } - - if (strict) { - throw new RecordingUploadError(err, interiorError); - } -} - -async function validateMetadata( - client: ReplayClient, - metadata: Record | null, - verbose: boolean | undefined -): Promise { - return metadata ? await client.buildRecordingMetadata(metadata, { verbose }) : null; -} - -async function setMetadata( - client: ReplayClient, - recordingId: string, - metadata: RecordingMetadata | null, - strict: boolean, - verbose: boolean -) { - if (metadata) { - try { - await retryWithExponentialBackoff( - () => client.setRecordingMetadata(recordingId, metadata), - e => { - debug("Failed to set recording metadata. Will be retried: %j", e); - } - ); - } catch (e) { - handleUploadingError(`Failed to set recording metadata ${e}`, strict, verbose, e); - } - } -} - -const MIN_MULTIPART_UPLOAD_SIZE = 5 * 1024 * 1024; -async function multipartUploadRecording( - server: string, - client: ReplayClient, - dir: string, - recording: RecordingEntry, - metadata: RecordingMetadata | null, - size: number, - strict: boolean, - verbose: boolean, - agentOptions?: AgentOptions -) { - const requestPartChunkSize = - parseInt(process.env.REPLAY_MULTIPART_UPLOAD_CHUNK || "", 10) || undefined; - const { recordingId, uploadId, partLinks, chunkSize } = - await client.connectionBeginRecordingMultipartUpload( - recording.id, - recording.buildId!, - size, - requestPartChunkSize - ); - await setMetadata(client, recordingId, metadata, strict, verbose); - addRecordingEvent(dir, "uploadStarted", recording.id, { - server, - recordingId, - }); - const eTags = await client.uploadRecordingInParts( - recording.path!, - partLinks, - chunkSize, - agentOptions - ); - - assert(eTags.length === partLinks.length, "Mismatched eTags and partLinks"); - - await client.connectionEndRecordingMultipartUpload(recording.id, uploadId, eTags); - return recordingId; -} - -async function directUploadRecording( - server: string, - client: ReplayClient, - dir: string, - recording: RecordingEntry, - metadata: RecordingMetadata | null, - size: number, - strict: boolean, - verbose: boolean -) { - const { recordingId, uploadLink } = await client.connectionBeginRecordingUpload( - recording.id, - recording.buildId!, - size - ); - await setMetadata(client, recordingId, metadata, strict, verbose); - addRecordingEvent(dir, "uploadStarted", recording.id, { - server, - recordingId, - }); - await retryWithExponentialBackoff( - () => client.uploadRecording(recording.path!, uploadLink, size), - e => { - debug("Upload failed with error. Will be retried: %j", e); - } - ); - - debug("%s: Uploaded %d bytes", recordingId, size); - - await client.connectionEndRecordingUpload(recording.id); - return recordingId; -} - -async function doUploadRecording( - dir: string, - server: string, - recording: RecordingEntry, - verbose: boolean = false, - apiKey?: string, - agentOptions?: AgentOptions, - removeAssets: boolean = false, - strict: boolean = false -) { - debug("Uploading %s from %s to %s", recording.id, dir, server); - maybeLog(verbose, `Starting upload for ${recording.id}...`); - - if (recording.status == "uploaded" && recording.recordingId) { - maybeLog(verbose, `Already uploaded: ${recording.recordingId}`); - - return recording.recordingId; - } - - const reason = uploadSkipReason(recording); - if (reason) { - handleUploadingError(reason, strict, verbose); - return null; - } - - if (!apiKey) { - apiKey = await readToken({ directory: dir }); - } - - const agent = getHttpAgent(server, agentOptions); - - if (recording.status == "crashed") { - debug("Uploading crash %o", recording); - await doUploadCrash(dir, server, recording, verbose, apiKey, agent); - maybeLog(verbose, `Crash report uploaded for ${recording.id}`); - if (removeAssets) { - removeRecordingAssets(recording, { directory: dir }); - } - return recording.id; - } - - const { size } = await fs.promises.stat(recording.path!); - - debug("Uploading recording %o", recording); - const client = new ReplayClient(); - if (!(await client.initConnection(server, apiKey, verbose, agent))) { - handleUploadingError(`Cannot connect to server ${server}`, strict, verbose); - return null; - } - - // validate metadata before uploading so invalid data can block the upload - const metadata = await validateMetadata(client, recording.metadata, verbose); - - let recordingId: string; - try { - const isMultipartEnabled = await getLaunchDarkly().isEnabled("cli-multipart-upload", false); - if (size > MIN_MULTIPART_UPLOAD_SIZE && isMultipartEnabled) { - recordingId = await multipartUploadRecording( - server, - client, - dir, - recording, - metadata, - size, - strict, - verbose, - agentOptions - ); - } else { - recordingId = await directUploadRecording( - server, - client, - dir, - recording, - metadata, - size, - strict, - verbose - ); - } - } catch (err) { - handleUploadingError( - err instanceof ProtocolError ? err.protocolMessage : String(err), - strict, - verbose, - err - ); - return null; - } - - await pMap( - recording.sourcemaps, - async (sourcemap: SourceMapEntry) => { - try { - debug("Uploading sourcemap %s for recording %s", sourcemap.path, recording.id); - const contents = fs.readFileSync(sourcemap.path, "utf8"); - const sourcemapId = await client.connectionUploadSourcemap( - recordingId, - sourcemap, - contents - ); - await pMap( - sourcemap.originalSources, - originalSource => { - debug( - "Uploading original source %s for sourcemap %s for recording %s", - originalSource.path, - sourcemap.path, - recording.id - ); - const contents = fs.readFileSync(originalSource.path, "utf8"); - return client.connectionUploadOriginalSource( - recordingId, - sourcemapId, - originalSource, - contents - ); - }, - { concurrency: 5, stopOnError: false } - ); - } catch (e) { - handleUploadingError( - `Cannot upload sourcemap ${sourcemap.path} from disk: ${e}`, - strict, - verbose, - e - ); - } - }, - { concurrency: 10, stopOnError: false } - ); - - if (removeAssets) { - removeRecordingAssets(recording, { directory: dir }); - } - - addRecordingEvent(dir, "uploadFinished", recording.id); - maybeLog( - verbose, - `Upload finished! View your Replay at: https://app.replay.io/recording/${recordingId}` - ); - client.closeConnection(); - return recordingId; -} - -async function uploadRecording(id: string, opts: UploadOptions = {}) { - const server = getServer(opts); - const dir = getDirectory(opts); - const recordings = readRecordings(dir); - const recording = recordings.find(r => r.id == id); - - if (!recording) { - maybeLog(opts.verbose, `Unknown recording ${id}`); - return null; - } - - return doUploadRecording( - dir, - server, - recording, - opts.verbose, - opts.apiKey, - opts.agentOptions, - opts.removeAssets ?? true, - opts.strict - ); -} - -async function processUploadedRecording(recordingId: string, opts: Options) { - const server = getServer(opts); - const agent = getHttpAgent(server, opts.agentOptions); - const { verbose } = opts; - let apiKey = opts.apiKey; - - maybeLog(verbose, `Processing recording ${recordingId}...`); - - if (!apiKey) { - apiKey = await readToken(opts); - } - - const client = new ReplayClient(); - if (!(await client.initConnection(server, apiKey, verbose, agent))) { - maybeLog(verbose, `Processing failed: can't connect to server ${server}`); - return false; - } - - try { - const error = await client.connectionWaitForProcessed(recordingId); - if (error) { - maybeLog(verbose, `Processing failed: ${error}`); - return false; - } - } finally { - client.closeConnection(); - } - - maybeLog(verbose, "Finished processing."); - return true; -} - -async function processRecording(id: string, opts: Options = {}) { - const recordingId = await uploadRecording(id, opts); - if (!recordingId) { - return null; - } - const succeeded = await processUploadedRecording(recordingId, opts); - return succeeded ? recordingId : null; -} - -async function uploadAllRecordings(opts: UploadAllOptions = {}) { - const server = getServer(opts); - const dir = getDirectory(opts); - const allRecordings = readRecordings(dir).filter(r => !uploadSkipReason(r)); - const recordings = filterRecordings(allRecordings, opts.filter, opts.includeCrashes); - - if ( - allRecordings.some(r => r.status === "crashed") && - !recordings.some(r => r.status === "crashed") && - opts.filter && - !opts.includeCrashes - ) { - maybeLog( - opts.verbose, - `\nāš ļø Warning: Some crash reports were created but will not be uploaded because of the provided filter. Add --include-crashes to upload crash reports.\n` - ); - } - - if (recordings.length === 0) { - if (opts.filter && allRecordings.length > 0) { - maybeLog(opts.verbose, `No replays matched the provided filter`); - } else { - maybeLog(opts.verbose, `No replays were found to upload`); - } - - return true; - } - - maybeLog(opts.verbose, `Starting upload of ${recordings.length} replays`); - if (opts.batchSize) { - debug("Batching upload in groups of %d", opts.batchSize); - } - - const batchSize = Math.min(opts.batchSize || 20, 25); - - const recordingIds: (string | null)[] = await pMap( - recordings, - (r: RecordingEntry) => - doUploadRecording( - dir, - server, - r, - opts.verbose, - opts.apiKey, - opts.agentOptions, - false, - opts.strict - ), - { concurrency: batchSize, stopOnError: false } - ); - - recordingIds.forEach(id => { - const recording = recordings.find(r => r.id === id); - if (!recording) return; - - removeRecordingAssets(recording, opts); - }); - - return recordingIds.every(r => r !== null); -} - -async function doViewRecording( - dir: string, - server: string, - recording: RecordingEntry, - verbose?: boolean, - apiKey?: string, - agentOptions?: AgentOptions, - viewServer?: string -) { - let recordingId; - if (recording.status === "crashUploaded") { - maybeLog(verbose, "Crash report already uploaded"); - return true; - } else if (recording.status == "uploaded") { - recordingId = recording.recordingId; - server = recording.server!; - } else { - recordingId = await doUploadRecording( - dir, - server, - recording, - verbose, - apiKey, - agentOptions, - true - ); - - if (!recordingId) { - return false; - } else if (recording.status === "crashed") { - return true; - } - } - const devtools = viewServer ?? "https://app.replay.io"; - const dispatch = server != "wss://dispatch.replay.io" ? `&dispatch=${server}` : ""; - spawn(openExecutable(), [`${devtools}?id=${recordingId}${dispatch}`]); - return true; -} - -async function viewRecording(id: string, opts: Options = {}) { - let server = getServer(opts); - const dir = getDirectory(opts); - const recordings = readRecordings(dir); - const recording = recordings.find(r => r.id == id); - if (!recording) { - maybeLog(opts.verbose, `Unknown recording ${id}`); - return false; - } - return doViewRecording(dir, server, recording, opts.verbose, opts.apiKey, opts.agentOptions); -} - -async function viewLatestRecording(opts: Options = {}) { - let server = getServer(opts); - const dir = getDirectory(opts); - const recordings = readRecordings(dir); - if (!recordings.length) { - maybeLog(opts.verbose, "No recordings to view"); - return false; - } - return doViewRecording( - dir, - server, - recordings[recordings.length - 1], - opts.verbose, - opts.apiKey, - opts.agentOptions, - opts.viewServer - ); -} - -function maybeRemoveAssetFile(asset?: string) { - if (asset) { - try { - if (fs.existsSync(asset)) { - debug("Removing asset file %s", asset); - fs.unlinkSync(asset); - } - } catch (e) { - debug("Failed to remove asset file: %s", e); - } - } -} - -function removeRecording(id: string, opts: Options = {}) { - const dir = getDirectory(opts); - const recordings = readRecordings(dir); - const recording = recordings.find(r => r.id == id); - if (!recording) { - maybeLog(opts.verbose, `Unknown recording ${id}`); - return false; - } - removeRecordingAssets(recording, opts); - removeRecordingFromLog(dir, id); - return true; -} - -function getRecordingAssetFiles(recording: RecordingEntry) { - const assetFiles: string[] = []; - if (recording.path) { - assetFiles.push(recording.path); - } - - recording.sourcemaps.forEach(sm => { - assetFiles.push(sm.path); - assetFiles.push(sm.path.replace(/\.map$/, ".lookup")); - sm.originalSources.forEach(o => assetFiles.push(o.path)); - }); - - return assetFiles; -} - -function removeRecordingAssets(recording: RecordingEntry, opts?: Pick) { - const localRecordings = listAllRecordings({ - ...opts, - filter: r => r.status !== "uploaded" && r.status !== "crashUploaded" && r.id !== recording.id, - }); - - const localRecordingAssetFiles = new Set(localRecordings.flatMap(getRecordingAssetFiles)); - const assetFiles = getRecordingAssetFiles(recording); - assetFiles.forEach(file => { - if (!localRecordingAssetFiles.has(file)) { - maybeRemoveAssetFile(file); - } - }); -} - -function removeAllRecordings(opts: Options = {}) { - const dir = getDirectory(opts); - const recordings = readRecordings(dir); - recordings.forEach(r => removeRecordingAssets(r, opts)); - - removeRecordingsFile(dir); -} - -function addLocalRecordingMetadata(recordingId: string, metadata: Record) { - addMetadata(recordingId, metadata); -} - -async function updateMetadata({ - init: metadata, - keys = [], - filter, - includeCrashes, - verbose, - warn, - directory, -}: MetadataOptions & FilterOptions) { - let md: any = {}; - if (metadata) { - md = JSON.parse(metadata); - } - - const keyedObjects = await pMap | null>(keys, async v => { - try { - switch (v) { - case "source": - return await sourceMetadata.init(md.source || {}); - case "test": - return await testMetadata.init(md.test || {}); - } - } catch (e) { - debug("Metadata initialization error: %o", e); - if (!warn) { - throw e; - } - - console.warn(`Unable to initialize metadata field: "${v}"`); - if (e instanceof Error) { - console.warn(" ->", e.message); - } - } - - return null; - }); - - const data = Object.assign(md, ...keyedObjects); - const sanitized = await sanitizeMetadata(data); - - debug("Sanitized metadata: %O", sanitized); - - const recordings = listAllRecordings({ directory, filter, includeCrashes }); - - recordings.forEach(r => { - maybeLog(verbose, `Setting metadata for ${r.id}`); - addMetadata(r.id, sanitized); - }); -} - -async function launchBrowser( - browserName: BrowserName, - args: string[] = [], - record: boolean = false, - opts?: Options & LaunchOptions -) { - debug("launchBrowser: %s %o %s %o", browserName, args, record, opts); - const execPath = getExecutablePath(browserName, opts); - if (!execPath) { - throw new Error(`${browserName} not supported on the current platform`); - } - - if (!fs.existsSync(execPath)) { - maybeLog(opts?.verbose, `Installing ${browserName}`); - await ensureBrowsersInstalled(browserName, false, opts); - } - - const profileDir = path.join(getDirectory(opts), "runtimes", "profiles", browserName); - - const browserArgs: Record = { - chromium: [ - "--no-first-run", - "--no-default-browser-check", - `--user-data-dir=${profileDir}`, - ...args, - ], - firefox: ["-foreground", ...args], - }; - - const env = { - ...process.env, - }; - - if (record) { - env.RECORD_ALL_CONTENT = "1"; - } - - if (opts?.directory) { - env.RECORD_REPLAY_DIRECTORY = opts?.directory; - } - - const proc = spawn(execPath, browserArgs[browserName], { - detached: !opts?.attach, - env, - stdio: "inherit", - }); - if (!opts?.attach) { - proc.unref(); - } else { - // Wait for the browser process to finish. - await new Promise((resolve, reject) => { - proc.on("error", reject); - proc.on("exit", (code, signal) => { - if (code || signal) { - reject(new Error(`Process failed code=${code}, signal=${signal}`)); - } else { - resolve(); - } - }); - }); - } - - return proc; -} - -async function version() { - const version = getCurrentVersion(); - let update = false; - let latest: string | null = null; - - try { - const data = await getPackument({ name: "@replayio/replay" }); - latest = data.distTags.latest; - - if (compare(version, latest) < 0) { - update = true; - } - } catch (e) { - debug("Error retrieving latest package info: %o", e); - } - - return { - version, - update, - latest, - }; -} - -export { - ExternalRecordingEntry, - addLocalRecordingMetadata, - getDirectory, - launchBrowser, - listAllRecordings, - processRecording, - removeAllRecordings, - removeRecording, - updateBrowsers, - updateMetadata, - uploadAllRecordings, - uploadRecording, - version, - viewLatestRecording, - viewRecording, -}; diff --git a/packages/replay/src/metadata/env.ts b/packages/replay/src/metadata/env.ts deleted file mode 100644 index 181296660..000000000 --- a/packages/replay/src/metadata/env.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "@replay-cli/shared/recording/metadata/legacy/env"; diff --git a/packages/replay/src/metadata/index.ts b/packages/replay/src/metadata/index.ts deleted file mode 100644 index 7960e149f..000000000 --- a/packages/replay/src/metadata/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { sanitizeMetadata as sanitize } from "@replay-cli/shared/recording/metadata/sanitizeMetadata"; -export { addMetadata as add } from "@replay-cli/shared/recording/metadata/addMetadata"; -import * as source from "./source"; -import * as test from "./test"; -export { source, test }; diff --git a/packages/replay/src/metadata/source.ts b/packages/replay/src/metadata/source.ts deleted file mode 100644 index cad4bd8f5..000000000 --- a/packages/replay/src/metadata/source.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { UnstructuredMetadata } from "@replay-cli/shared/recording/types"; -import * as source from "@replay-cli/shared/recording/metadata/legacy/source"; -export * from "@replay-cli/shared/recording/metadata/legacy/source"; - -export function validate(metadata: { source?: UnstructuredMetadata } = {}) { - return source.validate(metadata.source); -} diff --git a/packages/replay/src/metadata/test.ts b/packages/replay/src/metadata/test.ts deleted file mode 100644 index c90d5df8e..000000000 --- a/packages/replay/src/metadata/test.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { UnstructuredMetadata } from "@replay-cli/shared/recording/types"; -import * as test from "@replay-cli/shared/recording/metadata/legacy/test/index"; -export * from "@replay-cli/shared/recording/metadata/legacy/test/index"; - -export function validate(metadata: { test?: UnstructuredMetadata } = {}) { - return test.validate(metadata.test); -} diff --git a/packages/replay/src/recordingLog.test.ts b/packages/replay/src/recordingLog.test.ts deleted file mode 100644 index 39ee25537..000000000 --- a/packages/replay/src/recordingLog.test.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { readRecordings } from "./recordingLog"; -import { tmpdir } from "os"; -import path from "path"; -import { writeFileSync, mkdirSync, rmSync } from "fs"; - -describe("recordingLog", () => { - let dir: string; - - beforeEach(() => { - dir = path.join(tmpdir(), Math.floor(Math.random() * 100000).toString(16)); - mkdirSync(dir); - }); - - afterEach(() => { - rmSync(dir, { force: true, recursive: true }); - }); - - function writeTestCase(testCase: string) { - writeFileSync(path.join(dir, "recordings.log"), testCase, "utf-8"); - } - - it("should handle source map events before createRecording", async () => { - writeTestCase(` - {"kind":"sourcemapAdded","path":"/Users/ryan/.replay/sourcemap-eabb79b2a92a89eee953c1c052e614ad0ae19cdfafba598576b250d8cadb2140.map","recordingId":"60e36c77-3aad-46d5-902f-cf37bd3bff92","id":"eabb79b2a92a89eee953c1c052e614ad0ae19cdfafba598576b250d8cadb2140","url":"https://devtools-git-2023-11-29-testsuites-nux-1-recordreplay.vercel.app/_next/static/chunks/webpack-2b35d1198a807748.js.map","baseURL":"https://devtools-git-2023-11-29-testsuites-nux-1-recordreplay.vercel.app/_next/static/chunks/webpack-2b35d1198a807748.js.map","targetContentHash":"sha256:eabb79b2a92a89eee953c1c052e614ad0ae19cdfafba598576b250d8cadb2140","targetURLHash":"sha256:126ef596a4efc04607a183402a919a81e4057d7188cdc875746d5b25e3b614fc","targetMapURLHash":"sha256:ff0dccd318786108b6ea2c5824cd4ac5091cb3d5217cb24abb883f742a872f33"} - {"buildId":"macOS-chromium-20231130-d0df72d13090-718eb1da92df","driverVersion":"linker-macOS-12424-718eb1da92df","id":"60e36c77-3aad-46d5-902f-cf37bd3bff92","kind":"createRecording","timestamp":1701729042262} - {"id":"60e36c77-3aad-46d5-902f-cf37bd3bff92","kind":"addMetadata","metadata":{"uri":"https://devtools-git-2023-11-29-testsuites-nux-1-recordreplay.vercel.app/team/dzowNDAyOGMwYS05ZjM1LTQ2ZjktYTkwYi1jNzJkMTIzNzUxOTI=/runs/961a049e-a7db-46db-9d0c-c3954626b75e?param=dzowNDAyOGMwYS05ZjM1LTQ2ZjktYTkwYi1jNzJkMTIzNzUxOTI%3D¶m=runs"},"timestamp":1701729042262} - {"id":"60e36c77-3aad-46d5-902f-cf37bd3bff92","kind":"writeStarted","path":"/Users/ryan/.replay/recording-60e36c77-3aad-46d5-902f-cf37bd3bff92.dat","timestamp":1701729042262} - {"id":"60e36c77-3aad-46d5-902f-cf37bd3bff92","kind":"writeFinished","timestamp":1701729046248} - `); - - const recordings = readRecordings(dir); - expect(recordings[0]).not.toBeNull(); - expect(recordings[0].sourcemaps.length).toBe(1); - }); -}); diff --git a/packages/replay/src/recordingLog.ts b/packages/replay/src/recordingLog.ts deleted file mode 100644 index 971cd1854..000000000 --- a/packages/replay/src/recordingLog.ts +++ /dev/null @@ -1,256 +0,0 @@ -import dbg from "./debug"; -import fs from "fs"; -import path from "path"; -import { RecordingEntry } from "./types"; -import { generateDefaultTitle } from "./generateDefaultTitle"; -import { updateStatus } from "./updateStatus"; -import { getDirectory } from "./utils"; - -const debug = dbg("replay:cli:recording-log"); - -function getRecordingsFile(dir: string) { - return path.join(dir, "recordings.log"); -} -function readRecordingFile(dir: string) { - const file = getRecordingsFile(dir); - if (!fs.existsSync(file)) { - return []; - } - - return fs.readFileSync(file, "utf8").split("\n"); -} -function writeRecordingFile(dir: string, lines: string[]) { - // Add a trailing newline so the driver can safely append logs - fs.writeFileSync(getRecordingsFile(dir), lines.join("\n") + "\n"); -} -function getBuildRuntime(buildId: string) { - const match = /.*?-(.*?)-/.exec(buildId); - return match ? match[1] : "unknown"; -} -const RECORDING_LOG_KIND = [ - "createRecording", - "addMetadata", - "writeStarted", - "sourcemapAdded", - "originalSourceAdded", - "writeFinished", - "uploadStarted", - "uploadFinished", - "recordingUnusable", - "crashed", - "crashData", - "crashUploaded", -] as const; -interface RecordingLogEntry { - [key: string]: any; - kind: (typeof RECORDING_LOG_KIND)[number]; -} -export function readRecordings(dir?: string, includeHidden = false) { - dir = getDirectory({ directory: dir }); - const recordings: RecordingEntry[] = []; - const lines = readRecordingFile(dir) - .map(line => { - try { - return JSON.parse(line) as RecordingLogEntry; - } catch { - if (line) { - debug('Failed to parse recordings.log entry: "%s"', line); - } - return null; - } - }) - .filter(o => o != null) - .sort((a, b) => RECORDING_LOG_KIND.indexOf(a.kind) - RECORDING_LOG_KIND.indexOf(b.kind)); - - for (const obj of lines) { - switch (obj.kind) { - case "createRecording": { - const { id, timestamp, buildId } = obj; - recordings.push({ - id, - createTime: new Date(timestamp), - buildId, - runtime: getBuildRuntime(buildId), - metadata: {}, - sourcemaps: [], - - // We use an unknown status after the createRecording event because - // there should always be later events describing what happened to the - // recording. - status: "unknown", - }); - break; - } - case "addMetadata": { - const { id, metadata } = obj; - const recording = recordings.find(r => r.id == id); - if (recording) { - Object.assign(recording.metadata, metadata); - - if (!recording.metadata.title) { - recording.metadata.title = generateDefaultTitle(recording.metadata); - } - } - break; - } - case "writeStarted": { - const { id, path } = obj; - const recording = recordings.find(r => r.id == id); - if (recording) { - updateStatus(recording, "startedWrite"); - recording.path = path; - } - break; - } - case "writeFinished": { - const { id } = obj; - const recording = recordings.find(r => r.id == id); - if (recording) { - updateStatus(recording, "onDisk"); - } - break; - } - case "uploadStarted": { - const { id, server, recordingId } = obj; - const recording = recordings.find(r => r.id == id); - if (recording) { - updateStatus(recording, "startedUpload"); - recording.server = server; - recording.recordingId = recordingId; - } - break; - } - case "uploadFinished": { - const { id } = obj; - const recording = recordings.find(r => r.id == id); - if (recording) { - updateStatus(recording, "uploaded"); - } - break; - } - case "recordingUnusable": { - const { id, reason } = obj; - const recording = recordings.find(r => r.id == id); - if (recording) { - updateStatus(recording, "unusable"); - recording.unusableReason = reason; - } - break; - } - case "crashed": { - const { id } = obj; - const recording = recordings.find(r => r.id == id); - if (recording) { - updateStatus(recording, "crashed"); - } - break; - } - case "crashData": { - const { id, data } = obj; - const recording = recordings.find(r => r.id == id); - if (recording) { - if (!recording.crashData) { - recording.crashData = []; - } - recording.crashData.push(data); - } - break; - } - case "crashUploaded": { - const { id } = obj; - const recording = recordings.find(r => r.id == id); - if (recording) { - updateStatus(recording, "crashUploaded"); - } - break; - } - case "sourcemapAdded": { - const { - id, - recordingId, - path, - baseURL, - targetContentHash, - targetURLHash, - targetMapURLHash, - } = obj; - const recording = recordings.find(r => r.id == recordingId); - if (recording) { - recording.sourcemaps.push({ - id, - path, - baseURL, - targetContentHash, - targetURLHash, - targetMapURLHash, - originalSources: [], - }); - } - break; - } - case "originalSourceAdded": { - const { recordingId, path, parentId, parentOffset } = obj; - const recording = recordings.find(r => r.id === recordingId); - if (recording) { - const sourcemap = recording.sourcemaps.find(s => s.id === parentId); - if (sourcemap) { - sourcemap.originalSources.push({ - path, - parentOffset, - }); - } - } - break; - } - } - } - - if (includeHidden) { - return recordings; - } - - // There can be a fair number of recordings from gecko/chromium content - // processes which never loaded any interesting content. These are ignored by - // most callers. Note that we're unable to avoid generating these entries in - // the first place because the recordings log is append-only and we don't know - // when a recording process starts if it will ever do anything interesting. - return recordings.filter(r => !(r.unusableReason || "").includes("No interesting content")); -} - -function addRecordingEvent(dir: string, kind: string, id: string, tags = {}) { - const event = { - kind, - id, - timestamp: Date.now(), - ...tags, - }; - debug("Writing event to recording log %o", event); - const lines = readRecordingFile(dir); - lines.push(JSON.stringify(event)); - writeRecordingFile(dir, lines); -} - -function removeRecordingsFile(dir: string) { - const file = getRecordingsFile(dir); - if (fs.existsSync(file)) { - fs.unlinkSync(file); - } -} - -function removeRecordingFromLog(dir: string, id: string) { - const lines = readRecordingFile(dir).filter(line => { - try { - const obj = JSON.parse(line); - if (obj.id == id) { - return false; - } - } catch (e) { - return false; - } - return true; - }); - - writeRecordingFile(dir, lines); -} - -export { readRecordingFile, removeRecordingFromLog, removeRecordingsFile, addRecordingEvent }; diff --git a/packages/replay/src/types.ts b/packages/replay/src/types.ts deleted file mode 100644 index 2cc69e247..000000000 --- a/packages/replay/src/types.ts +++ /dev/null @@ -1,139 +0,0 @@ -import { UnstructuredMetadata } from "@replay-cli/shared/recording/types"; -import type { AgentOptions } from "http"; - -export interface Options { - /** - * Alternate recording directory - */ - directory?: string; - - /** - * Alternate server to upload recordings to - */ - server?: string; - - /** - * Alternate server to use for opening devtools - */ - viewServer?: string; - - /** - * Authentication API Key - */ - apiKey?: string; - verbose?: boolean; - agentOptions?: AgentOptions; -} - -export interface SourcemapUploadOptions { - group: string; - dryRun?: boolean; - extensions?: Array; - ignore?: Array; - quiet?: boolean; - verbose?: boolean; - root?: string; - batchSize?: number; -} - -export interface MetadataOptions { - init?: string; - keys?: string[]; - warn?: boolean; - verbose?: boolean; - directory?: string; -} - -export interface FilterOptions { - filter?: - | string - | ((recordings: RecordingEntry, index: number, allRecordings: RecordingEntry[]) => boolean); - includeCrashes?: boolean; -} - -export interface LaunchOptions { - browser?: string; - attach?: boolean; -} - -export interface ListOptions extends FilterOptions { - all?: boolean; -} - -export interface UploadOptions extends Options { - /** - * Fail the recording upload if any part of the upload fails. - */ - strict?: boolean; - /** - * Remove assets associated with the recording after successful upload - */ - removeAssets?: boolean; -} - -export interface UploadAllOptions extends FilterOptions, UploadOptions { - batchSize?: number; - warn?: boolean; -} - -/** - * Supported replay browsers - */ -export type BrowserName = "chromium" | "firefox"; - -export type Runner = "playwright" | "puppeteer"; - -export interface RecordingMetadata { - recordingData: { - id?: string; - duration?: number; - url?: string; - title?: string; - operations: object | null; - lastScreenData?: string; - lastScreenMimeType: string; - }; - metadata: UnstructuredMetadata; -} - -export interface OriginalSourceEntry { - path: string; - parentOffset: number; -} - -export interface SourceMapEntry { - id: string; - path: string; - baseURL: string; - targetContentHash?: string; - targetURLHash?: string; - targetMapURLHash: string; - originalSources: OriginalSourceEntry[]; -} - -export interface RecordingEntry { - id: string; - createTime: Date; - runtime: string; - metadata: TMetadata; - sourcemaps: SourceMapEntry[]; - buildId?: string; - status: - | "onDisk" - | "unknown" - | "uploaded" - | "crashed" - | "startedWrite" - | "startedUpload" - | "crashUploaded" - | "unusable"; - path?: string; - server?: string; - recordingId?: string; - crashData?: any[]; - unusableReason?: string; -} - -export type ExternalRecordingEntry< - TRecordingMetadata extends UnstructuredMetadata = UnstructuredMetadata -> = Omit, "buildId" | "crashData">; diff --git a/packages/replay/src/updateStatus.ts b/packages/replay/src/updateStatus.ts deleted file mode 100644 index 1984e6c16..000000000 --- a/packages/replay/src/updateStatus.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { RecordingEntry } from "./types"; - -export function updateStatus(recording: RecordingEntry, status: RecordingEntry["status"]) { - // Once a recording enters an unusable or crashed status, don't change it - // except to mark crashes as uploaded. - if ( - recording.status == "unusable" || - recording.status == "crashUploaded" || - (recording.status == "crashed" && status != "crashUploaded") - ) { - return; - } - recording.status = status; -} diff --git a/packages/replay/src/upload.ts b/packages/replay/src/upload.ts deleted file mode 100644 index c1254bd5a..000000000 --- a/packages/replay/src/upload.ts +++ /dev/null @@ -1,331 +0,0 @@ -import { retryWithLinearBackoff } from "@replay-cli/shared/async/retryOnFailure"; -import crypto from "crypto"; -import fs from "fs"; -import type { Agent, AgentOptions } from "http"; -import fetch from "node-fetch"; -import pMap from "p-map"; -import path from "path"; -import { Worker } from "worker_threads"; -import ProtocolClient from "./client"; -import dbg, { logPath } from "./debug"; -import { sanitizeMetadata } from "@replay-cli/shared/recording/metadata/sanitizeMetadata"; -import { Options, OriginalSourceEntry, RecordingMetadata, SourceMapEntry } from "./types"; -import { defer, getUserAgent, isValidUUID, maybeLog } from "./utils"; - -const debug = dbg("replay:cli:upload"); - -function sha256(text: string) { - return crypto.createHash("sha256").update(text).digest("hex"); -} - -class ReplayClient { - client: ProtocolClient | undefined; - clientReady = defer(); - - async initConnection(server: string, accessToken?: string, verbose?: boolean, agent?: Agent) { - if (!this.client) { - let { resolve } = this.clientReady; - this.client = new ProtocolClient( - server, - { - onOpen: async () => { - try { - await this.client!.setAccessToken(accessToken); - resolve(true); - } catch (err) { - maybeLog(verbose, `Error authenticating with server: ${err}`); - resolve(false); - } - }, - onClose() { - resolve(false); - }, - onError(e) { - maybeLog(verbose, `Error connecting to server: ${e}`); - resolve(false); - }, - }, - agent - ); - } - - return this.clientReady.promise; - } - - async connectionBeginRecordingUpload(id: string, buildId: string, size: number) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - const { recordingId, uploadLink } = await this.client.sendCommand<{ - recordingId: string; - uploadLink: string; - }>("Internal.beginRecordingUpload", { - buildId, - // 3/22/2022: Older builds use integers instead of UUIDs for the recording - // IDs written to disk. These are not valid to use as recording IDs when - // uploading recordings to the backend. - recordingId: isValidUUID(id) ? id : undefined, - recordingSize: size, - }); - return { recordingId, uploadLink }; - } - - async connectionBeginRecordingMultipartUpload( - id: string, - buildId: string, - size: number, - multiPartChunkSize?: number - ) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - const { recordingId, uploadId, chunkSize, partLinks } = await this.client.sendCommand<{ - recordingId: string; - uploadId: string; - partLinks: string[]; - chunkSize: number; - }>("Internal.beginRecordingMultipartUpload", { - buildId, - // 3/22/2022: Older builds use integers instead of UUIDs for the recording - // IDs written to disk. These are not valid to use as recording IDs when - // uploading recordings to the backend. - recordingId: isValidUUID(id) ? id : undefined, - recordingSize: size, - chunkSize: multiPartChunkSize, - }); - return { recordingId, uploadId, chunkSize, partLinks }; - } - - async buildRecordingMetadata( - metadata: Record, - _opts: Options = {} - ): Promise { - // extract the "standard" metadata and route the `rest` through the sanitizer - const { duration, url, uri, title, operations, ...rest } = metadata; - - const metadataUrl = url || uri; - - return { - recordingData: { - duration: typeof duration === "number" ? duration : 0, - url: typeof metadataUrl === "string" ? metadataUrl : "", - title: typeof title === "string" ? title : "", - operations: - operations && typeof operations === "object" - ? operations - : { - scriptDomains: [], - }, - lastScreenData: "", - lastScreenMimeType: "", - }, - metadata: await sanitizeMetadata(rest), - }; - } - - async setRecordingMetadata(id: string, metadata: RecordingMetadata) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - metadata.recordingData.id = id; - await this.client.sendCommand("Internal.setRecordingMetadata", metadata); - } - - connectionProcessRecording(recordingId: string) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - this.client.sendCommand("Recording.processRecording", { recordingId }); - } - - async connectionWaitForProcessed(recordingId: string) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - const { sessionId } = await this.client.sendCommand<{ sessionId: string }>( - "Recording.createSession", - { - recordingId, - } - ); - const waiter = defer(); - - this.client.setEventListener("Recording.sessionError", ({ message }) => - waiter.resolve(`session error ${sessionId}: ${message}`) - ); - - this.client.setEventListener("Session.unprocessedRegions", () => {}); - - this.client - .sendCommand("Session.ensureProcessed", { level: "basic" }, null, sessionId) - .then(() => waiter.resolve(null)); - - const error = await waiter.promise; - return error; - } - - async connectionReportCrash(data: any) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - await this.client.sendCommand("Internal.reportCrash", { data }); - } - - async uploadRecording(path: string, uploadLink: string, size: number) { - const file = fs.createReadStream(path); - const resp = await fetch(uploadLink, { - method: "PUT", - headers: { "Content-Length": size.toString(), "User-Agent": getUserAgent() }, - body: file, - }); - - if (!resp.ok) { - debug(await resp.text()); - throw new Error(`Failed to upload recording. Response was ${resp.status} ${resp.statusText}`); - } - } - - async uploadPart( - link: string, - partMeta: { filePath: string; start: number; end: number }, - size: number, - agentOptions?: AgentOptions - ): Promise { - return new Promise((resolve, reject) => { - const worker = new Worker(path.join(__dirname, "./uploadWorker.js")); - - worker.on("message", resolve); - worker.on("error", reject); - worker.on("exit", code => { - if (code !== 0) { - reject(new Error(`Worker stopped with exit code ${code}`)); - } - }); - - worker.postMessage({ link, partMeta, size, logPath, agentOptions }); - }); - } - - async uploadRecordingInParts( - filePath: string, - partUploadLinks: string[], - partSize: number, - agentOptions?: AgentOptions - ) { - const stats = fs.statSync(filePath); - const totalSize = stats.size; - const results = await pMap( - partUploadLinks, - async (url, index) => { - return retryWithLinearBackoff( - async () => { - const partNumber = index + 1; - const start = index * partSize; - const end = Math.min(start + partSize, totalSize) - 1; // -1 because end is inclusive - - debug("Uploading part %o", { - partNumber, - start, - end, - totalSize, - partSize, - }); - return this.uploadPart(url, { filePath, start, end }, end - start + 1, agentOptions); - }, - e => { - debug(`Failed to upload part ${index + 1}. Will be retried: %o`, e); - }, - 10 - ); - }, - { concurrency: 10 } - ); - - return results; - } - - async connectionEndRecordingUpload(recordingId: string) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - await this.client.sendCommand<{ recordingId: string }>("Internal.endRecordingUpload", { - recordingId, - }); - } - - async connectionEndRecordingMultipartUpload( - recordingId: string, - uploadId: string, - eTags: string[] - ) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - await this.client.sendCommand<{ recordingId: string; uploadId: string; partETags: string[] }>( - "Internal.endRecordingMultipartUpload", - { - recordingId, - uploadId, - partIds: eTags, - } - ); - } - - async connectionUploadSourcemap(recordingId: string, metadata: SourceMapEntry, content: string) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - const resource = await this.createResource(content); - - const { baseURL, targetContentHash, targetURLHash, targetMapURLHash } = metadata; - const result = await this.client.sendCommand<{ id: string }>("Recording.addSourceMap", { - recordingId, - resource, - baseURL, - targetContentHash, - targetURLHash, - targetMapURLHash, - }); - return result.id; - } - - async connectionUploadOriginalSource( - recordingId: string, - parentId: string, - metadata: OriginalSourceEntry, - content: string - ) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - const resource = await this.createResource(content); - - const { parentOffset } = metadata; - await this.client.sendCommand("Recording.addOriginalSource", { - recordingId, - resource, - parentId, - parentOffset, - }); - } - - async createResource(content: string) { - if (!this.client) throw new Error("Protocol client is not initialized"); - - const hash = "sha256:" + sha256(content); - const { token } = await this.client.sendCommand<{ token: string }>("Resource.token", { hash }); - let resource = { - token, - saltedHash: "sha256:" + sha256(token + content), - }; - - const { exists } = await this.client.sendCommand<{ exists: boolean }>("Resource.exists", { - resource, - }); - if (!exists) { - ({ resource } = await this.client.sendCommand("Resource.create", { content })); - } - - return resource; - } - - closeConnection() { - if (this.client) { - this.client.close(); - this.client = undefined; - this.clientReady = defer(); - } - } -} - -export { ReplayClient }; diff --git a/packages/replay/src/uploadWorker.ts b/packages/replay/src/uploadWorker.ts deleted file mode 100644 index 2d925e345..000000000 --- a/packages/replay/src/uploadWorker.ts +++ /dev/null @@ -1,65 +0,0 @@ -import type { AgentOptions } from "http"; - -import { parentPort } from "worker_threads"; -import fs from "fs"; -import fetch from "node-fetch"; -import { getHttpAgent, getUserAgent } from "./utils"; -import dbg from "./debug"; - -if (parentPort === null) { - throw new Error("Must be run as a worker"); -} - -parentPort.on( - "message", - async ({ - link, - partMeta, - size, - logPath, - agentOptions, - }: { - link: string; - partMeta: { filePath: string; start: number; end: number }; - size: number; - logPath: string; - agentOptions?: AgentOptions; - }) => { - const { filePath, start, end } = partMeta; - const debug = dbg("replay:cli:upload-worker", logPath); - - if (parentPort === null) { - throw new Error("Must be run as a worker"); - } - - debug("Uploading chunk %o", { filePath, size, start, end }); - - const stream = fs.createReadStream(filePath, { start, end }); - const resp = await fetch(link, { - method: "PUT", - agent: getHttpAgent(link, { - keepAlive: true, - ...agentOptions, - }), - headers: { - Connection: "keep-alive", - "Content-Length": size.toString(), - "User-Agent": getUserAgent(), - }, - body: stream, - }); - - debug(`Fetch response received. Status: ${resp.status}, Status Text: ${resp.statusText}`); - - if (!resp.ok) { - const respText = await resp.text(); - debug(`Fetch response text: ${respText}`); - throw new Error(`Failed to upload recording. Response was ${resp.status} ${resp.statusText}`); - } - - const etag = resp.headers.get("etag"); - debug("Etag received %o", { etag, filePath, size, start, end }); - - parentPort.postMessage(etag); - } -); diff --git a/packages/replay/src/utils.ts b/packages/replay/src/utils.ts deleted file mode 100644 index 8d5ab387b..000000000 --- a/packages/replay/src/utils.ts +++ /dev/null @@ -1,125 +0,0 @@ -// This module is meant to be somewhat browser-friendly. -// It can't lead to importing node builtin modules like like worker_threads. -// Cypress bundles this file and runs it in the browser, -// some imports like path and http are OK because they are aliased~ by their webpack config: -// https://github.com/cypress-io/cypress/blob/fb87950d6337ba99d13cb5fa3ce129e5f5cac02b/npm/webpack-batteries-included-preprocessor/index.js#L151 -// TODO: decouple this more so we never run into problems with this - we shouldn't rely on implementation details of Cypress bundling -import dbg from "debug"; -import path from "path"; -import { Agent as HttpAgent, AgentOptions } from "http"; -import { Agent as HttpsAgent } from "https"; - -import { BrowserName, Options } from "./types"; - -const debug = dbg("replay:cli"); - -// Get the executable name to use when opening a URL. -// It would be nice to use an existing npm package for this, -// but the obvious choice of "open" didn't actually work on linux -// when testing... -export function openExecutable() { - switch (process.platform) { - case "darwin": - return "open"; - case "linux": - return "xdg-open"; - default: - throw new Error("Unsupported platform"); - } -} - -function defer() { - let resolve: (value: T) => void = () => {}; - let reject: (reason?: any) => void = () => {}; - const promise = new Promise((res, rej) => { - resolve = res; - reject = rej; - }); - return { promise, resolve, reject }; -} - -function maybeLog(verbose: boolean | undefined, str: string) { - debug(str); - if (verbose) { - console.log(str); - } -} - -function getDirectory(opts?: Pick) { - const home = process.env.HOME || process.env.USERPROFILE; - return ( - (opts && opts.directory) || process.env.RECORD_REPLAY_DIRECTORY || path.join(home!, ".replay") - ); -} - -function isValidUUID(str: unknown) { - if (typeof str != "string" || str.length != 36) { - return false; - } - for (let i = 0; i < str.length; i++) { - if ("0123456789abcdef-".indexOf(str[i]) == -1) { - return false; - } - } - return true; -} - -function fuzzyBrowserName(browser?: string): BrowserName { - browser = browser?.toLowerCase()?.trim(); - - switch (browser) { - case "chrome": - return "chromium"; - case "gecko": - return "firefox"; - } - - return browser as BrowserName; -} - -function assertValidBrowserName(browser?: string): asserts browser is BrowserName { - if (!browser || (browser !== "chromium" && browser !== "firefox")) { - throw new Error("Unsupported browser: " + browser); - } -} - -function getCurrentVersion() { - const pkg = require("@replayio/replay/package.json"); - return pkg.version; -} - -function getNameAndVersion() { - const pkg = require("@replayio/replay/package.json"); - return `${pkg.name}/${pkg.version}`; -} - -function getUserAgent() { - return getNameAndVersion(); -} - -function getHttpAgent(server: string, agentOptions?: AgentOptions) { - const serverURL = new URL(server); - if (!agentOptions) { - return; - } - - if (["wss:", "https:"].includes(serverURL.protocol)) { - return new HttpsAgent(agentOptions); - } else if (["ws:", "http:"].includes(serverURL.protocol)) { - return new HttpAgent(agentOptions); - } - - throw new Error(`Unsupported protocol: ${serverURL.protocol} for URL ${serverURL}`); -} - -export { - assertValidBrowserName, - fuzzyBrowserName, - defer, - maybeLog, - getDirectory, - isValidUUID, - getCurrentVersion, - getUserAgent, - getHttpAgent, -}; diff --git a/packages/replay/tsconfig.json b/packages/replay/tsconfig.json deleted file mode 100644 index 826547ad1..000000000 --- a/packages/replay/tsconfig.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "extends": "@replay-cli/tsconfig/base.json", - "compilerOptions": { - "lib": ["ES2019"], - "types": ["node", "jest"] - }, - "include": ["src/**/*.ts"], - "exclude": ["**/*.test.ts"], - "references": [ - { - "path": "../sourcemap-upload" - } - ] -} diff --git a/packages/replay/utils.d.ts b/packages/replay/utils.d.ts deleted file mode 100644 index 605e84c3e..000000000 --- a/packages/replay/utils.d.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./dist/utils.js"; diff --git a/packages/replay/utils.js b/packages/replay/utils.js deleted file mode 100644 index 1df1353e8..000000000 --- a/packages/replay/utils.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./dist/utils.js"); diff --git a/tsconfig.json b/tsconfig.json index 02e806280..f08764351 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -5,7 +5,6 @@ { "path": "./packages/jest" }, { "path": "./packages/playwright" }, { "path": "./packages/puppeteer" }, - { "path": "./packages/replay" }, { "path": "./packages/replayio" }, { "path": "./packages/shared" }, { "path": "./packages/sourcemap-upload-webpack-plugin" }, diff --git a/yarn.lock b/yarn.lock index d968f9406..e97b86727 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3660,46 +3660,6 @@ __metadata: languageName: unknown linkType: soft -"@replayio/replay@workspace:^, @replayio/replay@workspace:packages/replay": - version: 0.0.0-use.local - resolution: "@replayio/replay@workspace:packages/replay" - dependencies: - "@replay-cli/pkg-build": "workspace:^" - "@replay-cli/shared": "workspace:^" - "@replay-cli/tsconfig": "workspace:^" - "@replayio/sourcemap-upload": "workspace:^" - "@types/debug": "npm:^4.1.7" - "@types/jest": "npm:^28.1.5" - "@types/node-fetch": "npm:^2.6.3" - "@types/semver": "npm:^7.5.6" - "@types/stack-utils": "npm:^2.0.3" - "@types/text-table": "npm:^0.2.2" - "@types/ws": "npm:^8.5.10" - commander: "npm:^12.0.0" - debug: "npm:^4.3.4" - fs-extra: "npm:^11.2.0" - is-uuid: "npm:^1.0.2" - jest: "npm:^28.1.3" - jsonata: "npm:^1.8.6" - launchdarkly-node-client-sdk: "npm:^3.2.1" - node-fetch: "npm:^2.6.8" - p-map: "npm:^4.0.0" - query-registry: "npm:^2.6.0" - semver: "npm:^7.5.4" - stack-utils: "npm:^2.0.6" - superstruct: "npm:^1.0.4" - text-table: "npm:^0.2.0" - ts-jest: "npm:^28.0.6" - typescript: "npm:^5.5.2" - undici: "npm:^5.28.4" - winston: "npm:^3.13.0" - winston-loki: "npm:^6.1.2" - ws: "npm:^7.5.0" - bin: - replay: ./bin.js - languageName: unknown - linkType: soft - "@replayio/sourcemap-upload-webpack-plugin@workspace:packages/sourcemap-upload-webpack-plugin": version: 0.0.0-use.local resolution: "@replayio/sourcemap-upload-webpack-plugin@workspace:packages/sourcemap-upload-webpack-plugin" @@ -4576,7 +4536,7 @@ __metadata: languageName: node linkType: hard -"@types/node-fetch@npm:^2.5.10, @types/node-fetch@npm:^2.6.2, @types/node-fetch@npm:^2.6.3": +"@types/node-fetch@npm:^2.5.10, @types/node-fetch@npm:^2.6.2": version: 2.6.11 resolution: "@types/node-fetch@npm:2.6.11" dependencies: @@ -4822,13 +4782,6 @@ __metadata: languageName: node linkType: hard -"@types/text-table@npm:^0.2.2": - version: 0.2.5 - resolution: "@types/text-table@npm:0.2.5" - checksum: 10c0/967054ba7509bf6ba4dda8adf81d048a7773b35295edb8670c045b6e27bda556a1917c8a29d4ea6b7d7e5b494785500779a002508c4415ef2e8b2a5351ca2066 - languageName: node - linkType: hard - "@types/through@npm:*": version: 0.0.33 resolution: "@types/through@npm:0.0.33" @@ -7106,7 +7059,6 @@ __metadata: dependencies: "@playwright/test": "npm:^1.40.1" "@replayio/playwright": "workspace:^" - "@replayio/replay": "workspace:^" "@testing-library/jest-dom": "npm:^5.16.2" "@testing-library/react": "npm:^12.1.4" "@testing-library/user-event": "npm:^13.5.0" @@ -13316,7 +13268,7 @@ __metadata: languageName: node linkType: hard -"node-fetch@npm:^2.5.0, node-fetch@npm:^2.6.1, node-fetch@npm:^2.6.7, node-fetch@npm:^2.6.8": +"node-fetch@npm:^2.5.0, node-fetch@npm:^2.6.1, node-fetch@npm:^2.6.7": version: 2.7.0 resolution: "node-fetch@npm:2.7.0" dependencies: