From 416fe3cb90c6d225822fc9d31eb1fa56fdd41401 Mon Sep 17 00:00:00 2001 From: Rik Nauta Date: Thu, 27 Jun 2024 13:55:22 +0200 Subject: [PATCH 01/11] Make BFG/Symf downloads parallel process safe --- .vscode/settings.json | 3 + agent/src/cli/cody-bench/strategy-fix.ts | 4 +- lib/shared/src/sourcegraph-api/graphql/url.ts | 4 +- lib/shared/src/utils.ts | 22 ++ vscode/src/graph/bfg/bfg.test.ts | 84 ++++++ vscode/src/graph/bfg/download-bfg.ts | 279 ++++++++++-------- vscode/src/graph/bfg/spawn-bfg.ts | 4 +- vscode/src/local-context/download-symf.ts | 233 ++++++++------- vscode/src/local-context/symf.test.ts | 103 +++++-- vscode/src/local-context/utils.ts | 121 ++++++++ vscode/src/testutils/mocks.ts | 23 +- 11 files changed, 611 insertions(+), 269 deletions(-) create mode 100644 vscode/src/graph/bfg/bfg.test.ts create mode 100644 vscode/src/local-context/utils.ts diff --git a/.vscode/settings.json b/.vscode/settings.json index cf59f1700dcb..fb52789c99c2 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -26,6 +26,9 @@ }, "editor.insertSpaces": true, "cSpell.words": ["Supercompletion", "Supercompletions"], + "[json]": { + "editor.defaultFormatter": "biomejs.biome", + }, "[typescript]": { "editor.defaultFormatter": "biomejs.biome" }, diff --git a/agent/src/cli/cody-bench/strategy-fix.ts b/agent/src/cli/cody-bench/strategy-fix.ts index c6bf18c84b8f..84bd42272615 100644 --- a/agent/src/cli/cody-bench/strategy-fix.ts +++ b/agent/src/cli/cody-bench/strategy-fix.ts @@ -3,7 +3,7 @@ import { PromptString, ps } from '@sourcegraph/cody-shared' import { glob } from 'glob' import * as vscode from 'vscode' import { ProtocolTextDocumentWithUri } from '../../../../vscode/src/jsonrpc/TextDocumentWithUri' -import { fileExists } from '../../../../vscode/src/local-context/download-symf' +import { pathExists } from '../../../../vscode/src/local-context/utils' import { redactAuthorizationHeader } from '../../../../vscode/src/testutils/CodyPersister' import { AgentTextDocument } from '../../AgentTextDocument' import { TestClient } from '../../TestClient' @@ -31,7 +31,7 @@ export async function evaluateFixStrategy( token: options.srcAccessToken, }, }) - if (!(await fileExists(path.join(options.workspace, 'node_modules')))) { + if (!(await pathExists(path.join(options.workspace, 'node_modules')))) { // Run pnpm install only when `node_modules` doesn't exist. await runVoidCommand(options.installCommand, options.workspace) } diff --git a/lib/shared/src/sourcegraph-api/graphql/url.ts b/lib/shared/src/sourcegraph-api/graphql/url.ts index 36a7ff7f5e18..60a4f802e9ba 100644 --- a/lib/shared/src/sourcegraph-api/graphql/url.ts +++ b/lib/shared/src/sourcegraph-api/graphql/url.ts @@ -1,5 +1,3 @@ -import { trimEnd } from 'lodash' - const GRAPHQL_URI = '/.api/graphql' interface BuildGraphQLUrlOptions { @@ -12,5 +10,5 @@ interface BuildGraphQLUrlOptions { export const buildGraphQLUrl = ({ request, baseUrl }: BuildGraphQLUrlOptions): string => { const nameMatch = request ? request.match(/^\s*(?:query|mutation)\s+(\w+)/) : '' const apiURL = `${GRAPHQL_URI}${nameMatch ? `?${nameMatch[1]}` : ''}` - return baseUrl ? new URL(trimEnd(baseUrl, '/') + apiURL).href : apiURL + return baseUrl ? new URL(apiURL, baseUrl).href : apiURL } diff --git a/lib/shared/src/utils.ts b/lib/shared/src/utils.ts index a51ebba9e02a..b462d7913f53 100644 --- a/lib/shared/src/utils.ts +++ b/lib/shared/src/utils.ts @@ -99,3 +99,25 @@ export function createSubscriber(): Subscriber { export function nextTick() { return new Promise(resolve => process.nextTick(resolve)) } + +export type SemverString = `${Prefix}${number}.${number}.${number}` + +export namespace SemverString { + const splitPrefixRegex = /^(?.*)(?\d+\.\d+\.\d+)$/ + export function forcePrefix

(prefix: P, value: string): SemverString

{ + const match = splitPrefixRegex.exec(value) + if (!match || !match.groups?.version) { + throw new Error(`Invalid semver string: ${value}`) + } + return `${prefix}${match.groups?.version}` as SemverString

+ } +} + +type TupleFromUnion = [T] extends [never] + ? [] + : T extends any + ? [T, ...TupleFromUnion>] + : [] + +// Helper type to ensure an array contains all members of T +export type ArrayContainsAll = TupleFromUnion diff --git a/vscode/src/graph/bfg/bfg.test.ts b/vscode/src/graph/bfg/bfg.test.ts new file mode 100644 index 000000000000..c4bbfa40df11 --- /dev/null +++ b/vscode/src/graph/bfg/bfg.test.ts @@ -0,0 +1,84 @@ +import { mkdtemp, open, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import path from 'node:path' +import { describe, expect, it, vi } from 'vitest' +import { downloadFile } from '../../local-context/utils' +import { getOSArch } from '../../os' +import { _config, _getNamesForPlatform, _upsertBfgForPlatform, defaultBfgVersion } from './download-bfg' + +//@ts-ignore +_config.FILE_DOWNLOAD_LOCK_DURATION = 10 +//@ts-ignore +_config.FILE_LOCK_RETRY_DELAY = 1 + +vi.mock('../../local-context/utils', async importOriginal => { + const mod = await importOriginal() + let firstDownload = true + return { + ...mod, + downloadFile: vi.fn(async (url: string, dest: string) => { + // we abandon the first download + if (firstDownload) { + await makeEmptyFile(dest) + firstDownload = false + throw new Error('Test Mock Deliberate Abandon') + } + await sleep(2) + // make an empty file + await makeEmptyFile(dest) + }), + unzip: vi.fn(async (zipPath: string, dest: string) => { + await sleep(2) + // just check the zip file exists first + if (!(await mod.fileExists(zipPath))) { + throw new Error("File doesn't exist") + } + // we ensure that at leats the expected file exists + const { platform, arch } = getOSArch() + const { bfgUnzippedFilename } = _getNamesForPlatform(platform!, arch!, defaultBfgVersion) + const bfgUnzippedPath = path.join(dest, bfgUnzippedFilename) + await makeEmptyFile(bfgUnzippedPath) + }), + } +}) + +describe('upsertBfgForPlatform', () => { + // NOTE: This really only checks downloads in the same Node process Instead + // we probably want to mock the fs and network layer directly and ensure + // that this works regardless of Mutex locks + it('prevents parallel downloads', async () => { + const dir = await mkdtemp(path.join(tmpdir(), 'bfg-')) + try { + // we first create a "abandoned" download so that we can ensure that + // after some expiration time one of the processes will forcefully + // download regardless + const abandonedDownload = _upsertBfgForPlatform(dir, defaultBfgVersion) + expect(await abandonedDownload).toBeNull() + vi.mocked(downloadFile).mockClear() + + // // we now start parallel async functions + const results = await Promise.all([ + _upsertBfgForPlatform(dir, defaultBfgVersion), + _upsertBfgForPlatform(dir, defaultBfgVersion), + _upsertBfgForPlatform(dir, defaultBfgVersion), + _upsertBfgForPlatform(dir, defaultBfgVersion), + ]) + // // only one actual download should have happened + expect(downloadFile).toHaveBeenCalledOnce() + // // expect all results to be the same and valid strings + expect(new Set(results).size).toBe(1) + expect(results[0]).toBeTruthy() + } finally { + await rm(dir, { recursive: true }) + } + }) +}) + +async function makeEmptyFile(filePath: string) { + const file = await open(filePath, 'w') + await file.close() +} + +function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) +} diff --git a/vscode/src/graph/bfg/download-bfg.ts b/vscode/src/graph/bfg/download-bfg.ts index 2a1ff2336059..c12491500881 100644 --- a/vscode/src/graph/bfg/download-bfg.ts +++ b/vscode/src/graph/bfg/download-bfg.ts @@ -1,146 +1,189 @@ -import * as fs from 'node:fs' -import { promises as fspromises } from 'node:fs' +import fs from 'node:fs/promises' +import os from 'node:os' import path from 'node:path' - -import axios from 'axios' -import * as unzipper from 'unzipper' +import { SemverString } from '@sourcegraph/cody-shared/src/utils' +import { Mutex } from 'async-mutex' import * as vscode from 'vscode' - -import { fileExists } from '../../local-context/download-symf' -import { logDebug } from '../../log' +import { sleep } from '../../completions/utils' +import { downloadFile, fileExists, unzip, upsertFile } from '../../local-context/utils' +import { logDebug, logError } from '../../log' import { Arch, Platform, getOSArch } from '../../os' import { captureException } from '../../services/sentry/sentry' // Available releases: https://github.com/sourcegraph/bfg/releases -// Do not include 'v' in this string. -const defaultBfgVersion = '5.4.6040' - -// We use this Promise to only have one downloadBfg running at once. -let serializeBfgDownload: Promise = Promise.resolve(null) - -export async function downloadBfg(context: vscode.ExtensionContext): Promise { - // First, wait for any in-progress downloads. - await serializeBfgDownload - - // Now we are the in-progress download. - serializeBfgDownload = (async () => { - const config = vscode.workspace.getConfiguration() - const userBfgPath = config.get('cody.experimental.cody-engine.path') - if (userBfgPath) { - const bfgStat = await fspromises.stat(userBfgPath) - if (!bfgStat.isFile()) { - throw new Error(`not a file: ${userBfgPath}`) - } - logDebug('CodyEngine', `using user-provided path: ${userBfgPath} ${bfgStat.isFile()}`) - return userBfgPath +export type BfgVersionString = SemverString<''> +export const defaultBfgVersion: BfgVersionString = '5.4.6040' + +export const _config = { + FILE_DOWNLOAD_LOCK_DURATION: 5000, + FILE_LOCK_RETRY_DELAY: 1000, +} as const + +/** + * Get the path to `bfg` binary. If possible it will be downloaded. + */ +export async function getBfgPath(context: vscode.ExtensionContext): Promise { + // If user-specified symf path is set, use that + // TODO: maybe we do want an option to download bfg if it's not found? + const config = vscode.workspace.getConfiguration() + const userBfgPath = config.get('cody.experimental.cody-engine.path') + if (userBfgPath) { + if (!(await fileExists(userBfgPath))) { + throw new Error(`bfg can't be loaded from user provided path: ${userBfgPath}`) } + logDebug('CodyEngine', `Skipping download. Using user-provided bfg path: ${userBfgPath}`) + return userBfgPath + } - const osArch = getOSArch() - if (!osArch) { - logDebug('CodyEngine', 'getOSArch returned nothing') - return null - } - const { platform, arch } = osArch + const bfgContainingDir = + typeof process !== 'undefined' && process.env.CODY_TESTING_BFG_DIR + ? process.env.CODY_TESTING_BFG_DIR + : path.join(context.globalStorageUri.fsPath, 'cody-engine') - if (!arch) { - logDebug('CodyEngine', 'getOSArch returned undefined arch') - return null - } + // remove any preceding v symbol + const bfgVersion = SemverString.forcePrefix( + '', + config.get('cody.experimental.cody-engine.version', defaultBfgVersion) + ) - if (!platform) { - logDebug('CodyEngine', 'getOSArch returned undefined platform') - return null - } - // Rename returned architecture to match RFC 795 conventions - // https://docs.google.com/document/d/11cw-7dAp93JmasITNSNCtx31xrQsNB1L2OoxVE6zrTc/edit - const archRenames = new Map([ - ['aarch64', 'arm64'], - ['x86_64', 'x64'], - ]) - let rfc795Arch = archRenames.get(arch ?? '') ?? arch - if (rfc795Arch === Arch.Arm64 && platform === Platform.Windows) { - // On Windows Arm PCs, we rely on emulation and use the x64 binary. - // See https://learn.microsoft.com/en-us/windows/arm/apps-on-arm-x86-emulation - rfc795Arch = Arch.X64 - } + const bfgPath = await _upsertBfgForPlatform(bfgContainingDir, bfgVersion) + return bfgPath +} - const bfgContainingDir = path.join(context.globalStorageUri.fsPath, 'cody-engine') - const bfgVersion = config.get('cody.experimental.cody-engine.version', defaultBfgVersion) - await fspromises.mkdir(bfgContainingDir, { recursive: true }) - const bfgFilename = `cody-engine-${bfgVersion}-${platform}-${rfc795Arch}` - const bfgPath = path.join(bfgContainingDir, bfgFilename) - const isAlreadyDownloaded = await fileExists(bfgPath) - if (isAlreadyDownloaded) { - logDebug('CodyEngine', `using downloaded path "${bfgPath}"`) - return bfgPath - } +// this protects agains multiple async functions in the same node process from +// starting a download +const processDownloadLock = new Mutex() + +export async function _upsertBfgForPlatform( + containingDir: string, + version: BfgVersionString +): Promise { + const { platform, arch } = getOSArch() + if (!platform || !arch) { + // show vs code error message + void vscode.window.showErrorMessage( + `No bfg binary available for ${os.platform()}/${os.machine()}` + ) + logError('CodyEngine', `No bfg binary available for ${os.platform()}/${os.machine()}`) + return null + } + const { bfgFilename, bfgUnzippedFilename, rfc795Arch } = _getNamesForPlatform( + platform, + arch, + version + ) + const bfgPath = path.join(containingDir, bfgFilename) + + if (await fileExists(bfgPath)) { + logDebug('CodyEngine', `using downloaded bfg path "${bfgPath}"`) + return bfgPath + } - const bfgURL = `https://github.com/sourcegraph/bfg/releases/download/v${bfgVersion}/bfg-${platform}-${rfc795Arch}.zip` + const bfgURL = `https://github.com/sourcegraph/bfg/releases/download/v${version}/bfg-${platform}-${rfc795Arch}.zip` + + return await processDownloadLock.runExclusive(async () => { try { - await vscode.window.withProgress( - { - location: vscode.ProgressLocation.Window, - title: 'Downloading cody-engine', - cancellable: false, - }, - async progress => { - progress.report({ message: 'Downloading cody-engine' }) - const bfgZip = path.join(bfgContainingDir, 'bfg.zip') - await downloadBfgBinary(bfgURL, bfgZip) - await unzipBfg(bfgZip, bfgContainingDir) - logDebug('CodyEngine', bfgPath) - // The zip file contains a binary named `bfg` or `bfg.exe`. We unzip it with that name first and then rename into - // a version-specific binary so that we can delete old versions of bfg. - const unzipPath = platform === Platform.Windows ? 'bfg.exe' : 'bfg' - await fspromises.rename(path.join(bfgContainingDir, unzipPath), bfgPath) - await fspromises.chmod(bfgPath, 0o755) - await fspromises.rm(bfgZip) - logDebug('CodyEngine', `downloaded cody-engine to ${bfgPath}`) - } - ) - void removeOldBfgBinaries(bfgContainingDir, bfgFilename) + const wasDownloaded = await downloadBfgBinary({ + bfgPath, + bfgURL, + bfgFilename, + bfgUnzippedFilename, + }) + if (wasDownloaded) { + void removeOldBfgBinaries(containingDir, bfgFilename) + } + return bfgPath } catch (error) { captureException(error) - void vscode.window.showErrorMessage(`Failed to download bfg from URL ${bfgURL}: ${error}`) + void vscode.window.showErrorMessage(`Failed to download bfg: ${error}`) return null } - return bfgPath - })() - return serializeBfgDownload + }) } -async function unzipBfg(zipFile: string, destinationDir: string): Promise { - const zip = fs.createReadStream(zipFile).pipe(unzipper.Parse({ forceStream: true })) - for await (const entry of zip) { - if (entry.path.endsWith('/')) { - continue - } - entry.pipe(fs.createWriteStream(path.join(destinationDir, entry.path))) +export function _getNamesForPlatform( + platform: Platform, + arch: Arch, + version: BfgVersionString +): { bfgFilename: string; bfgUnzippedFilename: string; rfc795Arch: string } { + // Rename returned architecture to match RFC 795 conventions + // https://docs.google.com/document/d/11cw-7dAp93JmasITNSNCtx31xrQsNB1L2OoxVE6zrTc/edit + const archRenames = new Map([ + ['aarch64', 'arm64'], + ['x86_64', 'x64'], + ]) + let rfc795Arch = archRenames.get(arch ?? '') ?? arch + if (rfc795Arch === Arch.Arm64 && platform === Platform.Windows) { + // On Windows Arm PCs, we rely on emulation and use the x64 binary. + // See https://learn.microsoft.com/en-us/windows/arm/apps-on-arm-x86-emulation + rfc795Arch = Arch.X64 } -} -async function downloadBfgBinary(url: string, destination: string): Promise { - logDebug('CodyEngine', `downloading from URL ${url}`) - const response = await axios({ - url, - method: 'GET', - responseType: 'stream', - maxRedirects: 10, - }) - - const stream = fs.createWriteStream(destination) - response.data.pipe(stream) + const bfgFilename = `cody-engine-${version}-${platform}-${rfc795Arch}` + const bfgUnzippedFilename = platform === Platform.Windows ? 'bfg.exe' : 'bfg' + return { bfgFilename, rfc795Arch, bfgUnzippedFilename } +} - await new Promise((resolve, reject) => { - stream.on('finish', resolve) - stream.on('error', reject) - }) +async function downloadBfgBinary({ + bfgPath, + bfgFilename, + bfgUnzippedFilename, + bfgURL, +}: { + bfgPath: string + bfgFilename: string + bfgUnzippedFilename: string + bfgURL: string +}): Promise { + logDebug('CodyEngine', `downloading bfg from ${bfgURL}`) + return await vscode.window.withProgress( + { + location: vscode.ProgressLocation.Notification, + title: 'Downloading Cody search engine (bfg)', + cancellable: false, + }, + async (progress, cancel) => { + progress.report({ message: 'Downloading bfg' }) + while (!cancel.isCancellationRequested) { + if (await fileExists(bfgPath)) { + logDebug('CodyEngine', 'bfg already downloaded, reusing') + return false + } + const bfgTmpDir = `${bfgPath}.tmp` + await fs.mkdir(bfgTmpDir, { recursive: true }) + + const bfgZipFile = path.join(bfgTmpDir, `${bfgFilename}.zip`) + // try and acquire a file lock, giving another process some grace to write data to it + const bfgZipFileLock = await upsertFile(bfgZipFile, _config.FILE_DOWNLOAD_LOCK_DURATION) + if (!bfgZipFileLock) { + logDebug('CodyEngine', 'Another process is already downloading bfg, waiting...') + await sleep(_config.FILE_DOWNLOAD_LOCK_DURATION) + continue + } + await downloadFile(bfgURL, bfgZipFile, cancel) + progress.report({ message: 'Extracting bfg' }) + await unzip(bfgZipFile, bfgTmpDir) + logDebug('CodyEngine', `downloaded bfg to ${bfgTmpDir}`) + + const tmpFile = path.join(bfgTmpDir, bfgUnzippedFilename) + await fs.chmod(tmpFile, 0o755) + await fs.rename(tmpFile, bfgPath) + await fs.rm(bfgTmpDir, { recursive: true }) + + logDebug('CodyEngine', `extracted bfg to ${bfgPath}`) + return true + } + return false + } + ) } async function removeOldBfgBinaries(containingDir: string, currentBfgPath: string): Promise { - const bfgDirContents = await fspromises.readdir(containingDir) - const oldBfgBinaries = bfgDirContents.filter(f => f.startsWith('bfg') && f !== currentBfgPath) + const bfgDirContents = await fs.readdir(containingDir) + const oldBfgBinaries = bfgDirContents.filter( + f => f.startsWith('cody-engine-') && f !== currentBfgPath + ) for (const oldBfgBinary of oldBfgBinaries) { - await fspromises.rm(path.join(containingDir, oldBfgBinary)) + await fs.rm(path.join(containingDir, oldBfgBinary)) } } diff --git a/vscode/src/graph/bfg/spawn-bfg.ts b/vscode/src/graph/bfg/spawn-bfg.ts index 92067648eae4..3168c7d7a2a9 100644 --- a/vscode/src/graph/bfg/spawn-bfg.ts +++ b/vscode/src/graph/bfg/spawn-bfg.ts @@ -5,13 +5,13 @@ import * as vscode from 'vscode' import { StreamMessageReader, StreamMessageWriter, createMessageConnection } from 'vscode-jsonrpc/node' import { MessageHandler } from '../../jsonrpc/jsonrpc' import { logDebug } from '../../log' -import { downloadBfg } from './download-bfg' +import { getBfgPath } from './download-bfg' export async function spawnBfg( context: vscode.ExtensionContext, reject: (reason?: any) => void ): Promise { - const codyrpc = await downloadBfg(context) + const codyrpc = await getBfgPath(context) if (!codyrpc) { throw new Error( 'Failed to download BFG binary. To fix this problem, set the "cody.experimental.cody-engine.path" configuration to the path of your BFG binary' diff --git a/vscode/src/local-context/download-symf.ts b/vscode/src/local-context/download-symf.ts index 1f4dddf34644..21f8ac29b490 100644 --- a/vscode/src/local-context/download-symf.ts +++ b/vscode/src/local-context/download-symf.ts @@ -1,68 +1,75 @@ -import * as fs from 'node:fs' -import fspromises from 'node:fs/promises' -import * as os from 'node:os' -import * as path from 'node:path' - -import axios from 'axios' -import * as unzipper from 'unzipper' -import * as vscode from 'vscode' - +import fs from 'node:fs/promises' +import os from 'node:os' +import path from 'node:path' +import type { SemverString } from '@sourcegraph/cody-shared/src/utils' import { Mutex } from 'async-mutex' -import { logDebug } from '../log' -import { Platform, getOSArch } from '../os' +import * as vscode from 'vscode' +import { sleep } from '../completions/utils' +import { logDebug, logError } from '../log' +import { type Arch, Platform, getOSArch } from '../os' import { captureException } from '../services/sentry/sentry' +import { downloadFile, fileExists, unzip, upsertFile } from './utils' -const symfVersion = 'v0.0.12' +export type SymfVersionString = SemverString<'v'> +const symfVersion: SymfVersionString = 'v0.0.12' + +export const _config = { + //how long to consider a file "active" before we consider it "stale" + FILE_DOWNLOAD_LOCK_DURATION: 5000, + //delay before trying to re-lock a active file + FILE_LOCK_RETRY_DELAY: 1000, +} as const /** - * Get the path to `symf`. If the symf binary is not found, download it. + * Get the path to `symf` binary. If possible it will be downloaded. */ export async function getSymfPath(context: vscode.ExtensionContext): Promise { // If user-specified symf path is set, use that + // TODO: maybe we do want an option to download symf if it's not found? const config = vscode.workspace.getConfiguration() const userSymfPath = config.get('cody.experimental.symf.path') ?? config.get('cody.internal.symf.path') if (userSymfPath) { - logDebug('symf', `using user symf: ${userSymfPath}`) + if (!(await fileExists(userSymfPath))) { + throw new Error(`symf can't be loaded from user provided path: ${userSymfPath}`) + } + logDebug('symf', `Skipping download. Using user specified symf path: ${userSymfPath}`) return userSymfPath } - const symfContainingDir = path.join(context.globalStorageUri.fsPath, 'symf') - return await _getSymfPath(symfContainingDir) + //TODO(rnauta): move all test overrides to helper class + const symfContainingDir = + typeof process !== 'undefined' && process.env.CODY_TESTING_SYMF_DIR + ? process.env.CODY_TESTING_SYMF_DIR + : path.join(context.globalStorageUri.fsPath, 'symf') + + const symfPath = await _upsertSymfForPlatform(symfContainingDir) + return symfPath } -const downloadLock = new Mutex() - -export async function _getSymfPath( - symfContainingDir: string, - actualDownloadSymf: (op: { - symfPath: string - symfFilename: string - symfUnzippedFilename: string - symfURL: string - }) => Promise = downloadSymf -): Promise { +// this protects agains multiple async functions in the same node process from +// starting a download +const processDownloadLock = new Mutex() + +/** + * Returns the platform specific symf path or downloads it if needed + * @param containingDir the directory in which the symf binary will be stored + * @returns symf path for platform + */ +export async function _upsertSymfForPlatform(containingDir: string): Promise { const { platform, arch } = getOSArch() if (!platform || !arch) { // show vs code error message void vscode.window.showErrorMessage( `No symf binary available for ${os.platform()}/${os.machine()}` ) + logError('CodyEngine', `No symf binary available for ${os.platform()}/${os.machine()}`) return null } - // Releases (eg at https://github.com/sourcegraph/symf/releases) are named with the Zig platform - // identifier (linux-musl, windows-gnu, macos). - const zigPlatform = - platform === Platform.Linux - ? 'linux-musl' - : platform === Platform.Windows - ? 'windows-gnu' - : platform + const { symfFilename, symfUnzippedFilename, zigPlatform } = _getNamesForPlatform(platform, arch) + const symfPath = path.join(containingDir, symfFilename) - const symfFilename = `symf-${symfVersion}-${arch}-${platform}` - const symfUnzippedFilename = `symf-${arch}-${zigPlatform}` // the filename inside the zip - const symfPath = path.join(symfContainingDir, symfFilename) if (await fileExists(symfPath)) { logDebug('symf', `using downloaded symf "${symfPath}"`) return symfPath @@ -71,28 +78,49 @@ export async function _getSymfPath( const symfURL = `https://github.com/sourcegraph/symf/releases/download/${symfVersion}/symf-${arch}-${zigPlatform}.zip` // Download symf binary with vscode progress api - try { - await downloadLock.acquire() - // Re-check if it has been downloaded - if (await fileExists(symfPath)) { - logDebug('symf', 'symf already downloaded, reusing') + return await processDownloadLock.runExclusive(async () => { + try { + const wasDownloaded = await downloadSymfBinary({ + symfPath, + symfURL, + symfFilename, + symfUnzippedFilename, + }) + if (wasDownloaded) { + void removeOldSymfBinaries(containingDir, symfFilename) + } return symfPath + } catch (error) { + captureException(error) + void vscode.window.showErrorMessage(`Failed to download symf: ${error}`) + return null } + }) +} - await actualDownloadSymf({ symfPath, symfURL, symfFilename, symfUnzippedFilename }) - void removeOldSymfBinaries(symfContainingDir, symfFilename) - } catch (error) { - captureException(error) - void vscode.window.showErrorMessage(`Failed to download symf: ${error}`) - return null - } finally { - downloadLock.release() - } +export function _getNamesForPlatform( + platform: Platform, + arch: Arch +): { symfFilename: string; symfUnzippedFilename: string; zigPlatform: string } { + // Releases (eg at https://github.com/sourcegraph/symf/releases) are named with the Zig platform + // identifier (linux-musl, windows-gnu, macos). + const zigPlatform = + platform === Platform.Linux + ? 'linux-musl' + : platform === Platform.Windows + ? 'windows-gnu' + : platform - return symfPath + const symfFilename = `symf-${symfVersion}-${arch}-${platform}` + const symfUnzippedFilename = `symf-${arch}-${zigPlatform}` // the filename inside the zip + return { symfFilename, symfUnzippedFilename, zigPlatform } } -async function downloadSymf({ +/** + * Downloads symf from the given URL to a given path. + * @returns true if the file was downloaded new or false if the file already existed + */ +async function downloadSymfBinary({ symfPath, symfFilename, symfUnzippedFilename, @@ -102,76 +130,57 @@ async function downloadSymf({ symfFilename: string symfUnzippedFilename: string symfURL: string -}): Promise { +}): Promise { logDebug('symf', `downloading symf from ${symfURL}`) - - await vscode.window.withProgress( + return await vscode.window.withProgress( { location: vscode.ProgressLocation.Notification, title: 'Downloading Cody search engine (symf)', cancellable: false, }, - async progress => { - const symfTmpDir = `${symfPath}.tmp` - progress.report({ message: 'Downloading symf and extracting symf' }) - - await fspromises.mkdir(symfTmpDir, { recursive: true }) - const symfZipFile = path.join(symfTmpDir, `${symfFilename}.zip`) - await downloadFile(symfURL, symfZipFile) - await unzipSymf(symfZipFile, symfTmpDir) - logDebug('symf', `downloaded symf to ${symfTmpDir}`) - - const tmpFile = path.join(symfTmpDir, symfUnzippedFilename) - await fspromises.chmod(tmpFile, 0o755) - await fspromises.rename(tmpFile, symfPath) - await fspromises.rm(symfTmpDir, { recursive: true }) - - logDebug('symf', `extracted symf to ${symfPath}`) + async (progress, cancel) => { + progress.report({ message: 'Downloading symf' }) + while (!cancel.isCancellationRequested) { + if (await fileExists(symfPath)) { + logDebug('symf', 'symf already downloaded, reusing') + return false + } + const symfTmpDir = `${symfPath}.tmp` + await fs.mkdir(symfTmpDir, { recursive: true }) + const symfZipFile = path.join(symfTmpDir, `${symfFilename}.zip`) + + // try and acquire a file lock, giving another process some grace to write data to it + const symfZipFileLock = await upsertFile( + symfZipFile, + _config.FILE_DOWNLOAD_LOCK_DURATION + ) + if (!symfZipFileLock) { + logDebug('symf', 'Another process is already downloading symf, waiting...') + await sleep(_config.FILE_LOCK_RETRY_DELAY) + continue + } + await downloadFile(symfURL, symfZipFile, cancel) + progress.report({ message: 'Extracting symf' }) + await unzip(symfZipFile, symfTmpDir) + logDebug('symf', `downloaded symf to ${symfTmpDir}`) + + const tmpFile = path.join(symfTmpDir, symfUnzippedFilename) + await fs.chmod(tmpFile, 0o755) + await fs.rename(tmpFile, symfPath) + await fs.rm(symfTmpDir, { recursive: true }) + + logDebug('symf', `extracted symf to ${symfPath}`) + return true + } + return false } ) } -export async function fileExists(path: string): Promise { - try { - await fspromises.access(path) - return true - } catch { - return false - } -} - -async function downloadFile(url: string, outputPath: string): Promise { - logDebug('Symf', `downloading from URL ${url}`) - const response = await axios({ - url, - method: 'GET', - responseType: 'stream', - maxRedirects: 10, - }) - - const stream = fs.createWriteStream(outputPath) - response.data.pipe(stream) - - await new Promise((resolve, reject) => { - stream.on('finish', resolve) - stream.on('error', reject) - }) -} - -async function unzipSymf(zipFile: string, destinationDir: string): Promise { - const zip = fs.createReadStream(zipFile).pipe(unzipper.Parse({ forceStream: true })) - for await (const entry of zip) { - if (entry.path.endsWith('/')) { - continue - } - entry.pipe(fs.createWriteStream(path.join(destinationDir, entry.path))) - } -} - async function removeOldSymfBinaries(containingDir: string, currentSymfPath: string): Promise { - const symfDirContents = await fspromises.readdir(containingDir) + const symfDirContents = await fs.readdir(containingDir) const oldSymfBinaries = symfDirContents.filter(f => f.startsWith('symf-') && f !== currentSymfPath) for (const oldSymfBinary of oldSymfBinaries) { - await fspromises.rm(path.join(containingDir, oldSymfBinary)) + await fs.rm(path.join(containingDir, oldSymfBinary)) } } diff --git a/vscode/src/local-context/symf.test.ts b/vscode/src/local-context/symf.test.ts index b2d69c420cd8..4773fc8196d5 100644 --- a/vscode/src/local-context/symf.test.ts +++ b/vscode/src/local-context/symf.test.ts @@ -1,36 +1,87 @@ -import { describe, expect, it } from 'vitest' - -import { _getSymfPath } from './download-symf' - -import { mkdtemp, open, rmdir } from 'node:fs/promises' +import { mkdtemp, open, rm } from 'node:fs/promises' import { tmpdir } from 'node:os' import path from 'node:path' +import { describe, expect, it, vi } from 'vitest' +import { getOSArch } from '../os' +import { _config, _getNamesForPlatform, _upsertSymfForPlatform } from './download-symf' +import { downloadFile } from './utils' + +//@ts-ignore +_config.FILE_DOWNLOAD_LOCK_DURATION = 10 +//@ts-ignore +_config.FILE_LOCK_RETRY_DELAY = 1 + +vi.mock('./utils', async importOriginal => { + //use the vscode mock inside this mock too + const mod = await importOriginal() + let firstDownload = true + return { + ...mod, + downloadFile: vi.fn(async (url: string, dest: string) => { + // we abandon the first download + if (firstDownload) { + await makeEmptyFile(dest) + firstDownload = false + throw new Error('Test Mock Deliberate Abandon') + } + await sleep(2) + // make an empty file + await makeEmptyFile(dest) + }), + unzip: vi.fn(async (zipPath: string, dest: string) => { + await sleep(2) + // just check the zip file exists first + if (!(await mod.fileExists(zipPath))) { + throw new Error("File doesn't exist") + } + // we ensure that at leats the expected file exists + const { platform, arch } = getOSArch() + const { symfUnzippedFilename } = _getNamesForPlatform(platform!, arch!) + const symfUnzippedPath = path.join(dest, symfUnzippedFilename) + await makeEmptyFile(symfUnzippedPath) + }), + } +}) -describe('download-symf', () => { - it('no parallel download', async () => { +describe('upsertSymfForPlatform', () => { + // NOTE: This really only checks downloads in the same Node process Instead + // we probably want to mock the fs and network layer directly and ensure + // that this works regardless of Mutex locks + it('prevents parallel downloads', async () => { const dir = await mkdtemp(path.join(tmpdir(), 'symf-')) try { - const makeEmptyFile = async (filePath: string) => { - const file = await open(filePath, 'w') - await file.close() - } + // we first create a "abandoned" download so that we can ensure that + // after some expiration time one of the processes will forcefully + // download regardless + const abandonedDownload = _upsertSymfForPlatform(dir) + expect(await abandonedDownload).toBeNull() - let mockDownloadSymfCalled = 0 - const mockDownloadSymf = async (op: { - symfPath: string - symfFilename: string - symfURL: string - }): Promise => { - mockDownloadSymfCalled++ - await makeEmptyFile(op.symfPath) - } - const symfPaths = await Promise.all( - [...Array(10).keys()].map(() => _getSymfPath(dir, mockDownloadSymf)) - ) - expect(symfPaths.every(p => p === symfPaths[0])).toBeTruthy() - expect(mockDownloadSymfCalled).toEqual(1) + vi.mocked(downloadFile).mockClear() + + // we now start parallel async functions + const results = await Promise.all([ + _upsertSymfForPlatform(dir), + _upsertSymfForPlatform(dir), + _upsertSymfForPlatform(dir), + _upsertSymfForPlatform(dir), + ]) + // only one actual download should have happened + expect(downloadFile).toHaveBeenCalledOnce() + + // expect all results to be the same and valid strings + expect(new Set(results).size).toBe(1) + expect(results[0]).toBeTruthy() } finally { - await rmdir(dir, { recursive: true }) + await rm(dir, { recursive: true }) } }) }) + +async function makeEmptyFile(filePath: string) { + const file = await open(filePath, 'w') + await file.close() +} + +function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) +} diff --git a/vscode/src/local-context/utils.ts b/vscode/src/local-context/utils.ts new file mode 100644 index 000000000000..756b1212afe9 --- /dev/null +++ b/vscode/src/local-context/utils.ts @@ -0,0 +1,121 @@ +import syncfs from 'node:fs' +import fs from 'node:fs/promises' +import path from 'node:path' +import axios from 'axios' +import unzipper from 'unzipper' +import type * as vscode from 'vscode' +import { logDebug } from '../log' + +export async function pathExists(path: string): Promise { + try { + await fs.access(path) + return true + } catch { + return false + } +} + +/** + * Determines wether the path exists and it is a file + * @param path + * @returns file exists at the specified path + */ +export async function fileExists(path: string): Promise { + try { + const stat = await fs.stat(path) + return stat.isFile() + } catch (err: any) { + if (err.code === 'ENOENT') { + return false + } + //throw on other errors + throw err + } +} + +/** + * Atomically creates the file if it does not exist but leaves it untouched otherwise. + * @param filePath the file to create/touch + * @param maxMtimeMs if the file hasn't been touched for maxMtimeMs, a new file will be created instead + * @returns True if a new file has been created. False if the existing file has been left in place + */ +export async function upsertFile( + filePath: string, + maxMtimeMs?: number, + cancellationToken?: vscode.CancellationToken +): Promise { + while (!cancellationToken?.isCancellationRequested) { + try { + const openFileHandle = await fs.open(filePath, 'wx') + try { + await openFileHandle.close() + } catch { + /*Ignore*/ + } + return true + } catch (error: any) { + if (error.code !== 'EEXIST') { + throw error + } + if (maxMtimeMs === undefined) { + return false + } + // We now know the file exists but we'll just check that someone has + // actually been writing to it within the maxAge time span. + // otherwise we assume it's abandoned and we'll give ourselves + + // Note: this could fail if the file has been deleted by another + // process right as we check this...I can live with that. + const fileStats = await fs.stat(filePath) + const age = Date.now() - fileStats.mtimeMs + if (age < maxMtimeMs) { + // this file has not been abandoned + return false + } + logDebug('symf', `file ${filePath} is abandoned, removing it`) + // we'll just remove the old file and retry. This way if another + // process was doing the same thing only one should win out + await fs.unlink(filePath) + } + } + return false +} + +/** + * This downloads a url to a specific location and overwrites the existing file + * if it exists + */ +export async function downloadFile( + url: string, + outputPath: string, + cancellationToken?: vscode.CancellationToken +): Promise { + logDebug('Symf', `downloading from URL ${url}`) + const abort = !cancellationToken ? undefined : new AbortController() + cancellationToken?.onCancellationRequested(() => abort?.abort()) + const response = await axios({ + url, + method: 'GET', + responseType: 'stream', + maxRedirects: 10, + signal: abort?.signal, + }) + + const stream = syncfs.createWriteStream(outputPath, { autoClose: true, flags: 'w' }) + response.data.pipe(stream) + + await new Promise((resolve, reject) => { + stream.on('finish', resolve) + stream.on('error', reject) + }) +} + +export async function unzip(zipFile: string, destinationDir: string): Promise { + const zip = syncfs.createReadStream(zipFile).pipe(unzipper.Parse({ forceStream: true })) + for await (const entry of zip) { + if (entry.path.endsWith('/')) { + continue + } + entry.pipe(syncfs.createWriteStream(path.join(destinationDir, entry.path))) + } +} diff --git a/vscode/src/testutils/mocks.ts b/vscode/src/testutils/mocks.ts index 97906aea36aa..62dfd0aef0f7 100644 --- a/vscode/src/testutils/mocks.ts +++ b/vscode/src/testutils/mocks.ts @@ -704,6 +704,12 @@ export enum UIKind { Web = 2, } +export enum ProgressLocation { + SourceControl = 1, + Window = 10, + Notification = 15, +} + export class FileSystemError extends Error { public code = 'FileSystemError' } @@ -753,6 +759,16 @@ export const vsCodeMocks = { key: 'foo', dispose: () => {}, }), + withProgress: async ( + options: vscode_types.ProgressOptions, + task: ( + progress: vscode_types.Progress<{ message?: string; increment?: number }>, + token: CancellationToken + ) => Thenable + ) => { + const cancel = new CancellationTokenSource() + return await task({ report: () => {} }, cancel.token) + }, visibleTextEditors: [], tabGroups: { all: [] }, }, @@ -804,14 +820,9 @@ export const vsCodeMocks = { DiagnosticSeverity, ViewColumn, TextDocumentChangeReason, + ProgressLocation, } as const -export enum ProgressLocation { - SourceControl = 1, - Window = 10, - Notification = 15, -} - export class MockFeatureFlagProvider extends FeatureFlagProvider { constructor(private readonly enabledFlags: Set) { super(null as any) From 10f01db0f063332933f15fb81f236c9f3adabbf3 Mon Sep 17 00:00:00 2001 From: Rik Nauta Date: Thu, 27 Jun 2024 19:24:24 +0200 Subject: [PATCH 02/11] Initial Playwright V2 E2E framework --- .gitignore | 1 + pnpm-lock.yaml | 347 +++++----- vscode/.gitignore | 1 + vscode/e2e/README.md | 6 + vscode/e2e/TODO.md | 6 + vscode/e2e/example.test.ts | 57 ++ vscode/e2e/issues/CODY-2392.test.ts | 78 +++ vscode/e2e/issues/README.md | 28 + vscode/e2e/issues/ignore.test.ts | 0 vscode/e2e/utils/helpers.ts | 47 ++ vscode/e2e/utils/symlink-extensions.setup.ts | 73 +++ vscode/e2e/utils/uix.test.ts | 20 + vscode/e2e/utils/vscody/fixture.ts | 654 +++++++++++++++++++ vscode/e2e/utils/vscody/index.ts | 3 + vscode/e2e/utils/vscody/uix/README.md | 1 + vscode/e2e/utils/vscody/uix/cody.ts | 79 +++ vscode/e2e/utils/vscody/uix/index.ts | 7 + vscode/e2e/utils/vscody/uix/vscode.ts | 170 +++++ vscode/e2e/utils/vscody/uix/workspace.ts | 29 + vscode/package.json | 11 +- vscode/playwright.v2.config.ts | 85 +++ vscode/src/testutils/CodyPersister.ts | 7 +- vscode/src/testutils/polly.ts | 84 +-- vscode/test/e2e/install-deps.ts | 21 +- vscode/tsconfig.json | 1 + 25 files changed, 1595 insertions(+), 221 deletions(-) create mode 100644 vscode/e2e/README.md create mode 100644 vscode/e2e/TODO.md create mode 100644 vscode/e2e/example.test.ts create mode 100644 vscode/e2e/issues/CODY-2392.test.ts create mode 100644 vscode/e2e/issues/README.md create mode 100644 vscode/e2e/issues/ignore.test.ts create mode 100644 vscode/e2e/utils/helpers.ts create mode 100644 vscode/e2e/utils/symlink-extensions.setup.ts create mode 100644 vscode/e2e/utils/uix.test.ts create mode 100644 vscode/e2e/utils/vscody/fixture.ts create mode 100644 vscode/e2e/utils/vscody/index.ts create mode 100644 vscode/e2e/utils/vscody/uix/README.md create mode 100644 vscode/e2e/utils/vscody/uix/cody.ts create mode 100644 vscode/e2e/utils/vscody/uix/index.ts create mode 100644 vscode/e2e/utils/vscody/uix/vscode.ts create mode 100644 vscode/e2e/utils/vscody/uix/workspace.ts create mode 100644 vscode/playwright.v2.config.ts diff --git a/.gitignore b/.gitignore index 771961ced618..3384fc1ae5f8 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ out/ .env .idea/ .run/ +.test/ **/*.iml **/*.vsix index.scip diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 96c7af702b4d..df4abd4030ed 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -56,7 +56,7 @@ importers: version: 8.0.5(react-dom@18.2.0)(react@18.2.0)(typescript@5.4.2)(vite@5.2.9) '@testing-library/jest-dom': specifier: ^6.4.2 - version: 6.4.2(vitest@1.5.0) + version: 6.4.2(vitest@1.6.0) '@testing-library/react': specifier: ^14.2.2 version: 14.2.2(react-dom@18.2.0)(react@18.2.0) @@ -104,7 +104,7 @@ importers: version: 5.2.9(@types/node@20.12.7) vitest: specifier: ^1.5.0 - version: 1.5.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0) + version: 1.6.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0) agent: dependencies: @@ -567,9 +567,15 @@ importers: '@google-cloud/pubsub': specifier: ^3.7.3 version: 3.7.3 + '@npmcli/promise-spawn': + specifier: ^7.0.2 + version: 7.0.2 '@playwright/test': specifier: 1.44.1 version: 1.44.1 + '@pollyjs/adapter': + specifier: ^6.0.6 + version: 6.0.6 '@pollyjs/adapter-node-http': specifier: ^6.0.6 version: 6.0.6 @@ -621,6 +627,9 @@ importers: '@types/mocha': specifier: ^10.0.6 version: 10.0.6 + '@types/npmcli__promise-spawn': + specifier: ^6.0.3 + version: 6.0.3 '@types/pako': specifier: ^2.0.3 version: 2.0.3 @@ -657,6 +666,9 @@ importers: ajv-formats: specifier: ^3.0.1 version: 3.0.1(ajv@8.14.0) + chokidar: + specifier: ^3.6.0 + version: 3.6.0 concurrently: specifier: ^8.2.0 version: 8.2.0 @@ -678,6 +690,9 @@ importers: fuzzysort: specifier: ^2.0.4 version: 2.0.4 + http-proxy-middleware: + specifier: ^3.0.0 + version: 3.0.0 mocha: specifier: ^10.2.0 version: 10.2.0 @@ -717,6 +732,9 @@ importers: yaml: specifier: ^2.3.4 version: 2.3.4 + zod: + specifier: ^3.23.8 + version: 3.23.8 web: dependencies: @@ -829,7 +847,7 @@ packages: '@babel/traverse': 7.24.5 '@babel/types': 7.24.5 convert-source-map: 2.0.0 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -910,7 +928,7 @@ packages: '@babel/core': 7.24.5 '@babel/helper-compilation-targets': 7.23.6 '@babel/helper-plugin-utils': 7.24.5 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 lodash.debounce: 4.0.8 resolve: 1.22.8 transitivePeerDependencies: @@ -2076,7 +2094,7 @@ packages: '@babel/helper-split-export-declaration': 7.24.5 '@babel/parser': 7.24.5 '@babel/types': 7.24.5 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -2860,7 +2878,7 @@ packages: resolution: {integrity: sha512-ribfPYfHb+Uw3b27Eiw6NPqjhIhTpVFzEWLwyc/1Xp+DCdwRRyIlAUODX+9bPARF6aQtUu1+/PHzdNvRzcs/+Q==} engines: {node: '>= 12'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 http-errors: 2.0.0 koa-compose: 4.1.0 methods: 1.1.2 @@ -3182,6 +3200,13 @@ packages: rimraf: 3.0.2 dev: false + /@npmcli/promise-spawn@7.0.2: + resolution: {integrity: sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==} + engines: {node: ^16.14.0 || >=18.0.0} + dependencies: + which: 4.0.0 + dev: true + /@openctx/client@0.0.19: resolution: {integrity: sha512-zyfCojoQlkqsNwEJERdCpAs1ytJnAQ/bFEfPM0wv6npkGDpjG0pagQonx7wY7gR2g1+gRDonS29hYeRU/i98lQ==} dependencies: @@ -5403,7 +5428,7 @@ packages: pretty-format: 27.5.1 dev: true - /@testing-library/jest-dom@6.4.2(vitest@1.5.0): + /@testing-library/jest-dom@6.4.2(vitest@1.6.0): resolution: {integrity: sha512-CzqH0AFymEMG48CpzXFriYYkOjk6ZGPCLMhW9e9jg3KMCn5OfJecF8GtGW7yGfR/IgCe3SX8BSwjdzI6BBbZLw==} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} peerDependencies: @@ -5432,7 +5457,7 @@ packages: dom-accessibility-api: 0.6.3 lodash: 4.17.21 redent: 3.0.0 - vitest: 1.5.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0) + vitest: 1.6.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0) dev: true /@testing-library/react@14.2.2(react-dom@18.2.0)(react@18.2.0): @@ -5669,6 +5694,12 @@ packages: resolution: {integrity: sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==} dev: true + /@types/http-proxy@1.17.14: + resolution: {integrity: sha512-SSrD0c1OQzlFX7pGu1eXxSEjemej64aaNPRhhVYUGqXh0BtldAAx37MG8btcumvpgKyZp1F5Gn3JkktdxiFv6w==} + dependencies: + '@types/node': 20.12.7 + dev: true + /@types/ini@4.1.0: resolution: {integrity: sha512-mTehMtc+xtnWBBvqizcqYCktKDBH2WChvx1GU3Sfe4PysFDXiNe+1YwtpVX1MDtCa4NQrSPw2+3HmvXHY3gt1w==} dev: true @@ -5776,6 +5807,12 @@ packages: resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} dev: true + /@types/npmcli__promise-spawn@6.0.3: + resolution: {integrity: sha512-4EQlesp5HtYHPXMXd4uuI+Q9hELEU0eVg/HBRLkqGC5U2ohwXZduCottmzPpb4tWCB+w4kQ3XNPlHIdXvCTyFw==} + dependencies: + '@types/node': 20.12.7 + dev: true + /@types/pako@2.0.3: resolution: {integrity: sha512-bq0hMV9opAcrmE0Byyo0fY3Ew4tgOevJmQ9grUhpXQhYfyLJ1Kqg3P33JT5fdbT2AjeAjR51zqqVjAL/HMkx7Q==} dev: true @@ -5951,29 +5988,12 @@ packages: - supports-color dev: true - /@vitest/expect@1.5.0: - resolution: {integrity: sha512-0pzuCI6KYi2SIC3LQezmxujU9RK/vwC1U9R0rLuGlNGcOuDWxqWKu6nUdFsX9tH1WU0SXtAxToOsEjeUn1s3hA==} - dependencies: - '@vitest/spy': 1.5.0 - '@vitest/utils': 1.5.0 - chai: 4.4.1 - dev: true - /@vitest/expect@1.6.0: resolution: {integrity: sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==} dependencies: '@vitest/spy': 1.6.0 '@vitest/utils': 1.6.0 chai: 4.4.1 - dev: false - - /@vitest/runner@1.5.0: - resolution: {integrity: sha512-7HWwdxXP5yDoe7DTpbif9l6ZmDwCzcSIK38kTSIt6CFEpMjX4EpCgT6wUmS0xTXqMI6E/ONmfgRKmaujpabjZQ==} - dependencies: - '@vitest/utils': 1.5.0 - p-limit: 5.0.0 - pathe: 1.1.2 - dev: true /@vitest/runner@1.6.0: resolution: {integrity: sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==} @@ -5981,15 +6001,6 @@ packages: '@vitest/utils': 1.6.0 p-limit: 5.0.0 pathe: 1.1.2 - dev: false - - /@vitest/snapshot@1.5.0: - resolution: {integrity: sha512-qpv3fSEuNrhAO3FpH6YYRdaECnnRjg9VxbhdtPwPRnzSfHVXnNzzrpX4cJxqiwgRMo7uRMWDFBlsBq4Cr+rO3A==} - dependencies: - magic-string: 0.30.10 - pathe: 1.1.2 - pretty-format: 29.7.0 - dev: true /@vitest/snapshot@1.6.0: resolution: {integrity: sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==} @@ -5997,28 +6008,11 @@ packages: magic-string: 0.30.10 pathe: 1.1.2 pretty-format: 29.7.0 - dev: false - - /@vitest/spy@1.5.0: - resolution: {integrity: sha512-vu6vi6ew5N5MMHJjD5PoakMRKYdmIrNJmyfkhRpQt5d9Ewhw9nZ5Aqynbi3N61bvk9UvZ5UysMT6ayIrZ8GA9w==} - dependencies: - tinyspy: 2.2.1 - dev: true /@vitest/spy@1.6.0: resolution: {integrity: sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==} dependencies: tinyspy: 2.2.1 - dev: false - - /@vitest/utils@1.5.0: - resolution: {integrity: sha512-BDU0GNL8MWkRkSRdNFvCUCAVOeHaUlVJ9Tx0TYBZyXaaOTmGtUFObzchCivIBrIwKzvZA7A9sCejVhXM2aY98A==} - dependencies: - diff-sequences: 29.6.3 - estree-walker: 3.0.3 - loupe: 2.3.7 - pretty-format: 29.7.0 - dev: true /@vitest/utils@1.6.0: resolution: {integrity: sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==} @@ -6027,7 +6021,6 @@ packages: estree-walker: 3.0.3 loupe: 2.3.7 pretty-format: 29.7.0 - dev: false /@vitest/web-worker@1.4.0(vitest@1.6.0): resolution: {integrity: sha512-JgLAVtPpF2/AJTI3y79eq8RrKEdK4lFS7gxT9O2gAbke1rbhRqpPoM/acQHWA6RrrX9Jci+Yk+ZQuOGON4D4ZA==} @@ -6189,12 +6182,12 @@ packages: acorn: 7.4.1 dev: true - /acorn-jsx@5.3.2(acorn@8.11.3): + /acorn-jsx@5.3.2(acorn@8.12.0): resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 dependencies: - acorn: 8.11.3 + acorn: 8.12.0 dev: true /acorn-walk@7.2.0: @@ -6206,6 +6199,12 @@ packages: resolution: {integrity: sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==} engines: {node: '>=0.4.0'} + /acorn-walk@8.3.3: + resolution: {integrity: sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==} + engines: {node: '>=0.4.0'} + dependencies: + acorn: 8.12.0 + /acorn@7.4.1: resolution: {integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==} engines: {node: '>=0.4.0'} @@ -6217,6 +6216,11 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + /acorn@8.12.0: + resolution: {integrity: sha512-RTvkC4w+KNXrM39/lWCUaG0IbRkWdCv7W/IOW9oU6SawyxulvkQy5HQPVTKxEjczcUvapcrw3cFx/60VN/NRNw==} + engines: {node: '>=0.4.0'} + hasBin: true + /address@1.2.2: resolution: {integrity: sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA==} engines: {node: '>= 10.0.0'} @@ -6226,7 +6230,7 @@ packages: resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} engines: {node: '>= 6.0.0'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -6234,7 +6238,7 @@ packages: resolution: {integrity: sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==} engines: {node: '>= 14'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -6497,7 +6501,7 @@ packages: /axios@1.3.6: resolution: {integrity: sha512-PEcdkk7JcdPiMDkvM4K6ZBRYq9keuVJsToxm2zQIM70Qqo2WHTdJZMXcG9X+RmRp2VPNUQC8W1RAGbgt6b1yMg==} dependencies: - follow-redirects: 1.15.6 + follow-redirects: 1.15.6(debug@4.3.5) form-data: 4.0.0 proxy-from-env: 1.1.0 transitivePeerDependencies: @@ -6963,7 +6967,7 @@ packages: dependencies: assertion-error: 1.1.0 check-error: 1.0.3 - deep-eql: 4.1.3 + deep-eql: 4.1.4 get-func-name: 2.0.2 loupe: 2.3.7 pathval: 1.1.1 @@ -7574,6 +7578,17 @@ packages: ms: 2.1.2 supports-color: 8.1.1 + /debug@4.3.5: + resolution: {integrity: sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + /decamelize-keys@1.1.1: resolution: {integrity: sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==} engines: {node: '>=0.10.0'} @@ -7623,8 +7638,8 @@ packages: /dedent@0.7.0: resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==} - /deep-eql@4.1.3: - resolution: {integrity: sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==} + /deep-eql@4.1.4: + resolution: {integrity: sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==} engines: {node: '>=6'} dependencies: type-detect: 4.0.8 @@ -7819,7 +7834,7 @@ packages: hasBin: true dependencies: address: 1.2.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color dev: true @@ -8070,7 +8085,7 @@ packages: peerDependencies: esbuild: '>=0.12 <1' dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 esbuild: 0.18.20 transitivePeerDependencies: - supports-color @@ -8195,8 +8210,8 @@ packages: resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: - acorn: 8.11.3 - acorn-jsx: 5.3.2(acorn@8.11.3) + acorn: 8.12.0 + acorn-jsx: 5.3.2(acorn@8.12.0) eslint-visitor-keys: 3.4.3 dev: true @@ -8239,6 +8254,10 @@ packages: resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} engines: {node: '>=6'} + /eventemitter3@4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + dev: true + /events@3.3.0: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} engines: {node: '>=0.8.x'} @@ -8621,7 +8640,7 @@ packages: engines: {node: '>=0.4.0'} dev: true - /follow-redirects@1.15.6: + /follow-redirects@1.15.6(debug@4.3.5): resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} engines: {node: '>=4.0'} peerDependencies: @@ -8629,6 +8648,8 @@ packages: peerDependenciesMeta: debug: optional: true + dependencies: + debug: 4.3.5 /for-each@0.3.3: resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} @@ -8859,7 +8880,7 @@ packages: dependencies: basic-ftp: 5.0.5 data-uri-to-buffer: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 fs-extra: 11.2.0 transitivePeerDependencies: - supports-color @@ -9098,7 +9119,7 @@ packages: source-map: 0.6.1 wordwrap: 1.0.0 optionalDependencies: - uglify-js: 3.17.4 + uglify-js: 3.18.0 dev: true /happy-dom@14.3.10: @@ -9361,7 +9382,7 @@ packages: resolution: {integrity: sha512-Ci5LRufQ8AtrQ1U26AevS8QoMXDOhnAHCJI3eZu1com7mZGHxREmw3dNj85ftpQokQCvak8nI2pnFS8zyM1M+Q==} engines: {node: '>=4.0.0'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -9371,7 +9392,7 @@ packages: dependencies: '@tootallnate/once': 1.1.2 agent-base: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color dev: true @@ -9382,7 +9403,7 @@ packages: dependencies: '@tootallnate/once': 2.0.0 agent-base: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -9395,12 +9416,37 @@ packages: transitivePeerDependencies: - supports-color + /http-proxy-middleware@3.0.0: + resolution: {integrity: sha512-36AV1fIaI2cWRzHo+rbcxhe3M3jUDCNzc4D5zRl57sEWRAxdXYtw7FSQKYY6PDKssiAKjLYypbssHk+xs/kMXw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@types/http-proxy': 1.17.14 + debug: 4.3.5 + http-proxy: 1.18.1(debug@4.3.5) + is-glob: 4.0.3 + is-plain-obj: 3.0.0 + micromatch: 4.0.5 + transitivePeerDependencies: + - supports-color + dev: true + + /http-proxy@1.18.1(debug@4.3.5): + resolution: {integrity: sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==} + engines: {node: '>=8.0.0'} + dependencies: + eventemitter3: 4.0.7 + follow-redirects: 1.15.6(debug@4.3.5) + requires-port: 1.0.0 + transitivePeerDependencies: + - debug + dev: true + /https-proxy-agent@5.0.1: resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} engines: {node: '>= 6'} dependencies: agent-base: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -9805,6 +9851,11 @@ packages: engines: {node: '>=8'} dev: true + /is-plain-obj@3.0.0: + resolution: {integrity: sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==} + engines: {node: '>=10'} + dev: true + /is-plain-obj@4.1.0: resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} engines: {node: '>=12'} @@ -9916,6 +9967,11 @@ packages: /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + /isexe@3.1.1: + resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==} + engines: {node: '>=16'} + dev: true + /isobject@2.1.0: resolution: {integrity: sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==} engines: {node: '>=0.10.0'} @@ -10277,7 +10333,7 @@ packages: resolution: {integrity: sha512-rm71jaA/P+6HeCpoRhmCv8KVBIi0tfGuO/dMKicbQnQW/YJntJ6MnnspkodoA4QstMVEZArsCphmd0bJEtoMjQ==} engines: {node: '>= 7.6.0'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 koa-compose: 4.1.0 transitivePeerDependencies: - supports-color @@ -10287,7 +10343,7 @@ packages: resolution: {integrity: sha512-tmcyQ/wXXuxpDxyNXv5yNNkdAMdFRqwtegBXUaowiQzUKqJehttS0x2j0eOZDQAyloAth5w6wwBImnFzkUz3pQ==} engines: {node: '>= 8'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 http-errors: 1.8.1 resolve-path: 1.4.0 transitivePeerDependencies: @@ -10313,7 +10369,7 @@ packages: content-disposition: 0.5.4 content-type: 1.0.5 cookies: 0.9.1 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 delegates: 1.0.0 depd: 2.0.0 destroy: 1.2.0 @@ -10400,8 +10456,8 @@ packages: resolution: {integrity: sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==} engines: {node: '>=14'} dependencies: - mlly: 1.6.1 - pkg-types: 1.1.0 + mlly: 1.7.1 + pkg-types: 1.1.1 /locate-path@3.0.0: resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} @@ -11183,7 +11239,7 @@ packages: resolution: {integrity: sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ==} dependencies: '@types/debug': 4.1.12 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 decode-named-character-reference: 1.0.2 devlop: 1.1.0 micromark-core-commonmark: 2.0.1 @@ -11400,12 +11456,12 @@ packages: hasBin: true dev: false - /mlly@1.6.1: - resolution: {integrity: sha512-vLgaHvaeunuOXHSmEbZ9izxPx3USsk8KCQ8iC+aTlp5sKRSoZvwhHh5L9VbKSaVC6sJDqbyohIS76E2VmHIPAA==} + /mlly@1.7.1: + resolution: {integrity: sha512-rrVRZRELyQzrIUAVMHxP97kv+G786pHmOKzuFII8zDYahFBS7qnHh2AlYSl1GAHhaMPCz6/oHjVMcfFYgFYHgA==} dependencies: - acorn: 8.11.3 + acorn: 8.12.0 pathe: 1.1.2 - pkg-types: 1.1.0 + pkg-types: 1.1.1 ufo: 1.5.3 /mocha@10.2.0: @@ -11551,7 +11607,7 @@ packages: resolution: {integrity: sha512-yAyTfdeNJGGBFxWdzSKCBYxs5FxLbCg5X5Q4ets974hcQzG1+qCxvIyOo4j2Ry6MUlhWVMX4OoYDefAIIwupjw==} engines: {node: '>= 10.13'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 json-stringify-safe: 5.0.1 propagate: 2.0.1 transitivePeerDependencies: @@ -11868,7 +11924,7 @@ packages: dependencies: '@vscode/vsce': 2.22.0 commander: 6.2.1 - follow-redirects: 1.15.6 + follow-redirects: 1.15.6(debug@4.3.5) is-ci: 2.0.0 leven: 3.1.0 semver: 7.6.0 @@ -11940,7 +11996,7 @@ packages: dependencies: '@tootallnate/quickjs-emscripten': 0.23.0 agent-base: 7.1.1 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 get-uri: 6.0.3 http-proxy-agent: 7.0.2 https-proxy-agent: 7.0.4 @@ -12164,11 +12220,11 @@ packages: find-up: 5.0.0 dev: true - /pkg-types@1.1.0: - resolution: {integrity: sha512-/RpmvKdxKf8uILTtoOhAgf30wYbP2Qw+L9p3Rvshx1JZVX+XQNZQFjlbmGHEGIm4CkVPlSn+NXmIM8+9oWQaSA==} + /pkg-types@1.1.1: + resolution: {integrity: sha512-ko14TjmDuQJ14zsotODv7dBlwxKhUKQEhuhmbqo1uCi9BB0Z2alo/wAXg6q1dTR5TyuqYyWhjtfe/Tsh+X28jQ==} dependencies: confbox: 0.1.7 - mlly: 1.6.1 + mlly: 1.7.1 pathe: 1.1.2 /playwright-core@1.43.1: @@ -13069,7 +13125,7 @@ packages: resolution: {integrity: sha512-nQFEv9gRw6SJAwWD2LrL0NmQvAcO7FBwJbwmr2ttPAacfy0xuiOjE5zt+zM4xDyuyvUaxBi/9gb2SoCyNEVJcw==} engines: {node: '>=8.6.0'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 module-details-from-path: 1.0.3 resolve: 1.22.8 transitivePeerDependencies: @@ -13133,7 +13189,7 @@ packages: resolution: {integrity: sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==} engines: {node: '>=12'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 extend: 3.0.2 transitivePeerDependencies: - supports-color @@ -13536,7 +13592,7 @@ packages: engines: {node: '>= 10'} dependencies: agent-base: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 socks: 2.8.3 transitivePeerDependencies: - supports-color @@ -13558,7 +13614,7 @@ packages: engines: {node: '>= 14'} dependencies: agent-base: 7.1.1 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 socks: 2.8.3 transitivePeerDependencies: - supports-color @@ -14519,6 +14575,14 @@ packages: requiresBuild: true dev: true + /uglify-js@3.18.0: + resolution: {integrity: sha512-SyVVbcNBCk0dzr9XL/R/ySrmYf0s372K6/hFklzgcp2lBFyXtw4I7BOdDjlLhE1aVqaI/SHWXWmYdlZxuyF38A==} + engines: {node: '>=0.8.0'} + hasBin: true + requiresBuild: true + dev: true + optional: true + /underscore@1.13.6: resolution: {integrity: sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==} dev: true @@ -14697,7 +14761,7 @@ packages: resolution: {integrity: sha512-d6Mhq8RJeGA8UfKCu54Um4lFA0eSaRa3XxdAJg8tIdxbu1ubW0hBCZUL7yI2uGyYCRndvbK8FLHzqy2XKfeMsg==} engines: {node: '>=14.0.0'} dependencies: - acorn: 8.11.3 + acorn: 8.12.0 chokidar: 3.6.0 webpack-sources: 3.2.3 webpack-virtual-modules: 0.6.1 @@ -14873,34 +14937,13 @@ packages: vfile-message: 4.0.2 dev: false - /vite-node@1.5.0(@types/node@20.12.7): - resolution: {integrity: sha512-tV8h6gMj6vPzVCa7l+VGq9lwoJjW8Y79vst8QZZGiuRAfijU+EEWuc0kFpmndQrWhMMhet1jdSF+40KSZUqIIw==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - dependencies: - cac: 6.7.14 - debug: 4.3.4(supports-color@8.1.1) - pathe: 1.1.2 - picocolors: 1.0.1 - vite: 5.2.9(@types/node@20.12.7) - transitivePeerDependencies: - - '@types/node' - - less - - lightningcss - - sass - - stylus - - sugarss - - supports-color - - terser - dev: true - /vite-node@1.6.0(@types/node@20.12.7): resolution: {integrity: sha512-de6HJgzC+TFzOu0NTC4RAIsyf/DY/ibWDYQUcuEA84EMHhcefTUGkjFHKKEJhQN4A+6I0u++kr3l36ZF2d7XRw==} engines: {node: ^18.0.0 || >=20.0.0} hasBin: true dependencies: cac: 6.7.14 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 pathe: 1.1.2 picocolors: 1.0.1 vite: 5.2.11(@types/node@20.12.7) @@ -14913,7 +14956,6 @@ packages: - sugarss - supports-color - terser - dev: false /vite-plugin-svgr@4.2.0(typescript@5.4.2)(vite@5.2.11): resolution: {integrity: sha512-SC7+FfVtNQk7So0XMjrrtLAbEC8qjFPifyD7+fs/E6aaNdVde6umlVVh0QuwDLdOMu7vp5RiGFsB70nj5yo0XA==} @@ -15001,64 +15043,6 @@ packages: fsevents: 2.3.3 dev: true - /vitest@1.5.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0): - resolution: {integrity: sha512-d8UKgR0m2kjdxDWX6911uwxout6GHS0XaGH1cksSIVVG8kRlE7G7aBw7myKQCvDI5dT4j7ZMa+l706BIORMDLw==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@edge-runtime/vm': '*' - '@types/node': ^18.0.0 || >=20.0.0 - '@vitest/browser': 1.5.0 - '@vitest/ui': 1.5.0 - happy-dom: '*' - jsdom: '*' - peerDependenciesMeta: - '@edge-runtime/vm': - optional: true - '@types/node': - optional: true - '@vitest/browser': - optional: true - '@vitest/ui': - optional: true - happy-dom: - optional: true - jsdom: - optional: true - dependencies: - '@types/node': 20.12.7 - '@vitest/expect': 1.5.0 - '@vitest/runner': 1.5.0 - '@vitest/snapshot': 1.5.0 - '@vitest/spy': 1.5.0 - '@vitest/utils': 1.5.0 - acorn-walk: 8.3.2 - chai: 4.4.1 - debug: 4.3.4(supports-color@8.1.1) - execa: 8.0.1 - happy-dom: 14.3.10 - jsdom: 22.1.0 - local-pkg: 0.5.0 - magic-string: 0.30.10 - pathe: 1.1.2 - picocolors: 1.0.0 - std-env: 3.7.0 - strip-literal: 2.1.0 - tinybench: 2.8.0 - tinypool: 0.8.4 - vite: 5.2.9(@types/node@20.12.7) - vite-node: 1.5.0(@types/node@20.12.7) - why-is-node-running: 2.2.2 - transitivePeerDependencies: - - less - - lightningcss - - sass - - stylus - - sugarss - - supports-color - - terser - dev: true - /vitest@1.6.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0): resolution: {integrity: sha512-H5r/dN06swuFnzNFhq/dnz37bPXnq8xB2xB5JOVk8K09rUtoeNN+LHWkoQ0A/i3hvbUKKcCei9KpbxqHMLhLLA==} engines: {node: ^18.0.0 || >=20.0.0} @@ -15090,9 +15074,9 @@ packages: '@vitest/snapshot': 1.6.0 '@vitest/spy': 1.6.0 '@vitest/utils': 1.6.0 - acorn-walk: 8.3.2 + acorn-walk: 8.3.3 chai: 4.4.1 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 execa: 8.0.1 happy-dom: 14.3.10 jsdom: 22.1.0 @@ -15115,7 +15099,6 @@ packages: - sugarss - supports-color - terser - dev: false /vlq@0.2.3: resolution: {integrity: sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==} @@ -15276,6 +15259,14 @@ packages: dependencies: isexe: 2.0.0 + /which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + dependencies: + isexe: 3.1.1 + dev: true + /why-is-node-running@2.2.2: resolution: {integrity: sha512-6tSwToZxTOcotxHeA+qGCq1mVzKR3CwcJGmVcY+QE8SHy6TnpFnh8PAvPNHYr7EcuVeG0QSMxtYCuO1ta/G/oA==} engines: {node: '>=8'} @@ -15580,6 +15571,10 @@ packages: resolution: {integrity: sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==} dev: true + /zod@3.23.8: + resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} + dev: true + /zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} dev: false diff --git a/vscode/.gitignore b/vscode/.gitignore index 261d34db33b7..b6a7aeae3dc0 100644 --- a/vscode/.gitignore +++ b/vscode/.gitignore @@ -3,6 +3,7 @@ out/ .vscode-test/ dist/ .vscode-test-web/ +.test-reports resources/wasm/ GITHUB_CHANGELOG.md walkthroughs/cody_tutorial.py diff --git a/vscode/e2e/README.md b/vscode/e2e/README.md new file mode 100644 index 000000000000..59ac888b31e4 --- /dev/null +++ b/vscode/e2e/README.md @@ -0,0 +1,6 @@ +TODO: This will be a nice guide + +- `pnpm run test:e2e2:run --ui` to view trace UI locally +- How to update network recordings +- How to record a test + - Issue tests diff --git a/vscode/e2e/TODO.md b/vscode/e2e/TODO.md new file mode 100644 index 000000000000..915bfa1e8840 --- /dev/null +++ b/vscode/e2e/TODO.md @@ -0,0 +1,6 @@ +- [ ] Fail test on proxy failure +- [ ] Credentials loading +- [ ] Configurable endpoint proxies +- [ ] Migrate existing e2e tests +- [ ] Fixutres repo +- [ ] Wait for Cody progress-bars diff --git a/vscode/e2e/example.test.ts b/vscode/e2e/example.test.ts new file mode 100644 index 000000000000..470e38991da3 --- /dev/null +++ b/vscode/e2e/example.test.ts @@ -0,0 +1,57 @@ +import { expect } from '@playwright/test' +import { fixture as test, uix } from './utils/vscody' + +test.describe('Demonstrations', () => { + test.use({ + templateWorkspaceDir: 'test/fixtures/workspace', + }) + test('Show off v2 features', async ({ + page, + sourcegraphMitM, + vscodeUI, + polly, + executeCommand, + workspaceDir, + }) => { + polly.server.host(sourcegraphMitM.target, () => { + polly.server + .post('/.api/graphql') + .filter(req => 'RecordTelemetryEvents' in req.query) + .intercept((req, res, interceptor) => { + console.log('Custom interceptor') + res.sendStatus(500) + }) + }) + await uix.workspace.modifySettings( + existing => ({ ...existing, 'workbench.colorTheme': 'Default Light Modern' }), + { workspaceDir } + ) + await uix.vscode.startSession({ page, vscodeUI, executeCommand, workspaceDir }) + await uix.cody.waitForStartup() + + await executeCommand('workbench.action.closeAllEditors') + await executeCommand('workbench.action.showRuntimeExtensions') + + await page.click('[aria-label="Cody"]') + + await executeCommand('workbench.explorer.fileView.focus') + + await page.click('[aria-label="Cody"]') + + const [signInView, ...otherWebviews] = await uix.cody.WebView.all({ page }, { atLeast: 1 }) + + expect(signInView).toBeTruthy() + expect(otherWebviews).toHaveLength(0) + + await signInView.waitUntilReady() + await expect(signInView.wrapper).toBeVisible() + + await expect( + signInView.content.getByRole('button', { name: 'Sign In to Your Enterprise Instance' }) + ).toBeVisible() + }) + + test('also works', async ({ page, sourcegraphMitM, vscodeUI, executeCommand }) => { + await uix.cody.dummy() + }) +}) diff --git a/vscode/e2e/issues/CODY-2392.test.ts b/vscode/e2e/issues/CODY-2392.test.ts new file mode 100644 index 000000000000..e5d0a8c08cb8 --- /dev/null +++ b/vscode/e2e/issues/CODY-2392.test.ts @@ -0,0 +1,78 @@ +// // CTX(linear-issue): https://linear.app/sourcegraph/issue/CODY-2392 +import { expect } from '@playwright/test' +import { fixture as test } from '../utils/vscody' + +test.fixme('CODY-2392', () => { + expect(true).toBeFalsy() + // import { expect } from '@playwright/test' + // import { + // chatMessageRows, + // createEmptyChatPanel, + // disableNotifications, + // focusSidebar, + // openFileInEditorTab, + // selectLineRangeInEditorTab, + // } from '../../e2e/common' + // import { + // type ExpectedEvents, + // type ExpectedV2Events, + // type TestConfiguration, + // executeCommandInPalette, + // test, + // } from '../../e2e/helpers' + + // test.extend({ + // expectedEvents: [], + // expectedV2Events: [], + // preAuthenticate: true, + // })('@issue [CODY-2392](https://linear.app/sourcegraph/issue/CODY-2392)', async ({ page }) => { + // await disableNotifications(page) + + // //open a file + // await openFileInEditorTab(page, 'buzz.ts') + // await focusSidebar(page) + // const [chatPanel, lastChatInput, firstChatInput, chatInputs] = await createEmptyChatPanel(page) + // await firstChatInput.fill('show me a code snippet') + // await firstChatInput.press('Enter') + + // // wait for assistant response + // const messageRows = chatMessageRows(chatPanel) + // const assistantRow = messageRows.nth(1) + // await expect(assistantRow).toContainText('Here is a code snippet:') + + // // we now start editing the original message + // await firstChatInput.click() + // //now write some text + // await firstChatInput.fill('I want to include some context') + // await selectLineRangeInEditorTab(page, 1, 10) + // await executeCommandInPalette(page, 'Cody: Add Selection to Cody Chat') + + // // we now expect the first input to contain the selected context + // // the last input should still be empty + // const lastChatInputText = await lastChatInput.textContent() + // await expect(lastChatInput).toBeEmpty() + // await expect(firstChatInput).toContainText('@buzz.ts:1-10') + // }) +}) + +//TODO: Make a template +/** + * // TODO: update this file-level comment +// CTX(linear): https://linear.app/sourcegraph/issue/CODY-1234 + +import { disableNotifications } from '../../e2e/common' +import type { TestConfiguration } from '../../e2e/helpers' +import { test } from '../../e2e/helpers' + +// TODO: add a .only() to actually run this test +test.extend({ + expectedEvents: [], + expectedV2Events: [], + preAuthenticate: true, +})('@issue [CODY-1234](https://linear.app/sourcegraph/issue/CODY-1234)', async ({ page }) => { + // TODO: The test name should include the @issue tag and a markdown link to the issue + await disableNotifications(page) + //do your worst +}) + + */ diff --git a/vscode/e2e/issues/README.md b/vscode/e2e/issues/README.md new file mode 100644 index 000000000000..73de6416c774 --- /dev/null +++ b/vscode/e2e/issues/README.md @@ -0,0 +1,28 @@ +# Issue Tests + +This as a "low-threshold staging area" to put tests that can help replicate or diagnose a problem. + +These tests don't run as part of CI. Instead, the goal is to make it easier for anyone to contribute even rough or partial test as part of every bug report. + +Doing so will make diagnosing and verifying the results a lot easier for everyone. + +## Rules: +- Ideally test files are named with the Linear/Github Issue ID to make it easy to find them or pull in additional context. +- Only tests explicitly marked with `only()` should run. Issue tests are by definition very tied to the specific issue someone is trying to diagnose, so running other tests would just be noise. This should already be if you extend the base test for the specific type. See [e2e/template.test.ts](./e2e/template.test.ts) +- (Optional) I'm hoping to do [some experiments soon](#sidenote-openctx-experiment). So if you can please: + - start each test with a `//CTX(linear-issue): ` comment + - use the `@issue` tag and and a markdown link to the issue in the test title. + + + + +### Sidenote: OpenCtx Experiment: + +I'd like to see how we can use Cody to assist with replicating issues from bug-reports. So it would be really helpful if each Issue test created contains context on what issue it was trying to demonstrate / replicate. + +There is some rudimentary OpenCtx support for Linear issues [in the works](https://github.com/sourcegraph/openctx/pull/154), providing both additional context to the UI & the AI. So for now, if nothing else, it should at least make these tests a bit easier to understand. Especially as test comments and issue comments might drift. + +## TODO: +- [ ] Make sure we automatically clean up tests for issues marked as closed +- [ ] Automatically tag issues in Linear that have corresponding tests +- [ ] CI fast-path to limit the amount of needless tests to run when just trying to merge a Test-Only PR diff --git a/vscode/e2e/issues/ignore.test.ts b/vscode/e2e/issues/ignore.test.ts new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/vscode/e2e/utils/helpers.ts b/vscode/e2e/utils/helpers.ts new file mode 100644 index 000000000000..509ef66b6397 --- /dev/null +++ b/vscode/e2e/utils/helpers.ts @@ -0,0 +1,47 @@ +import type { TestInfo } from '@playwright/test' + +/** + * Stretches the test with at most the `max` amount of ms but never more than + * needed to finish the operation. This way you can effectively nullify the time + * a certain operation takes. + */ +export async function stretchTimeout( + fn: () => Promise, + { + max, + testInfo, + }: { + max: number + testInfo: TestInfo + } +): Promise { + // Warning: For some reason Playwright doesn't report the modified timeout + // correctly so we can't rely on it being updated after we call setTimeout + const timeout = testInfo.timeout + if (timeout === 0) { + return await fn() + } + testInfo.setTimeout(timeout + max) + const startTime = Date.now() + try { + return await fn() + } finally { + const totalTime = Date.now() - startTime + testInfo.setTimeout(timeout + totalTime) + } +} + +export async function retry(fn: () => Promise, retries = 5, delay = 1000): Promise { + for (let i = 0; i < retries; i++) { + try { + return await fn() + } catch (err) { + if (i < retries - 1) { + await new Promise(res => setTimeout(res, delay)) + } else { + throw err + } + } + } + throw new Error('Could not execute retryable function') +} diff --git a/vscode/e2e/utils/symlink-extensions.setup.ts b/vscode/e2e/utils/symlink-extensions.setup.ts new file mode 100644 index 000000000000..aa7a7771be3b --- /dev/null +++ b/vscode/e2e/utils/symlink-extensions.setup.ts @@ -0,0 +1,73 @@ +import fs from 'node:fs/promises' +import path from 'node:path' +import { test as setup } from '@playwright/test' + +//TODO: make options with nice descriptions and validation +export type SymlinkExtensions = + | { + vscodeExtensionCacheDir: string + symlinkExtensions: [string, ...string[]] + } + | { + vscodeExtensionCacheDir?: unknown + symlinkExtensions?: [] | null // these paths will get symlinked to the shared extension cache as pre-installed extensions + } + +// biome-ignore lint/complexity/noBannedTypes: +setup.extend<{}, SymlinkExtensions>({ + vscodeExtensionCacheDir: [undefined, { scope: 'worker', option: true }], + symlinkExtensions: [undefined, { scope: 'worker', option: true }], +})('symlink extensions', async ({ vscodeExtensionCacheDir, symlinkExtensions }) => { + if (typeof vscodeExtensionCacheDir === 'string') { + await fs.mkdir(vscodeExtensionCacheDir, { recursive: true }) + } + if (!symlinkExtensions || symlinkExtensions.length === 0) { + return + } + if (typeof vscodeExtensionCacheDir !== 'string') { + throw new TypeError('vscodeTmpDir is required to symlink extensions') + } + for (const extension of symlinkExtensions) { + const absoluteDir = path.resolve(process.cwd(), extension) + //read the package.json as json + const packageJsonPath = await fs.readFile(path.join(absoluteDir, 'package.json')) + const packageJson = JSON.parse(packageJsonPath.toString()) + const { publisher, name, version } = packageJson + if (!publisher || !name || !version) { + throw new TypeError( + `package.json for extension ${extension} must have publisher, name, and version` + ) + } + try { + // we look for any extensions with that same name (because they could be an older version) + const extensions = await fs.readdir(vscodeExtensionCacheDir) + const removePromises = [ + fs.unlink(path.join(vscodeExtensionCacheDir, 'extensions.json')).catch(() => void 0), + fs.unlink(path.join(vscodeExtensionCacheDir, '.obsolete')).catch(() => void 0), + ] + for (const extension of extensions) { + if (path.basename(extension).startsWith(`${publisher}.${name}-`)) { + // check if this is a symlink or a directory + const extensionPath = path.join(vscodeExtensionCacheDir, extension) + console.log(extensionPath) + removePromises.push( + fs.lstat(extensionPath).then(async stat => { + if (stat.isSymbolicLink()) { + await fs.unlink(extensionPath) + } + await fs.rm(extensionPath, { force: true, recursive: true }) + }) + ) + } + } + await Promise.all(removePromises) + } catch { + //ignore + } + await fs.symlink( + absoluteDir, + path.join(vscodeExtensionCacheDir, `${publisher}.${name}-${version}`), + 'dir' + ) + } +}) diff --git a/vscode/e2e/utils/uix.test.ts b/vscode/e2e/utils/uix.test.ts new file mode 100644 index 000000000000..cdb6cc84f65c --- /dev/null +++ b/vscode/e2e/utils/uix.test.ts @@ -0,0 +1,20 @@ +import { expect } from '@playwright/test' +import { fixture as test, uix } from './vscody' + +test.describe('UIX', () => { + test.use({ + templateWorkspaceDir: 'test/fixtures/workspace', + }) + test('VSCode Sidebar', async ({ page, vscodeUI, executeCommand, workspaceDir }) => { + await uix.vscode.startSession({ page, vscodeUI, executeCommand, workspaceDir }) + const sidebar = uix.vscode.Sidebar.get({ page }) + + await executeCommand('workbench.view.explorer') + expect(await sidebar.isVisible()).toBe(true) + expect(await sidebar.activeView).toBe('workbench.view.explorer') + await executeCommand('workbench.action.closeSidebar') + expect(await sidebar.isVisible()).toBe(false) + await executeCommand('workbench.view.extension.cody') + expect(await sidebar.activeView).toBe(uix.vscode.Sidebar.CODY_VIEW_ID) + }) +}) diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts new file mode 100644 index 000000000000..01ad142af489 --- /dev/null +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -0,0 +1,654 @@ +import { exec as _exec, spawn } from 'node:child_process' +import type { Dirent } from 'node:fs' +import fs from 'node:fs/promises' +import 'node:http' +import 'node:https' +import type { AddressInfo } from 'node:net' +import os from 'node:os' +import path from 'node:path' +import { promisify } from 'node:util' + +import pspawn from '@npmcli/promise-spawn' +import { test as _test, expect, mergeTests } from '@playwright/test' +import NodeHttpAdapter from '@pollyjs/adapter-node-http' +import { type EXPIRY_STRATEGY, type MODE, Polly } from '@pollyjs/core' +import type { ArrayContainsAll } from '@sourcegraph/cody-shared/src/utils' +import { ConsoleReporter, type ProgressReport, ProgressReportStage } from '@vscode/test-electron' +import { downloadAndUnzipVSCode } from '@vscode/test-electron/out/download' +import chokidar from 'chokidar' +import express from 'express' +import { copy as copyExt } from 'fs-extra' +import { createProxyMiddleware } from 'http-proxy-middleware' +import zod from 'zod' + +import { CodyPersister } from '../../../src/testutils/CodyPersister' +import { defaultMatchRequestsBy } from '../../../src/testutils/polly' +import { retry, stretchTimeout } from '../helpers' + +const exec = promisify(_exec) + +export type Directory = string + +const DOWNLOAD_GRACE_TIME = 5 * 60 * 1000 //5 minutes + +// TODO(rnauta): finish all variable descriptions +const workerOptionsSchema = zod.object({ + repoRootDir: zod + .string() + .describe( + 'DEPRECATED: The .git root of this project. Might still get used for some path defaults so must be set' + ), + vscodeExtensionCacheDir: zod.string(), + vscodeTmpDir: zod.string(), + binaryTmpDir: zod.string(), + recordingDir: zod.string(), +}) + +const testOptionsSchema = zod.object({ + vscodeVersion: zod.string().default('stable'), + vscodeExtensions: zod.array(zod.string()).default([]), + templateWorkspaceDir: zod.string(), + recordingMode: zod.enum([ + 'passthrough', + 'record', + 'replay', + 'stopped', + ] satisfies ArrayContainsAll), + recordIfMissing: zod.boolean(), + keepUnusedRecordings: zod.boolean().default(true), + recordingExpiryStrategy: zod + .enum(['record', 'warn', 'error'] satisfies ArrayContainsAll) + .default('record'), + recordingExpiresIn: zod.string().nullable().default(null), +}) + +export type TestOptions = zod.infer +export type WorkerOptions = zod.infer + +export interface WorkerContext { + validWorkerOptions: WorkerOptions +} +export interface TestContext { + vscodeUI: { + url: string + token: string + } + serverRootDir: Directory + validOptions: TestOptions & WorkerOptions + polly: Polly + sourcegraphMitM: { endpoint: string; target: string } + workspaceDir: Directory + //TODO(rnauta): Make the typing inferred from VSCode directly + executeCommand: (commandId: string, ...args: any[]) => Promise +} + +function schemaOptions, S extends 'worker' | 'test'>(o: T, s: S) { + return Object.fromEntries( + Object.keys(o.shape).map(key => [key, [undefined, { scope: s, option: true }]]) + ) as unknown as { [k in keyof T]: [T[k], { scope: S; option: true }] } +} + +// We split out the options fixutre from the implementation fixture so that in +// the implementaiton fixture we don't accidentally use any options directly, +// instead having to use validated options +const optionsFixture: ReturnType< + typeof _test.extend, Pick> +> = _test.extend< + TestOptions & Pick, + WorkerOptions & Pick +>({ + ...schemaOptions(workerOptionsSchema, 'worker'), + ...schemaOptions(testOptionsSchema, 'test'), + validWorkerOptions: [ + async ( + { repoRootDir, binaryTmpDir, recordingDir, vscodeTmpDir, vscodeExtensionCacheDir }, + use + ) => { + const validOptionsWithDefaults = await workerOptionsSchema.safeParseAsync( + { + repoRootDir, + binaryTmpDir, + recordingDir, + vscodeTmpDir, + vscodeExtensionCacheDir, + } satisfies { [key in keyof WorkerOptions]-?: WorkerOptions[key] }, + {} + ) + if (!validOptionsWithDefaults.success) { + throw new TypeError( + `Invalid worker arguments:\n${JSON.stringify( + validOptionsWithDefaults.error.flatten().fieldErrors, + null, + 2 + )}` + ) + } + use(validOptionsWithDefaults.data) + }, + { scope: 'worker', auto: true }, + ], + validOptions: [ + async ( + { + vscodeExtensions, + vscodeVersion, + templateWorkspaceDir, + recordIfMissing, + recordingMode, + keepUnusedRecordings, + recordingExpiresIn, + recordingExpiryStrategy, + validWorkerOptions, + }, + use + ) => { + const validOptionsWithDefaults = await testOptionsSchema.safeParseAsync( + { + vscodeExtensions, + vscodeVersion, + keepUnusedRecordings, + recordingExpiresIn, + recordingExpiryStrategy, + templateWorkspaceDir, + recordIfMissing, + recordingMode, + } satisfies { [key in keyof TestOptions]-?: TestOptions[key] }, + {} + ) + if (!validOptionsWithDefaults.success) { + throw new TypeError( + `Invalid test arguments:\n${JSON.stringify( + validOptionsWithDefaults.error.flatten().fieldErrors, + null, + 2 + )}` + ) + } + use({ ...validOptionsWithDefaults.data, ...validWorkerOptions }) + }, + { scope: 'test', auto: true }, + ], +}) + +const implFixture = _test.extend({ + serverRootDir: [ + // biome-ignore lint/correctness/noEmptyPattern: + async ({}, use, testInfo) => { + const dir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'test-vscode-server-')) + await use(dir) + const attachmentPromises = [] + const logDir = path.join(dir, 'data/logs') + + for (const file of await getFilesRecursive(logDir)) { + const filePath = path.join(file.path, file.name) + const relativePath = path.relative(logDir, filePath) + attachmentPromises.push( + testInfo.attach(relativePath, { + path: filePath, + }) + ) + } + if (attachmentPromises.length > 0) { + await Promise.allSettled(attachmentPromises) + } + await retry(() => fs.rm(dir, { force: true, recursive: true }), 20, 500) + }, + { scope: 'test' }, + ], + workspaceDir: [ + async ({ validOptions }, use) => { + const dir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'test-workspace-')) + + await copyExt(path.resolve(process.cwd(), validOptions.templateWorkspaceDir), dir, { + overwrite: true, + preserveTimestamps: true, + dereference: true, // we can't risk the test modifying the symlink + }) + await use(dir) + await retry(() => fs.rm(dir, { force: true, recursive: true }), 20, 500) + }, + { + scope: 'test', + }, + ], + //#region Polly & Proxies + sourcegraphMitM: [ + // biome-ignore lint/correctness/noEmptyPattern: + async ({}, use) => { + const app = express() + //TODO: Credentials & Configuration TODO: I can see a use-case where + //you can switch endpoints dynamically. For instance wanting to try + //signing out of one and then signing into another. You could + //probably do that already using env variables in the workspace + //config but it's not a super smooth experience yet. If you run into + //this please give me a ping so we can brainstorm. + const target = 'https://sourcegraph.com' + const middleware = createProxyMiddleware({ + target, + changeOrigin: true, + }) + app.use(middleware) + let server: ReturnType = null as any + const serverInfo = await new Promise((resolve, reject) => { + server = app.listen(0, '127.0.0.1', () => { + const address = server.address() + if (address === null || typeof address === 'string') { + reject('address is not a valid object') + } else { + resolve(address) + } + }) + }) + + await use({ + endpoint: `http://${serverInfo.address}:${serverInfo.port}`, + target, + }) + + server.closeAllConnections() + await new Promise(resolve => server.close(resolve)) + }, + { scope: 'test' }, + ], + polly: [ + async ({ validOptions, sourcegraphMitM }, use, testInfo) => { + const polly = new Polly(`${testInfo.project}`, { + flushRequestsOnStop: true, + recordIfMissing: validOptions.recordIfMissing ?? validOptions.recordingMode === 'record', + mode: validOptions.recordingMode, + persister: 'fs', + adapters: ['node-http'], + recordFailedRequests: true, + matchRequestsBy: defaultMatchRequestsBy, + persisterOptions: { + keepUnusedRequests: validOptions.keepUnusedRecordings ?? true, + fs: { + recordingsDir: path.resolve(process.cwd(), validOptions.recordingDir), + }, + }, + }) + + polly.server + .any() + .filter(req => !req.url.startsWith(sourcegraphMitM.target)) + .intercept((req, res, interceptor) => { + interceptor.stopPropagation() + interceptor.passthrough() + }) + polly.server.host(sourcegraphMitM.target, () => { + polly.server + .post('/.api/graphql') + .filter(req => 'RecordTelemetryEvents' in req.query) + .on('request', (req, inter) => { + //TODO(rnauta): Store telemetry & allow for custom validation (if needed) + }) + + // NOTE: this might seem counter intuitive that the user could + // override these functions given that PollyJS calls them in the + // order they were defined. However, these intercept handlers + // don't work like normal middleware in that it's the first to + // respond. Instead if you call sendStatus(400) in a subsequent + // handler you change the resoponse. So although handlers are + // called in the order they are defined, it's the last handler + // to modify the response that actually dictates the response. + // This took me ages to figure out, and feels like a terrible + // API...why they didn't just go with normal well-understood + // middleware API 🤷‍♂️ + polly.server + .post('/.api/graphql') + .filter( + req => 'RecordTelemetryEvents' in req.query || 'LogEventMutation' in req.query + ) + .intercept((req, res, interceptor) => { + res.sendStatus(200) + }) + + polly.server.get('/healthz').intercept((req, res, interceptor) => { + res.sendStatus(200) + }) + }) + + await use(polly) + await polly.flush() + await polly.stop() + }, + { scope: 'test' }, + ], + //#region vscode agent + vscodeUI: [ + async ({ validOptions, serverRootDir, sourcegraphMitM, page, context }, use, testInfo) => { + const executableDir = path.resolve(process.cwd(), validOptions.vscodeTmpDir) + await fs.mkdir(executableDir, { recursive: true }) + + // We nullify the time it takes to download VSCode as it can vary wildly! + const electronExecutable = await stretchTimeout( + async () => downloadOrWaitForVSCode({ validOptions, executableDir }), + { + max: DOWNLOAD_GRACE_TIME, + testInfo, + } + ) + + const cliExecutableDir = path.resolve( + path.dirname(electronExecutable), + '../Resources/app/bin/' + ) + //find either a code or code.exe file + const vscodeExecutableName = (await fs.readdir(cliExecutableDir)).find( + file => file.endsWith('code-tunnel') || file.endsWith('code-tunnel.exe') + ) + if (!vscodeExecutableName) { + throw new Error(`Could not find a vscode executable in ${cliExecutableDir}`) + } + const vscodeExecutable = path.join(cliExecutableDir, vscodeExecutableName) + + // Machine settings should simply serve as a baseline to ensure + // tests by default work smoothly. Any test specific preferences + // should be set in workspace settings instead. + + // Note: Not all settings can be set as machine settings, especially + // those with security implications. These are set as user settings + // which live inside the browser's IndexDB. There's + const machineDir = path.join(serverRootDir, 'data/Machine') + await fs.mkdir(machineDir, { recursive: true }) + await fs.writeFile( + path.join(machineDir, 'settings.json'), + JSON.stringify( + { + 'extensions.ignoreRecommendations': true, + 'workbench.editor.empty.hint': 'hidden', + 'workbench.startupEditor': 'none', + 'workbench.tips.enabled': false, + 'workbench.welcomePage.walkthroughs.openOnInstall': false, + 'workbench.colorTheme': 'Default Dark Modern', + // sane defaults + 'cody.debug.verbose': true, + }, + null, + 2 + ) + ) + + // Here we install the extensions requested. To speed things up we make use of a shared extension cache that we symlink to. + const extensionsDir = path.join(serverRootDir, 'extensions') + await fs.mkdir(extensionsDir, { recursive: true }) + + if (validOptions.vscodeExtensions.length > 0) { + //TODO(rnauta): Add lockfile wrapper to avoid race conditions + const sharedCacheDir = path.resolve(process.cwd(), validOptions.vscodeExtensionCacheDir) + const args = [ + `--extensions-dir=${sharedCacheDir.replace(/ /g, '\\ ')}`, // cli doesn't handle quotes properly so just escape spaces, + '--install-extension', + ...validOptions.vscodeExtensions, + ] + await pspawn(vscodeExecutable, args) + //we now read all the folders in the shared cache dir and + //symlink the relevant ones to our isolated extension dir + for (const sharedExtensionDir of await fs.readdir(sharedCacheDir)) { + const [_, extensionName] = /^(.*)-\d+\.\d+\.\d+$/.exec(sharedExtensionDir) ?? [] + if (!validOptions.vscodeExtensions.includes(extensionName)) { + continue + } + const sharedExtensionPath = path.join(sharedCacheDir, sharedExtensionDir) + const extensionPath = path.join(extensionsDir, sharedExtensionDir) + await fs.symlink(sharedExtensionPath, extensionPath, 'dir') + } + } + + // We can now start the server + const args = [ + 'serve-web', + '--accept-server-license-terms', + '--port=0', + `--server-data-dir=${serverRootDir.replace(/ /g, '\\ ')}`, + `--extensions-dir=${extensionsDir.replace(/ /g, '\\ ')}`, // cli doesn't handle quotes properly so just escape spaces, + ] + //TODO(rnauta): better typing + const env = { + //TODO: all env variables + TESTING_DOTCOM_URL: sourcegraphMitM.endpoint, + } + const codeProcess = spawn(vscodeExecutable, args, { + env, + stdio: ['inherit', 'pipe', 'pipe'], + detached: false, + }) + if (!codeProcess.pid) { + throw new Error('Could not start code process') + } + const token = await waitForVSCodeUI(codeProcess.stdout) + if (!token) { + throw new Error("VSCode did't provide an auth token") + } + // We started vscode with port 0 which means a random port was + // assigned. However VSCode still reports the port as 0 themselves, + // so we need to do some magic to get the actual port. + // TODO: this might not be very cross-platform + const port = await getPortForPid(codeProcess.pid) + + const config = { url: `http://127.0.0.1:${port}/`, token: token } + await use(config) + + // Turn of logging browser logging and navigate away from the UI + // Otherwise we needlessly add a bunch of noisy error logs + if (page.url().startsWith(config.url)) { + await page.evaluate(() => { + console.log = () => {} + console.info = () => {} + console.warn = () => {} + console.error = () => {} + window.onerror = () => {} + }) + await page.goto('about:blank') + await page.waitForLoadState('domcontentloaded') + } + const exitPromise = new Promise(resolve => { + codeProcess.on('exit', () => { + resolve(void 0) + }) + }) + codeProcess.kill() + await exitPromise + }, + { scope: 'test', timeout: 15 * 1000 }, + ], + // This exposes some bare-bones VSCode APIs in the browser context. You can + // now simply execute a command from the chrome debugger which is a lot less + // flaky then relying on Button Clicks etc. + executeCommand: [ + async ({ page }, use) => { + const commandFn = async (command: string, ...args: any[]): Promise => { + return await _test.step( + 'executeCommand', + async () => { + await expect(page.locator('meta[name="__exposed-vscode-api__"]')).toBeAttached({ + timeout: 4000, + }) + const res = await page.evaluate( + async ({ command, args }) => { + //@ts-ignore + return await window._executeCommand(command, ...args) + }, + { + command, + args, + } + ) + return res + }, + { box: true } + ) + } + use(commandFn) + }, + { scope: 'test' }, + ], +}) + +export const fixture = mergeTests(optionsFixture, implFixture) as ReturnType< + typeof _test.extend +> + +fixture.beforeAll(async () => { + // This just registers polly adapters, it doesn't actually wire anything up + await fixture.step('Polly Register', () => { + Polly.register(NodeHttpAdapter) + Polly.register(CodyPersister) + }) +}) + +function waitForVSCodeUI(stdout: NodeJS.ReadableStream): Promise { + return new Promise((resolve, reject) => { + const listener = (data: Buffer) => { + if (data.toString().includes('available at')) { + clearTimeout(timeout) + stdout.removeListener('data', listener) + const [_, token] = /\?tkn=([a-zA-Z0-9-]+)/.exec(data.toString()) ?? [] + resolve(token) + } + } + const timeout = setTimeout(() => { + stdout.removeListener('data', listener) + reject(new Error('Could not start code process')) + }, 30_000 /*TODO(rnauta): make this configurable*/) + stdout.on('data', listener) + }) +} + +/** + * This ensures only a single process is actually downloading VSCode + */ +async function downloadOrWaitForVSCode({ + executableDir, + validOptions, +}: Pick & { executableDir: string }) { + let electronExecutable = '' + while (!electronExecutable) { + const downloadLockFilePath = path.join( + executableDir, + `${process.env.RUN_ID}.${validOptions.vscodeVersion}.lock` + ) + const createdLockFilePath = await createFileIfNotExists(downloadLockFilePath) + if (!createdLockFilePath) { + // Someone else is downloading, let's just wait for the file to no longer exist. + const watcher = chokidar.watch(downloadLockFilePath) + try { + await Promise.all([ + new Promise(resolve => { + watcher.on('unlink', resolve) + watcher.on('change', resolve) + }), + //the file might have been removed as we were starting the wathcer + fileExists(downloadLockFilePath).then(exists => { + if (!exists) { + throw new Error('Abort') + } + }), + ]) + } catch { + } finally { + await watcher.close() + } + continue + } + try { + electronExecutable = await downloadAndUnzipVSCode({ + cachePath: executableDir, + version: validOptions.vscodeVersion, + reporter: new CustomConsoleReporter(process.stdout.isTTY), + }) + } finally { + await fs.unlink(downloadLockFilePath) + } + } + return electronExecutable +} + +async function createFileIfNotExists(p: string): Promise { + const openFileHandle = await fs.open(p, 'wx').catch(err => { + if (err.code === 'EEXIST') { + return null + } + throw err + }) + await openFileHandle?.close() + return openFileHandle ? p : null +} + +function fileExists(p: string): Promise { + return fs + .stat(p) + .then(s => { + return s.isFile() + }) + .catch(err => { + if (err.code === 'ENOENT') { + return false + } + throw err + }) +} + +async function getPortForPid(pid: number): Promise { + const platform = process.platform + let command: string + + switch (platform) { + case 'win32': + command = `netstat -ano | findstr ${pid}` + break + case 'darwin': + // Use `lsof` with specific options for macOS + command = `lsof -nP -i4TCP -a -p ${pid} | grep LISTEN` + break + case 'linux': + command = `ss -tlnp | grep ${pid}` + break + default: + throw new Error(`Unsupported platform: ${platform}`) + } + + const { stdout } = await exec(command, { encoding: 'utf-8' }) + const lines = stdout.split('\n') + for (const line of lines) { + const match = line.match(/:(\d+)\s/) + if (match?.[1]) { + return Number.parseInt(match[1], 10) + } + } + throw new Error(`No listening port found for PID ${pid}`) +} + +async function getFilesRecursive(dir: string): Promise> { + // lists all dirents recursively in a directory + let dirs: Array>> = [fs.readdir(dir, { withFileTypes: true })] + const files: Array = [] + while (dirs.length > 0) { + const ents = (await Promise.allSettled(dirs)).flat() + dirs = [] + for (const promise of ents) { + if (promise.status === 'rejected') { + // we don't care, we just don't want to leave out other logs that did succeed + continue + } + for (const ent of promise.value) { + if (ent.isFile()) { + files.push(ent) + } else if (ent.isDirectory()) { + dirs.push(fs.readdir(path.join(ent.path, ent.name), { withFileTypes: true })) + } + } + } + } + return files +} + +// A custom version of the VS Code download reporter that silences matching installation +// notifications as these otherwise are emitted on every test run +class CustomConsoleReporter extends ConsoleReporter { + public report(report: ProgressReport): void { + if (report.stage !== ProgressReportStage.FoundMatchingInstall) { + super.report(report) + } + } +} diff --git a/vscode/e2e/utils/vscody/index.ts b/vscode/e2e/utils/vscody/index.ts new file mode 100644 index 000000000000..bfba3678c8d6 --- /dev/null +++ b/vscode/e2e/utils/vscody/index.ts @@ -0,0 +1,3 @@ +export type * from './fixture' +export { fixture } from './fixture' +export * as uix from './uix' diff --git a/vscode/e2e/utils/vscody/uix/README.md b/vscode/e2e/utils/vscody/uix/README.md new file mode 100644 index 000000000000..91e6967a8d79 --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/README.md @@ -0,0 +1 @@ +These are a set of utility functions and common UX patterns that make tests more readable and composable. diff --git a/vscode/e2e/utils/vscody/uix/cody.ts b/vscode/e2e/utils/vscody/uix/cody.ts new file mode 100644 index 000000000000..8e29d6c71a12 --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/cody.ts @@ -0,0 +1,79 @@ +import { expect, test as t } from '@playwright/test' +import type { UIXContextFnContext } from '.' +type WebViewCtx = Pick + +/** + * A web view can be positioned anywhere + */ +export class WebView { + private constructor( + public readonly id: string, + private ctx: WebViewCtx + ) {} + + public async waitUntilReady(timeout?: number): Promise { + await this.ctx.page.waitForSelector(`iframe.webview.ready[name="${this.id}"]`, { + strict: true, + state: 'attached', + timeout: timeout, + }) + return this + } + + /** + * Can be used to check visibility + */ + public get wrapper() { + return this.ctx.page.locator(`div:has(> iframe.webview[name="${this.id}"])`) + } + + /** + * Can be used for accessing WebView Content + */ + public get content() { + return this.ctx.page.frameLocator(`.webview[name="${this.id}"]`).frameLocator('#active-frame') + } + + public static all( + ctx: WebViewCtx, + opts: { atLeast?: number; ignoring?: Array; timeout?: number } = {} + ) { + return t.step('Cody.WebView.all', async () => { + const excludedIds = opts.ignoring?.map(id => (typeof id === 'string' ? id : id.id)) ?? [] + const nots = excludedIds.map(id => `:not([name="${id}"`).join('') + const validOptions = ctx.page.locator( + `iframe.webview[src*="extensionId=sourcegraph.cody-ai"]${nots}` + ) + + if (opts.atLeast) { + await expect(validOptions.nth(opts.atLeast - 1)).toBeAttached({ timeout: opts.timeout }) + } + + const ids = await validOptions.evaluateAll(frames => { + return frames.map(frame => frame.getAttribute('name')!).filter(Boolean) + }) + return ids.map(id => new WebView(id, ctx)) + }) + } +} + +export async function dummy() { + console.log('DUMMY') +} + +export async function waitForBinaryDownloads() {} + +export async function waitForIndexing() {} + +export async function waitForStartup() { + await Promise.all([waitForBinaryDownloads(), waitForIndexing()]) +} + +export async function sidebar( + withSidebar: (sidebar: any) => Promise, + ctx: Pick +): Promise { + //todo: IFRAME Locator + const frame = await ctx.page.frameLocator('iframe') + return await withSidebar(frame) +} diff --git a/vscode/e2e/utils/vscody/uix/index.ts b/vscode/e2e/utils/vscody/uix/index.ts new file mode 100644 index 000000000000..497718ce68d1 --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/index.ts @@ -0,0 +1,7 @@ +import type { PlaywrightTestArgs, PlaywrightWorkerArgs } from '@playwright/test' +import type { TestContext, WorkerContext } from '../fixture' +export * as vscode from './vscode' +export * as cody from './cody' +export * as workspace from './workspace' + +export type UIXContextFnContext = TestContext & WorkerContext & PlaywrightTestArgs & PlaywrightWorkerArgs diff --git a/vscode/e2e/utils/vscody/uix/vscode.ts b/vscode/e2e/utils/vscody/uix/vscode.ts new file mode 100644 index 000000000000..97b3ccda388c --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/vscode.ts @@ -0,0 +1,170 @@ +import path from 'node:path' +import { test as t } from '@playwright/test' +import type { UIXContextFnContext } from '.' + +type SidebarCtx = Pick +export class Sidebar { + public static readonly CODY_VIEW_ID = 'workbench.view.extension.cody' + + private constructor(private ctx: SidebarCtx) {} + + public static get(ctx: SidebarCtx) { + return new Sidebar(ctx) + } + + public get locator() { + return this.ctx.page.locator('#workbench\\.parts\\.sidebar') + } + + private get splitViewContainer() { + return this.locator.locator('xpath=ancestor::*[contains(@class, "split-view-view")]').last() + } + + /** + * The viewlet is the content of the sidebar. Any webview will get + * positioned as anchored to this. + */ + private get viewlet() { + return this.locator.locator('.composite.viewlet').first() + } + + public async isVisible() { + return await t.step('Sidebar.isVisible', async () => { + const classes = await this.splitViewContainer.getAttribute('class') + return classes?.split(' ').includes('visible') + }) + } + + public get activeView() { + return this.viewlet.getAttribute('id') + } +} + +export function startSession({ + page, + vscodeUI, + executeCommand, + workspaceDir, +}: Pick) { + return t.step('Start VSCode Session', async () => { + // we dummy route here so that we can modify the state etc. Which would + // otherwise be protected by the browser to match the domain + await page.route( + vscodeUI.url, + route => { + route.fulfill({ + status: 200, + body: '', + }) + }, + { times: 1 } + ) + await page.goto(vscodeUI.url) + // User settings are stored in IndexDB though so we need to get a bit + // clever. Normal "user settings" are better stored in Machine settings + // so that they can be easily edited as a normal file. Machine settings + // don't cover security sensitive settings though. + const userSettingsOk = await page.evaluate(async () => { + const openDatabase = () => { + return new Promise((resolve, reject) => { + const request = indexedDB.open('vscode-web-db') + + request.onupgradeneeded = (event: any) => { + const db = event.target.result + if (!db.objectStoreNames.contains('vscode-userdata-store')) { + db.createObjectStore('vscode-userdata-store') + } + } + + request.onsuccess = (event: any) => { + resolve(event.target.result) + } + + request.onerror = (event: any) => { + reject(event.target.errorCode) + } + }) + } + const putData = (db: any) => { + return new Promise((resolve, reject) => { + const transaction = db.transaction(['vscode-userdata-store'], 'readwrite') + const store = transaction.objectStore('vscode-userdata-store') + //TODO: Configurable overwrites + const settingsJSON = JSON.stringify( + { + 'security.workspace.trust.enabled': false, + 'extensions.autoCheckUpdates': false, + 'extensions.autoUpdate': false, + 'update.mode': 'none', + 'update.showReleaseNotes': false, + }, + null, + 2 + ) + const settingsData = new TextEncoder().encode(settingsJSON) + const putRequest = store.put(settingsData, '/User/settings.json') + putRequest.onsuccess = () => { + resolve(void 0) + } + putRequest.onerror = (event: any) => { + console.error(event) + reject(event.target.errorCode) + } + }) + } + + try { + const db = await openDatabase() + await putData(db) + return true + } catch (error) { + console.error('Error accessing IndexedDB:', error) + return false + } + }) + + if (!userSettingsOk) { + throw new Error('Failed to initialize VSCode User Settings') + } + + // We also make sure that on page loads we expose the VSCodeAPI + await page.addInitScript(async () => { + // only run this in the main frame + if (window && window.self === window.top) { + if (document.querySelector('meta[name="__exposed-vscode-api__"]') !== null) { + return + } + while (true) { + try { + const code = window.require('vs/workbench/workbench.web.main') + //@ts-ignore + window._vscode = code + //@ts-ignore + window._executeCommand = code.commands.executeCommand + // insert the meta tag if it doesn't already exist + // await page.waitForSelector('meta[name="__exposed-vscode-api__"]', { timeout: 1000 }) + const meta = document.createElement('meta') + meta.setAttribute('name', '__exposed-vscode-api__') + meta.setAttribute('content', 'true') + document.head.appendChild(meta) + return + } catch (err) { + // We'll try again in a bit. Eitehr require wasn't loaded yet or the module isn't imported yet + await new Promise(resolve => { + setTimeout(resolve, 100) + }) + } + } + } + }) + + // We can now authenticate and navigate to the UI + await page.goto(`${vscodeUI.url}?tkn=${vscodeUI.token}&folder=${path.resolve(workspaceDir)}`) + + // wait for the UI to be ready + await page.locator('iframe.web-worker-ext-host-iframe').waitFor({ + state: 'attached', + timeout: 10000, + }) + }) +} diff --git a/vscode/e2e/utils/vscody/uix/workspace.ts b/vscode/e2e/utils/vscody/uix/workspace.ts new file mode 100644 index 000000000000..a94fe4928a67 --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/workspace.ts @@ -0,0 +1,29 @@ +import fs from 'node:fs/promises' +import path from 'node:path' +import { test as t } from '@playwright/test' +import type { UIXContextFnContext } from '.' +export function modifySettings( + modifyFn: (settings: Record | undefined) => Record, + { workspaceDir }: Pick +) { + return t.step( + 'Modify Workspace Settings', + async () => { + const existingConfig: string | undefined = await fs + .readFile(path.join(workspaceDir, '.vscode', 'settings.json'), 'utf-8') + .catch(err => { + if (err.code === 'ENOENT') { + return undefined + } + throw err + }) + const updatedConfig = modifyFn(existingConfig ? JSON.parse(existingConfig) : undefined) + await fs.mkdir(path.join(workspaceDir, '.vscode'), { recursive: true }) + fs.writeFile( + path.join(workspaceDir, '.vscode', 'settings.json'), + JSON.stringify(updatedConfig, null, 2) + ) + }, + { box: true } + ) +} diff --git a/vscode/package.json b/vscode/package.json index 68c1b065156e..9ae70cb56736 100644 --- a/vscode/package.json +++ b/vscode/package.json @@ -40,6 +40,9 @@ "storybook": "storybook dev -p 6007 --no-open --no-version-updates", "test:e2e": "playwright install && tsc --build && node dist/tsc/test/e2e/install-deps.js && pnpm run -s build:dev:desktop && pnpm run -s test:e2e:run", "test:e2e:run": "playwright test", + "test:e2e2": "pnpm -s test:e2e2:deps && pnpm -s build:dev:desktop && pnpm -s test:e2e2:run", + "test:e2e2:run": "playwright test -c playwright.v2.config.ts", + "test:e2e2:deps": "playwright install chromium", "test:integration": "tsc --build ./test/integration && pnpm run -s build:dev:desktop && node --inspect -r ts-node/register dist/tsc/test/integration/main.js", "test:unit": "vitest", "bench": "vitest bench", @@ -1388,7 +1391,9 @@ }, "devDependencies": { "@google-cloud/pubsub": "^3.7.3", + "@npmcli/promise-spawn": "^7.0.2", "@playwright/test": "1.44.1", + "@pollyjs/adapter": "^6.0.6", "@pollyjs/adapter-node-http": "^6.0.6", "@pollyjs/core": "^6.0.6", "@pollyjs/persister": "^6.0.6", @@ -1406,6 +1411,7 @@ "@types/lodash": "^4.14.195", "@types/marked": "^5.0.0", "@types/mocha": "^10.0.6", + "@types/npmcli__promise-spawn": "^6.0.3", "@types/pako": "^2.0.3", "@types/progress": "^2.0.5", "@types/semver": "^7.5.0", @@ -1418,6 +1424,7 @@ "ajv": "^8.14.0", "ajv-errors": "^3.0.0", "ajv-formats": "^3.0.1", + "chokidar": "^3.6.0", "concurrently": "^8.2.0", "dedent": "^0.7.0", "express": "^4.18.2", @@ -1425,6 +1432,7 @@ "franc-min": "^6.2.0", "fs-extra": "^11.2.0", "fuzzysort": "^2.0.4", + "http-proxy-middleware": "^3.0.0", "mocha": "^10.2.0", "ovsx": "^0.8.2", "pako": "^2.1.0", @@ -1437,6 +1445,7 @@ "vite-plugin-svgr": "^4.2.0", "vscode-jsonrpc": "^8.2.0", "vscode-languageserver-protocol": "^3.17.5", - "yaml": "^2.3.4" + "yaml": "^2.3.4", + "zod": "^3.23.8" } } diff --git a/vscode/playwright.v2.config.ts b/vscode/playwright.v2.config.ts new file mode 100644 index 000000000000..eb07cb9d3d31 --- /dev/null +++ b/vscode/playwright.v2.config.ts @@ -0,0 +1,85 @@ +import { type ReporterDescription, defineConfig } from '@playwright/test' +import type { SymlinkExtensions } from './e2e/utils/symlink-extensions.setup' +import type { TestOptions, WorkerOptions } from './e2e/utils/vscody' +const isWin = process.platform.startsWith('win') +const isCI = !!process.env.CI + +// This makes sure that each run gets a unique run id. This shouldn't really be +// used other than to invalidate lockfiles etc. +process.env.RUN_ID = process.env.RUN_ID || new Date().toISOString() + +export default defineConfig({ + workers: '50%', + retries: 0, // NO MORE FLAKE ALLOWED! It's a slippery slope. + forbidOnly: isCI, + fullyParallel: true, + timeout: isWin || isCI ? 30000 : 20000, + expect: { + timeout: isWin || isCI ? 10000 : 5000, + }, + use: { + // You can override options easily per project/worker/test so they are + // unlikely to need to be modified here. These are just some sane + // defaults + repoRootDir: '../', //deprecated + vscodeExtensions: ['sourcegraph.cody-ai'], + symlinkExtensions: ['.'], + vscodeVersion: 'stable', + vscodeTmpDir: '../.test/global/vscode', + vscodeExtensionCacheDir: '../.test/global/vscode-extensions', + binaryTmpDir: '../.test/global/bin', + recordIfMissing: + typeof process.env.CODY_RECORD_IF_MISSING === 'string' + ? process.env.CODY_RECORD_IF_MISSING === 'true' + : false, + recordingMode: (process.env.CODY_RECORDING_MODE as any) ?? 'replay', + recordingDir: '../recordings/vscode/', + + bypassCSP: true, + locale: 'en-US', + timezoneId: 'America/Los_Angeles', + permissions: ['clipboard-read', 'clipboard-write'], + geolocation: { longitude: -122.40825783227943, latitude: 37.78124453182266 }, + acceptDownloads: false, + trace: { + mode: isCI ? 'retain-on-failure' : 'on', + attachments: true, + screenshots: true, + snapshots: true, + sources: true, + }, + }, + projects: [ + { + name: 'symlink-extensions', + testDir: './e2e/utils', + testMatch: ['symlink-extensions.setup.ts'], + }, + { + name: 'utils', + testDir: './e2e/utils', + testMatch: ['**/*.test.ts'], + dependencies: ['symlink-extensions'], + }, + { + name: 'e2e', + testDir: './e2e', + testMatch: ['**/*.test.ts'], + testIgnore: ['issues/**/*', 'utils/**/*'], + dependencies: ['symlink-extensions'], + }, + { + name: 'issues', + testDir: './e2e/issues', + retries: 0, + testMatch: ['**/*.test.ts'], + dependencies: ['symlink-extensions'], + }, + ], + reporter: [ + ['line', { printSteps: true, includeProjectInTestName: true }], + ['html', { outputFolder: '.test-reports', fileName: 'report.html', open: 'never' }], + ['json', { outputFile: '.test-reports/report.json', open: 'never' }], + ...(isCI ? [['github', {}] satisfies ReporterDescription] : []), + ], +}) diff --git a/vscode/src/testutils/CodyPersister.ts b/vscode/src/testutils/CodyPersister.ts index ac60a034a408..b140e2643d21 100644 --- a/vscode/src/testutils/CodyPersister.ts +++ b/vscode/src/testutils/CodyPersister.ts @@ -153,7 +153,12 @@ export class CodyPersister extends FSPersister { private filterHeaders( headers: { name: string; value: string }[] ): { name: string; value: string }[] { - const removeHeaderNames = new Set(['set-cookie', 'server', 'via']) + const removeHeaderNames = new Set([ + 'set-cookie', + 'server', + 'via', + 'x-sourcegraph-actor-anonymous-uid', + ]) const removeHeaderPrefixes = ['x-trace', 'cf-'] return headers.filter( header => diff --git a/vscode/src/testutils/polly.ts b/vscode/src/testutils/polly.ts index 7070fb121906..f2983a46decd 100644 --- a/vscode/src/testutils/polly.ts +++ b/vscode/src/testutils/polly.ts @@ -2,7 +2,7 @@ import { execSync } from 'node:child_process' import path from 'node:path' import jsonStableStringify from 'fast-json-stable-stringify' -import { type EXPIRY_STRATEGY, type Headers, type MODE, Polly } from '@pollyjs/core' +import { type EXPIRY_STRATEGY, type Headers, type MODE, Polly, type PollyConfig } from '@pollyjs/core' import { CodyNodeHttpAdapter } from './CodyNodeHttpAdapter' import { CodyPersister, redactAuthorizationHeader } from './CodyPersister' @@ -36,50 +36,52 @@ export function startPollyRecording(userOptions: PollyOptions): Polly { recordingsDir: options.recordingDirectory, }, }, - matchRequestsBy: { - order: false, + matchRequestsBy: defaultMatchRequestsBy, + }) +} - // Canonicalize JSON bodies so that we can replay the recording even if the JSON strings - // differ by semantically meaningless things like object key enumeration order. - body(body) { - try { - if (typeof body === 'string' && (body.startsWith('{') || body.startsWith('['))) { - return jsonStableStringify(JSON.parse(body)) - } - } catch {} - return body - }, +export const defaultMatchRequestsBy: PollyConfig['matchRequestsBy'] = { + order: false, + + // Canonicalize JSON bodies so that we can replay the recording even if the JSON strings + // differ by semantically meaningless things like object key enumeration order. + body(body) { + try { + if (typeof body === 'string' && (body.startsWith('{') || body.startsWith('['))) { + return jsonStableStringify(JSON.parse(body)) + } + } catch {} + return body + }, - // The logic below is a bit tricky to follow. Simplified, we need to - // ensure that Polly generates the same request ID regardless if - // we're running in record mode (with an access token) or in replay - // mode (with a redacted token). The ID is computed by Polly as the - // MD5 digest of all request "identifiers", which a JSON object that - // includes a "headers" property from the result of the function - // below. To better understand what's going on, it's helpful to read - // the implementation of Polly here: - // https://sourcegraph.com/github.com/Netflix/pollyjs@9b6bede12b7ee998472b8883c9dd01e2159e00a8/-/blob/packages/@pollyjs/core/src/-private/request.js?L281 - headers(headers): Headers { - // Get the authorization token. - const { authorization } = headers - let header = - typeof authorization === 'string' - ? authorization - : Array.isArray(authorization) - ? authorization.at(0) - : undefined + // The logic below is a bit tricky to follow. Simplified, we need to + // ensure that Polly generates the same request ID regardless if + // we're running in record mode (with an access token) or in replay + // mode (with a redacted token). The ID is computed by Polly as the + // MD5 digest of all request "identifiers", which a JSON object that + // includes a "headers" property from the result of the function + // below. To better understand what's going on, it's helpful to read + // the implementation of Polly here: + // https://sourcegraph.com/github.com/Netflix/pollyjs@9b6bede12b7ee998472b8883c9dd01e2159e00a8/-/blob/packages/@pollyjs/core/src/-private/request.js?L281 + headers(headers): Headers { + // Get the authorization token. + const { authorization } = headers + let header = + typeof authorization === 'string' + ? authorization + : Array.isArray(authorization) + ? authorization.at(0) + : undefined - // Redact it so that the ID is the same regardless if we're in record or replay - // mode. - if (header) { - header = redactAuthorizationHeader(header) - } + // Redact it so that the ID is the same regardless if we're in record or replay + // mode. + if (header) { + header = redactAuthorizationHeader(header) + } - // Normalize to always be a single header value (not an array). - return header ? { authorization: header } : {} - }, - }, - }) + // Normalize to always be a single header value (not an array). + return header ? { authorization: header } : {} + }, } function defaultPollyOptions( diff --git a/vscode/test/e2e/install-deps.ts b/vscode/test/e2e/install-deps.ts index e09ded581491..e6f7a08ada06 100644 --- a/vscode/test/e2e/install-deps.ts +++ b/vscode/test/e2e/install-deps.ts @@ -4,8 +4,9 @@ import { ConsoleReporter, type ProgressReport, ProgressReportStage, - downloadAndUnzipVSCode, + downloadAndUnzipVSCode as _downloadAndUnzipVSCode, } from '@vscode/test-electron' +import type { DownloadOptions } from '@vscode/test-electron/out/download' // The VS Code version to use for e2e tests (there is also a version in ../integration/main.ts used for integration tests). // @@ -24,8 +25,24 @@ class CustomConsoleReporter extends ConsoleReporter { } } +/** + * Patches the default logger but otherwise leaves all options available + * @param opts + */ +export function downloadAndUnzipVSCode(opts: Partial) { + return _downloadAndUnzipVSCode( + Object.assign( + { + version: vscodeVersion, + reporter: new CustomConsoleReporter(process.stdout.isTTY), + } satisfies Partial, + opts + ) + ) +} + export function installVsCode(): Promise { - return downloadAndUnzipVSCode( + return _downloadAndUnzipVSCode( vscodeVersion, undefined, new CustomConsoleReporter(process.stdout.isTTY) diff --git a/vscode/tsconfig.json b/vscode/tsconfig.json index 7f82d3ad061c..41263a395340 100644 --- a/vscode/tsconfig.json +++ b/vscode/tsconfig.json @@ -20,6 +20,7 @@ "playwright.config.ts", "test/e2e", "test/e2e/utils/commands.json", + "e2e", "webviews", "webviews/*.d.ts", "package.json", From 2504e5148aaa4867dfe4ebff8f6d648d872f4715 Mon Sep 17 00:00:00 2001 From: Keegan Carruthers-Smith Date: Fri, 28 Jun 2024 16:02:35 +0200 Subject: [PATCH 03/11] check linux specific directory for electronExecutable --- vscode/e2e/utils/vscody/fixture.ts | 35 ++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts index 01ad142af489..68f90e233cca 100644 --- a/vscode/e2e/utils/vscody/fixture.ts +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -329,18 +329,29 @@ const implFixture = _test.extend({ } ) - const cliExecutableDir = path.resolve( - path.dirname(electronExecutable), - '../Resources/app/bin/' - ) - //find either a code or code.exe file - const vscodeExecutableName = (await fs.readdir(cliExecutableDir)).find( - file => file.endsWith('code-tunnel') || file.endsWith('code-tunnel.exe') - ) - if (!vscodeExecutableName) { - throw new Error(`Could not find a vscode executable in ${cliExecutableDir}`) - } - const vscodeExecutable = path.join(cliExecutableDir, vscodeExecutableName) + // The location of the executable is platform dependent, try the + // first location that works. + const vscodeExecutable = await Promise.any( + [ + '../Resources/app/bin', // darwin + 'bin', // linux + ].map(async binPath => { + const cliExecutableDir = path.resolve(path.dirname(electronExecutable), binPath) + + // find either a code or code.exe file + const vscodeExecutableName = (await fs.readdir(cliExecutableDir)).find( + file => file.endsWith('code-tunnel') || file.endsWith('code-tunnel.exe') + ) + if (!vscodeExecutableName) { + throw new Error(`Could not find a vscode executable in ${cliExecutableDir}`) + } + return path.join(cliExecutableDir, vscodeExecutableName) + }) + ).catch(async () => { + throw new Error( + `Could not find a vscode executable under ${path.dirname(electronExecutable)}` + ) + }) // Machine settings should simply serve as a baseline to ensure // tests by default work smoothly. Any test specific preferences From a309bdcc56f66b30ef1bcce66c5bf87ab5773016 Mon Sep 17 00:00:00 2001 From: Keegan Carruthers-Smith Date: Fri, 28 Jun 2024 16:02:56 +0200 Subject: [PATCH 04/11] lower timeout for serve-web to start 30s is higher than the test timeout, which lead to hard to debug failures. 10s means in the default case if we have issues the 'Could not start code process' error will be thrown. --- vscode/e2e/utils/vscody/fixture.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts index 68f90e233cca..35d555b9c84f 100644 --- a/vscode/e2e/utils/vscody/fixture.ts +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -521,7 +521,7 @@ function waitForVSCodeUI(stdout: NodeJS.ReadableStream): Promise { stdout.removeListener('data', listener) reject(new Error('Could not start code process')) - }, 30_000 /*TODO(rnauta): make this configurable*/) + }, 10_000 /*TODO(rnauta): make this configurable*/) stdout.on('data', listener) }) } From 1ec99e2cd61884fcf96e881a98f4cf78b696bea3 Mon Sep 17 00:00:00 2001 From: Keegan Carruthers-Smith Date: Fri, 28 Jun 2024 16:03:48 +0200 Subject: [PATCH 05/11] inherit process.env when starting code-tunnel On linux I need my PATH and because of the environment I am on I also need other environment variables to be able to run random binaries from the internet (ie the vscode we download). --- vscode/e2e/utils/vscody/fixture.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts index 35d555b9c84f..a63bb8b5b3fb 100644 --- a/vscode/e2e/utils/vscody/fixture.ts +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -416,6 +416,8 @@ const implFixture = _test.extend({ ] //TODO(rnauta): better typing const env = { + // inherit environment + ...process.env, //TODO: all env variables TESTING_DOTCOM_URL: sourcegraphMitM.endpoint, } From e75a2f6c1a4e1d3fc12682526bc45fced18b43c7 Mon Sep 17 00:00:00 2001 From: Keegan Carruthers-Smith Date: Fri, 28 Jun 2024 16:31:15 +0200 Subject: [PATCH 06/11] say bin is for windows as well I did not test on windows. However, I inspected the tarball from the download site and it follows the same pattern as linux --- vscode/e2e/utils/vscody/fixture.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts index a63bb8b5b3fb..959ff5d9ad2c 100644 --- a/vscode/e2e/utils/vscody/fixture.ts +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -334,7 +334,7 @@ const implFixture = _test.extend({ const vscodeExecutable = await Promise.any( [ '../Resources/app/bin', // darwin - 'bin', // linux + 'bin', // linux and windows ].map(async binPath => { const cliExecutableDir = path.resolve(path.dirname(electronExecutable), binPath) From 3e25a3f3880e368ea421befd3073bd9fd1cc71dc Mon Sep 17 00:00:00 2001 From: Rik Nauta Date: Mon, 1 Jul 2024 12:12:50 +0200 Subject: [PATCH 07/11] Lockfile improvements --- lib/shared/package.json | 4 + lib/shared/src/index.ts | 2 + lib/shared/src/lockfile.ts | 443 ++++++++++++++++++++++ pnpm-lock.yaml | 22 ++ vscode/e2e/utils/vscody/fixture.ts | 136 ++++--- vscode/src/graph/bfg/bfg.test.ts | 2 - vscode/src/graph/bfg/download-bfg.ts | 75 ++-- vscode/src/local-context/download-symf.ts | 77 ++-- vscode/src/local-context/symf.test.ts | 2 - vscode/src/local-context/utils.ts | 57 +-- 10 files changed, 611 insertions(+), 209 deletions(-) create mode 100644 lib/shared/src/lockfile.ts diff --git a/lib/shared/package.json b/lib/shared/package.json index bb5198339bf4..1a6348bdb373 100644 --- a/lib/shared/package.json +++ b/lib/shared/package.json @@ -27,6 +27,7 @@ "dedent": "^0.7.0", "diff": "^5.2.0", "fast-xml-parser": "^4.3.2", + "graceful-fs": "^4.2.11", "isomorphic-fetch": "^3.0.0", "js-tiktoken": "^1.0.10", "lexical": "^0.16.0", @@ -36,6 +37,7 @@ "ollama": "^0.5.1", "re2js": "^0.4.1", "semver": "^7.5.4", + "signal-exit": "^4.1.0", "vscode-uri": "^3.0.7", "win-ca": "^3.5.1" }, @@ -44,10 +46,12 @@ "@types/crypto-js": "^4.2.2", "@types/dedent": "^0.7.0", "@types/diff": "^5.0.9", + "@types/graceful-fs": "^4.1.9", "@types/isomorphic-fetch": "^0.0.39", "@types/lodash": "^4.14.195", "@types/node-fetch": "^2.6.4", "@types/semver": "^7.5.0", + "@types/signal-exit": "^3.0.4", "@types/vscode": "^1.80.0" } } diff --git a/lib/shared/src/index.ts b/lib/shared/src/index.ts index 7da79c446729..1490e7224f4b 100644 --- a/lib/shared/src/index.ts +++ b/lib/shared/src/index.ts @@ -291,6 +291,8 @@ export * from './sourcegraph-api/utils' export * from './token' export * from './token/constants' export * from './configuration' +import * as lockfile from './lockfile' +export { lockfile } export { setOpenCtxClient, openCtx, diff --git a/lib/shared/src/lockfile.ts b/lib/shared/src/lockfile.ts new file mode 100644 index 000000000000..f15b7eb2c6d1 --- /dev/null +++ b/lib/shared/src/lockfile.ts @@ -0,0 +1,443 @@ +//@ts-nocheck + +// This is a modified and sligthly stripped down version of +// https://github.com/microsoft/playwright/commit/8f62aa933562d37f344015cf4e43775fbf81716b +// The original seems no longer maintained and has a critical bug +// https://github.com/moxystudio/node-proper-lockfile/issues/111 It was stripped +// to keep dependencies minimal. TODO: This doesn't seem like a very clean +// long-term solution. + +/** + * + * The MIT License (MIT) + * + * Copyright (c) 2018 Made With MOXY Lda + * Modifications copyright (c) Microsoft Corporation. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ +import path from 'node:path' +import fs from 'graceful-fs' +import { onExit } from 'signal-exit' +const locks = {} +const cacheSymbol = Symbol() + +interface LockOptions { + stale?: number + update?: number + realpath?: boolean + lockfilePath?: string +} +export async function lock(file: string, options?: LockOptions): Promise<() => void> { + const release = await toPromise(_lock)(file, options) + return toPromise(release) +} + +interface WaitForLockOptions extends LockOptions { + delay: number + signal?: AbortSignal +} + +export async function waitForLock( + file: string, + { delay, signal, ...opts }: WaitForLockOptions +): Promise<() => void> { + while (!signal?.aborted) { + const unlockFn = await lock(file, opts).catch(err => { + if (err.code === 'ELOCKED') { + return undefined + } + throw err + }) + if (unlockFn) { + return unlockFn + } + await new Promise(resolve => setTimeout(resolve, delay)) + } +} + +function probe(file, fs, callback) { + const cachedPrecision = fs[cacheSymbol] + + if (cachedPrecision) { + return fs.stat(file, (err, stat) => { + /* istanbul ignore if */ + if (err) { + return callback(err) + } + + callback(null, stat.mtime, cachedPrecision) + }) + } + + // Set mtime by ceiling Date.now() to seconds + 5ms so that it's "not on the second" + const mtime = new Date(Math.ceil(Date.now() / 1000) * 1000 + 5) + + fs.utimes(file, mtime, mtime, err => { + /* istanbul ignore if */ + if (err) { + return callback(err) + } + + fs.stat(file, (err, stat) => { + /* istanbul ignore if */ + if (err) { + return callback(err) + } + + const precision = stat.mtime.getTime() % 1000 === 0 ? 's' : 'ms' + + // Cache the precision in a non-enumerable way + Object.defineProperty(fs, cacheSymbol, { value: precision }) + + callback(null, stat.mtime, precision) + }) + }) +} + +function getMtime(precision) { + let now = Date.now() + + if (precision === 's') { + now = Math.ceil(now / 1000) * 1000 + } + + return new Date(now) +} + +function getLockFile(file, options) { + return options.lockfilePath || `${file}.lock` +} + +function resolveCanonicalPath(file, options, callback) { + if (!options.realpath) { + return callback(null, path.resolve(file)) + } + + // Use realpath to resolve symlinks + // It also resolves relative paths + options.fs.realpath(file, callback) +} + +function acquireLock(file, options, callback) { + const lockfilePath = getLockFile(file, options) + + // Use mkdir to create the lockfile (atomic operation) + options.fs.mkdir(lockfilePath, err => { + if (!err) { + // At this point, we acquired the lock! + // Probe the mtime precision + return probe(lockfilePath, options.fs, (err, mtime, mtimePrecision) => { + // If it failed, try to remove the lock.. + /* istanbul ignore if */ + if (err) { + options.fs.rmdir(lockfilePath, () => {}) + + return callback(err) + } + + callback(null, mtime, mtimePrecision) + }) + } + + // If error is not EEXIST then some other error occurred while locking + if (err.code !== 'EEXIST') { + return callback(err) + } + + // Otherwise, check if lock is stale by analyzing the file mtime + if (options.stale <= 0) { + return callback( + Object.assign(new Error('Lock file is already being held'), { code: 'ELOCKED', file }) + ) + } + + options.fs.stat(lockfilePath, (err, stat) => { + if (err) { + // Retry if the lockfile has been removed (meanwhile) + // Skip stale check to avoid recursiveness + if (err.code === 'ENOENT') { + return acquireLock(file, { ...options, stale: 0 }, callback) + } + + return callback(err) + } + + if (!isLockStale(stat, options)) { + return callback( + Object.assign(new Error('Lock file is already being held'), { + code: 'ELOCKED', + file, + }) + ) + } + + // If it's stale, remove it and try again! + // Skip stale check to avoid recursiveness + removeLock(file, options, err => { + if (err) { + return callback(err) + } + + acquireLock(file, { ...options, stale: 0 }, callback) + }) + }) + }) +} + +function isLockStale(stat, options) { + return stat.mtime.getTime() < Date.now() - options.stale +} + +function removeLock(file, options, callback) { + // Remove lockfile, ignoring ENOENT errors + options.fs.rmdir(getLockFile(file, options), err => { + if (err && err.code !== 'ENOENT') { + return callback(err) + } + + callback() + }) +} + +function updateLock(file, options) { + const lock = locks[file] + + // Just for safety, should never happen + /* istanbul ignore if */ + if (lock.updateTimeout) { + return + } + + lock.updateDelay = lock.updateDelay || options.update + lock.updateTimeout = setTimeout(() => { + lock.updateTimeout = null + + // Stat the file to check if mtime is still ours + // If it is, we can still recover from a system sleep or a busy event loop + options.fs.stat(lock.lockfilePath, (err, stat) => { + const isOverThreshold = lock.lastUpdate + options.stale < Date.now() + + // If it failed to update the lockfile, keep trying unless + // the lockfile was deleted or we are over the threshold + if (err) { + if (err.code === 'ENOENT' || isOverThreshold) { + return setLockAsCompromised(file, lock, Object.assign(err, { code: 'ECOMPROMISED' })) + } + + lock.updateDelay = 1000 + + return updateLock(file, options) + } + + const isMtimeOurs = lock.mtime.getTime() === stat.mtime.getTime() + + if (!isMtimeOurs) { + return setLockAsCompromised( + file, + lock, + Object.assign(new Error('Unable to update lock within the stale threshold'), { + code: 'ECOMPROMISED', + }) + ) + } + + const mtime = getMtime(lock.mtimePrecision) + + options.fs.utimes(lock.lockfilePath, mtime, mtime, err => { + const isOverThreshold = lock.lastUpdate + options.stale < Date.now() + + // Ignore if the lock was released + if (lock.released) { + return + } + + // If it failed to update the lockfile, keep trying unless + // the lockfile was deleted or we are over the threshold + if (err) { + if (err.code === 'ENOENT' || isOverThreshold) { + return setLockAsCompromised( + file, + lock, + Object.assign(err, { code: 'ECOMPROMISED' }) + ) + } + + lock.updateDelay = 1000 + + return updateLock(file, options) + } + + // All ok, keep updating.. + lock.mtime = mtime + lock.lastUpdate = Date.now() + lock.updateDelay = null + updateLock(file, options) + }) + }) + }, lock.updateDelay) + + // Unref the timer so that the nodejs process can exit freely + // This is safe because all acquired locks will be automatically released + // on process exit + + // We first check that `lock.updateTimeout.unref` exists because some users + // may be using this module outside of NodeJS (e.g., in an electron app), + // and in those cases `setTimeout` return an integer. + /* istanbul ignore else */ + if (lock.updateTimeout.unref) { + lock.updateTimeout.unref() + } +} + +function setLockAsCompromised(file, lock, err) { + // Signal the lock has been released + lock.released = true + + // Cancel lock mtime update + // Just for safety, at this point updateTimeout should be null + /* istanbul ignore if */ + if (lock.updateTimeout) { + clearTimeout(lock.updateTimeout) + } + + if (locks[file] === lock) { + delete locks[file] + } + + lock.options.onCompromised(err) +} + +// ---------------------------------------------------------- + +function _lock(file, options, callback) { + /* istanbul ignore next */ + options = { + stale: 10000, + update: null, + realpath: true, + fs, + onCompromised: err => { + throw err + }, + ...options, + } + + options.stale = Math.max(options.stale || 0, 2000) + options.update = options.update == null ? options.stale / 2 : options.update || 0 + options.update = Math.max(Math.min(options.update, options.stale / 2), 1000) + + // Resolve to a canonical file path + resolveCanonicalPath(file, options, (err, file) => { + if (err) { + return callback(err) + } + + // Attempt to acquire the lock + acquireLock(file, options, (err, mtime, mtimePrecision) => { + if (err) { + return callback(err) + } + + // We now own the lock + const lockObj = { + lockfilePath: getLockFile(file, options), + mtime, + mtimePrecision, + options, + lastUpdate: Date.now(), + } + locks[file] = lockObj + + // We must keep the lock fresh to avoid staleness + updateLock(file, options) + + callback(null, releasedCallback => { + if (lockObj.released) { + return releasedCallback?.( + Object.assign(new Error('Lock is already released'), { + code: 'ERELEASED', + }) + ) + } + + // Not necessary to use realpath twice when unlocking + unlock(file, { ...options, realpath: false }, releasedCallback) + }) + }) + }) +} + +function unlock(file, options, callback) { + options = { + fs, + realpath: true, + ...options, + } + + // Resolve to a canonical file path + resolveCanonicalPath(file, options, (err, file) => { + if (err) { + return callback(err) + } + + // Skip if the lock is not acquired + const lock = locks[file] + + if (!lock) { + return callback( + Object.assign(new Error('Lock is not acquired/owned by you'), { code: 'ENOTACQUIRED' }) + ) + } + + lock.updateTimeout && clearTimeout(lock.updateTimeout) // Cancel lock mtime update + lock.released = true // Signal the lock has been released + delete locks[file] // Delete from locks + + removeLock(file, options, callback) + }) +} + +function toPromise(method) { + return (...args) => + new Promise((resolve, reject) => { + args.push((err, result) => { + if (err) { + reject(err) + } else { + resolve(result) + } + }) + method(...args) + }) +} + +// Remove acquired locks on exit +/* istanbul ignore next */ +onExit(() => { + for (const file in locks) { + const options = locks[file].options + + try { + options.fs.rmdirSync(getLockFile(file, options)) + } catch (e) { + /* Empty */ + } + } +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index df4abd4030ed..b67dd5d56e3e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -280,6 +280,9 @@ importers: fast-xml-parser: specifier: ^4.3.2 version: 4.3.2 + graceful-fs: + specifier: ^4.2.11 + version: 4.2.11 isomorphic-fetch: specifier: ^3.0.0 version: 3.0.0 @@ -307,6 +310,9 @@ importers: semver: specifier: ^7.5.4 version: 7.5.4 + signal-exit: + specifier: ^4.1.0 + version: 4.1.0 vscode-uri: specifier: ^3.0.7 version: 3.0.7 @@ -326,6 +332,9 @@ importers: '@types/diff': specifier: ^5.0.9 version: 5.0.9 + '@types/graceful-fs': + specifier: ^4.1.9 + version: 4.1.9 '@types/isomorphic-fetch': specifier: ^0.0.39 version: 0.0.39 @@ -338,6 +347,9 @@ importers: '@types/semver': specifier: ^7.5.0 version: 7.5.8 + '@types/signal-exit': + specifier: ^3.0.4 + version: 3.0.4 '@types/vscode': specifier: ^1.80.0 version: 1.80.0 @@ -5675,6 +5687,12 @@ packages: resolution: {integrity: sha512-40um9QqwHjRS92qnOaDpL7RmDK15NuZYo9HihiJRbYkMQZlWnuH8AdvbMy8/o6lgLmKbDUKa+OALCltHdbOTpQ==} dev: true + /@types/graceful-fs@4.1.9: + resolution: {integrity: sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==} + dependencies: + '@types/node': 20.12.7 + dev: true + /@types/hast@2.3.10: resolution: {integrity: sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==} dependencies: @@ -5892,6 +5910,10 @@ packages: resolution: {integrity: sha512-9Hp0ObzwwO57DpLFF0InUjUm/II8GmKAvzbefxQTihCb7KI6yc9yzf0nLc4mVdby5N4DRCgQM2wCup9KTieeww==} dev: false + /@types/signal-exit@3.0.4: + resolution: {integrity: sha512-e7EUPfU9afHyWc5CXtlqbvVHEshrb05uPlDCenWIbMgtWoFrTuTDVYNLKk6o4X2/4oHTfNqrJX/vaJ3uBhtXTg==} + dev: true + /@types/svg2ttf@5.0.3: resolution: {integrity: sha512-hL+/A4qMISvDbDTtdY73R0zuvsdc7YRYnV5FyAfKVGk8OsluXu/tCFxop7IB5Sgr+ZCS0hHtFxylD0REmm+abA==} dev: false diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts index 959ff5d9ad2c..aa4e928f54c1 100644 --- a/vscode/e2e/utils/vscody/fixture.ts +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -1,3 +1,6 @@ +// TODO/WARNING/APOLOGY: I know that this is an unreasonably large file right +// now. I'll refactor and cut it down this down once everything is working +// first. import { exec as _exec, spawn } from 'node:child_process' import type { Dirent } from 'node:fs' import fs from 'node:fs/promises' @@ -15,12 +18,14 @@ import { type EXPIRY_STRATEGY, type MODE, Polly } from '@pollyjs/core' import type { ArrayContainsAll } from '@sourcegraph/cody-shared/src/utils' import { ConsoleReporter, type ProgressReport, ProgressReportStage } from '@vscode/test-electron' import { downloadAndUnzipVSCode } from '@vscode/test-electron/out/download' -import chokidar from 'chokidar' import express from 'express' import { copy as copyExt } from 'fs-extra' import { createProxyMiddleware } from 'http-proxy-middleware' +import type { loggerPlugin as ProxyMiddlewarePlugin } from 'http-proxy-middleware' import zod from 'zod' +import { EventEmitter } from 'node:stream' +import { waitForLock } from '@sourcegraph/cody-shared/src/lockfile' import { CodyPersister } from '../../../src/testutils/CodyPersister' import { defaultMatchRequestsBy } from '../../../src/testutils/polly' import { retry, stretchTimeout } from '../helpers' @@ -223,9 +228,15 @@ const implFixture = _test.extend({ //config but it's not a super smooth experience yet. If you run into //this please give me a ping so we can brainstorm. const target = 'https://sourcegraph.com' + const testFailureSignal = new EventEmitter<{ error: [Error] }>() + testFailureSignal.on('error', err => { + throw err + }) const middleware = createProxyMiddleware({ target, changeOrigin: true, + ejectPlugins: true, + plugins: [failOrRetryRecordingOnError(testFailureSignal)], }) app.use(middleware) let server: ReturnType = null as any @@ -252,13 +263,18 @@ const implFixture = _test.extend({ ], polly: [ async ({ validOptions, sourcegraphMitM }, use, testInfo) => { - const polly = new Polly(`${testInfo.project}`, { + const relativeTestPath = path.relative( + path.resolve(process.cwd(), testInfo.project.testDir), + testInfo.file + ) + const polly = new Polly(`${testInfo.project.name}/${relativeTestPath}/${testInfo.title}`, { flushRequestsOnStop: true, recordIfMissing: validOptions.recordIfMissing ?? validOptions.recordingMode === 'record', mode: validOptions.recordingMode, persister: 'fs', adapters: ['node-http'], recordFailedRequests: true, + logLevel: 'SILENT', matchRequestsBy: defaultMatchRequestsBy, persisterOptions: { keepUnusedRequests: validOptions.keepUnusedRecordings ?? true, @@ -300,7 +316,8 @@ const implFixture = _test.extend({ req => 'RecordTelemetryEvents' in req.query || 'LogEventMutation' in req.query ) .intercept((req, res, interceptor) => { - res.sendStatus(200) + res.send('{}') + res.status(200) }) polly.server.get('/healthz').intercept((req, res, interceptor) => { @@ -387,12 +404,21 @@ const implFixture = _test.extend({ if (validOptions.vscodeExtensions.length > 0) { //TODO(rnauta): Add lockfile wrapper to avoid race conditions const sharedCacheDir = path.resolve(process.cwd(), validOptions.vscodeExtensionCacheDir) - const args = [ - `--extensions-dir=${sharedCacheDir.replace(/ /g, '\\ ')}`, // cli doesn't handle quotes properly so just escape spaces, - '--install-extension', - ...validOptions.vscodeExtensions, - ] - await pspawn(vscodeExecutable, args) + await fs.mkdir(sharedCacheDir, { recursive: true }) + const releaseLock = await waitForLock(sharedCacheDir, { + lockfilePath: path.join(sharedCacheDir, '.lock'), + delay: 1000, + }) + try { + const args = [ + `--extensions-dir=${sharedCacheDir.replace(/ /g, '\\ ')}`, // cli doesn't handle quotes properly so just escape spaces, + '--install-extension', + ...validOptions.vscodeExtensions, + ] + await pspawn(vscodeExecutable, args) + } finally { + releaseLock() + } //we now read all the folders in the shared cache dir and //symlink the relevant ones to our isolated extension dir for (const sharedExtensionDir of await fs.readdir(sharedCacheDir)) { @@ -420,6 +446,8 @@ const implFixture = _test.extend({ ...process.env, //TODO: all env variables TESTING_DOTCOM_URL: sourcegraphMitM.endpoint, + CODY_TESTING_BFG_DIR: path.resolve(process.cwd(), validOptions.binaryTmpDir), + CODY_TESTING_SYMF_DIR: path.resolve(process.cwd(), validOptions.binaryTmpDir), } const codeProcess = spawn(vscodeExecutable, args, { env, @@ -536,72 +564,21 @@ async function downloadOrWaitForVSCode({ validOptions, }: Pick & { executableDir: string }) { let electronExecutable = '' - while (!electronExecutable) { - const downloadLockFilePath = path.join( - executableDir, - `${process.env.RUN_ID}.${validOptions.vscodeVersion}.lock` - ) - const createdLockFilePath = await createFileIfNotExists(downloadLockFilePath) - if (!createdLockFilePath) { - // Someone else is downloading, let's just wait for the file to no longer exist. - const watcher = chokidar.watch(downloadLockFilePath) - try { - await Promise.all([ - new Promise(resolve => { - watcher.on('unlink', resolve) - watcher.on('change', resolve) - }), - //the file might have been removed as we were starting the wathcer - fileExists(downloadLockFilePath).then(exists => { - if (!exists) { - throw new Error('Abort') - } - }), - ]) - } catch { - } finally { - await watcher.close() - } - continue - } - try { - electronExecutable = await downloadAndUnzipVSCode({ - cachePath: executableDir, - version: validOptions.vscodeVersion, - reporter: new CustomConsoleReporter(process.stdout.isTTY), - }) - } finally { - await fs.unlink(downloadLockFilePath) - } + const lockfilePath = path.join(executableDir, `${validOptions.vscodeVersion}.lock`) + const releaseLock = await waitForLock(executableDir, { lockfilePath, delay: 500 }) + + try { + electronExecutable = await downloadAndUnzipVSCode({ + cachePath: executableDir, + version: validOptions.vscodeVersion, + reporter: new CustomConsoleReporter(process.stdout.isTTY), + }) + } finally { + releaseLock() } return electronExecutable } -async function createFileIfNotExists(p: string): Promise { - const openFileHandle = await fs.open(p, 'wx').catch(err => { - if (err.code === 'EEXIST') { - return null - } - throw err - }) - await openFileHandle?.close() - return openFileHandle ? p : null -} - -function fileExists(p: string): Promise { - return fs - .stat(p) - .then(s => { - return s.isFile() - }) - .catch(err => { - if (err.code === 'ENOENT') { - return false - } - throw err - }) -} - async function getPortForPid(pid: number): Promise { const platform = process.platform let command: string @@ -656,6 +633,23 @@ async function getFilesRecursive(dir: string): Promise> { return files } +function failOrRetryRecordingOnError( + emitter: EventEmitter<{ error: [Error] }> +): typeof ProxyMiddlewarePlugin { + return (proxyServer, options) => { + proxyServer.on('error', (err, req, res) => { + if ( + err.name === 'PollyError' && + err.message.includes('Recording for the following request is not found') + ) { + //TODO: allow re-trying with recording temporarily enabled + err.message = `Polly recording missing for ${[req.method]}${req.url}` + } + emitter.emit('error', err) + }) + } +} + // A custom version of the VS Code download reporter that silences matching installation // notifications as these otherwise are emitted on every test run class CustomConsoleReporter extends ConsoleReporter { diff --git a/vscode/src/graph/bfg/bfg.test.ts b/vscode/src/graph/bfg/bfg.test.ts index c4bbfa40df11..8db09f23e0cc 100644 --- a/vscode/src/graph/bfg/bfg.test.ts +++ b/vscode/src/graph/bfg/bfg.test.ts @@ -6,8 +6,6 @@ import { downloadFile } from '../../local-context/utils' import { getOSArch } from '../../os' import { _config, _getNamesForPlatform, _upsertBfgForPlatform, defaultBfgVersion } from './download-bfg' -//@ts-ignore -_config.FILE_DOWNLOAD_LOCK_DURATION = 10 //@ts-ignore _config.FILE_LOCK_RETRY_DELAY = 1 diff --git a/vscode/src/graph/bfg/download-bfg.ts b/vscode/src/graph/bfg/download-bfg.ts index c12491500881..692dd1e265c2 100644 --- a/vscode/src/graph/bfg/download-bfg.ts +++ b/vscode/src/graph/bfg/download-bfg.ts @@ -1,11 +1,10 @@ import fs from 'node:fs/promises' import os from 'node:os' import path from 'node:path' +import { lockfile } from '@sourcegraph/cody-shared' import { SemverString } from '@sourcegraph/cody-shared/src/utils' -import { Mutex } from 'async-mutex' import * as vscode from 'vscode' -import { sleep } from '../../completions/utils' -import { downloadFile, fileExists, unzip, upsertFile } from '../../local-context/utils' +import { downloadFile, fileExists, unzip } from '../../local-context/utils' import { logDebug, logError } from '../../log' import { Arch, Platform, getOSArch } from '../../os' import { captureException } from '../../services/sentry/sentry' @@ -15,8 +14,8 @@ export type BfgVersionString = SemverString<''> export const defaultBfgVersion: BfgVersionString = '5.4.6040' export const _config = { - FILE_DOWNLOAD_LOCK_DURATION: 5000, - FILE_LOCK_RETRY_DELAY: 1000, + //delay before trying to re-lock a active file + FILE_LOCK_RETRY_DELAY: 500, } as const /** @@ -50,10 +49,6 @@ export async function getBfgPath(context: vscode.ExtensionContext): Promise { - try { - const wasDownloaded = await downloadBfgBinary({ - bfgPath, - bfgURL, - bfgFilename, - bfgUnzippedFilename, - }) - if (wasDownloaded) { - void removeOldBfgBinaries(containingDir, bfgFilename) - } - return bfgPath - } catch (error) { - captureException(error) - void vscode.window.showErrorMessage(`Failed to download bfg: ${error}`) - return null + try { + const wasDownloaded = await downloadBfgBinary({ + bfgPath, + bfgURL, + bfgFilename, + bfgUnzippedFilename, + }) + if (wasDownloaded) { + //TODO: we can't always assume that nobody is using these still + void removeOldBfgBinaries(containingDir, bfgFilename) } - }) + return bfgPath + } catch (error) { + captureException(error) + void vscode.window.showErrorMessage(`Failed to download bfg: ${error}`) + return null + } } export function _getNamesForPlatform( @@ -143,24 +137,30 @@ async function downloadBfgBinary({ cancellable: false, }, async (progress, cancel) => { - progress.report({ message: 'Downloading bfg' }) - while (!cancel.isCancellationRequested) { + progress.report({ message: 'Checking bfg status' }) + const abortController = new AbortController() + cancel.onCancellationRequested(() => abortController.abort()) + + const bfgDir = path.dirname(bfgPath) + await fs.mkdir(bfgDir, { recursive: true }) + const unlockFn = await lockfile.waitForLock(bfgDir, { + delay: _config.FILE_LOCK_RETRY_DELAY, + lockfilePath: `${bfgPath}.lock`, + }) + try { if (await fileExists(bfgPath)) { logDebug('CodyEngine', 'bfg already downloaded, reusing') return false } + + progress.report({ message: 'Downloading bfg' }) + const bfgTmpDir = `${bfgPath}.tmp` await fs.mkdir(bfgTmpDir, { recursive: true }) const bfgZipFile = path.join(bfgTmpDir, `${bfgFilename}.zip`) - // try and acquire a file lock, giving another process some grace to write data to it - const bfgZipFileLock = await upsertFile(bfgZipFile, _config.FILE_DOWNLOAD_LOCK_DURATION) - if (!bfgZipFileLock) { - logDebug('CodyEngine', 'Another process is already downloading bfg, waiting...') - await sleep(_config.FILE_DOWNLOAD_LOCK_DURATION) - continue - } - await downloadFile(bfgURL, bfgZipFile, cancel) + + await downloadFile(bfgURL, bfgZipFile, abortController.signal) progress.report({ message: 'Extracting bfg' }) await unzip(bfgZipFile, bfgTmpDir) logDebug('CodyEngine', `downloaded bfg to ${bfgTmpDir}`) @@ -172,8 +172,9 @@ async function downloadBfgBinary({ logDebug('CodyEngine', `extracted bfg to ${bfgPath}`) return true + } finally { + unlockFn?.() } - return false } ) } diff --git a/vscode/src/local-context/download-symf.ts b/vscode/src/local-context/download-symf.ts index 21f8ac29b490..c3d9ca56fd1c 100644 --- a/vscode/src/local-context/download-symf.ts +++ b/vscode/src/local-context/download-symf.ts @@ -1,23 +1,20 @@ import fs from 'node:fs/promises' import os from 'node:os' import path from 'node:path' +import { lockfile } from '@sourcegraph/cody-shared' import type { SemverString } from '@sourcegraph/cody-shared/src/utils' -import { Mutex } from 'async-mutex' import * as vscode from 'vscode' -import { sleep } from '../completions/utils' import { logDebug, logError } from '../log' import { type Arch, Platform, getOSArch } from '../os' import { captureException } from '../services/sentry/sentry' -import { downloadFile, fileExists, unzip, upsertFile } from './utils' +import { downloadFile, fileExists, unzip } from './utils' export type SymfVersionString = SemverString<'v'> const symfVersion: SymfVersionString = 'v0.0.12' export const _config = { - //how long to consider a file "active" before we consider it "stale" - FILE_DOWNLOAD_LOCK_DURATION: 5000, //delay before trying to re-lock a active file - FILE_LOCK_RETRY_DELAY: 1000, + FILE_LOCK_RETRY_DELAY: 500, } as const /** @@ -48,10 +45,6 @@ export async function getSymfPath(context: vscode.ExtensionContext): Promise { - try { - const wasDownloaded = await downloadSymfBinary({ - symfPath, - symfURL, - symfFilename, - symfUnzippedFilename, - }) - if (wasDownloaded) { - void removeOldSymfBinaries(containingDir, symfFilename) - } - return symfPath - } catch (error) { - captureException(error) - void vscode.window.showErrorMessage(`Failed to download symf: ${error}`) - return null + try { + const wasDownloaded = await downloadSymfBinary({ + symfPath, + symfURL, + symfFilename, + symfUnzippedFilename, + }) + if (wasDownloaded) { + void removeOldSymfBinaries(containingDir, symfFilename) } - }) + return symfPath + } catch (error) { + captureException(error) + void vscode.window.showErrorMessage(`Failed to download symf: ${error}`) + return null + } } export function _getNamesForPlatform( @@ -139,27 +129,29 @@ async function downloadSymfBinary({ cancellable: false, }, async (progress, cancel) => { - progress.report({ message: 'Downloading symf' }) - while (!cancel.isCancellationRequested) { + progress.report({ message: 'Checking symf status' }) + const abortController = new AbortController() + cancel.onCancellationRequested(() => abortController.abort()) + + const symfDir = path.dirname(symfPath) + await fs.mkdir(symfDir, { recursive: true }) + const unlockFn = await lockfile.waitForLock(symfDir, { + delay: _config.FILE_LOCK_RETRY_DELAY, + lockfilePath: `${symfPath}.lock`, + }) + + try { if (await fileExists(symfPath)) { logDebug('symf', 'symf already downloaded, reusing') return false } + progress.report({ message: 'Downloading symf' }) + const symfTmpDir = `${symfPath}.tmp` await fs.mkdir(symfTmpDir, { recursive: true }) const symfZipFile = path.join(symfTmpDir, `${symfFilename}.zip`) - // try and acquire a file lock, giving another process some grace to write data to it - const symfZipFileLock = await upsertFile( - symfZipFile, - _config.FILE_DOWNLOAD_LOCK_DURATION - ) - if (!symfZipFileLock) { - logDebug('symf', 'Another process is already downloading symf, waiting...') - await sleep(_config.FILE_LOCK_RETRY_DELAY) - continue - } - await downloadFile(symfURL, symfZipFile, cancel) + await downloadFile(symfURL, symfZipFile, abortController.signal) progress.report({ message: 'Extracting symf' }) await unzip(symfZipFile, symfTmpDir) logDebug('symf', `downloaded symf to ${symfTmpDir}`) @@ -171,8 +163,9 @@ async function downloadSymfBinary({ logDebug('symf', `extracted symf to ${symfPath}`) return true + } finally { + unlockFn?.() } - return false } ) } diff --git a/vscode/src/local-context/symf.test.ts b/vscode/src/local-context/symf.test.ts index 4773fc8196d5..775de917bef1 100644 --- a/vscode/src/local-context/symf.test.ts +++ b/vscode/src/local-context/symf.test.ts @@ -6,8 +6,6 @@ import { getOSArch } from '../os' import { _config, _getNamesForPlatform, _upsertSymfForPlatform } from './download-symf' import { downloadFile } from './utils' -//@ts-ignore -_config.FILE_DOWNLOAD_LOCK_DURATION = 10 //@ts-ignore _config.FILE_LOCK_RETRY_DELAY = 1 diff --git a/vscode/src/local-context/utils.ts b/vscode/src/local-context/utils.ts index 756b1212afe9..f1d0e4384af6 100644 --- a/vscode/src/local-context/utils.ts +++ b/vscode/src/local-context/utils.ts @@ -3,8 +3,6 @@ import fs from 'node:fs/promises' import path from 'node:path' import axios from 'axios' import unzipper from 'unzipper' -import type * as vscode from 'vscode' -import { logDebug } from '../log' export async function pathExists(path: string): Promise { try { @@ -33,54 +31,6 @@ export async function fileExists(path: string): Promise { } } -/** - * Atomically creates the file if it does not exist but leaves it untouched otherwise. - * @param filePath the file to create/touch - * @param maxMtimeMs if the file hasn't been touched for maxMtimeMs, a new file will be created instead - * @returns True if a new file has been created. False if the existing file has been left in place - */ -export async function upsertFile( - filePath: string, - maxMtimeMs?: number, - cancellationToken?: vscode.CancellationToken -): Promise { - while (!cancellationToken?.isCancellationRequested) { - try { - const openFileHandle = await fs.open(filePath, 'wx') - try { - await openFileHandle.close() - } catch { - /*Ignore*/ - } - return true - } catch (error: any) { - if (error.code !== 'EEXIST') { - throw error - } - if (maxMtimeMs === undefined) { - return false - } - // We now know the file exists but we'll just check that someone has - // actually been writing to it within the maxAge time span. - // otherwise we assume it's abandoned and we'll give ourselves - - // Note: this could fail if the file has been deleted by another - // process right as we check this...I can live with that. - const fileStats = await fs.stat(filePath) - const age = Date.now() - fileStats.mtimeMs - if (age < maxMtimeMs) { - // this file has not been abandoned - return false - } - logDebug('symf', `file ${filePath} is abandoned, removing it`) - // we'll just remove the old file and retry. This way if another - // process was doing the same thing only one should win out - await fs.unlink(filePath) - } - } - return false -} - /** * This downloads a url to a specific location and overwrites the existing file * if it exists @@ -88,17 +38,14 @@ export async function upsertFile( export async function downloadFile( url: string, outputPath: string, - cancellationToken?: vscode.CancellationToken + signal?: AbortSignal ): Promise { - logDebug('Symf', `downloading from URL ${url}`) - const abort = !cancellationToken ? undefined : new AbortController() - cancellationToken?.onCancellationRequested(() => abort?.abort()) const response = await axios({ url, method: 'GET', responseType: 'stream', maxRedirects: 10, - signal: abort?.signal, + signal: signal, }) const stream = syncfs.createWriteStream(outputPath, { autoClose: true, flags: 'w' }) From 954514fb448055b78670aa0aedf37a1975def535 Mon Sep 17 00:00:00 2001 From: Rik Nauta Date: Mon, 1 Jul 2024 15:30:11 +0200 Subject: [PATCH 08/11] Wait for server download --- vscode/e2e/utils/vscody/fixture.ts | 34 +++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts index aa4e928f54c1..0f85ba9a97d4 100644 --- a/vscode/e2e/utils/vscody/fixture.ts +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -345,6 +345,11 @@ const implFixture = _test.extend({ testInfo, } ) + const serverExecutableDir = path.join( + executableDir, + path.relative(executableDir, electronExecutable).split(path.sep)[0], + 'server' + ) // The location of the executable is platform dependent, try the // first location that works. @@ -437,6 +442,7 @@ const implFixture = _test.extend({ 'serve-web', '--accept-server-license-terms', '--port=0', + `--cli-data-dir=${serverExecutableDir.replace(/ /g, '\\ ')}`, `--server-data-dir=${serverRootDir.replace(/ /g, '\\ ')}`, `--extensions-dir=${extensionsDir.replace(/ /g, '\\ ')}`, // cli doesn't handle quotes properly so just escape spaces, ] @@ -466,8 +472,34 @@ const implFixture = _test.extend({ // so we need to do some magic to get the actual port. // TODO: this might not be very cross-platform const port = await getPortForPid(codeProcess.pid) - const config = { url: `http://127.0.0.1:${port}/`, token: token } + + // we now need to wait for the server to be downloaded + const releaseServerDownloadLock = await waitForLock(serverExecutableDir, { + delay: 1000, + lockfilePath: path.join(serverExecutableDir, '.lock'), + }) + try { + stretchTimeout( + async () => { + while (true) { + try { + const res = await fetch(config.url) + if (res.status === 202) { + // we are still downloading here + } else if (res.status === 200 || res.status === 401) { + return + } + } catch {} + await new Promise(resolve => setTimeout(resolve, 1000)) + } + }, + { max: 60_000, testInfo } + ) + } finally { + releaseServerDownloadLock() + } + await use(config) // Turn of logging browser logging and navigate away from the UI From 08781ab55127162ebd9905ad9f3b90f60f5cd380 Mon Sep 17 00:00:00 2001 From: Rik Nauta Date: Mon, 1 Jul 2024 18:36:28 +0200 Subject: [PATCH 09/11] Fix lockfile downloads --- lib/shared/package.json | 4 -- lib/shared/src/index.ts | 2 - pnpm-lock.yaml | 24 +++---- vscode/e2e/utils/vscody/fixture.ts | 68 +++++++++++--------- vscode/package.json | 78 +++++++++++++++++++---- vscode/src/graph/bfg/download-bfg.ts | 4 +- vscode/src/local-context/download-symf.ts | 4 +- {lib/shared => vscode}/src/lockfile.ts | 0 8 files changed, 119 insertions(+), 65 deletions(-) rename {lib/shared => vscode}/src/lockfile.ts (100%) diff --git a/lib/shared/package.json b/lib/shared/package.json index 1a6348bdb373..bb5198339bf4 100644 --- a/lib/shared/package.json +++ b/lib/shared/package.json @@ -27,7 +27,6 @@ "dedent": "^0.7.0", "diff": "^5.2.0", "fast-xml-parser": "^4.3.2", - "graceful-fs": "^4.2.11", "isomorphic-fetch": "^3.0.0", "js-tiktoken": "^1.0.10", "lexical": "^0.16.0", @@ -37,7 +36,6 @@ "ollama": "^0.5.1", "re2js": "^0.4.1", "semver": "^7.5.4", - "signal-exit": "^4.1.0", "vscode-uri": "^3.0.7", "win-ca": "^3.5.1" }, @@ -46,12 +44,10 @@ "@types/crypto-js": "^4.2.2", "@types/dedent": "^0.7.0", "@types/diff": "^5.0.9", - "@types/graceful-fs": "^4.1.9", "@types/isomorphic-fetch": "^0.0.39", "@types/lodash": "^4.14.195", "@types/node-fetch": "^2.6.4", "@types/semver": "^7.5.0", - "@types/signal-exit": "^3.0.4", "@types/vscode": "^1.80.0" } } diff --git a/lib/shared/src/index.ts b/lib/shared/src/index.ts index 1490e7224f4b..7da79c446729 100644 --- a/lib/shared/src/index.ts +++ b/lib/shared/src/index.ts @@ -291,8 +291,6 @@ export * from './sourcegraph-api/utils' export * from './token' export * from './token/constants' export * from './configuration' -import * as lockfile from './lockfile' -export { lockfile } export { setOpenCtxClient, openCtx, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b67dd5d56e3e..1736fb237f08 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -280,9 +280,6 @@ importers: fast-xml-parser: specifier: ^4.3.2 version: 4.3.2 - graceful-fs: - specifier: ^4.2.11 - version: 4.2.11 isomorphic-fetch: specifier: ^3.0.0 version: 3.0.0 @@ -310,9 +307,6 @@ importers: semver: specifier: ^7.5.4 version: 7.5.4 - signal-exit: - specifier: ^4.1.0 - version: 4.1.0 vscode-uri: specifier: ^3.0.7 version: 3.0.7 @@ -332,9 +326,6 @@ importers: '@types/diff': specifier: ^5.0.9 version: 5.0.9 - '@types/graceful-fs': - specifier: ^4.1.9 - version: 4.1.9 '@types/isomorphic-fetch': specifier: ^0.0.39 version: 0.0.39 @@ -347,9 +338,6 @@ importers: '@types/semver': specifier: ^7.5.0 version: 7.5.8 - '@types/signal-exit': - specifier: ^3.0.4 - version: 3.0.4 '@types/vscode': specifier: ^1.80.0 version: 1.80.0 @@ -479,6 +467,9 @@ importers: glob: specifier: ^7.2.3 version: 7.2.3 + graceful-fs: + specifier: ^4.2.11 + version: 4.2.11 he: specifier: ^1.2.0 version: 1.2.0 @@ -548,6 +539,9 @@ importers: semver: specifier: ^7.5.4 version: 7.5.4 + signal-exit: + specifier: ^4.1.0 + version: 4.1.0 socks-proxy-agent: specifier: ^8.0.1 version: 8.0.1 @@ -621,6 +615,9 @@ importers: '@types/glob': specifier: ^8.0.0 version: 8.0.0 + '@types/graceful-fs': + specifier: ^4.1.9 + version: 4.1.9 '@types/ini': specifier: ^4.1.0 version: 4.1.0 @@ -651,6 +648,9 @@ importers: '@types/semver': specifier: ^7.5.0 version: 7.5.0 + '@types/signal-exit': + specifier: ^3.0.4 + version: 3.0.4 '@types/unzipper': specifier: ^0.10.7 version: 0.10.7 diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts index 0f85ba9a97d4..5ed3b9fc4268 100644 --- a/vscode/e2e/utils/vscody/fixture.ts +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -25,7 +25,7 @@ import type { loggerPlugin as ProxyMiddlewarePlugin } from 'http-proxy-middlewar import zod from 'zod' import { EventEmitter } from 'node:stream' -import { waitForLock } from '@sourcegraph/cody-shared/src/lockfile' +import { waitForLock } from '../../../src/lockfile' import { CodyPersister } from '../../../src/testutils/CodyPersister' import { defaultMatchRequestsBy } from '../../../src/testutils/polly' import { retry, stretchTimeout } from '../helpers' @@ -339,7 +339,7 @@ const implFixture = _test.extend({ // We nullify the time it takes to download VSCode as it can vary wildly! const electronExecutable = await stretchTimeout( - async () => downloadOrWaitForVSCode({ validOptions, executableDir }), + () => downloadOrWaitForVSCode({ validOptions, executableDir }), { max: DOWNLOAD_GRACE_TIME, testInfo, @@ -374,7 +374,6 @@ const implFixture = _test.extend({ `Could not find a vscode executable under ${path.dirname(electronExecutable)}` ) }) - // Machine settings should simply serve as a baseline to ensure // tests by default work smoothly. Any test specific preferences // should be set in workspace settings instead. @@ -449,7 +448,8 @@ const implFixture = _test.extend({ //TODO(rnauta): better typing const env = { // inherit environment - ...process.env, + // TODO: Check why this was necessary. Shouldn't be needed + // ...process.env, //TODO: all env variables TESTING_DOTCOM_URL: sourcegraphMitM.endpoint, CODY_TESTING_BFG_DIR: path.resolve(process.cwd(), validOptions.binaryTmpDir), @@ -474,31 +474,10 @@ const implFixture = _test.extend({ const port = await getPortForPid(codeProcess.pid) const config = { url: `http://127.0.0.1:${port}/`, token: token } - // we now need to wait for the server to be downloaded - const releaseServerDownloadLock = await waitForLock(serverExecutableDir, { - delay: 1000, - lockfilePath: path.join(serverExecutableDir, '.lock'), + await stretchTimeout(() => waitForVSCodeServer({ url: config.url, serverExecutableDir }), { + max: DOWNLOAD_GRACE_TIME, + testInfo, }) - try { - stretchTimeout( - async () => { - while (true) { - try { - const res = await fetch(config.url) - if (res.status === 202) { - // we are still downloading here - } else if (res.status === 200 || res.status === 401) { - return - } - } catch {} - await new Promise(resolve => setTimeout(resolve, 1000)) - } - }, - { max: 60_000, testInfo } - ) - } finally { - releaseServerDownloadLock() - } await use(config) @@ -523,7 +502,7 @@ const implFixture = _test.extend({ codeProcess.kill() await exitPromise }, - { scope: 'test', timeout: 15 * 1000 }, + { scope: 'test' }, ], // This exposes some bare-bones VSCode APIs in the browser context. You can // now simply execute a command from the chrome debugger which is a lot less @@ -570,6 +549,37 @@ fixture.beforeAll(async () => { }) }) +/** + * Waits for server components to be downloaded and that the server is ready to + * accept connections + */ +async function waitForVSCodeServer(config: { url: string; serverExecutableDir: string }) { + const releaseServerDownloadLock = await waitForLock(config.serverExecutableDir, { + delay: 1000, + lockfilePath: path.join(config.serverExecutableDir, '.lock'), + }) + try { + while (true) { + try { + const res = await fetch(config.url) + if (res.status === 202) { + // we are still downloading here + } else if (res.status === 200 || res.status === 403) { + // 403 simply means we haven't supplied the token + // 200 probably means we didn't require a token + // either way we are ready to accept connections + return + } else { + console.error(`Unexpected status code ${res.status}`) + } + } catch {} + await new Promise(resolve => setTimeout(resolve, 1000)) + } + } finally { + releaseServerDownloadLock() + } +} + function waitForVSCodeUI(stdout: NodeJS.ReadableStream): Promise { return new Promise((resolve, reject) => { const listener = (data: Buffer) => { diff --git a/vscode/package.json b/vscode/package.json index 9ae70cb56736..eb9b2ff18727 100644 --- a/vscode/package.json +++ b/vscode/package.json @@ -54,7 +54,12 @@ "version-bump:patch": "RELEASE_TYPE=patch ts-node-transpile-only ./scripts/version-bump.ts", "version-bump:dry-run": "RELEASE_TYPE=prerelease ts-node-transpile-only ./scripts/version-bump.ts" }, - "categories": ["Programming Languages", "Machine Learning", "Snippets", "Education"], + "categories": [ + "Programming Languages", + "Machine Learning", + "Snippets", + "Education" + ], "keywords": [ "ai", "openai", @@ -107,7 +112,11 @@ }, "main": "./dist/extension.node.js", "browser": "./dist/extension.web.js", - "activationEvents": ["onLanguage", "onStartupFinished", "onWebviewPanel:cody.chatPanel"], + "activationEvents": [ + "onLanguage", + "onStartupFinished", + "onWebviewPanel:cody.chatPanel" + ], "contributes": { "walkthroughs": [ { @@ -687,13 +696,17 @@ }, { "command": "cody.supercompletion.jumpTo", - "args": ["next"], + "args": [ + "next" + ], "key": "shift+ctrl+down", "when": "cody.activated && !editorReadonly && cody.hasActionableSupercompletion" }, { "command": "cody.supercompletion.jumpTo", - "args": ["previous"], + "args": [ + "previous" + ], "key": "shift+ctrl+up", "when": "cody.activated && !editorReadonly && cody.hasActionableSupercompletion" } @@ -973,12 +986,20 @@ "order": 2, "type": "string", "markdownDescription": "A Git repository URL to use instead of allowing Cody to infer the Git repository from the workspace.", - "examples": ["https://github.com/sourcegraph/cody", "ssh://git@github.com/sourcegraph/cody"] + "examples": [ + "https://github.com/sourcegraph/cody", + "ssh://git@github.com/sourcegraph/cody" + ] }, "cody.useContext": { "order": 99, "type": "string", - "enum": ["embeddings", "keyword", "blended", "none"], + "enum": [ + "embeddings", + "keyword", + "blended", + "none" + ], "default": "blended", "markdownDescription": "Controls which context providers Cody uses for chat, commands and inline edits. Use 'blended' for best results. For debugging other context sources, 'embeddings' will use an embeddings-based index if available. 'keyword' will use a search-based index. 'none' will not use embeddings or search-based indexes." }, @@ -1024,13 +1045,17 @@ "order": 6, "type": "string", "markdownDescription": "A custom instruction to be included at the start of all chat messages (e.g. \"Answer all my questions in Spanish.\")", - "examples": ["Answer all my questions in Spanish."] + "examples": [ + "Answer all my questions in Spanish." + ] }, "cody.edit.preInstruction": { "order": 7, "type": "string", "markdownDescription": "A custom instruction to be included at the end of all instructions for edit commands (e.g. \"Write all unit tests with Jest instead of detected framework.\")", - "examples": ["Write all unit tests with Jest instead of detected framework."] + "examples": [ + "Write all unit tests with Jest instead of detected framework." + ] }, "cody.codeActions.enabled": { "order": 11, @@ -1076,8 +1101,14 @@ "cody.telemetry.level": { "order": 99, "type": "string", - "enum": ["all", "off"], - "enumDescriptions": ["Sends usage data and errors.", "Disables all extension telemetry."], + "enum": [ + "all", + "off" + ], + "enumDescriptions": [ + "Sends usage data and errors.", + "Disables all extension telemetry." + ], "markdownDescription": "Controls the telemetry about Cody usage and errors. See [Cody usage and privacy notice](https://about.sourcegraph.com/terms/cody-notice).", "default": "all" }, @@ -1105,7 +1136,11 @@ "cody.autocomplete.advanced.model": { "type": "string", "default": null, - "enum": [null, "starcoder-16b", "starcoder-7b"], + "enum": [ + null, + "starcoder-16b", + "starcoder-7b" + ], "markdownDescription": "Overwrite the model used for code autocompletion inference. This is only supported with the `fireworks` provider" }, "cody.autocomplete.completeSuggestWidgetSelection": { @@ -1125,7 +1160,10 @@ }, "cody.experimental.foldingRanges": { "type": "string", - "enum": ["lsp", "indentation-based"], + "enum": [ + "lsp", + "indentation-based" + ], "enumDescriptions": [ "Use folding ranges that are enabled by default in VS Code, and are usually powered by LSP", "Use custom implementation of folding ranges that is indentation based. This is the implementation that is used by other Cody clients like the JetBrains plugin" @@ -1141,7 +1179,13 @@ "cody.autocomplete.experimental.graphContext": { "type": "string", "default": null, - "enum": [null, "bfg", "bfg-mixed", "tsc", "tsc-mixed"], + "enum": [ + null, + "bfg", + "bfg-mixed", + "tsc", + "tsc-mixed" + ], "markdownDescription": "Use the code graph to retrieve context for autocomplete requests." }, "cody.autocomplete.experimental.fireworksOptions": { @@ -1311,7 +1355,9 @@ "untrustedWorkspaces": { "supported": "limited", "description": "Cody only uses providers (configured in `openctx.providers`) from trusted workspaces because providers may execute arbitrary code.", - "restrictedConfigurations": ["openctx.providers"] + "restrictedConfigurations": [ + "openctx.providers" + ] } }, "dependencies": { @@ -1356,6 +1402,7 @@ "diff": "^5.2.0", "fast-xml-parser": "^4.3.2", "glob": "^7.2.3", + "graceful-fs": "^4.2.11", "he": "^1.2.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.4", @@ -1379,6 +1426,7 @@ "rehype-sanitize": "^6.0.0", "remark-gfm": "^4.0.0", "semver": "^7.5.4", + "signal-exit": "^4.1.0", "socks-proxy-agent": "^8.0.1", "tailwind-merge": "^2.3.0", "tailwindcss": "^3.4.3", @@ -1405,6 +1453,7 @@ "@types/express": "^4.17.17", "@types/fs-extra": "^11.0.4", "@types/glob": "^8.0.0", + "@types/graceful-fs": "^4.1.9", "@types/ini": "^4.1.0", "@types/isomorphic-fetch": "^0.0.39", "@types/js-levenshtein": "^1.1.1", @@ -1415,6 +1464,7 @@ "@types/pako": "^2.0.3", "@types/progress": "^2.0.5", "@types/semver": "^7.5.0", + "@types/signal-exit": "^3.0.4", "@types/unzipper": "^0.10.7", "@types/uuid": "^9.0.2", "@types/vscode": "^1.79.0", diff --git a/vscode/src/graph/bfg/download-bfg.ts b/vscode/src/graph/bfg/download-bfg.ts index 692dd1e265c2..a750e3526c23 100644 --- a/vscode/src/graph/bfg/download-bfg.ts +++ b/vscode/src/graph/bfg/download-bfg.ts @@ -1,10 +1,10 @@ import fs from 'node:fs/promises' import os from 'node:os' import path from 'node:path' -import { lockfile } from '@sourcegraph/cody-shared' import { SemverString } from '@sourcegraph/cody-shared/src/utils' import * as vscode from 'vscode' import { downloadFile, fileExists, unzip } from '../../local-context/utils' +import { waitForLock } from '../../lockfile' import { logDebug, logError } from '../../log' import { Arch, Platform, getOSArch } from '../../os' import { captureException } from '../../services/sentry/sentry' @@ -143,7 +143,7 @@ async function downloadBfgBinary({ const bfgDir = path.dirname(bfgPath) await fs.mkdir(bfgDir, { recursive: true }) - const unlockFn = await lockfile.waitForLock(bfgDir, { + const unlockFn = await waitForLock(bfgDir, { delay: _config.FILE_LOCK_RETRY_DELAY, lockfilePath: `${bfgPath}.lock`, }) diff --git a/vscode/src/local-context/download-symf.ts b/vscode/src/local-context/download-symf.ts index c3d9ca56fd1c..03d55fa856f2 100644 --- a/vscode/src/local-context/download-symf.ts +++ b/vscode/src/local-context/download-symf.ts @@ -1,9 +1,9 @@ import fs from 'node:fs/promises' import os from 'node:os' import path from 'node:path' -import { lockfile } from '@sourcegraph/cody-shared' import type { SemverString } from '@sourcegraph/cody-shared/src/utils' import * as vscode from 'vscode' +import { waitForLock } from '../lockfile' import { logDebug, logError } from '../log' import { type Arch, Platform, getOSArch } from '../os' import { captureException } from '../services/sentry/sentry' @@ -135,7 +135,7 @@ async function downloadSymfBinary({ const symfDir = path.dirname(symfPath) await fs.mkdir(symfDir, { recursive: true }) - const unlockFn = await lockfile.waitForLock(symfDir, { + const unlockFn = await waitForLock(symfDir, { delay: _config.FILE_LOCK_RETRY_DELAY, lockfilePath: `${symfPath}.lock`, }) diff --git a/lib/shared/src/lockfile.ts b/vscode/src/lockfile.ts similarity index 100% rename from lib/shared/src/lockfile.ts rename to vscode/src/lockfile.ts From e1c974853a83d3725b5babff46b1001049e4f320 Mon Sep 17 00:00:00 2001 From: Rik Nauta Date: Wed, 3 Jul 2024 13:32:24 +0200 Subject: [PATCH 10/11] Improve Fxitures --- pnpm-lock.yaml | 14 + .../recording.har.yaml | 929 ++++++++++++++++++ vscode/e2e/TODO.md | 6 +- vscode/e2e/utils/vscody/fixture.ts | 307 +++--- vscode/package.json | 3 +- vscode/playwright.v2.config.ts | 34 +- 6 files changed, 1141 insertions(+), 152 deletions(-) create mode 100644 recordings/vscode/e2e_2083974793/example-test-ts_2069432700/Show-off-v2-features_1731159314/recording.har.yaml diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1736fb237f08..16e69d348b09 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -732,6 +732,9 @@ importers: typescript-language-server: specifier: ^4.3.3 version: 4.3.3 + ulidx: + specifier: ^2.3.0 + version: 2.3.0 vite-plugin-svgr: specifier: ^4.2.0 version: 4.2.0(typescript@5.4.2)(vite@5.2.11) @@ -10413,6 +10416,10 @@ packages: - supports-color dev: true + /layerr@2.1.0: + resolution: {integrity: sha512-xDD9suWxfBYeXgqffRVH/Wqh+mqZrQcqPRn0I0ijl7iJQ7vu8gMGPt1Qop59pEW/jaIDNUN7+PX1Qk40+vuflg==} + dev: true + /lazy-universal-dotenv@4.0.0: resolution: {integrity: sha512-aXpZJRnTkpK6gQ/z4nk+ZBLd/Qdp118cvPruLSIQzQNRhKwEcdXCOzXuF55VDqIiuAaY3UGZ10DJtvZzDcvsxg==} engines: {node: '>=14.0.0'} @@ -14605,6 +14612,13 @@ packages: dev: true optional: true + /ulidx@2.3.0: + resolution: {integrity: sha512-36piWNqcdp9hKlQewyeehCaALy4lyx3FodsCxHuV6i0YdexSkjDOubwxEVr2yi4kh62L/0MgyrxqG4K+qtovnw==} + engines: {node: '>=16'} + dependencies: + layerr: 2.1.0 + dev: true + /underscore@1.13.6: resolution: {integrity: sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==} dev: true diff --git a/recordings/vscode/e2e_2083974793/example-test-ts_2069432700/Show-off-v2-features_1731159314/recording.har.yaml b/recordings/vscode/e2e_2083974793/example-test-ts_2069432700/Show-off-v2-features_1731159314/recording.har.yaml new file mode 100644 index 000000000000..9e5a07e5b58b --- /dev/null +++ b/recordings/vscode/e2e_2083974793/example-test-ts_2069432700/Show-off-v2-features_1731159314/recording.har.yaml @@ -0,0 +1,929 @@ +log: + _recordingName: e2e/example.test.ts/Show off v2 features + creator: + comment: persister:fs + name: Polly.JS + version: 6.0.6 + entries: + - _id: fd79a8c38d53d2648b1b68d15d1f77c6 + _order: 0 + cache: {} + request: + bodySize: 217 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "217" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 303 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |- + + query CodyConfigFeaturesResponse { + site { + codyConfigFeatures { + chat + autoComplete + commands + attribution + } + } + } + variables: {} + queryString: + - name: CodyConfigFeaturesResponse + value: null + url: https://sourcegraph.com/.api/graphql?CodyConfigFeaturesResponse + response: + bodySize: 152 + content: + encoding: base64 + mimeType: application/json + size: 152 + text: "[\"H4sIAAAAAAAAAzyLwQqAIBAF/2XPfYFXof/YdC0h3dDnIcR/Dws6DQwznTyDyXSqETLp1\ + N9Wc4j7KoxWpL72YJBBabIQN6jVdJ0yj885TYmzr38DlLg1RM1kAp9VxhjjAQAA//8D\ + AIfOLkJuAAAA\"]" + textDecoded: + data: + site: + codyConfigFeatures: + attribution: false + autoComplete: true + chat: true + commands: true + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:04 GMT + - name: content-type + value: application/json + - name: transfer-encoding + value: chunked + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + - name: content-encoding + value: gzip + headersSize: 1328 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:04.040Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 09b59ac55ce3c40d6f9ab8c79846a2c6 + _order: 0 + cache: {} + request: + bodySize: 144 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "144" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 291 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |- + + query ContextFilters { + site { + codyContextFilters(version: V1) { + raw + } + } + } + variables: {} + queryString: + - name: ContextFilters + value: null + url: https://sourcegraph.com/.api/graphql?ContextFilters + response: + bodySize: 111 + content: + encoding: base64 + mimeType: application/json + size: 111 + text: "[\"H4sIAAAAAAAAA6pWSkksSVSyqlY=\",\"Ks4sSQXRyfkplc75eSWpFSVumTklqUXFINGi\ + xHIlq7zSnJza2tpaAAAAAP//AwA2LshlNQAAAA==\"]" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:03 GMT + - name: content-type + value: application/json + - name: transfer-encoding + value: chunked + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + - name: content-encoding + value: gzip + headersSize: 1328 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:03.358Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 0b63afd2aa308ea31206484848de021f + _order: 0 + cache: {} + request: + bodySize: 183 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "183" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 296 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-use-sourcegraph-embeddings + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.463Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 97bed95811980e1a8d1c114065f65cdf + _order: 0 + cache: {} + request: + bodySize: 177 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "177" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 296 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-autocomplete-tracing + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.464Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: a3d3d0f7feb229a85bd641be136a8edd + _order: 0 + cache: {} + request: + bodySize: 181 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "181" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 296 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-embeddings-auto-indexing + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:03 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.749Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 0b8e788d38a93fd0c53c921e768bff46 + _order: 0 + cache: {} + request: + bodySize: 177 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "177" + - name: accept + value: "*/*" + - name: traceparent + value: 00-f0577df6943a2bfaaffc899734ad881e-924cb5ce9934c0cc-01 + - name: content-type + value: application/json; charset=utf-8 + headersSize: 366 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-interactive-tutorial + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:03 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:03.713Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 4ec84765b3d45b8a8f6392da0304fa77 + _order: 0 + cache: {} + request: + bodySize: 178 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "178" + - name: accept + value: "*/*" + - name: traceparent + value: 00-f3df72a135c611acb86086fc824e6e1f-b32eb6211074902f-01 + - name: content-type + value: application/json; charset=utf-8 + headersSize: 366 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-chat-context-preamble + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:03 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:03.727Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 62498f2d11167bd2d5d002a799a49338 + _order: 0 + cache: {} + request: + bodySize: 147 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "147" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 289 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query FeatureFlags { + evaluatedFeatureFlags() { + name + value + } + } + variables: {} + queryString: + - name: FeatureFlags + value: null + url: https://sourcegraph.com/.api/graphql?FeatureFlags + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluatedFeatureFlags\":[]}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.205Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 8d297306aeea324b87ef494954016fba + _order: 0 + cache: {} + request: + bodySize: 164 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "164" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 295 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |- + + query SiteIdentification { + site { + siteID + productSubscription { + license { + hashedKey + } + } + } + } + variables: {} + queryString: + - name: SiteIdentification + value: null + url: https://sourcegraph.com/.api/graphql?SiteIdentification + response: + bodySize: 219 + content: + encoding: base64 + mimeType: application/json + size: 219 + text: "[\"H4sIAAAAAAAAAzTLsQ6CMBCA4Xc=\",\"udmF9q4FZhfjyOB87V2liQHSlsEQ391g4r/8\ + 03eAcGMYD6i56f+3K4wwrXuJ+iy8zQ8NcIGtrLLHNu2hxpK3ltflBK8cdak/O3OdVe7\ + 6hhGG6LvQGZv6JJoMYu9EGZWc86jRGiTjDUXqHAnZIIgpOI+G7cCcCD5nXwAAAP//Aw\ + AY9rt+oAAAAA==\"]" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: transfer-encoding + value: chunked + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + - name: content-encoding + value: gzip + headersSize: 1328 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.158Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 203c1896021c3a09dfe619120ea1b725 + _order: 0 + cache: {} + request: + bodySize: 101 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "101" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 368 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |- + + query SiteProductVersion { + site { + productVersion + } + } + variables: {} + queryString: + - name: SiteProductVersion + value: null + url: https://sourcegraph.com/.api/graphql?SiteProductVersion + response: + bodySize: 139 + content: + encoding: base64 + mimeType: application/json + size: 139 + text: "[\"H4sIAAAAAAAAA6pWSkksSVSyqlY=\",\"Ks4sSQXRBUX5KaXJJWGpRcWZ+XlKVkpGFgZm\ + pgbxRgZGJroG5roGRvGmeia6hkmWJqaWyYlJ5kbJSrW1tQAAAAD//wMA1rHtQ0kAAAA=\ + \"]" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: transfer-encoding + value: chunked + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + - name: content-encoding + value: gzip + headersSize: 1328 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.161Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + pages: [] + version: "1.2" diff --git a/vscode/e2e/TODO.md b/vscode/e2e/TODO.md index 915bfa1e8840..54669d24059e 100644 --- a/vscode/e2e/TODO.md +++ b/vscode/e2e/TODO.md @@ -1,6 +1,10 @@ -- [ ] Fail test on proxy failure +- [x] Fail test on proxy failure - [ ] Credentials loading - [ ] Configurable endpoint proxies - [ ] Migrate existing e2e tests - [ ] Fixutres repo - [ ] Wait for Cody progress-bars +- [ ] Better DX around Telemetry evaluation +- [ ] Fuzzy testing with API delays +- [ ] Allow logging of requests + debugger modification (nullifying timeout) +- [ ] Copy kill-tree from vscode (https://www.npmjs.com/package/kill-sync) diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts index 5ed3b9fc4268..74186ef62c3d 100644 --- a/vscode/e2e/utils/vscody/fixture.ts +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -7,16 +7,21 @@ import fs from 'node:fs/promises' import 'node:http' import 'node:https' import type { AddressInfo } from 'node:net' -import os from 'node:os' import path from 'node:path' +import { EventEmitter } from 'node:stream' import { promisify } from 'node:util' import pspawn from '@npmcli/promise-spawn' -import { test as _test, expect, mergeTests } from '@playwright/test' +import { type TestInfo, test as _test, expect, mergeTests } from '@playwright/test' import NodeHttpAdapter from '@pollyjs/adapter-node-http' import { type EXPIRY_STRATEGY, type MODE, Polly } from '@pollyjs/core' import type { ArrayContainsAll } from '@sourcegraph/cody-shared/src/utils' -import { ConsoleReporter, type ProgressReport, ProgressReportStage } from '@vscode/test-electron' +import { + ConsoleReporter, + type ProgressReport, + ProgressReportStage, + resolveCliArgsFromVSCodeExecutablePath, +} from '@vscode/test-electron' import { downloadAndUnzipVSCode } from '@vscode/test-electron/out/download' import express from 'express' import { copy as copyExt } from 'fs-extra' @@ -24,7 +29,6 @@ import { createProxyMiddleware } from 'http-proxy-middleware' import type { loggerPlugin as ProxyMiddlewarePlugin } from 'http-proxy-middleware' import zod from 'zod' -import { EventEmitter } from 'node:stream' import { waitForLock } from '../../../src/lockfile' import { CodyPersister } from '../../../src/testutils/CodyPersister' import { defaultMatchRequestsBy } from '../../../src/testutils/polly' @@ -44,14 +48,19 @@ const workerOptionsSchema = zod.object({ 'DEPRECATED: The .git root of this project. Might still get used for some path defaults so must be set' ), vscodeExtensionCacheDir: zod.string(), + globalTmpDir: zod.string(), vscodeTmpDir: zod.string(), + vscodeServerTmpDir: zod.string(), binaryTmpDir: zod.string(), recordingDir: zod.string(), + vscodeServerPortRange: zod.tuple([zod.number(), zod.number()]).default([33100, 33200]), + keepRuntimeDirs: zod.enum(['all', 'failed', 'none']).default('none'), + allowGlobalVSCodeModification: zod.boolean().default(false), }) const testOptionsSchema = zod.object({ vscodeVersion: zod.string().default('stable'), - vscodeExtensions: zod.array(zod.string()).default([]), + vscodeExtensions: zod.array(zod.string().toLowerCase()).default([]), templateWorkspaceDir: zod.string(), recordingMode: zod.enum([ 'passthrough', @@ -106,7 +115,18 @@ const optionsFixture: ReturnType< ...schemaOptions(testOptionsSchema, 'test'), validWorkerOptions: [ async ( - { repoRootDir, binaryTmpDir, recordingDir, vscodeTmpDir, vscodeExtensionCacheDir }, + { + repoRootDir, + binaryTmpDir, + recordingDir, + globalTmpDir, + vscodeTmpDir, + vscodeServerTmpDir, + vscodeExtensionCacheDir, + keepRuntimeDirs, + vscodeServerPortRange, + allowGlobalVSCodeModification, + }, use ) => { const validOptionsWithDefaults = await workerOptionsSchema.safeParseAsync( @@ -114,8 +134,13 @@ const optionsFixture: ReturnType< repoRootDir, binaryTmpDir, recordingDir, + globalTmpDir, vscodeTmpDir, + vscodeServerTmpDir, vscodeExtensionCacheDir, + keepRuntimeDirs, + vscodeServerPortRange, + allowGlobalVSCodeModification, } satisfies { [key in keyof WorkerOptions]-?: WorkerOptions[key] }, {} ) @@ -177,9 +202,10 @@ const optionsFixture: ReturnType< const implFixture = _test.extend({ serverRootDir: [ - // biome-ignore lint/correctness/noEmptyPattern: - async ({}, use, testInfo) => { - const dir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'test-vscode-server-')) + async ({ validWorkerOptions }, use, testInfo) => { + const dir = await fs.mkdtemp( + path.resolve(validWorkerOptions.globalTmpDir, 'test-vscode-server-') + ) await use(dir) const attachmentPromises = [] const logDir = path.join(dir, 'data/logs') @@ -196,13 +222,19 @@ const implFixture = _test.extend({ if (attachmentPromises.length > 0) { await Promise.allSettled(attachmentPromises) } - await retry(() => fs.rm(dir, { force: true, recursive: true }), 20, 500) + if ( + validWorkerOptions.keepRuntimeDirs === 'none' || + (validWorkerOptions.keepRuntimeDirs === 'failed' && + ['failed', 'timedOut'].includes(testInfo.status ?? 'unknown')) + ) { + await retry(() => fs.rm(logDir, { force: true, recursive: true }), 20, 500) + } }, { scope: 'test' }, ], workspaceDir: [ - async ({ validOptions }, use) => { - const dir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'test-workspace-')) + async ({ validOptions }, use, testInfo) => { + const dir = await fs.mkdtemp(path.resolve(validOptions.globalTmpDir, 'test-workspace-')) await copyExt(path.resolve(process.cwd(), validOptions.templateWorkspaceDir), dir, { overwrite: true, @@ -210,7 +242,13 @@ const implFixture = _test.extend({ dereference: true, // we can't risk the test modifying the symlink }) await use(dir) - await retry(() => fs.rm(dir, { force: true, recursive: true }), 20, 500) + if ( + validOptions.keepRuntimeDirs === 'none' || + (validOptions.keepRuntimeDirs === 'failed' && + ['failed', 'timedOut'].includes(testInfo.status ?? 'unknown')) + ) { + await retry(() => fs.rm(dir, { force: true, recursive: true }), 20, 500) + } }, { scope: 'test', @@ -219,7 +257,7 @@ const implFixture = _test.extend({ //#region Polly & Proxies sourcegraphMitM: [ // biome-ignore lint/correctness/noEmptyPattern: - async ({}, use) => { + async ({}, use, testInfo) => { const app = express() //TODO: Credentials & Configuration TODO: I can see a use-case where //you can switch endpoints dynamically. For instance wanting to try @@ -236,7 +274,7 @@ const implFixture = _test.extend({ target, changeOrigin: true, ejectPlugins: true, - plugins: [failOrRetryRecordingOnError(testFailureSignal)], + plugins: [failOrRetryRecordingOnError(testFailureSignal, testInfo)], }) app.use(middleware) let server: ReturnType = null as any @@ -333,47 +371,20 @@ const implFixture = _test.extend({ ], //#region vscode agent vscodeUI: [ - async ({ validOptions, serverRootDir, sourcegraphMitM, page, context }, use, testInfo) => { + async ({ validOptions, serverRootDir, sourcegraphMitM, page }, use, testInfo) => { const executableDir = path.resolve(process.cwd(), validOptions.vscodeTmpDir) await fs.mkdir(executableDir, { recursive: true }) - + const serverExecutableDir = path.resolve(process.cwd(), validOptions.vscodeServerTmpDir) + await fs.mkdir(serverExecutableDir, { recursive: true }) // We nullify the time it takes to download VSCode as it can vary wildly! - const electronExecutable = await stretchTimeout( + const [codeCliPath, codeTunnelCliPath] = await stretchTimeout( () => downloadOrWaitForVSCode({ validOptions, executableDir }), { max: DOWNLOAD_GRACE_TIME, testInfo, } ) - const serverExecutableDir = path.join( - executableDir, - path.relative(executableDir, electronExecutable).split(path.sep)[0], - 'server' - ) - // The location of the executable is platform dependent, try the - // first location that works. - const vscodeExecutable = await Promise.any( - [ - '../Resources/app/bin', // darwin - 'bin', // linux and windows - ].map(async binPath => { - const cliExecutableDir = path.resolve(path.dirname(electronExecutable), binPath) - - // find either a code or code.exe file - const vscodeExecutableName = (await fs.readdir(cliExecutableDir)).find( - file => file.endsWith('code-tunnel') || file.endsWith('code-tunnel.exe') - ) - if (!vscodeExecutableName) { - throw new Error(`Could not find a vscode executable in ${cliExecutableDir}`) - } - return path.join(cliExecutableDir, vscodeExecutableName) - }) - ).catch(async () => { - throw new Error( - `Could not find a vscode executable under ${path.dirname(electronExecutable)}` - ) - }) // Machine settings should simply serve as a baseline to ensure // tests by default work smoothly. Any test specific preferences // should be set in workspace settings instead. @@ -404,76 +415,97 @@ const implFixture = _test.extend({ // Here we install the extensions requested. To speed things up we make use of a shared extension cache that we symlink to. const extensionsDir = path.join(serverRootDir, 'extensions') await fs.mkdir(extensionsDir, { recursive: true }) - + const userDataDir = path.join(serverRootDir, 'data/User') + await fs.mkdir(userDataDir, { recursive: true }) if (validOptions.vscodeExtensions.length > 0) { //TODO(rnauta): Add lockfile wrapper to avoid race conditions - const sharedCacheDir = path.resolve(process.cwd(), validOptions.vscodeExtensionCacheDir) - await fs.mkdir(sharedCacheDir, { recursive: true }) - const releaseLock = await waitForLock(sharedCacheDir, { - lockfilePath: path.join(sharedCacheDir, '.lock'), + const sharedExtensionsDir = path.resolve( + process.cwd(), + validOptions.vscodeExtensionCacheDir + ) + if (!sharedExtensionsDir.endsWith('.vscode-server/extensions')) { + //right now there's no way of setting the extension installation directoy. Instead they are always install in ~/.vscode-server/extensions + throw new Error( + "Unfortunately VSCode doesn't provide a way yet to cache extensions isolated from a global installation. Please use ~/.code-server/extensions for now." + ) + } + await fs.mkdir(sharedExtensionsDir, { recursive: true }) + const releaseLock = await waitForLock(sharedExtensionsDir, { + lockfilePath: path.join(sharedExtensionsDir, '.lock'), delay: 1000, }) try { const args = [ - `--extensions-dir=${sharedCacheDir.replace(/ /g, '\\ ')}`, // cli doesn't handle quotes properly so just escape spaces, - '--install-extension', - ...validOptions.vscodeExtensions, + ...validOptions.vscodeExtensions.flatMap(v => ['--install-extension', v]), ] - await pspawn(vscodeExecutable, args) + const res = await pspawn(codeTunnelCliPath, args, { + env: { + ...process.env, + // VSCODE_EXTENSIONS: sharedExtensionsDir, This doesn't work either + }, + stdio: ['inherit', 'inherit', 'inherit'], + }) + } catch (e) { + console.log('I AM HERE') + if ( + typeof e === 'string' && + e.includes('code version use stable --install-dir /path/to/installation') + ) { + } + console.error(e) + throw e } finally { releaseLock() } //we now read all the folders in the shared cache dir and //symlink the relevant ones to our isolated extension dir - for (const sharedExtensionDir of await fs.readdir(sharedCacheDir)) { + for (const sharedExtensionDir of await fs.readdir(sharedExtensionsDir)) { const [_, extensionName] = /^(.*)-\d+\.\d+\.\d+$/.exec(sharedExtensionDir) ?? [] - if (!validOptions.vscodeExtensions.includes(extensionName)) { + if (!validOptions.vscodeExtensions.includes(extensionName?.toLowerCase())) { continue } - const sharedExtensionPath = path.join(sharedCacheDir, sharedExtensionDir) + const sharedExtensionPath = path.join(sharedExtensionsDir, sharedExtensionDir) const extensionPath = path.join(extensionsDir, sharedExtensionDir) - await fs.symlink(sharedExtensionPath, extensionPath, 'dir') + await fs.symlink(sharedExtensionPath, extensionPath) } } + //TODO: Fixed Port Ranges // We can now start the server + const connectionToken = '0000-0000' + const serverPort = validOptions.vscodeServerPortRange[0] + testInfo.parallelIndex + if (serverPort > validOptions.vscodeServerPortRange[1]) { + throw new Error( + 'Port range is exhausted. Either reduce the amount of workers or increase the port range.' + ) + } const args = [ 'serve-web', + `--user-data-dir=${userDataDir}`, '--accept-server-license-terms', - '--port=0', - `--cli-data-dir=${serverExecutableDir.replace(/ /g, '\\ ')}`, - `--server-data-dir=${serverRootDir.replace(/ /g, '\\ ')}`, - `--extensions-dir=${extensionsDir.replace(/ /g, '\\ ')}`, // cli doesn't handle quotes properly so just escape spaces, + `--port=${serverPort}`, + `--connection-token=${connectionToken}`, + `--cli-data-dir=${serverExecutableDir}`, + `--server-data-dir=${serverRootDir}`, + `--extensions-dir=${extensionsDir}`, // cli doesn't handle quotes properly so just escape spaces, ] - //TODO(rnauta): better typing + const env = { - // inherit environment - // TODO: Check why this was necessary. Shouldn't be needed - // ...process.env, - //TODO: all env variables + ...process.env, + ...(['stable', 'insiders'].includes(validOptions.vscodeVersion) + ? { VSCODE_CLI_QUALITY: validOptions.vscodeVersion } + : { VSCODE_CLI_COMMIT: validOptions.vscodeVersion }), TESTING_DOTCOM_URL: sourcegraphMitM.endpoint, CODY_TESTING_BFG_DIR: path.resolve(process.cwd(), validOptions.binaryTmpDir), CODY_TESTING_SYMF_DIR: path.resolve(process.cwd(), validOptions.binaryTmpDir), } - const codeProcess = spawn(vscodeExecutable, args, { + const codeProcess = spawn(codeTunnelCliPath, args, { env, - stdio: ['inherit', 'pipe', 'pipe'], + stdio: ['inherit', 'ignore', 'inherit'], detached: false, }) - if (!codeProcess.pid) { - throw new Error('Could not start code process') - } - const token = await waitForVSCodeUI(codeProcess.stdout) - if (!token) { - throw new Error("VSCode did't provide an auth token") - } - // We started vscode with port 0 which means a random port was - // assigned. However VSCode still reports the port as 0 themselves, - // so we need to do some magic to get the actual port. - // TODO: this might not be very cross-platform - const port = await getPortForPid(codeProcess.pid) - const config = { url: `http://127.0.0.1:${port}/`, token: token } + const config = { url: `http://127.0.0.1:${serverPort}/`, token: connectionToken } await stretchTimeout(() => waitForVSCodeServer({ url: config.url, serverExecutableDir }), { max: DOWNLOAD_GRACE_TIME, testInfo, @@ -553,12 +585,17 @@ fixture.beforeAll(async () => { * Waits for server components to be downloaded and that the server is ready to * accept connections */ -async function waitForVSCodeServer(config: { url: string; serverExecutableDir: string }) { +async function waitForVSCodeServer(config: { + url: string + serverExecutableDir: string + maxConnectionRetries?: number +}) { const releaseServerDownloadLock = await waitForLock(config.serverExecutableDir, { delay: 1000, lockfilePath: path.join(config.serverExecutableDir, '.lock'), }) try { + let connectionIssueTries = config.maxConnectionRetries ?? 5 while (true) { try { const res = await fetch(config.url) @@ -572,7 +609,12 @@ async function waitForVSCodeServer(config: { url: string; serverExecutableDir: s } else { console.error(`Unexpected status code ${res.status}`) } - } catch {} + } catch (err) { + connectionIssueTries-- + if (connectionIssueTries <= 0) { + throw err + } + } await new Promise(resolve => setTimeout(resolve, 1000)) } } finally { @@ -580,24 +622,6 @@ async function waitForVSCodeServer(config: { url: string; serverExecutableDir: s } } -function waitForVSCodeUI(stdout: NodeJS.ReadableStream): Promise { - return new Promise((resolve, reject) => { - const listener = (data: Buffer) => { - if (data.toString().includes('available at')) { - clearTimeout(timeout) - stdout.removeListener('data', listener) - const [_, token] = /\?tkn=([a-zA-Z0-9-]+)/.exec(data.toString()) ?? [] - resolve(token) - } - } - const timeout = setTimeout(() => { - stdout.removeListener('data', listener) - reject(new Error('Could not start code process')) - }, 10_000 /*TODO(rnauta): make this configurable*/) - stdout.on('data', listener) - }) -} - /** * This ensures only a single process is actually downloading VSCode */ @@ -605,50 +629,46 @@ async function downloadOrWaitForVSCode({ executableDir, validOptions, }: Pick & { executableDir: string }) { - let electronExecutable = '' - const lockfilePath = path.join(executableDir, `${validOptions.vscodeVersion}.lock`) + const lockfilePath = path.join(executableDir, '.lock') const releaseLock = await waitForLock(executableDir, { lockfilePath, delay: 500 }) try { - electronExecutable = await downloadAndUnzipVSCode({ + const electronPath = await downloadAndUnzipVSCode({ cachePath: executableDir, - version: validOptions.vscodeVersion, + version: 'stable', reporter: new CustomConsoleReporter(process.stdout.isTTY), }) + const installPath = path.join( + executableDir, + path.relative(executableDir, electronPath).split(path.sep)[0] + ) + const [cliPath] = resolveCliArgsFromVSCodeExecutablePath(electronPath) + //replce code with code-tunnel(.exe) either if the last binary or if code.exe + const tunnelPath = cliPath + .replace(/code$/, 'code-tunnel') + .replace(/code\.(?:exe|cmd)$/, 'code-tunnel.exe') + + // we need to make sure vscode has global configuration set + const res = await pspawn(tunnelPath, ['version', 'show'], { + stdio: ['inherit', 'pipe', 'inherit'], + }) + if (res.code !== 0 || res.stdout.includes('No existing installation found')) { + if (!validOptions.allowGlobalVSCodeModification) { + throw new Error('Global VSCode path modification is not allowed') + } + await pspawn(tunnelPath, ['version', 'use', 'stable', '--install-dir', installPath], { + stdio: ['inherit', 'inherit', 'inherit'], + }) + } else if (res.code !== 0) { + throw new Error(JSON.stringify(res)) + } + return [cliPath, tunnelPath] + //If this fails I assume we haven't configured VSCode globally. Since + //getting portable mode to work is annoying we just set this + //installation as the global one. } finally { releaseLock() } - return electronExecutable -} - -async function getPortForPid(pid: number): Promise { - const platform = process.platform - let command: string - - switch (platform) { - case 'win32': - command = `netstat -ano | findstr ${pid}` - break - case 'darwin': - // Use `lsof` with specific options for macOS - command = `lsof -nP -i4TCP -a -p ${pid} | grep LISTEN` - break - case 'linux': - command = `ss -tlnp | grep ${pid}` - break - default: - throw new Error(`Unsupported platform: ${platform}`) - } - - const { stdout } = await exec(command, { encoding: 'utf-8' }) - const lines = stdout.split('\n') - for (const line of lines) { - const match = line.match(/:(\d+)\s/) - if (match?.[1]) { - return Number.parseInt(match[1], 10) - } - } - throw new Error(`No listening port found for PID ${pid}`) } async function getFilesRecursive(dir: string): Promise> { @@ -676,17 +696,12 @@ async function getFilesRecursive(dir: string): Promise> { } function failOrRetryRecordingOnError( - emitter: EventEmitter<{ error: [Error] }> + emitter: EventEmitter<{ error: [Error] }>, + testInfo: TestInfo ): typeof ProxyMiddlewarePlugin { return (proxyServer, options) => { + //TODO(rnauta): retry with different settings if user accepts in cli prompt proxyServer.on('error', (err, req, res) => { - if ( - err.name === 'PollyError' && - err.message.includes('Recording for the following request is not found') - ) { - //TODO: allow re-trying with recording temporarily enabled - err.message = `Polly recording missing for ${[req.method]}${req.url}` - } emitter.emit('error', err) }) } diff --git a/vscode/package.json b/vscode/package.json index eb9b2ff18727..1c3e3b017b99 100644 --- a/vscode/package.json +++ b/vscode/package.json @@ -42,7 +42,7 @@ "test:e2e:run": "playwright test", "test:e2e2": "pnpm -s test:e2e2:deps && pnpm -s build:dev:desktop && pnpm -s test:e2e2:run", "test:e2e2:run": "playwright test -c playwright.v2.config.ts", - "test:e2e2:deps": "playwright install chromium", + "test:e2e2:deps": "playwright install chromium --with-deps", "test:integration": "tsc --build ./test/integration && pnpm run -s build:dev:desktop && node --inspect -r ts-node/register dist/tsc/test/integration/main.js", "test:unit": "vitest", "bench": "vitest bench", @@ -1492,6 +1492,7 @@ "progress": "^2.0.3", "react-head": "^3.4.2", "typescript-language-server": "^4.3.3", + "ulidx": "^2.3.0", "vite-plugin-svgr": "^4.2.0", "vscode-jsonrpc": "^8.2.0", "vscode-languageserver-protocol": "^3.17.5", diff --git a/vscode/playwright.v2.config.ts b/vscode/playwright.v2.config.ts index eb07cb9d3d31..ef88753fa122 100644 --- a/vscode/playwright.v2.config.ts +++ b/vscode/playwright.v2.config.ts @@ -1,12 +1,27 @@ +import { mkdirSync, readdirSync, rmSync } from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' import { type ReporterDescription, defineConfig } from '@playwright/test' +import { ulid } from 'ulidx' import type { SymlinkExtensions } from './e2e/utils/symlink-extensions.setup' import type { TestOptions, WorkerOptions } from './e2e/utils/vscody' + const isWin = process.platform.startsWith('win') const isCI = !!process.env.CI // This makes sure that each run gets a unique run id. This shouldn't really be // used other than to invalidate lockfiles etc. -process.env.RUN_ID = process.env.RUN_ID || new Date().toISOString() +process.env.RUN_ID = process.env.RUN_ID || ulid() + +const globalTmpDir = path.resolve(__dirname, `../.test/runs/${process.env.RUN_ID}/`) +mkdirSync(globalTmpDir, { recursive: true }) +// get previous runs and delete them +for (const run of readdirSync(path.resolve(__dirname, '../.test/runs/'))) { + if (run !== process.env.RUN_ID) { + console.log('clearing previous run', run) + rmSync(path.resolve(__dirname, `../.test/runs/${run}`), { force: true, recursive: true }) + } +} export default defineConfig({ workers: '50%', @@ -21,12 +36,15 @@ export default defineConfig({ // You can override options easily per project/worker/test so they are // unlikely to need to be modified here. These are just some sane // defaults + browserName: 'chromium', repoRootDir: '../', //deprecated vscodeExtensions: ['sourcegraph.cody-ai'], symlinkExtensions: ['.'], - vscodeVersion: 'stable', + globalTmpDir: `../.test/runs/${process.env.RUN_ID}/`, //os.tmpdir(), + vscodeVersion: 'stablefff', vscodeTmpDir: '../.test/global/vscode', - vscodeExtensionCacheDir: '../.test/global/vscode-extensions', + vscodeExtensionCacheDir: `${os.homedir()}/.vscode-server/extensions`, + vscodeServerTmpDir: '../.test/global/vscode-server', binaryTmpDir: '../.test/global/bin', recordIfMissing: typeof process.env.CODY_RECORD_IF_MISSING === 'string' @@ -34,13 +52,15 @@ export default defineConfig({ : false, recordingMode: (process.env.CODY_RECORDING_MODE as any) ?? 'replay', recordingDir: '../recordings/vscode/', - + keepUnusedRecordings: true, bypassCSP: true, locale: 'en-US', timezoneId: 'America/Los_Angeles', permissions: ['clipboard-read', 'clipboard-write'], geolocation: { longitude: -122.40825783227943, latitude: 37.78124453182266 }, acceptDownloads: false, + keepRuntimeDirs: 'all', + allowGlobalVSCodeModification: isCI, trace: { mode: isCI ? 'retain-on-failure' : 'on', attachments: true, @@ -67,6 +87,9 @@ export default defineConfig({ testMatch: ['**/*.test.ts'], testIgnore: ['issues/**/*', 'utils/**/*'], dependencies: ['symlink-extensions'], + use: { + // recordIfMissing: true, //uncomment for quick manual override + }, }, { name: 'issues', @@ -74,6 +97,9 @@ export default defineConfig({ retries: 0, testMatch: ['**/*.test.ts'], dependencies: ['symlink-extensions'], + use: { + // recordIfMissing: true, //uncomment for quick manual override + }, }, ], reporter: [ From 77e6f2e0e7bab4c5add073ac3614597f1d2f9d8c Mon Sep 17 00:00:00 2001 From: Rik Nauta Date: Wed, 3 Jul 2024 13:53:38 +0200 Subject: [PATCH 11/11] Fix linting --- .../src/cli/cody-bench/strategy-unit-test.ts | 2 +- vscode/e2e/issues/CODY-2392.test.ts | 3 +- vscode/e2e/utils/vscody/fixture.ts | 9 +-- vscode/package.json | 74 ++++--------------- 4 files changed, 20 insertions(+), 68 deletions(-) diff --git a/agent/src/cli/cody-bench/strategy-unit-test.ts b/agent/src/cli/cody-bench/strategy-unit-test.ts index dbd5f80b4743..e2b0fa129e05 100644 --- a/agent/src/cli/cody-bench/strategy-unit-test.ts +++ b/agent/src/cli/cody-bench/strategy-unit-test.ts @@ -3,7 +3,7 @@ import _ from 'lodash' import * as vscode from 'vscode' import yaml from 'yaml' import type { RpcMessageHandler } from '../../../../vscode/src/jsonrpc/jsonrpc' -import { fileExists } from '../../../../vscode/src/local-context/download-symf' +import { fileExists } from '../../../../vscode/src/local-context/utils' import { redactAuthorizationHeader } from '../../../../vscode/src/testutils/CodyPersister' import { TestClient } from '../../TestClient' import { getLanguageForFileName } from '../../language' diff --git a/vscode/e2e/issues/CODY-2392.test.ts b/vscode/e2e/issues/CODY-2392.test.ts index e5d0a8c08cb8..387ee14e0222 100644 --- a/vscode/e2e/issues/CODY-2392.test.ts +++ b/vscode/e2e/issues/CODY-2392.test.ts @@ -1,6 +1,7 @@ -// // CTX(linear-issue): https://linear.app/sourcegraph/issue/CODY-2392 +// CTX(linear-issue): https://linear.app/sourcegraph/issue/CODY-2392 import { expect } from '@playwright/test' import { fixture as test } from '../utils/vscody' +//TODO(rnauta): wjow test.fixme('CODY-2392', () => { expect(true).toBeFalsy() diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts index 74186ef62c3d..b2f871fd009b 100644 --- a/vscode/e2e/utils/vscody/fixture.ts +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -1,7 +1,7 @@ // TODO/WARNING/APOLOGY: I know that this is an unreasonably large file right // now. I'll refactor and cut it down this down once everything is working // first. -import { exec as _exec, spawn } from 'node:child_process' +import { spawn } from 'node:child_process' import type { Dirent } from 'node:fs' import fs from 'node:fs/promises' import 'node:http' @@ -9,7 +9,6 @@ import 'node:https' import type { AddressInfo } from 'node:net' import path from 'node:path' import { EventEmitter } from 'node:stream' -import { promisify } from 'node:util' import pspawn from '@npmcli/promise-spawn' import { type TestInfo, test as _test, expect, mergeTests } from '@playwright/test' @@ -34,8 +33,6 @@ import { CodyPersister } from '../../../src/testutils/CodyPersister' import { defaultMatchRequestsBy } from '../../../src/testutils/polly' import { retry, stretchTimeout } from '../helpers' -const exec = promisify(_exec) - export type Directory = string const DOWNLOAD_GRACE_TIME = 5 * 60 * 1000 //5 minutes @@ -377,7 +374,7 @@ const implFixture = _test.extend({ const serverExecutableDir = path.resolve(process.cwd(), validOptions.vscodeServerTmpDir) await fs.mkdir(serverExecutableDir, { recursive: true }) // We nullify the time it takes to download VSCode as it can vary wildly! - const [codeCliPath, codeTunnelCliPath] = await stretchTimeout( + const [_, codeTunnelCliPath] = await stretchTimeout( () => downloadOrWaitForVSCode({ validOptions, executableDir }), { max: DOWNLOAD_GRACE_TIME, @@ -438,7 +435,7 @@ const implFixture = _test.extend({ const args = [ ...validOptions.vscodeExtensions.flatMap(v => ['--install-extension', v]), ] - const res = await pspawn(codeTunnelCliPath, args, { + await pspawn(codeTunnelCliPath, args, { env: { ...process.env, // VSCODE_EXTENSIONS: sharedExtensionsDir, This doesn't work either diff --git a/vscode/package.json b/vscode/package.json index 34e8ae974dd9..3c72b681b039 100644 --- a/vscode/package.json +++ b/vscode/package.json @@ -54,12 +54,7 @@ "version-bump:patch": "RELEASE_TYPE=patch ts-node-transpile-only ./scripts/version-bump.ts", "version-bump:dry-run": "RELEASE_TYPE=prerelease ts-node-transpile-only ./scripts/version-bump.ts" }, - "categories": [ - "Programming Languages", - "Machine Learning", - "Snippets", - "Education" - ], + "categories": ["Programming Languages", "Machine Learning", "Snippets", "Education"], "keywords": [ "ai", "openai", @@ -112,11 +107,7 @@ }, "main": "./dist/extension.node.js", "browser": "./dist/extension.web.js", - "activationEvents": [ - "onLanguage", - "onStartupFinished", - "onWebviewPanel:cody.chatPanel" - ], + "activationEvents": ["onLanguage", "onStartupFinished", "onWebviewPanel:cody.chatPanel"], "contributes": { "walkthroughs": [ { @@ -712,17 +703,13 @@ }, { "command": "cody.supercompletion.jumpTo", - "args": [ - "next" - ], + "args": ["next"], "key": "shift+ctrl+down", "when": "cody.activated && !editorReadonly && cody.hasActionableSupercompletion" }, { "command": "cody.supercompletion.jumpTo", - "args": [ - "previous" - ], + "args": ["previous"], "key": "shift+ctrl+up", "when": "cody.activated && !editorReadonly && cody.hasActionableSupercompletion" } @@ -1013,20 +1000,12 @@ "order": 2, "type": "string", "markdownDescription": "A Git repository URL to use instead of allowing Cody to infer the Git repository from the workspace.", - "examples": [ - "https://github.com/sourcegraph/cody", - "ssh://git@github.com/sourcegraph/cody" - ] + "examples": ["https://github.com/sourcegraph/cody", "ssh://git@github.com/sourcegraph/cody"] }, "cody.useContext": { "order": 99, "type": "string", - "enum": [ - "embeddings", - "keyword", - "blended", - "none" - ], + "enum": ["embeddings", "keyword", "blended", "none"], "default": "blended", "markdownDescription": "Controls which context providers Cody uses for chat, commands and inline edits. Use 'blended' for best results. For debugging other context sources, 'embeddings' will use an embeddings-based index if available. 'keyword' will use a search-based index. 'none' will not use embeddings or search-based indexes." }, @@ -1072,17 +1051,13 @@ "order": 6, "type": "string", "markdownDescription": "A custom instruction to be included at the start of all chat messages (e.g. \"Answer all my questions in Spanish.\")", - "examples": [ - "Answer all my questions in Spanish." - ] + "examples": ["Answer all my questions in Spanish."] }, "cody.edit.preInstruction": { "order": 7, "type": "string", "markdownDescription": "A custom instruction to be included at the end of all instructions for edit commands (e.g. \"Write all unit tests with Jest instead of detected framework.\")", - "examples": [ - "Write all unit tests with Jest instead of detected framework." - ] + "examples": ["Write all unit tests with Jest instead of detected framework."] }, "cody.codeActions.enabled": { "order": 11, @@ -1128,14 +1103,8 @@ "cody.telemetry.level": { "order": 99, "type": "string", - "enum": [ - "all", - "off" - ], - "enumDescriptions": [ - "Sends usage data and errors.", - "Disables all extension telemetry." - ], + "enum": ["all", "off"], + "enumDescriptions": ["Sends usage data and errors.", "Disables all extension telemetry."], "markdownDescription": "Controls the telemetry about Cody usage and errors. See [Cody usage and privacy notice](https://about.sourcegraph.com/terms/cody-notice).", "default": "all" }, @@ -1163,11 +1132,7 @@ "cody.autocomplete.advanced.model": { "type": "string", "default": null, - "enum": [ - null, - "starcoder-16b", - "starcoder-7b" - ], + "enum": [null, "starcoder-16b", "starcoder-7b"], "markdownDescription": "Overwrite the model used for code autocompletion inference. This is only supported with the `fireworks` provider" }, "cody.autocomplete.completeSuggestWidgetSelection": { @@ -1187,10 +1152,7 @@ }, "cody.experimental.foldingRanges": { "type": "string", - "enum": [ - "lsp", - "indentation-based" - ], + "enum": ["lsp", "indentation-based"], "enumDescriptions": [ "Use folding ranges that are enabled by default in VS Code, and are usually powered by LSP", "Use custom implementation of folding ranges that is indentation based. This is the implementation that is used by other Cody clients like the JetBrains plugin" @@ -1206,13 +1168,7 @@ "cody.autocomplete.experimental.graphContext": { "type": "string", "default": null, - "enum": [ - null, - "bfg", - "bfg-mixed", - "tsc", - "tsc-mixed" - ], + "enum": [null, "bfg", "bfg-mixed", "tsc", "tsc-mixed"], "markdownDescription": "Use the code graph to retrieve context for autocomplete requests." }, "cody.autocomplete.experimental.fireworksOptions": { @@ -1382,9 +1338,7 @@ "untrustedWorkspaces": { "supported": "limited", "description": "Cody only uses providers (configured in `openctx.providers`) from trusted workspaces because providers may execute arbitrary code.", - "restrictedConfigurations": [ - "openctx.providers" - ] + "restrictedConfigurations": ["openctx.providers"] } }, "dependencies": {