From b80e931b5c8b1d5c7ce44d3a0ad96e5884f4c422 Mon Sep 17 00:00:00 2001 From: Denys Kuchma Date: Wed, 4 Feb 2026 23:33:28 +0200 Subject: [PATCH 1/2] add aiTrace plugin for AI-assisted test debugging --- docs/aitrace.md | 279 +++++++++++++++++++++++++++ docs/plugins.md | 108 +++++++++++ examples/codecept.config.js | 8 + lib/plugin/aiTrace.js | 311 +++++++++++++++++++++++++++++++ test/unit/plugin/aiTrace_test.js | 277 +++++++++++++++++++++++++++ 5 files changed, 983 insertions(+) create mode 100644 docs/aitrace.md create mode 100644 lib/plugin/aiTrace.js create mode 100644 test/unit/plugin/aiTrace_test.js diff --git a/docs/aitrace.md b/docs/aitrace.md new file mode 100644 index 000000000..540f51dc0 --- /dev/null +++ b/docs/aitrace.md @@ -0,0 +1,279 @@ +--- +permalink: /aitrace +title: AI Trace Plugin +--- + +# AI Trace Plugin + +AI Trace Plugin generates AI-friendly trace files for debugging with AI agents like Claude Code. + +When a test fails, you need to understand what went wrong: what the page looked like, what elements were present, what errors occurred, and what led to the failure. This plugin automatically captures all that information and organizes it in a format optimized for AI analysis. + +## Quick Start + +Enable the plugin in your `codecept.conf.js`: + +```javascript +export const config = { + tests: './*_test.js', + output: './output', + helpers: { + Playwright: { + url: 'https://example.com', + // Optional: Enable HAR/trace for HTTP capture + recordHar: { + mode: 'minimal', + content: 'embed', + }, + trace: 'on', + keepTraceForPassedTests: true, + }, + }, + plugins: { + aiTrace: { + enabled: true, + }, + }, +} +``` + +Run tests: + +```bash +npx codeceptjs run +``` + +After test execution, trace files are created in `output/trace_*/trace.md`. + +## Artifacts Created + +For each test, a `trace_` directory is created with the following files: + +**trace.md** - AI-friendly markdown file with test execution history + +**0000_screenshot.png** - Screenshot for each step + +**0000_page.html** - Full HTML of the page at each step + +**0000_aria.txt** - ARIA accessibility snapshot (AI-readable structure without HTML noise) + +**0000_console.json** - Browser console logs + +When HAR or trace recording is enabled in your helper config, links to those files are also included. + +## Trace File Format + +The `trace.md` file contains a structured execution log with links to all artifacts: + +```markdown +file: /path/to/test.js +name: My test scenario +time: 3.45s +--- + +I am on page "https://example.com" + > navigated to https://example.com/ + > [HTML](./0000_page.html) + > [ARIA Snapshot](./0000_aria.txt) + > [Screenshot](./0000_screenshot.png) + > [Browser Logs](0000_console.json) (7 entries) + > HTTP: see [HAR file](../har/...) for network requests + +I see "Welcome" + > navigated to https://example.com/ + > [HTML](./0001_page.html) + > [ARIA Snapshot](./0001_aria.txt) + > [Screenshot](./0001_screenshot.png) + > [Browser Logs](0001_console.json) (0 entries) +``` + +## Configuration + +### Basic Configuration + +```javascript +plugins: { + aiTrace: { + enabled: true, + } +} +``` + +### Advanced Configuration + +```javascript +plugins: { + aiTrace: { + enabled: true, + + // Artifact capture options + captureHTML: true, // Save HTML for each step + captureARIA: true, // Save ARIA snapshots + captureBrowserLogs: true, // Save console logs + captureHTTP: true, // Links to HAR/trace files + captureDebugOutput: true, // CodeceptJS debug messages + + // Screenshot options + fullPageScreenshots: false, // Full page or viewport only + + // Output options + output: './output', // Where to save traces + deleteSuccessful: false, // Delete traces for passed tests + + // Step filtering + ignoreSteps: [ + /^grab/, // Ignore all grab* steps + /^wait/, // Ignore all wait* steps + ], + } +} +``` + +### Configuration Options + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `enabled` | boolean | `false` | Enable/disable the plugin | +| `captureHTML` | boolean | `true` | Capture HTML for each step | +| `captureARIA` | boolean | `true` | Capture ARIA snapshots | +| `captureBrowserLogs` | boolean | `true` | Capture browser console logs | +| `captureHTTP` | boolean | `true` | Capture HTTP requests (requires HAR/trace) | +| `captureDebugOutput` | boolean | `true` | Capture CodeceptJS debug output | +| `fullPageScreenshots` | boolean | `false` | Use full page screenshots | +| `output` | string | `'./output'` | Directory for trace files | +| `deleteSuccessful` | boolean | `false` | Delete traces for passed tests | +| `ignoreSteps` | array | `[]` | Steps to ignore (regex patterns) | + +## Best Practices + +### Optimize for Failing Tests + +Save disk space by only keeping traces for failed tests: + +```javascript +plugins: { + aiTrace: { + enabled: true, + deleteSuccessful: true, // Only keep failing tests + } +} +``` + +### Ignore Noise + +Don't capture logs for `grab` and `wait` steps: + +```javascript +plugins: { + aiTrace: { + enabled: true, + ignoreSteps: [/^grab/, /^wait/], + } +} +``` + +### Selective Artifact Capture + +Capture only what you need to reduce file sizes: + +```javascript +plugins: { + aiTrace: { + enabled: true, + captureHTML: false, // Skip HTML (saves ~500KB per step) + captureARIA: true, // Keep ARIA (only ~16KB) + captureBrowserLogs: false, // Skip console logs + } +} +``` + +### Enable HTTP Capture + +For network debugging, enable HAR/trace in your helper: + +```javascript +helpers: { + Playwright: { + recordHar: { + mode: 'minimal', + content: 'embed', + }, + trace: 'on', + keepTraceForPassedTests: true, + }, + plugins: { + aiTrace: { + enabled: true, + captureHTTP: true, // Links to HAR/trace files + }, + }, +} +``` + +## Using with AI Agents + +The trace format is optimized for AI agents like Claude Code. When debugging a failing test: + +1. Open the generated `trace.md` file +2. Copy its contents along with relevant artifact files (ARIA snapshots, console logs, etc.) +3. Provide to the AI agent with context about the failure + +Example prompt: +``` +I have a failing test. Here's the AI trace: + +[paste trace.md contents] + +[paste relevant ARIA snapshots] + +[paste console logs] + +Analyze this and explain why the test failed and how to fix it. +``` + +The AI agent can analyze all artifacts together - screenshots, HTML structure, console errors, and network requests - to provide comprehensive debugging insights. + +## Troubleshooting + +### No trace files created + +**Possible causes:** +1. Plugin not enabled +2. No steps executed +3. Tests skipped + +**Solution:** +```bash +# Check if plugin is enabled +grep -A 5 "aiTrace" codecept.conf.js + +# Run with verbose output +npx codeceptjs run --verbose +``` + +### ARIA snapshots missing + +**Possible cause:** Helper doesn't support `grabAriaSnapshot` + +**Solution:** Use Playwright or update to latest CodeceptJS + +### HAR files missing + +**Possible cause:** HAR/trace not enabled in helper config + +**Solution:** +```javascript +helpers: { + Playwright: { + recordHar: { mode: 'minimal' }, + trace: 'on', + }, +} +``` + +## Related + +- [AI Features](/ai) - AI-powered testing features +- [Plugins](/plugins) - All available plugins +- [Configuration](/configuration) - General configuration +- [Playwright Helper](/playwright) - Playwright-specific configuration diff --git a/docs/plugins.md b/docs/plugins.md index 1a279cd37..a6a6f7ef5 100644 --- a/docs/plugins.md +++ b/docs/plugins.md @@ -63,6 +63,114 @@ exports.config = { Returns **void** +## aiTrace + +Generates AI-friendly trace files for debugging with AI agents like Claude Code. + +When a test fails, you need to understand what went wrong. This plugin automatically captures comprehensive information about test execution - screenshots, HTML, ARIA snapshots, console logs, and HTTP requests - and organizes it in a format optimized for AI analysis. + +The generated trace files are structured markdown documents that AI agents can easily parse to understand test context and provide debugging insights. + +#### Usage + +Enable this plugin in your config: + +```js +// in codecept.conf.js +exports.config = { + plugins: { + aiTrace: { + enabled: true + } + } +} +``` + +#### Configuration + +* `deleteSuccessful` (boolean) - delete traces for successfully executed tests. Default: false. +* `fullPageScreenshots` (boolean) - should full page screenshots be used. Default: false. +* `output` (string) - a directory where traces should be stored. Default: `output`. +* `captureHTML` (boolean) - capture HTML for each step. Default: true. +* `captureARIA` (boolean) - capture ARIA snapshot for each step. Default: true. +* `captureBrowserLogs` (boolean) - capture browser console logs. Default: true. +* `captureHTTP` (boolean) - capture HTTP requests (requires `trace` or `recordHar` enabled in helper config). Default: true. +* `captureDebugOutput` (boolean) - capture CodeceptJS debug output. Default: true. +* `ignoreSteps` (array) - steps to ignore in trace. Array of RegExps is expected. Default: []. + +#### Artifacts Created + +For each test, a `trace_` directory is created with: + +* **trace.md** - AI-friendly markdown file with test execution history +* **0000_screenshot.png** - screenshot for each step +* **0000_page.html** - full HTML of the page at each step +* **0000_aria.txt** - ARIA accessibility snapshot (AI-readable structure) +* **0000_console.json** - browser console logs + +When HAR or trace recording is enabled in your helper config, links to those files are also included. + +#### Example Output + +```markdown +file: /path/to/test.js +name: My test scenario +time: 3.45s +--- + +I am on page "https://example.com" + > navigated to https://example.com/ + > [HTML](./0000_page.html) + > [ARIA Snapshot](./0000_aria.txt) + > [Screenshot](./0000_screenshot.png) + > [Browser Logs](0000_console.json) (7 entries) + > HTTP: see [HAR file](../har/...) for network requests + +I see "Welcome" + > navigated to https://example.com/ + > [HTML](./0001_page.html) + > [ARIA Snapshot](./0001_aria.txt) + > [Screenshot](./0001_screenshot.png) + > [Browser Logs](0001_console.json) (0 entries) +``` + +#### Best Practices + +**Save disk space** - Only keep traces for failed tests: + +```js +aiTrace: { + enabled: true, + deleteSuccessful: true +} +``` + +**Ignore noise** - Don't capture logs for `grab` and `wait` steps: + +```js +aiTrace: { + enabled: true, + ignoreSteps: [/^grab/, /^wait/] +} +``` + +**Reduce file sizes** - Capture only what you need: + +```js +aiTrace: { + enabled: true, + captureHTML: false, // Skip HTML (saves ~500KB per step) + captureARIA: true, // Keep ARIA (only ~16KB) + captureBrowserLogs: false // Skip console logs +} +``` + +### Parameters + +* `config` **[Object][1]** Plugin configuration (optional, default `{}`) + +Returns **void** + ## auth Logs user in for the first test and reuses session for next tests. diff --git a/examples/codecept.config.js b/examples/codecept.config.js index 399e6a248..c59e980be 100644 --- a/examples/codecept.config.js +++ b/examples/codecept.config.js @@ -60,6 +60,14 @@ export const config = { subtitles: { enabled: true, }, + aiTrace: { + enabled: true, + captureHTML: true, + captureARIA: true, + captureBrowserLogs: true, + captureHTTP: true, + ignoreSteps: [/^grab/, /^wait/], + }, }, tests: './*_test.js', diff --git a/lib/plugin/aiTrace.js b/lib/plugin/aiTrace.js new file mode 100644 index 000000000..7ca64312d --- /dev/null +++ b/lib/plugin/aiTrace.js @@ -0,0 +1,311 @@ +import crypto from 'crypto' +import fs from 'fs' +import { mkdirp } from 'mkdirp' +import path from 'path' +import { fileURLToPath } from 'url' + +import Container from '../container.js' +import recorder from '../recorder.js' +import event from '../event.js' +import output from '../output.js' +import { deleteDir } from '../utils.js' + +const supportedHelpers = Container.STANDARD_ACTING_HELPERS + +const defaultConfig = { + deleteSuccessful: false, + fullPageScreenshots: false, + output: global.output_dir, + captureHTML: true, + captureARIA: true, + captureBrowserLogs: true, + captureHTTP: true, + captureDebugOutput: true, + ignoreSteps: [], +} + +/** + * + * Generates AI-friendly trace files for debugging with AI agents. + * This plugin creates a markdown file with test execution logs and links to all artifacts + * (screenshots, HTML, ARIA snapshots, browser logs, HTTP requests) for each step. + * + * #### Configuration + * + * ```js + * "plugins": { + * "aiTrace": { + * "enabled": true + * } + * } + * ``` + * + * Possible config options: + * + * * `deleteSuccessful`: delete traces for successfully executed tests. Default: false. + * * `fullPageScreenshots`: should full page screenshots be used. Default: false. + * * `output`: a directory where traces should be stored. Default: `output`. + * * `captureHTML`: capture HTML for each step. Default: true. + * * `captureARIA`: capture ARIA snapshot for each step. Default: true. + * * `captureBrowserLogs`: capture browser console logs. Default: true. + * * `captureHTTP`: capture HTTP requests (requires `trace` or `recordHar` enabled in helper config). Default: true. + * * `captureDebugOutput`: capture CodeceptJS debug output. Default: true. + * * `ignoreSteps`: steps to ignore in trace. Array of RegExps is expected. + * + * @param {*} config + */ +export default function (config) { + const helpers = Container.helpers() + let helper + + config = Object.assign(defaultConfig, config) + + for (const helperName of supportedHelpers) { + if (Object.keys(helpers).indexOf(helperName) > -1) { + helper = helpers[helperName] + } + } + + if (!helper) { + output.warn('aiTrace plugin: No supported helper found (Playwright, Puppeteer, WebDriver). Plugin disabled.') + return + } + + let dir + let stepNum + let steps = [] + let debugOutput = [] + let error + let savedSteps = new Set() + let currentTest = null + let testStartTime + let currentUrl = null + + const reportDir = config.output ? path.resolve(global.codecept_dir, config.output) : defaultConfig.output + + if (config.captureDebugOutput) { + const originalDebug = output.debug + output.debug = function (...args) { + debugOutput.push(args.join(' ')) + originalDebug.apply(output, args) + } + } + + event.dispatcher.on(event.suite.before, suite => { + stepNum = -1 + }) + + event.dispatcher.on(event.test.before, test => { + const sha256hash = crypto + .createHash('sha256') + .update(test.file + test.title) + .digest('hex') + dir = path.join(reportDir, `trace_${sha256hash}`) + mkdirp.sync(dir) + stepNum = 0 + error = null + steps = [] + debugOutput = [] + savedSteps.clear() + currentTest = test + testStartTime = Date.now() + currentUrl = null + }) + + event.dispatcher.on(event.step.after, step => { + if (!currentTest) return + recorder.add('save ai trace step', async () => persistStep(step), true) + }) + + event.dispatcher.on(event.step.failed, step => { + if (!currentTest) return + recorder.add('save ai trace failed step', async () => persistStep(step), true) + }) + + event.dispatcher.on(event.test.passed, test => { + if (config.deleteSuccessful) { + deleteDir(dir) + return + } + persist(test, 'passed') + }) + + event.dispatcher.on(event.test.failed, (test, _err, hookName) => { + if (hookName === 'BeforeSuite' || hookName === 'AfterSuite') { + return + } + persist(test, 'failed') + }) + + async function persistStep(step) { + if (stepNum === -1) return + if (isStepIgnored(step)) return + if (step.metaStep && step.metaStep.name === 'BeforeSuite') return + + const stepKey = step.toString() + if (savedSteps.has(stepKey)) { + const existingStep = steps.find(s => s.step === stepKey) + if (existingStep && step.status === 'failed') { + existingStep.status = 'failed' + } + return + } + savedSteps.add(stepKey) + + const stepPrefix = `${String(stepNum).padStart(4, '0')}` + stepNum++ + + const stepData = { + step: step.toString(), + status: step.status, + prefix: stepPrefix, + artifacts: {}, + meta: {}, + } + + try { + if (helper.grabCurrentUrl) { + try { + const url = await helper.grabCurrentUrl() + stepData.meta.url = url + currentUrl = url + } catch (err) { + // Ignore URL capture errors + } + } + + // Save screenshot + const screenshotFile = `${stepPrefix}_screenshot.png` + await helper.saveScreenshot(path.join(dir, screenshotFile), config.fullPageScreenshots) + stepData.artifacts.screenshot = screenshotFile + + // Save HTML + if (config.captureHTML && helper.grabSource) { + try { + const html = await helper.grabSource() + const htmlFile = `${stepPrefix}_page.html` + fs.writeFileSync(path.join(dir, htmlFile), html) + stepData.artifacts.html = htmlFile + } catch (err) { + output.debug(`aiTrace: Could not capture HTML: ${err.message}`) + } + } + + // Save ARIA snapshot + if (config.captureARIA && helper.grabAriaSnapshot) { + try { + const aria = await helper.grabAriaSnapshot() + const ariaFile = `${stepPrefix}_aria.txt` + fs.writeFileSync(path.join(dir, ariaFile), aria) + stepData.artifacts.aria = ariaFile + } catch (err) { + output.debug(`aiTrace: Could not capture ARIA snapshot: ${err.message}`) + } + } + + // Save browser logs + if (config.captureBrowserLogs && helper.grabBrowserLogs) { + try { + const logs = await helper.grabBrowserLogs() + const logsFile = `${stepPrefix}_console.json` + fs.writeFileSync(path.join(dir, logsFile), JSON.stringify(logs || [], null, 2)) + stepData.artifacts.console = logsFile + stepData.meta.consoleCount = logs ? logs.length : 0 + } catch (err) { + output.debug(`aiTrace: Could not capture browser logs: ${err.message}`) + } + } + } catch (err) { + output.plugin(`aiTrace: Can't save step artifacts: ${err}`) + } + + steps.push(stepData) + } + + function persist(test, status) { + if (!steps.length) { + output.debug('aiTrace: No steps to save in trace') + return + } + + const testDuration = ((Date.now() - testStartTime) / 1000).toFixed(2) + + let markdown = `file: ${test.file || 'unknown'}\n` + markdown += `name: ${test.title}\n` + markdown += `time: ${testDuration}s\n` + markdown += `---\n\n` + + if (status === 'failed') { + if (test.art && test.art.message) { + markdown += `Error: ${test.art.message}\n\n` + } + if (test.art && test.art.stack) { + markdown += `${test.art.stack}\n\n` + } + markdown += `---\n\n` + } + + if (config.captureDebugOutput && debugOutput.length > 0) { + markdown += `CodeceptJS Debug Output:\n\n` + debugOutput.forEach(line => { + markdown += `> ${line}\n` + }) + markdown += `\n---\n\n` + } + + steps.forEach((stepData, index) => { + markdown += `${stepData.step}\n` + + if (stepData.meta.url) { + markdown += ` > navigated to ${stepData.meta.url}\n` + } + + if (stepData.artifacts.html) { + markdown += ` > [HTML](./${stepData.artifacts.html})\n` + } + + if (stepData.artifacts.aria) { + markdown += ` > [ARIA Snapshot](./${stepData.artifacts.aria})\n` + } + + if (stepData.artifacts.screenshot) { + markdown += ` > [Screenshot](./${stepData.artifacts.screenshot})\n` + } + + if (stepData.artifacts.console) { + const count = stepData.meta.consoleCount || 0 + markdown += ` > [Browser Logs](${stepData.artifacts.console}) (${count} entries)\n` + } + + if (config.captureHTTP) { + if (test.artifacts && test.artifacts.har) { + const harPath = path.relative(reportDir, test.artifacts.har) + markdown += ` > HTTP: see [HAR file](../${harPath}) for network requests\n` + } else if (test.artifacts && test.artifacts.trace) { + const tracePath = path.relative(reportDir, test.artifacts.trace) + markdown += ` > HTTP: see [Playwright trace](../${tracePath}) for network requests\n` + } + } + + markdown += `\n` + }) + + const traceFile = path.join(dir, 'trace.md') + fs.writeFileSync(traceFile, markdown) + + output.print(`🤖 AI Trace: ${colors.white.bold(`file://${traceFile}`)}`) + + if (!test.artifacts) test.artifacts = {} + test.artifacts.aiTrace = traceFile + } + + function isStepIgnored(step) { + if (!config.ignoreSteps) return false + for (const pattern of config.ignoreSteps || []) { + if (step.name.match(pattern)) return true + } + return false + } +} + +import colors from 'chalk' diff --git a/test/unit/plugin/aiTrace_test.js b/test/unit/plugin/aiTrace_test.js new file mode 100644 index 000000000..a2f2cfc3f --- /dev/null +++ b/test/unit/plugin/aiTrace_test.js @@ -0,0 +1,277 @@ +import { expect } from 'chai' +import sinon from 'sinon' +import aiTrace from '../../../lib/plugin/aiTrace.js' +import container from '../../../lib/container.js' +import event from '../../../lib/event.js' +import recorder from '../../../lib/recorder.js' +import output from '../../../lib/output.js' +import { createTest } from '../../../lib/mocha/test.js' +import path from 'path' + +const testsDir = path.join(process.cwd(), 'test/output') + +describe('aiTrace plugin', () => { + let helperStub + + beforeEach(() => { + recorder.reset() + + helperStub = { + saveScreenshot: sinon.stub().resolves(), + grabSource: sinon.stub().resolves(''), + grabAriaSnapshot: sinon.stub().resolves('- region\n- text: Test'), + grabBrowserLogs: sinon.stub().resolves([]), + } + + container.clear({ + Playwright: helperStub, + }) + + sinon.stub(output, 'print') + }) + + afterEach(() => { + sinon.restore() + event.dispatcher.removeAllListeners(event.test.before) + event.dispatcher.removeAllListeners(event.test.after) + event.dispatcher.removeAllListeners(event.test.passed) + event.dispatcher.removeAllListeners(event.test.failed) + event.dispatcher.removeAllListeners(event.step.after) + }) + + it('should save artifacts for each step', async () => { + aiTrace({ + enabled: true, + output: testsDir, + }) + + const test = createTest('test one') + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + const step = { + name: 'amOnPage', + toString: () => 'I am on page', + meta: { url: 'https://example.com' }, + status: 'success', + } + + event.dispatcher.emit(event.step.after, step) + await recorder.promise() + + expect(helperStub.saveScreenshot.calledOnce).to.be.true + expect(helperStub.grabSource.calledOnce).to.be.true + expect(helperStub.grabAriaSnapshot.calledOnce).to.be.true + expect(helperStub.grabBrowserLogs.calledOnce).to.be.true + }) + + it('should generate trace on test passed', async () => { + aiTrace({ + enabled: true, + output: testsDir, + }) + + const test = createTest('test one') + test.art = {} + + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + const step = { + name: 'see', + toString: () => 'I see test', + status: 'success', + } + + event.dispatcher.emit(event.step.after, step) + await recorder.promise() + + event.dispatcher.emit(event.test.passed, test) + await recorder.promise() + + expect(test.artifacts.aiTrace).to.be.ok + expect(test.artifacts.aiTrace).to.include('trace.md') + }) + + it('should generate trace on test failed', async () => { + aiTrace({ + enabled: true, + output: testsDir, + }) + + const test = createTest('test one') + test.art = { + message: 'Element not found', + stack: 'Error', + } + + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + const step = { + name: 'see', + toString: () => 'I see test', + status: 'failed', + } + + event.dispatcher.emit(event.step.after, step) + await recorder.promise() + + event.dispatcher.emit(event.test.failed, test) + await recorder.promise() + + expect(test.artifacts.aiTrace).to.be.ok + }) + + it('should ignore steps matching ignoreSteps pattern', async () => { + aiTrace({ + enabled: true, + output: testsDir, + ignoreSteps: [/^grab/], + }) + + const test = createTest('test one') + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + const step = { + name: 'grabText', + toString: () => 'I grab text', + status: 'success', + } + event.dispatcher.emit(event.step.after, step) + await recorder.promise() + + expect(helperStub.saveScreenshot.called).to.be.false + }) + + it('should not save duplicate steps', async () => { + aiTrace({ + enabled: true, + output: testsDir, + }) + + const test = createTest('test one') + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + const step = { + name: 'see', + toString: () => 'I see test', + status: 'success', + } + + event.dispatcher.emit(event.step.after, step) + await recorder.promise() + + event.dispatcher.emit(event.step.after, step) + await recorder.promise() + + expect(helperStub.saveScreenshot.calledOnce).to.be.true + }) + + it('should not create trace for BeforeSuite failures', async () => { + aiTrace({ + enabled: true, + output: testsDir, + }) + + const test = createTest('test one') + test.artifacts = {} + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + event.dispatcher.emit(event.test.failed, test, null, 'BeforeSuite') + await recorder.promise() + + expect(test.artifacts.aiTrace).to.be.undefined + }) + + it('should not create trace for AfterSuite failures', async () => { + aiTrace({ + enabled: true, + output: testsDir, + }) + + const test = createTest('test one') + test.artifacts = {} + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + event.dispatcher.emit(event.test.failed, test, null, 'AfterSuite') + await recorder.promise() + + expect(test.artifacts.aiTrace).to.be.undefined + }) + + describe('Artifact capture options', () => { + it('should not capture HTML when captureHTML is false', async () => { + aiTrace({ + enabled: true, + output: testsDir, + captureHTML: false, + }) + + const test = createTest('test one') + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + const step = { + name: 'amOnPage', + toString: () => 'I am on page', + status: 'success', + } + + event.dispatcher.emit(event.step.after, step) + await recorder.promise() + + expect(helperStub.grabSource.called).to.be.false + }) + + it('should not capture ARIA when captureARIA is false', async () => { + aiTrace({ + enabled: true, + output: testsDir, + captureARIA: false, + }) + + const test = createTest('test one') + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + const step = { + name: 'amOnPage', + toString: () => 'I am on page', + status: 'success', + } + + event.dispatcher.emit(event.step.after, step) + await recorder.promise() + + expect(helperStub.grabAriaSnapshot.called).to.be.false + }) + + it('should not capture browser logs when captureBrowserLogs is false', async () => { + aiTrace({ + enabled: true, + output: testsDir, + captureBrowserLogs: false, + }) + + const test = createTest('test one') + event.dispatcher.emit(event.test.before, test) + await recorder.promise() + + const step = { + name: 'amOnPage', + toString: () => 'I am on page', + status: 'success', + } + + event.dispatcher.emit(event.step.after, step) + await recorder.promise() + + expect(helperStub.grabBrowserLogs.called).to.be.false + }) + }) +}) From bc430210904b1c54788e0da82c0d2c1a0e4a7063 Mon Sep 17 00:00:00 2001 From: Denys Kuchma Date: Thu, 5 Feb 2026 14:22:14 +0200 Subject: [PATCH 2/2] add path and little fix --- docs/aitrace.md | 21 +-------------------- lib/plugin/aiTrace.js | 4 ++-- 2 files changed, 3 insertions(+), 22 deletions(-) diff --git a/docs/aitrace.md b/docs/aitrace.md index 540f51dc0..c3832509c 100644 --- a/docs/aitrace.md +++ b/docs/aitrace.md @@ -212,26 +212,7 @@ helpers: { ## Using with AI Agents -The trace format is optimized for AI agents like Claude Code. When debugging a failing test: - -1. Open the generated `trace.md` file -2. Copy its contents along with relevant artifact files (ARIA snapshots, console logs, etc.) -3. Provide to the AI agent with context about the failure - -Example prompt: -``` -I have a failing test. Here's the AI trace: - -[paste trace.md contents] - -[paste relevant ARIA snapshots] - -[paste console logs] - -Analyze this and explain why the test failed and how to fix it. -``` - -The AI agent can analyze all artifacts together - screenshots, HTML structure, console errors, and network requests - to provide comprehensive debugging insights. +The trace format is optimized for AI agents like Claude Code. When debugging a failing test, just point the AI agent to the `trace.md` file - it will read the file and all linked artifacts automatically to analyze the failure. ## Troubleshooting diff --git a/lib/plugin/aiTrace.js b/lib/plugin/aiTrace.js index 7ca64312d..c7c14233f 100644 --- a/lib/plugin/aiTrace.js +++ b/lib/plugin/aiTrace.js @@ -9,6 +9,7 @@ import recorder from '../recorder.js' import event from '../event.js' import output from '../output.js' import { deleteDir } from '../utils.js' +import colors from 'chalk' const supportedHelpers = Container.STANDARD_ACTING_HELPERS @@ -274,7 +275,7 @@ export default function (config) { if (stepData.artifacts.console) { const count = stepData.meta.consoleCount || 0 - markdown += ` > [Browser Logs](${stepData.artifacts.console}) (${count} entries)\n` + markdown += ` > [Browser Logs](./${stepData.artifacts.console}) (${count} entries)\n` } if (config.captureHTTP) { @@ -308,4 +309,3 @@ export default function (config) { } } -import colors from 'chalk'