mirror of
https://github.com/nodejs/node.git
synced 2025-08-15 13:48:44 +02:00
test: add error only reporter for node:test
This commit introduces a node:test reporter to the common utils. This reporter can be used to silence output other than errors from node:test. This is useful because in Node's own test suite, the output of node:test is included in the output of the Python test runner. Refs: https://github.com/nodejs/node/issues/49120 PR-URL: https://github.com/nodejs/node/pull/56438 Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Pietro Marchini <pietro.marchini94@gmail.com>
This commit is contained in:
parent
ad68d088a3
commit
0576deb4e5
12 changed files with 93 additions and 8 deletions
2
.github/workflows/build-tarball.yml
vendored
2
.github/workflows/build-tarball.yml
vendored
|
@ -105,4 +105,4 @@ jobs:
|
|||
- name: Test
|
||||
run: |
|
||||
cd $TAR_DIR
|
||||
make run-ci -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9"
|
||||
make run-ci -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=./test/common/test-error-reporter.js' --measure-flakiness 9"
|
||||
|
|
|
@ -68,7 +68,7 @@ jobs:
|
|||
# TODO(bcoe): fix the couple tests that fail with the inspector enabled.
|
||||
# The cause is most likely coverage's use of the inspector.
|
||||
- name: Test
|
||||
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0
|
||||
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=./test/common/test-error-reporter.js' --measure-flakiness 9" || exit 0
|
||||
- name: Report JS
|
||||
run: npx c8 report --check-coverage
|
||||
env:
|
||||
|
|
2
.github/workflows/coverage-linux.yml
vendored
2
.github/workflows/coverage-linux.yml
vendored
|
@ -68,7 +68,7 @@ jobs:
|
|||
# TODO(bcoe): fix the couple tests that fail with the inspector enabled.
|
||||
# The cause is most likely coverage's use of the inspector.
|
||||
- name: Test
|
||||
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=spec' --measure-flakiness 9" || exit 0
|
||||
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j4 V=1 TEST_CI_ARGS="-p dots --node-args='--test-reporter=./test/common/test-error-reporter.js' --measure-flakiness 9" || exit 0
|
||||
- name: Report JS
|
||||
run: npx c8 report --check-coverage
|
||||
env:
|
||||
|
|
2
.github/workflows/doc.yml
vendored
2
.github/workflows/doc.yml
vendored
|
@ -40,4 +40,4 @@ jobs:
|
|||
name: docs
|
||||
path: out/doc
|
||||
- name: Test
|
||||
run: NODE=$(command -v node) make test-doc-ci TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
|
||||
run: NODE=$(command -v node) make test-doc-ci TEST_CI_ARGS="-p actions --node-args='--test-reporter=./test/common/test-error-reporter.js' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
|
||||
|
|
2
.github/workflows/test-asan.yml
vendored
2
.github/workflows/test-asan.yml
vendored
|
@ -63,4 +63,4 @@ jobs:
|
|||
- name: Build
|
||||
run: make build-ci -j4 V=1
|
||||
- name: Test
|
||||
run: make run-ci -j4 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' -t 300 --measure-flakiness 9"
|
||||
run: make run-ci -j4 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=./test/common/test-error-reporter.js' --node-args='--test-reporter-destination=stdout' -t 300 --measure-flakiness 9"
|
||||
|
|
2
.github/workflows/test-linux.yml
vendored
2
.github/workflows/test-linux.yml
vendored
|
@ -54,7 +54,7 @@ jobs:
|
|||
- name: Build
|
||||
run: make -C node build-ci -j4 V=1 CONFIG_FLAGS="--error-on-warn"
|
||||
- name: Test
|
||||
run: make -C node run-ci -j4 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
|
||||
run: make -C node run-ci -j4 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=./test/common/test-error-reporter.js' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
|
||||
- name: Re-run test in a folder whose name contains unusual chars
|
||||
run: |
|
||||
mv node "$DIR"
|
||||
|
|
2
.github/workflows/test-macos.yml
vendored
2
.github/workflows/test-macos.yml
vendored
|
@ -89,7 +89,7 @@ jobs:
|
|||
- name: Free Space After Build
|
||||
run: df -h
|
||||
- name: Test
|
||||
run: make -C node run-ci -j$(getconf _NPROCESSORS_ONLN) V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
|
||||
run: make -C node run-ci -j$(getconf _NPROCESSORS_ONLN) V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=./test/common/test-error-reporter.js' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9"
|
||||
- name: Re-run test in a folder whose name contains unusual chars
|
||||
run: |
|
||||
mv node "$DIR"
|
||||
|
|
2
.github/workflows/test-ubsan.yml
vendored
2
.github/workflows/test-ubsan.yml
vendored
|
@ -64,4 +64,4 @@ jobs:
|
|||
- name: Build
|
||||
run: make build-ci -j2 V=1
|
||||
- name: Test
|
||||
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' -t 300 --measure-flakiness 9"
|
||||
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=./test/common/test-error-reporter.js' --node-args='--test-reporter-destination=stdout' -t 300 --measure-flakiness 9"
|
||||
|
|
41
test/common/test-error-reporter.js
Normal file
41
test/common/test-error-reporter.js
Normal file
|
@ -0,0 +1,41 @@
|
|||
'use strict';
|
||||
const { relative } = require('node:path');
|
||||
const { inspect } = require('node:util');
|
||||
const cwd = process.cwd();
|
||||
|
||||
module.exports = async function* errorReporter(source) {
|
||||
for await (const event of source) {
|
||||
if (event.type === 'test:fail') {
|
||||
const { name, details, line, column, file } = event.data;
|
||||
let { error } = details;
|
||||
|
||||
if (error?.failureType === 'subtestsFailed') {
|
||||
// In the interest of keeping things concise, skip failures that are
|
||||
// only due to nested failures.
|
||||
continue;
|
||||
}
|
||||
|
||||
if (error?.code === 'ERR_TEST_FAILURE') {
|
||||
error = error.cause;
|
||||
}
|
||||
|
||||
const output = [
|
||||
`Test failure: '${name}'`,
|
||||
];
|
||||
|
||||
if (file) {
|
||||
output.push(`Location: ${relative(cwd, file)}:${line}:${column}`);
|
||||
}
|
||||
|
||||
output.push(inspect(error));
|
||||
output.push('\n');
|
||||
yield output.join('\n');
|
||||
|
||||
if (process.env.FAIL_FAST) {
|
||||
yield `\nBailing on failed test: ${event.data.name}\n`;
|
||||
process.exitCode = 1;
|
||||
process.emit('SIGINT');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
6
test/fixtures/test-runner/error-reporter-fail-fast/a.mjs
vendored
Normal file
6
test/fixtures/test-runner/error-reporter-fail-fast/a.mjs
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
const assert = require('node:assert');
|
||||
const { test } = require('node:test');
|
||||
|
||||
test('fail', () => {
|
||||
assert.fail('a.mjs fail');
|
||||
});
|
6
test/fixtures/test-runner/error-reporter-fail-fast/b.mjs
vendored
Normal file
6
test/fixtures/test-runner/error-reporter-fail-fast/b.mjs
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
const assert = require('node:assert');
|
||||
const { test } = require('node:test');
|
||||
|
||||
test('fail', () => {
|
||||
assert.fail('b.mjs fail');
|
||||
});
|
32
test/parallel/test-runner-error-reporter.js
Normal file
32
test/parallel/test-runner-error-reporter.js
Normal file
|
@ -0,0 +1,32 @@
|
|||
'use strict';
|
||||
|
||||
require('../common');
|
||||
const fixtures = require('../common/fixtures');
|
||||
const assert = require('node:assert');
|
||||
const { spawnSync } = require('node:child_process');
|
||||
const { test } = require('node:test');
|
||||
const cwd = fixtures.path('test-runner', 'error-reporter-fail-fast');
|
||||
|
||||
test('all tests failures reported without FAIL_FAST flag', async () => {
|
||||
const args = [
|
||||
'--test-reporter=./test/common/test-error-reporter.js',
|
||||
'--test-concurrency=1',
|
||||
'--test',
|
||||
`${cwd}/*.mjs`,
|
||||
];
|
||||
const cp = spawnSync(process.execPath, args);
|
||||
const failureCount = (cp.stdout.toString().match(/Test failure:/g) || []).length;
|
||||
assert.strictEqual(failureCount, 2);
|
||||
});
|
||||
|
||||
test('FAIL_FAST stops test execution after first failure', async () => {
|
||||
const args = [
|
||||
'--test-reporter=./test/common/test-error-reporter.js',
|
||||
'--test-concurrency=1',
|
||||
'--test',
|
||||
`${cwd}/*.mjs`,
|
||||
];
|
||||
const cp = spawnSync(process.execPath, args, { env: { ...process.env, FAIL_FAST: 'true' } });
|
||||
const failureCount = (cp.stdout.toString().match(/Test failure:/g) || []).length;
|
||||
assert.strictEqual(failureCount, 1);
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue