Skip to content

Commit

Permalink
chore: fix Mocha test runner suggestion when hooks fail (#9750)
Browse files Browse the repository at this point in the history
  • Loading branch information
Lightning00Blade committed Feb 28, 2023
1 parent 4a365a4 commit 232873a
Show file tree
Hide file tree
Showing 7 changed files with 73 additions and 59 deletions.
2 changes: 1 addition & 1 deletion test/README.md
Expand Up @@ -25,7 +25,7 @@ The best place to look is an existing test to see how they use the helpers.

## Skipping tests in specific conditions

To skip tests edit the [TestExpecations](https://github.com/puppeteer/puppeteer/blob/main/test/TestExpectations.json) file. See [test runner documentation](https://github.com/puppeteer/puppeteer/tree/main/tools/mochaRunner) for more details.
To skip tests edit the [TestExpectations](https://github.com/puppeteer/puppeteer/blob/main/test/TestExpectations.json) file. See [test runner documentation](https://github.com/puppeteer/puppeteer/tree/main/tools/mochaRunner) for more details.

## Running tests

Expand Down
2 changes: 1 addition & 1 deletion test/TestSuites.json
Expand Up @@ -43,7 +43,7 @@
"expectedLineCoverage": 56
}
],
"parameterDefinitons": {
"parameterDefinitions": {
"chrome": {
"PUPPETEER_PRODUCT": "chrome"
},
Expand Down
1 change: 1 addition & 0 deletions test/package.json
Expand Up @@ -15,6 +15,7 @@
"../packages/testserver:build"
],
"files": [
"../tools/mochaRunner/**",
"src/**"
],
"output": [
Expand Down
2 changes: 1 addition & 1 deletion tools/mochaRunner/README.md
Expand Up @@ -24,7 +24,7 @@ npm run build && npm run test -- --test-suite chrome-headless
## TestSuites.json

Define test suites via the `testSuites` attribute. `parameters` can be used in the `TestExpectations.json` to disable tests
based on parameters. The meaning for parameters is defined in `parameterDefinitons` which tell what env object corresponds
based on parameters. The meaning for parameters is defined in `parameterDefinitions` which tell what env object corresponds
to the given parameter.

## TestExpectations.json
Expand Down
58 changes: 17 additions & 41 deletions tools/mochaRunner/src/main.ts
Expand Up @@ -31,10 +31,10 @@ import {
import {
extendProcessEnv,
filterByPlatform,
prettyPrintJSON,
readJSON,
filterByParameters,
getExpectationUpdates,
printSuggestions,
} from './utils.js';

function getApplicableTestSuites(
Expand Down Expand Up @@ -109,7 +109,7 @@ async function main() {

const env = extendProcessEnv([
...parameters.map(param => {
return parsedSuitesFile.parameterDefinitons[param];
return parsedSuitesFile.parameterDefinitions[param];
}),
{
PUPPETEER_SKIPPED_TEST_CONFIG: JSON.stringify(
Expand Down Expand Up @@ -211,45 +211,21 @@ async function main() {
console.error(err);
} finally {
if (!noSuggestions) {
const toAdd = recommendations.filter(item => {
return item.action === 'add';
});
if (toAdd.length) {
console.log(
'Add the following to TestExpectations.json to ignore the error:'
);
prettyPrintJSON(
toAdd.map(item => {
return item.expectation;
})
);
}
const toRemove = recommendations.filter(item => {
return item.action === 'remove';
});
if (toRemove.length) {
console.log(
'Remove the following from the TestExpectations.json to ignore the error:'
);
prettyPrintJSON(
toRemove.map(item => {
return item.expectation;
})
);
}
const toUpdate = recommendations.filter(item => {
return item.action === 'update';
});
if (toUpdate.length) {
console.log(
'Update the following expectations in the TestExpecations.json to ignore the error:'
);
prettyPrintJSON(
toUpdate.map(item => {
return item.expectation;
})
);
}
printSuggestions(
recommendations,
'add',
'Add the following to TestExpectations.json to ignore the error:'
);
printSuggestions(
recommendations,
'remove',
'Remove the following from the TestExpectations.json to ignore the error:'
);
printSuggestions(
recommendations,
'update',
'Update the following expectations in the TestExpectations.json to ignore the error:'
);
}
process.exit(fail ? 1 : 0);
}
Expand Down
2 changes: 1 addition & 1 deletion tools/mochaRunner/src/types.ts
Expand Up @@ -31,7 +31,7 @@ export type TestSuite = z.infer<typeof zTestSuite>;

export const zTestSuiteFile = z.object({
testSuites: z.array(zTestSuite),
parameterDefinitons: z.record(z.any()),
parameterDefinitions: z.record(z.any()),
});

export type TestSuiteFile = z.infer<typeof zTestSuiteFile>;
Expand Down
65 changes: 51 additions & 14 deletions tools/mochaRunner/src/utils.ts
Expand Up @@ -57,6 +57,24 @@ export function prettyPrintJSON(json: unknown): void {
console.log(JSON.stringify(json, null, 2));
}

export function printSuggestions(
recommendations: RecommendedExpectation[],
action: RecommendedExpectation['action'],
message: string
): void {
const toPrint = recommendations.filter(item => {
return item.action === action;
});
if (toPrint.length) {
console.log(message);
prettyPrintJSON(
toPrint.map(item => {
return item.expectation;
})
);
}
}

export function filterByParameters(
expectations: TestExpectation[],
parameters: string[]
Expand Down Expand Up @@ -88,9 +106,8 @@ export function findEffectiveExpectationForTest(
.pop();
}

type RecommendedExpecation = {
type RecommendedExpectation = {
expectation: TestExpectation;
test: MochaTestResult;
action: 'remove' | 'add' | 'update';
};

Expand All @@ -104,28 +121,42 @@ export function isWildCardPattern(testIdPattern: string): boolean {

export function getExpectationUpdates(
results: MochaResults,
expecations: TestExpectation[],
expectations: TestExpectation[],
context: {
platforms: NodeJS.Platform[];
parameters: string[];
}
): RecommendedExpecation[] {
const output: RecommendedExpecation[] = [];
): RecommendedExpectation[] {
const output: Map<string, RecommendedExpectation> = new Map();

for (const pass of results.passes) {
const expectationEntry = findEffectiveExpectationForTest(expecations, pass);
// If an error occurs during a hook
// the error not have a file associated with it
if (!pass.file) {
continue;
}

const expectationEntry = findEffectiveExpectationForTest(
expectations,
pass
);
if (expectationEntry && !expectationEntry.expectations.includes('PASS')) {
output.push({
addEntry({
expectation: expectationEntry,
test: pass,
action: 'remove',
});
}
}

for (const failure of results.failures) {
// If an error occurs during a hook
// the error not have a file associated with it
if (!failure.file) {
continue;
}

const expectationEntry = findEffectiveExpectationForTest(
expecations,
expectations,
failure
);
// If the effective explanation is a wildcard, we recommend adding a new
Expand All @@ -140,32 +171,38 @@ export function getExpectationUpdates(
getTestResultForFailure(failure)
)
) {
output.push({
addEntry({
expectation: {
...expectationEntry,
expectations: [
...expectationEntry.expectations,
getTestResultForFailure(failure),
],
},
test: failure,
action: 'update',
});
}
} else {
output.push({
addEntry({
expectation: {
testIdPattern: getTestId(failure.file, failure.fullTitle),
platforms: context.platforms,
parameters: context.parameters,
expectations: [getTestResultForFailure(failure)],
},
test: failure,
action: 'add',
});
}
}
return output;

function addEntry(value: RecommendedExpectation) {
const key = JSON.stringify(value);
if (!output.has(key)) {
output.set(key, value);
}
}

return [...output.values()];
}

export function getTestResultForFailure(
Expand Down

0 comments on commit 232873a

Please sign in to comment.