Skip to content

Commit 5e8ae53

Browse files
authored
Merge pull request #51248 from margelo/e2e/do-not-fail-exceution-if-one-test-failed
[NoQA] e2e: allow tests failure
2 parents 93e4449 + d254f8f commit 5e8ae53

File tree

6 files changed

+162
-82
lines changed

6 files changed

+162
-82
lines changed

.github/workflows/e2ePerformanceTests.yml

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -229,6 +229,36 @@ jobs:
229229
env:
230230
GITHUB_TOKEN: ${{ github.token }}
231231

232+
- name: Check if test has skipped tests
233+
id: checkIfSkippedTestsDetected
234+
run: |
235+
if grep -q '⚠️' "./Host_Machine_Files/\$WORKING_DIRECTORY/output.md"; then
236+
# Create an output to the GH action that the tests were skipped:
237+
echo "skippedTestsDetected=true" >> "$GITHUB_OUTPUT"
238+
else
239+
echo "skippedTestsDetected=false" >> "$GITHUB_OUTPUT"
240+
echo '✅ no skipped tests detected'
241+
fi
242+
env:
243+
GITHUB_TOKEN: ${{ github.token }}
244+
245+
- name: 'Announce skipped tests in Slack'
246+
if: ${{ steps.checkIfSkippedTestsDetected.outputs.skippedTestsDetected == 'true' }}
247+
uses: 8398a7/action-slack@v3
248+
with:
249+
status: custom
250+
custom_payload: |
251+
{
252+
channel: '#e2e-announce',
253+
attachments: [{
254+
color: 'danger',
255+
text: `⚠️ ${process.env.AS_REPO} Some of E2E tests were skipped on <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.workflow }}> workflow ⚠️`,
256+
}]
257+
}
258+
env:
259+
GITHUB_TOKEN: ${{ github.token }}
260+
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }}
261+
232262
- name: 'Announce regression in Slack'
233263
if: ${{ steps.checkIfRegressionDetected.outputs.performanceRegressionDetected == 'true' }}
234264
uses: 8398a7/action-slack@v3

tests/e2e/compare/compare.ts

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,16 +91,23 @@ function compareResults(baselineEntries: Metric | string, compareEntries: Metric
9191
};
9292
}
9393

94-
export default (main: Metric | string, delta: Metric | string, outputFile: string, outputFormat = 'all', metricForTest = {}) => {
94+
type Options = {
95+
outputFile: string;
96+
outputFormat: 'console' | 'markdown' | 'all';
97+
metricForTest: Record<string, Unit>;
98+
skippedTests: string[];
99+
};
100+
101+
export default (main: Metric | string, delta: Metric | string, {outputFile, outputFormat = 'all', metricForTest = {}, skippedTests}: Options) => {
95102
// IMPORTANT NOTE: make sure you are passing the main/baseline results first, then the delta/compare results:
96103
const outputData = compareResults(main, delta, metricForTest);
97104

98105
if (outputFormat === 'console' || outputFormat === 'all') {
99-
printToConsole(outputData);
106+
printToConsole(outputData, skippedTests);
100107
}
101108

102109
if (outputFormat === 'markdown' || outputFormat === 'all') {
103-
return writeToMarkdown(outputFile, outputData);
110+
return writeToMarkdown(outputFile, outputData, skippedTests);
104111
}
105112
};
106113
export {compareResults};

tests/e2e/compare/output/console.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ const printRegularLine = (entry: Entry) => {
2626
/**
2727
* Prints the result simply to console.
2828
*/
29-
export default (data: Data) => {
29+
export default (data: Data, skippedTests: string[]) => {
3030
// No need to log errors or warnings as these were be logged on the fly
3131
console.debug('');
3232
console.debug('❇️ Performance comparison results:');
@@ -38,6 +38,10 @@ export default (data: Data) => {
3838
data.meaningless.forEach(printRegularLine);
3939

4040
console.debug('');
41+
42+
if (skippedTests.length > 0) {
43+
console.debug(`⚠️ Some tests did not pass successfully, so some results are omitted from final report: ${skippedTests.join(', ')}`);
44+
}
4145
};
4246

4347
export type {Data, Entry};

tests/e2e/compare/output/markdown.ts

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ const buildSummaryTable = (entries: Entry[], collapse = false) => {
6767
return collapse ? collapsibleSection('Show entries', content) : content;
6868
};
6969

70-
const buildMarkdown = (data: Data) => {
70+
const buildMarkdown = (data: Data, skippedTests: string[]) => {
7171
let result = '## Performance Comparison Report 📊';
7272

7373
if (data.errors?.length) {
@@ -92,6 +92,10 @@ const buildMarkdown = (data: Data) => {
9292
result += `\n${buildDetailsTable(data.meaningless)}`;
9393
result += '\n';
9494

95+
if (skippedTests.length > 0) {
96+
result += `⚠️ Some tests did not pass successfully, so some results are omitted from final report: ${skippedTests.join(', ')}`;
97+
}
98+
9599
return result;
96100
};
97101

@@ -109,8 +113,9 @@ const writeToFile = (filePath: string, content: string) =>
109113
throw error;
110114
});
111115

112-
const writeToMarkdown = (filePath: string, data: Data) => {
113-
const markdown = buildMarkdown(data);
116+
const writeToMarkdown = (filePath: string, data: Data, skippedTests: string[]) => {
117+
const markdown = buildMarkdown(data, skippedTests);
118+
Logger.info('Markdown was built successfully, writing to file...', markdown);
114119
return writeToFile(filePath, markdown).catch((error) => {
115120
console.error(error);
116121
throw error;

tests/e2e/testRunner.ts

Lines changed: 108 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,20 @@ const runTests = async (): Promise<void> => {
123123
}
124124
};
125125

126+
const skippedTests: string[] = [];
127+
const clearTestResults = (test: TestConfig) => {
128+
skippedTests.push(test.name);
129+
130+
Object.keys(results).forEach((branch: string) => {
131+
Object.keys(results[branch]).forEach((metric: string) => {
132+
if (!metric.startsWith(test.name)) {
133+
return;
134+
}
135+
delete results[branch][metric];
136+
});
137+
});
138+
};
139+
126140
// Collect results while tests are being executed
127141
server.addTestResultListener((testResult) => {
128142
const {isCritical = true} = testResult;
@@ -151,7 +165,7 @@ const runTests = async (): Promise<void> => {
151165
await launchApp('android', appPackage, config.ACTIVITY_PATH, launchArgs);
152166

153167
const {promise, resetTimeout} = withFailTimeout(
154-
new Promise<void>((resolve) => {
168+
new Promise<void>((resolve, reject) => {
155169
const removeListener = server.addTestDoneListener(() => {
156170
Logger.success(iterationText);
157171

@@ -201,9 +215,14 @@ const runTests = async (): Promise<void> => {
201215
removeListener();
202216
// something went wrong, let's wait a little bit and try again
203217
await sleep(5000);
204-
// simply restart the test
205-
await runTestIteration(appPackage, iterationText, branch, launchArgs);
206-
resolve();
218+
try {
219+
// simply restart the test
220+
await runTestIteration(appPackage, iterationText, branch, launchArgs);
221+
resolve();
222+
} catch (e) {
223+
// okay, give up and throw the exception further
224+
reject(e);
225+
}
207226
},
208227
});
209228
}),
@@ -244,88 +263,103 @@ const runTests = async (): Promise<void> => {
244263
server.setTestConfig(test as TestConfig);
245264
server.setReadyToAcceptTestResults(false);
246265

247-
const warmupText = `Warmup for test '${test?.name}' [${testIndex + 1}/${tests.length}]`;
248-
249-
// For each warmup we allow the warmup to fail three times before we stop the warmup run:
250-
const errorCountWarmupRef = {
251-
errorCount: 0,
252-
allowedExceptions: 3,
253-
};
254-
255-
// by default we do 2 warmups:
256-
// - first warmup to pass a login flow
257-
// - second warmup to pass an actual flow and cache network requests
258-
const iterations = 2;
259-
for (let i = 0; i < iterations; i++) {
260-
try {
261-
// Warmup the main app:
262-
await runTestIteration(config.MAIN_APP_PACKAGE, `[MAIN] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_MAIN);
263-
264-
// Warmup the delta app:
265-
await runTestIteration(config.DELTA_APP_PACKAGE, `[DELTA] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_DELTA);
266-
} catch (e) {
267-
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
268-
Logger.error(`Warmup failed with error: ${e}`);
269-
270-
errorCountWarmupRef.errorCount++;
271-
i--; // repeat warmup again
272-
273-
if (errorCountWarmupRef.errorCount === errorCountWarmupRef.allowedExceptions) {
274-
Logger.error("There was an error running the warmup and we've reached the maximum number of allowed exceptions. Stopping the test run.");
275-
throw e;
266+
try {
267+
const warmupText = `Warmup for test '${test?.name}' [${testIndex + 1}/${tests.length}]`;
268+
269+
// For each warmup we allow the warmup to fail three times before we stop the warmup run:
270+
const errorCountWarmupRef = {
271+
errorCount: 0,
272+
allowedExceptions: 3,
273+
};
274+
275+
// by default we do 2 warmups:
276+
// - first warmup to pass a login flow
277+
// - second warmup to pass an actual flow and cache network requests
278+
const iterations = 2;
279+
for (let i = 0; i < iterations; i++) {
280+
try {
281+
// Warmup the main app:
282+
await runTestIteration(config.MAIN_APP_PACKAGE, `[MAIN] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_MAIN);
283+
284+
// Warmup the delta app:
285+
await runTestIteration(config.DELTA_APP_PACKAGE, `[DELTA] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_DELTA);
286+
} catch (e) {
287+
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
288+
Logger.error(`Warmup failed with error: ${e}`);
289+
290+
MeasureUtils.stop('error-warmup');
291+
server.clearAllTestDoneListeners();
292+
293+
errorCountWarmupRef.errorCount++;
294+
i--; // repeat warmup again
295+
296+
if (errorCountWarmupRef.errorCount === errorCountWarmupRef.allowedExceptions) {
297+
Logger.error("There was an error running the warmup and we've reached the maximum number of allowed exceptions. Stopping the test run.");
298+
throw e;
299+
}
276300
}
277301
}
278-
}
279302

280-
server.setReadyToAcceptTestResults(true);
281-
282-
// For each test case we allow the test to fail three times before we stop the test run:
283-
const errorCountRef = {
284-
errorCount: 0,
285-
allowedExceptions: 3,
286-
};
287-
288-
// We run each test multiple time to average out the results
289-
for (let testIteration = 0; testIteration < config.RUNS; testIteration++) {
290-
const onError = (e: Error) => {
291-
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
292-
Logger.error(`Unexpected error during test execution: ${e}. `);
293-
MeasureUtils.stop('error');
294-
server.clearAllTestDoneListeners();
295-
errorCountRef.errorCount += 1;
296-
if (testIteration === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) {
297-
Logger.error("There was an error running the test and we've reached the maximum number of allowed exceptions. Stopping the test run.");
298-
// If the error happened on the first test run, the test is broken
299-
// and we should not continue running it. Or if we have reached the
300-
// maximum number of allowed exceptions, we should stop the test run.
301-
throw e;
302-
}
303-
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
304-
Logger.warn(`There was an error running the test. Continuing the test run. Error: ${e}`);
305-
};
303+
server.setReadyToAcceptTestResults(true);
306304

307-
const launchArgs = {
308-
mockNetwork: true,
305+
// For each test case we allow the test to fail three times before we stop the test run:
306+
const errorCountRef = {
307+
errorCount: 0,
308+
allowedExceptions: 3,
309309
};
310310

311-
const iterationText = `Test '${test?.name}' [${testIndex + 1}/${tests.length}], iteration [${testIteration + 1}/${config.RUNS}]`;
312-
const mainIterationText = `[MAIN] ${iterationText}`;
313-
const deltaIterationText = `[DELTA] ${iterationText}`;
314-
try {
315-
// Run the test on the main app:
316-
await runTestIteration(config.MAIN_APP_PACKAGE, mainIterationText, config.BRANCH_MAIN, launchArgs);
317-
318-
// Run the test on the delta app:
319-
await runTestIteration(config.DELTA_APP_PACKAGE, deltaIterationText, config.BRANCH_DELTA, launchArgs);
320-
} catch (e) {
321-
onError(e as Error);
311+
// We run each test multiple time to average out the results
312+
for (let testIteration = 0; testIteration < config.RUNS; testIteration++) {
313+
const onError = (e: Error) => {
314+
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
315+
Logger.error(`Unexpected error during test execution: ${e}. `);
316+
MeasureUtils.stop('error');
317+
server.clearAllTestDoneListeners();
318+
errorCountRef.errorCount += 1;
319+
if (testIteration === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) {
320+
Logger.error("There was an error running the test and we've reached the maximum number of allowed exceptions. Stopping the test run.");
321+
// If the error happened on the first test run, the test is broken
322+
// and we should not continue running it. Or if we have reached the
323+
// maximum number of allowed exceptions, we should stop the test run.
324+
throw e;
325+
}
326+
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
327+
Logger.warn(`There was an error running the test. Continuing the test run. Error: ${e}`);
328+
};
329+
330+
const launchArgs = {
331+
mockNetwork: true,
332+
};
333+
334+
const iterationText = `Test '${test?.name}' [${testIndex + 1}/${tests.length}], iteration [${testIteration + 1}/${config.RUNS}]`;
335+
const mainIterationText = `[MAIN] ${iterationText}`;
336+
const deltaIterationText = `[DELTA] ${iterationText}`;
337+
try {
338+
// Run the test on the main app:
339+
await runTestIteration(config.MAIN_APP_PACKAGE, mainIterationText, config.BRANCH_MAIN, launchArgs);
340+
341+
// Run the test on the delta app:
342+
await runTestIteration(config.DELTA_APP_PACKAGE, deltaIterationText, config.BRANCH_DELTA, launchArgs);
343+
} catch (e) {
344+
onError(e as Error);
345+
}
322346
}
347+
} catch (exception) {
348+
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
349+
Logger.warn(`Test ${test?.name} can not be finished due to error: ${exception}`);
350+
clearTestResults(test as TestConfig);
323351
}
324352
}
325353

326354
// Calculate statistics and write them to our work file
327355
Logger.info('Calculating statics and writing results');
328-
compare(results.main, results.delta, `${config.OUTPUT_DIR}/output.md`, 'all', metricForTest);
356+
await compare(results.main, results.delta, {
357+
outputFile: `${config.OUTPUT_DIR}/output.md`,
358+
outputFormat: 'all',
359+
metricForTest,
360+
skippedTests,
361+
});
362+
Logger.info('Finished calculating statics and writing results, stopping the test server');
329363

330364
await server.stop();
331365
};

tests/unit/E2EMarkdownTest.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,6 @@ const results = {
1313
describe('markdown formatter', () => {
1414
it('should format significant changes properly', () => {
1515
const data = compareResults(results.main, results.delta, {commentLinking: 'ms'});
16-
expect(buildMarkdown(data)).toMatchSnapshot();
16+
expect(buildMarkdown(data, [])).toMatchSnapshot();
1717
});
1818
});

0 commit comments

Comments
 (0)