mirror of
https://github.com/mattermost/mattermost.git
synced 2026-02-18 18:18:23 -05:00
E2E/Test: Increase parallel tests and removed smoke tests (#35271)
* test: increase parallel tests and removed smoke tests both in cypress and playwright * add duration and retest info * indicate overall, first-pass and re-run run and test durations
This commit is contained in:
parent
53aa05d8c6
commit
0ec4a474d5
21 changed files with 356 additions and 111 deletions
|
|
@ -42,6 +42,8 @@ outputs:
|
|||
description: Pass rate percentage (e.g., "100.00")
|
||||
color:
|
||||
description: Color for webhook based on pass rate (green=100%, yellow=99%+, orange=98%+, red=<98%)
|
||||
test_duration:
|
||||
description: Wall-clock test duration (earliest start to latest end across all specs, formatted as "Xm Ys")
|
||||
|
||||
runs:
|
||||
using: node24
|
||||
|
|
|
|||
|
|
@ -19082,6 +19082,12 @@ function getColor(passRate) {
|
|||
return "#F44336";
|
||||
}
|
||||
}
|
||||
function formatDuration(ms) {
|
||||
const totalSeconds = Math.round(ms / 1e3);
|
||||
const minutes = Math.floor(totalSeconds / 60);
|
||||
const seconds = totalSeconds % 60;
|
||||
return `${minutes}m ${seconds}s`;
|
||||
}
|
||||
function calculateResultsFromSpecs(specs) {
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
|
@ -19105,6 +19111,25 @@ function calculateResultsFromSpecs(specs) {
|
|||
}
|
||||
}
|
||||
}
|
||||
let earliestStart = null;
|
||||
let latestEnd = null;
|
||||
for (const spec of specs) {
|
||||
const { start, end } = spec.result.stats;
|
||||
if (start) {
|
||||
const startMs = new Date(start).getTime();
|
||||
if (earliestStart === null || startMs < earliestStart) {
|
||||
earliestStart = startMs;
|
||||
}
|
||||
}
|
||||
if (end) {
|
||||
const endMs = new Date(end).getTime();
|
||||
if (latestEnd === null || endMs > latestEnd) {
|
||||
latestEnd = endMs;
|
||||
}
|
||||
}
|
||||
}
|
||||
const testDurationMs = earliestStart !== null && latestEnd !== null ? latestEnd - earliestStart : 0;
|
||||
const testDuration = formatDuration(testDurationMs);
|
||||
const totalSpecs = specs.length;
|
||||
const failedSpecs = Array.from(failedSpecsSet).join(",");
|
||||
const failedSpecsCount = failedSpecsSet.size;
|
||||
|
|
@ -19146,7 +19171,8 @@ function calculateResultsFromSpecs(specs) {
|
|||
failedTests,
|
||||
total,
|
||||
passRate,
|
||||
color
|
||||
color,
|
||||
testDuration
|
||||
};
|
||||
}
|
||||
async function loadSpecFiles(resultsPath) {
|
||||
|
|
@ -19292,6 +19318,7 @@ async function run() {
|
|||
info(`Failed Specs Count: ${calc.failedSpecsCount}`);
|
||||
info(`Commit Status Message: ${calc.commitStatusMessage}`);
|
||||
info(`Failed Specs: ${calc.failedSpecs || "none"}`);
|
||||
info(`Test Duration: ${calc.testDuration}`);
|
||||
endGroup();
|
||||
setOutput("merged", merged.toString());
|
||||
setOutput("passed", calc.passed);
|
||||
|
|
@ -19305,6 +19332,7 @@ async function run() {
|
|||
setOutput("total", calc.total);
|
||||
setOutput("pass_rate", calc.passRate);
|
||||
setOutput("color", calc.color);
|
||||
setOutput("test_duration", calc.testDuration);
|
||||
}
|
||||
|
||||
// src/index.ts
|
||||
|
|
|
|||
|
|
@ -81,6 +81,7 @@ export async function run(): Promise<void> {
|
|||
core.info(`Failed Specs Count: ${calc.failedSpecsCount}`);
|
||||
core.info(`Commit Status Message: ${calc.commitStatusMessage}`);
|
||||
core.info(`Failed Specs: ${calc.failedSpecs || "none"}`);
|
||||
core.info(`Test Duration: ${calc.testDuration}`);
|
||||
core.endGroup();
|
||||
|
||||
// Set all outputs
|
||||
|
|
@ -96,4 +97,5 @@ export async function run(): Promise<void> {
|
|||
core.setOutput("total", calc.total);
|
||||
core.setOutput("pass_rate", calc.passRate);
|
||||
core.setOutput("color", calc.color);
|
||||
core.setOutput("test_duration", calc.testDuration);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -97,6 +97,16 @@ function getColor(passRate: number): string {
|
|||
/**
|
||||
* Calculate results from parsed spec files
|
||||
*/
|
||||
/**
|
||||
* Format milliseconds as "Xm Ys"
|
||||
*/
|
||||
function formatDuration(ms: number): string {
|
||||
const totalSeconds = Math.round(ms / 1000);
|
||||
const minutes = Math.floor(totalSeconds / 60);
|
||||
const seconds = totalSeconds % 60;
|
||||
return `${minutes}m ${seconds}s`;
|
||||
}
|
||||
|
||||
export function calculateResultsFromSpecs(
|
||||
specs: ParsedSpecFile[],
|
||||
): CalculationResult {
|
||||
|
|
@ -125,6 +135,30 @@ export function calculateResultsFromSpecs(
|
|||
}
|
||||
}
|
||||
|
||||
// Compute test duration from earliest start to latest end across all specs
|
||||
let earliestStart: number | null = null;
|
||||
let latestEnd: number | null = null;
|
||||
for (const spec of specs) {
|
||||
const { start, end } = spec.result.stats;
|
||||
if (start) {
|
||||
const startMs = new Date(start).getTime();
|
||||
if (earliestStart === null || startMs < earliestStart) {
|
||||
earliestStart = startMs;
|
||||
}
|
||||
}
|
||||
if (end) {
|
||||
const endMs = new Date(end).getTime();
|
||||
if (latestEnd === null || endMs > latestEnd) {
|
||||
latestEnd = endMs;
|
||||
}
|
||||
}
|
||||
}
|
||||
const testDurationMs =
|
||||
earliestStart !== null && latestEnd !== null
|
||||
? latestEnd - earliestStart
|
||||
: 0;
|
||||
const testDuration = formatDuration(testDurationMs);
|
||||
|
||||
const totalSpecs = specs.length;
|
||||
const failedSpecs = Array.from(failedSpecsSet).join(",");
|
||||
const failedSpecsCount = failedSpecsSet.size;
|
||||
|
|
@ -185,6 +219,7 @@ export function calculateResultsFromSpecs(
|
|||
total,
|
||||
passRate,
|
||||
color,
|
||||
testDuration,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -130,6 +130,7 @@ export interface CalculationResult {
|
|||
total: number;
|
||||
passRate: string;
|
||||
color: string;
|
||||
testDuration: string;
|
||||
}
|
||||
|
||||
export interface FailedTest {
|
||||
|
|
|
|||
|
|
@ -45,6 +45,8 @@ outputs:
|
|||
description: Number of passing tests (passed + flaky)
|
||||
color:
|
||||
description: Color for webhook based on pass rate (green=100%, yellow=99%+, orange=98%+, red=<98%)
|
||||
test_duration:
|
||||
description: Test execution duration from stats (formatted as "Xm Ys")
|
||||
|
||||
runs:
|
||||
using: node24
|
||||
|
|
|
|||
|
|
@ -19106,6 +19106,12 @@ function computeStats(suites, originalStats, retestStats) {
|
|||
flaky
|
||||
};
|
||||
}
|
||||
function formatDuration(ms) {
|
||||
const totalSeconds = Math.round(ms / 1e3);
|
||||
const minutes = Math.floor(totalSeconds / 60);
|
||||
const seconds = totalSeconds % 60;
|
||||
return `${minutes}m ${seconds}s`;
|
||||
}
|
||||
function getColor(passRate) {
|
||||
if (passRate === 100) {
|
||||
return "#43A047";
|
||||
|
|
@ -19177,6 +19183,7 @@ function calculateResults(results) {
|
|||
const rateStr = rate === 100 ? "100%" : `${rate.toFixed(1)}%`;
|
||||
const specSuffix = totalSpecs > 0 ? `, ${totalSpecs} specs` : "";
|
||||
const commitStatusMessage = rate === 100 ? `${rateStr} passed (${passing})${specSuffix}` : `${rateStr} passed (${passing}/${total}), ${failed} failed${specSuffix}`;
|
||||
const testDuration = formatDuration(stats.duration || 0);
|
||||
return {
|
||||
passed,
|
||||
failed,
|
||||
|
|
@ -19190,7 +19197,8 @@ function calculateResults(results) {
|
|||
total,
|
||||
passRate,
|
||||
passing,
|
||||
color
|
||||
color,
|
||||
testDuration
|
||||
};
|
||||
}
|
||||
function mergeResults(original, retest) {
|
||||
|
|
@ -19284,6 +19292,7 @@ async function run() {
|
|||
info(`Failed Specs Count: ${calc.failedSpecsCount}`);
|
||||
info(`Commit Status Message: ${calc.commitStatusMessage}`);
|
||||
info(`Failed Specs: ${calc.failedSpecs || "none"}`);
|
||||
info(`Test Duration: ${calc.testDuration}`);
|
||||
endGroup();
|
||||
setOutput("merged", merged.toString());
|
||||
setOutput("passed", calc.passed);
|
||||
|
|
@ -19299,6 +19308,7 @@ async function run() {
|
|||
setOutput("pass_rate", calc.passRate);
|
||||
setOutput("passing", calc.passing);
|
||||
setOutput("color", calc.color);
|
||||
setOutput("test_duration", calc.testDuration);
|
||||
}
|
||||
|
||||
// src/index.ts
|
||||
|
|
|
|||
|
|
@ -101,6 +101,7 @@ export async function run(): Promise<void> {
|
|||
core.info(`Failed Specs Count: ${calc.failedSpecsCount}`);
|
||||
core.info(`Commit Status Message: ${calc.commitStatusMessage}`);
|
||||
core.info(`Failed Specs: ${calc.failedSpecs || "none"}`);
|
||||
core.info(`Test Duration: ${calc.testDuration}`);
|
||||
core.endGroup();
|
||||
|
||||
// Set all outputs
|
||||
|
|
@ -118,4 +119,5 @@ export async function run(): Promise<void> {
|
|||
core.setOutput("pass_rate", calc.passRate);
|
||||
core.setOutput("passing", calc.passing);
|
||||
core.setOutput("color", calc.color);
|
||||
core.setOutput("test_duration", calc.testDuration);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -130,6 +130,16 @@ export function computeStats(
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format milliseconds as "Xm Ys"
|
||||
*/
|
||||
function formatDuration(ms: number): string {
|
||||
const totalSeconds = Math.round(ms / 1000);
|
||||
const minutes = Math.floor(totalSeconds / 60);
|
||||
const seconds = totalSeconds % 60;
|
||||
return `${minutes}m ${seconds}s`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get color based on pass rate
|
||||
*/
|
||||
|
|
@ -236,6 +246,8 @@ export function calculateResults(
|
|||
? `${rateStr} passed (${passing})${specSuffix}`
|
||||
: `${rateStr} passed (${passing}/${total}), ${failed} failed${specSuffix}`;
|
||||
|
||||
const testDuration = formatDuration(stats.duration || 0);
|
||||
|
||||
return {
|
||||
passed,
|
||||
failed,
|
||||
|
|
@ -250,6 +262,7 @@ export function calculateResults(
|
|||
passRate,
|
||||
passing,
|
||||
color,
|
||||
testDuration,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ export interface CalculationResult {
|
|||
passRate: string;
|
||||
passing: number;
|
||||
color: string;
|
||||
testDuration: string;
|
||||
}
|
||||
|
||||
export interface FailedTest {
|
||||
|
|
|
|||
|
|
@ -73,7 +73,8 @@ runs:
|
|||
while IFS= read -r file; do
|
||||
[ -z "$file" ] && continue
|
||||
if [[ ! "$file" =~ ^e2e-tests/ ]] && \
|
||||
[[ ! "$file" =~ ^\.github/workflows/e2e- ]]; then
|
||||
[[ ! "$file" =~ ^\.github/workflows/e2e- ]] && \
|
||||
[[ ! "$file" =~ ^\.github/actions/ ]]; then
|
||||
echo "Non-E2E file found: $file"
|
||||
E2E_TEST_ONLY="false"
|
||||
break
|
||||
|
|
|
|||
2
.github/workflows/e2e-tests-ci.yml
vendored
2
.github/workflows/e2e-tests-ci.yml
vendored
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
name: E2E Tests (smoke-then-full)
|
||||
name: E2E Tests (full)
|
||||
on:
|
||||
# Argo Events Trigger (automated):
|
||||
# - Triggered by: Enterprise CI/docker-image status check (success)
|
||||
|
|
|
|||
94
.github/workflows/e2e-tests-cypress-template.yml
vendored
94
.github/workflows/e2e-tests-cypress-template.yml
vendored
|
|
@ -17,11 +17,6 @@ on:
|
|||
type: number
|
||||
required: false
|
||||
default: 1
|
||||
timeout_minutes:
|
||||
description: "Job timeout in minutes"
|
||||
type: number
|
||||
required: false
|
||||
default: 30
|
||||
enabled_docker_services:
|
||||
description: "Space-separated list of docker services to enable"
|
||||
type: string
|
||||
|
|
@ -132,11 +127,13 @@ jobs:
|
|||
outputs:
|
||||
status_check_url: "${{ steps.generate-cycle.outputs.status_check_url }}"
|
||||
workers: "${{ steps.generate-workers.outputs.workers }}"
|
||||
start_time: "${{ steps.generate-workers.outputs.start_time }}"
|
||||
steps:
|
||||
- name: ci/generate-workers
|
||||
id: generate-workers
|
||||
run: |
|
||||
echo "workers=$(jq -nc '[range(${{ inputs.workers }})]')" >> $GITHUB_OUTPUT
|
||||
echo "start_time=$(date +%s)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: ci/checkout-repo
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
|
@ -172,7 +169,7 @@ jobs:
|
|||
|
||||
run-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: ${{ fromJSON(inputs.timeout_minutes) }}
|
||||
timeout-minutes: 30
|
||||
continue-on-error: ${{ inputs.workers > 1 }}
|
||||
needs:
|
||||
- generate-test-cycle
|
||||
|
|
@ -245,6 +242,8 @@ jobs:
|
|||
total: ${{ steps.calculate.outputs.total }}
|
||||
pass_rate: ${{ steps.calculate.outputs.pass_rate }}
|
||||
color: ${{ steps.calculate.outputs.color }}
|
||||
test_duration: ${{ steps.calculate.outputs.test_duration }}
|
||||
end_time: ${{ steps.record-end-time.outputs.end_time }}
|
||||
steps:
|
||||
- name: ci/checkout-repo
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
|
@ -259,10 +258,13 @@ jobs:
|
|||
uses: ./.github/actions/calculate-cypress-results
|
||||
with:
|
||||
original-results-path: e2e-tests/cypress/results
|
||||
- name: ci/record-end-time
|
||||
id: record-end-time
|
||||
run: echo "end_time=$(date +%s)" >> $GITHUB_OUTPUT
|
||||
|
||||
run-failed-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: ${{ fromJSON(inputs.timeout_minutes) }}
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- generate-test-cycle
|
||||
- run-tests
|
||||
|
|
@ -331,6 +333,9 @@ jobs:
|
|||
passed: "${{ steps.final-results.outputs.passed }}"
|
||||
failed: "${{ steps.final-results.outputs.failed }}"
|
||||
commit_status_message: "${{ steps.final-results.outputs.commit_status_message }}"
|
||||
duration: "${{ steps.duration.outputs.duration }}"
|
||||
duration_display: "${{ steps.duration.outputs.duration_display }}"
|
||||
retest_display: "${{ steps.duration.outputs.retest_display }}"
|
||||
defaults:
|
||||
run:
|
||||
working-directory: e2e-tests
|
||||
|
|
@ -366,6 +371,7 @@ jobs:
|
|||
echo "total=${{ needs.calculate-results.outputs.total }}" >> $GITHUB_OUTPUT
|
||||
echo "pass_rate=${{ needs.calculate-results.outputs.pass_rate }}" >> $GITHUB_OUTPUT
|
||||
echo "color=${{ needs.calculate-results.outputs.color }}" >> $GITHUB_OUTPUT
|
||||
echo "test_duration=${{ needs.calculate-results.outputs.test_duration }}" >> $GITHUB_OUTPUT
|
||||
{
|
||||
echo "failed_tests<<EOF"
|
||||
echo "${{ needs.calculate-results.outputs.failed_tests }}"
|
||||
|
|
@ -412,6 +418,7 @@ jobs:
|
|||
echo "total=${{ steps.use-previous.outputs.total }}" >> $GITHUB_OUTPUT
|
||||
echo "pass_rate=${{ steps.use-previous.outputs.pass_rate }}" >> $GITHUB_OUTPUT
|
||||
echo "color=${{ steps.use-previous.outputs.color }}" >> $GITHUB_OUTPUT
|
||||
echo "test_duration=${{ steps.use-previous.outputs.test_duration }}" >> $GITHUB_OUTPUT
|
||||
{
|
||||
echo "failed_tests<<EOF"
|
||||
echo "$USE_PREVIOUS_FAILED_TESTS"
|
||||
|
|
@ -428,6 +435,7 @@ jobs:
|
|||
echo "total=${{ steps.recalculate.outputs.total }}" >> $GITHUB_OUTPUT
|
||||
echo "pass_rate=${{ steps.recalculate.outputs.pass_rate }}" >> $GITHUB_OUTPUT
|
||||
echo "color=${{ steps.recalculate.outputs.color }}" >> $GITHUB_OUTPUT
|
||||
echo "test_duration=${{ steps.recalculate.outputs.test_duration }}" >> $GITHUB_OUTPUT
|
||||
{
|
||||
echo "failed_tests<<EOF"
|
||||
echo "$RECALCULATE_FAILED_TESTS"
|
||||
|
|
@ -435,6 +443,56 @@ jobs:
|
|||
} >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: ci/compute-duration
|
||||
id: duration
|
||||
env:
|
||||
START_TIME: ${{ needs.generate-test-cycle.outputs.start_time }}
|
||||
FIRST_PASS_END_TIME: ${{ needs.calculate-results.outputs.end_time }}
|
||||
RETEST_RESULT: ${{ needs.run-failed-tests.result }}
|
||||
RETEST_SPEC_COUNT: ${{ needs.calculate-results.outputs.failed_specs_count }}
|
||||
TEST_DURATION: ${{ steps.final-results.outputs.test_duration }}
|
||||
run: |
|
||||
NOW=$(date +%s)
|
||||
ELAPSED=$((NOW - START_TIME))
|
||||
MINUTES=$((ELAPSED / 60))
|
||||
SECONDS=$((ELAPSED % 60))
|
||||
DURATION="${MINUTES}m ${SECONDS}s"
|
||||
|
||||
# Compute first-pass and re-run durations
|
||||
FIRST_PASS_ELAPSED=$((FIRST_PASS_END_TIME - START_TIME))
|
||||
FP_MIN=$((FIRST_PASS_ELAPSED / 60))
|
||||
FP_SEC=$((FIRST_PASS_ELAPSED % 60))
|
||||
FIRST_PASS="${FP_MIN}m ${FP_SEC}s"
|
||||
|
||||
if [ "$RETEST_RESULT" != "skipped" ]; then
|
||||
RERUN_ELAPSED=$((NOW - FIRST_PASS_END_TIME))
|
||||
RR_MIN=$((RERUN_ELAPSED / 60))
|
||||
RR_SEC=$((RERUN_ELAPSED % 60))
|
||||
RUN_BREAKDOWN=" (first-pass: ${FIRST_PASS}, re-run: ${RR_MIN}m ${RR_SEC}s)"
|
||||
else
|
||||
RUN_BREAKDOWN=""
|
||||
fi
|
||||
|
||||
# Duration icons: >20m high alert, >15m warning, otherwise clock
|
||||
if [ "$MINUTES" -ge 20 ]; then
|
||||
DURATION_DISPLAY=":rotating_light: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
|
||||
elif [ "$MINUTES" -ge 15 ]; then
|
||||
DURATION_DISPLAY=":warning: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
|
||||
else
|
||||
DURATION_DISPLAY=":clock3: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
|
||||
fi
|
||||
|
||||
# Retest indicator with spec count
|
||||
if [ "$RETEST_RESULT" != "skipped" ]; then
|
||||
RETEST_DISPLAY=":repeat: re-run ${RETEST_SPEC_COUNT} spec(s)"
|
||||
else
|
||||
RETEST_DISPLAY=""
|
||||
fi
|
||||
|
||||
echo "duration=${DURATION}" >> $GITHUB_OUTPUT
|
||||
echo "duration_display=${DURATION_DISPLAY}" >> $GITHUB_OUTPUT
|
||||
echo "retest_display=${RETEST_DISPLAY}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: ci/upload-combined-results
|
||||
if: inputs.workers > 1
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
|
|
@ -455,6 +513,8 @@ jobs:
|
|||
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||
REF_BRANCH: ${{ inputs.ref_branch }}
|
||||
PR_NUMBER: ${{ inputs.pr_number }}
|
||||
DURATION_DISPLAY: ${{ steps.duration.outputs.duration_display }}
|
||||
RETEST_DISPLAY: ${{ steps.duration.outputs.retest_display }}
|
||||
run: |
|
||||
# Capitalize test type
|
||||
TEST_TYPE_CAP=$(echo "$TEST_TYPE" | sed 's/.*/\u&/')
|
||||
|
|
@ -470,6 +530,12 @@ jobs:
|
|||
SOURCE_LINE=":open-pull-request: [mattermost-pr-${PR_NUMBER}](https://github.com/${{ github.repository }}/pull/${PR_NUMBER})"
|
||||
fi
|
||||
|
||||
# Build retest part for message
|
||||
RETEST_PART=""
|
||||
if [ -n "$RETEST_DISPLAY" ]; then
|
||||
RETEST_PART=" | ${RETEST_DISPLAY}"
|
||||
fi
|
||||
|
||||
# Build payload with attachments
|
||||
PAYLOAD=$(cat <<EOF
|
||||
{
|
||||
|
|
@ -477,7 +543,7 @@ jobs:
|
|||
"icon_url": "https://mattermost.com/wp-content/uploads/2022/02/icon_WS.png",
|
||||
"attachments": [{
|
||||
"color": "${COLOR}",
|
||||
"text": "**Results - Cypress ${TEST_TYPE_CAP} Tests**\n\n${SOURCE_LINE}\n:docker: \`${{ env.SERVER_IMAGE }}\`\n${COMMIT_STATUS_MESSAGE} | [full report](${REPORT_URL})"
|
||||
"text": "**Results - Cypress ${TEST_TYPE_CAP} Tests**\n\n${SOURCE_LINE}\n:docker: \`${{ env.SERVER_IMAGE }}\`\n${COMMIT_STATUS_MESSAGE}${RETEST_PART} | [full report](${REPORT_URL})\n${DURATION_DISPLAY}"
|
||||
}]
|
||||
}
|
||||
EOF
|
||||
|
|
@ -498,6 +564,8 @@ jobs:
|
|||
FAILED_SPECS: ${{ steps.final-results.outputs.failed_specs }}
|
||||
COMMIT_STATUS_MESSAGE: ${{ steps.final-results.outputs.commit_status_message }}
|
||||
FAILED_TESTS: ${{ steps.final-results.outputs.failed_tests }}
|
||||
DURATION_DISPLAY: ${{ steps.duration.outputs.duration_display }}
|
||||
RETEST_RESULT: ${{ needs.run-failed-tests.result }}
|
||||
run: |
|
||||
{
|
||||
echo "## E2E Test Results - Cypress ${TEST_TYPE}"
|
||||
|
|
@ -527,6 +595,12 @@ jobs:
|
|||
echo "| failed_specs_count | ${FAILED_SPECS_COUNT} |"
|
||||
echo "| commit_status_message | ${COMMIT_STATUS_MESSAGE} |"
|
||||
echo "| failed_specs | ${FAILED_SPECS:-none} |"
|
||||
echo "| duration | ${DURATION_DISPLAY} |"
|
||||
if [ "$RETEST_RESULT" != "skipped" ]; then
|
||||
echo "| retested | Yes |"
|
||||
else
|
||||
echo "| retested | No |"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "---"
|
||||
|
|
@ -551,7 +625,7 @@ jobs:
|
|||
repository_full_name: ${{ github.repository }}
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
context: ${{ inputs.context_name }}
|
||||
description: "${{ needs.report.outputs.commit_status_message }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
|
||||
description: "${{ needs.report.outputs.commit_status_message }}, ${{ needs.report.outputs.duration }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
|
||||
status: success
|
||||
target_url: ${{ needs.generate-test-cycle.outputs.status_check_url }}
|
||||
|
||||
|
|
@ -570,6 +644,6 @@ jobs:
|
|||
repository_full_name: ${{ github.repository }}
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
context: ${{ inputs.context_name }}
|
||||
description: "${{ needs.report.outputs.commit_status_message }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
|
||||
description: "${{ needs.report.outputs.commit_status_message }}, ${{ needs.report.outputs.duration }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
|
||||
status: failure
|
||||
target_url: ${{ needs.generate-test-cycle.outputs.status_check_url }}
|
||||
|
|
|
|||
49
.github/workflows/e2e-tests-cypress.yml
vendored
49
.github/workflows/e2e-tests-cypress.yml
vendored
|
|
@ -33,11 +33,6 @@ on:
|
|||
required: false
|
||||
default: mattermostdevelopment
|
||||
description: "Docker registry: mattermostdevelopment (default) or mattermost"
|
||||
skip_smoke:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Skip smoke tests and run full tests directly"
|
||||
server_image_aliases:
|
||||
type: string
|
||||
required: false
|
||||
|
|
@ -70,6 +65,7 @@ jobs:
|
|||
build_id: "${{ steps.build-vars.outputs.build_id }}"
|
||||
server_image_tag: "${{ steps.build-vars.outputs.server_image_tag }}"
|
||||
server_image: "${{ steps.build-vars.outputs.server_image }}"
|
||||
context_suffix: "${{ steps.build-vars.outputs.context_suffix }}"
|
||||
steps:
|
||||
- name: ci/generate-build-variables
|
||||
id: build-vars
|
||||
|
|
@ -130,46 +126,23 @@ jobs:
|
|||
echo "build_id=${RUN_ID}_${RUN_ATTEMPT}-${SERVER_IMAGE_TAG}-cypress-onprem-ent" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
cypress-smoke:
|
||||
if: ${{ !inputs.skip_smoke }}
|
||||
needs:
|
||||
- generate-build-variables
|
||||
uses: ./.github/workflows/e2e-tests-cypress-template.yml
|
||||
with:
|
||||
test_type: smoke
|
||||
test_filter: "--stage=@prod --group=@smoke"
|
||||
workers: 1
|
||||
timeout_minutes: 30
|
||||
enabled_docker_services: "postgres inbucket"
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
branch: ${{ needs.generate-build-variables.outputs.branch }}
|
||||
build_id: ${{ needs.generate-build-variables.outputs.build_id }}
|
||||
server_image_tag: ${{ needs.generate-build-variables.outputs.server_image_tag }}
|
||||
server_edition: ${{ inputs.server_edition }}
|
||||
server_image_repo: ${{ inputs.server_image_repo }}
|
||||
server_image_aliases: ${{ inputs.server_image_aliases }}
|
||||
server: ${{ inputs.server }}
|
||||
context_name: "e2e-test/cypress-smoke/${{ inputs.server_edition || 'enterprise' }}"
|
||||
secrets:
|
||||
MM_LICENSE: ${{ secrets.MM_LICENSE }}
|
||||
AUTOMATION_DASHBOARD_URL: ${{ secrets.AUTOMATION_DASHBOARD_URL }}
|
||||
AUTOMATION_DASHBOARD_TOKEN: ${{ secrets.AUTOMATION_DASHBOARD_TOKEN }}
|
||||
PUSH_NOTIFICATION_SERVER: ${{ secrets.PUSH_NOTIFICATION_SERVER }}
|
||||
CWS_URL: ${{ secrets.CWS_URL }}
|
||||
CWS_EXTRA_HTTP_HEADERS: ${{ secrets.CWS_EXTRA_HTTP_HEADERS }}
|
||||
# Generate context name suffix based on report type
|
||||
REPORT_TYPE="${{ inputs.report_type }}"
|
||||
case "$REPORT_TYPE" in
|
||||
MASTER) echo "context_suffix=/master" >> $GITHUB_OUTPUT ;;
|
||||
RELEASE) echo "context_suffix=/release" >> $GITHUB_OUTPUT ;;
|
||||
RELEASE_CUT) echo "context_suffix=/release-cut" >> $GITHUB_OUTPUT ;;
|
||||
*) echo "context_suffix=" >> $GITHUB_OUTPUT ;;
|
||||
esac
|
||||
|
||||
# Full Tests (runs if smoke passed or skipped)
|
||||
cypress-full:
|
||||
needs:
|
||||
- cypress-smoke
|
||||
- generate-build-variables
|
||||
if: always() && (needs.cypress-smoke.result == 'skipped' || needs.cypress-smoke.outputs.failed == '0')
|
||||
uses: ./.github/workflows/e2e-tests-cypress-template.yml
|
||||
with:
|
||||
test_type: full
|
||||
test_filter: '--stage="@prod" --excludeGroup="@te_only,@cloud_only,@high_availability" --sortFirst="@compliance_export,@elasticsearch,@ldap_group,@ldap" --sortLast="@saml,@keycloak,@plugin,@plugins_uninstall,@mfa,@license_removal"'
|
||||
workers: 20
|
||||
timeout_minutes: 60
|
||||
workers: 40
|
||||
enabled_docker_services: "postgres inbucket minio openldap elasticsearch keycloak"
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
branch: ${{ needs.generate-build-variables.outputs.branch }}
|
||||
|
|
@ -183,7 +156,7 @@ jobs:
|
|||
report_type: ${{ inputs.report_type }}
|
||||
ref_branch: ${{ inputs.ref_branch }}
|
||||
pr_number: ${{ inputs.pr_number }}
|
||||
context_name: "e2e-test/cypress-full/${{ inputs.server_edition || 'enterprise' }}"
|
||||
context_name: "e2e-test/cypress-full/${{ inputs.server_edition || 'enterprise' }}${{ needs.generate-build-variables.outputs.context_suffix }}"
|
||||
secrets:
|
||||
MM_LICENSE: ${{ secrets.MM_LICENSE }}
|
||||
AUTOMATION_DASHBOARD_URL: ${{ secrets.AUTOMATION_DASHBOARD_URL }}
|
||||
|
|
|
|||
4
.github/workflows/e2e-tests-on-merge.yml
vendored
4
.github/workflows/e2e-tests-on-merge.yml
vendored
|
|
@ -62,7 +62,6 @@ jobs:
|
|||
with:
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
server_image_tag: ${{ inputs.server_image_tag }}
|
||||
skip_smoke: true
|
||||
server: onprem
|
||||
enable_reporting: true
|
||||
report_type: ${{ needs.generate-build-variables.outputs.report_type }}
|
||||
|
|
@ -82,7 +81,6 @@ jobs:
|
|||
with:
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
server_image_tag: ${{ inputs.server_image_tag }}
|
||||
skip_smoke: true
|
||||
server: onprem
|
||||
enable_reporting: true
|
||||
report_type: ${{ needs.generate-build-variables.outputs.report_type }}
|
||||
|
|
@ -101,7 +99,6 @@ jobs:
|
|||
commit_sha: ${{ inputs.commit_sha }}
|
||||
server_image_tag: ${{ inputs.server_image_tag }}
|
||||
server_edition: fips
|
||||
skip_smoke: true
|
||||
server: onprem
|
||||
enable_reporting: true
|
||||
report_type: ${{ needs.generate-build-variables.outputs.report_type }}
|
||||
|
|
@ -122,7 +119,6 @@ jobs:
|
|||
commit_sha: ${{ inputs.commit_sha }}
|
||||
server_image_tag: ${{ inputs.server_image_tag }}
|
||||
server_edition: fips
|
||||
skip_smoke: true
|
||||
server: onprem
|
||||
enable_reporting: true
|
||||
report_type: ${{ needs.generate-build-variables.outputs.report_type }}
|
||||
|
|
|
|||
4
.github/workflows/e2e-tests-on-release.yml
vendored
4
.github/workflows/e2e-tests-on-release.yml
vendored
|
|
@ -59,7 +59,6 @@ jobs:
|
|||
server_image_tag: ${{ inputs.server_image_tag }}
|
||||
server_image_repo: mattermost
|
||||
server_image_aliases: ${{ inputs.server_image_aliases }}
|
||||
skip_smoke: true
|
||||
server: onprem
|
||||
enable_reporting: true
|
||||
report_type: RELEASE_CUT
|
||||
|
|
@ -81,7 +80,6 @@ jobs:
|
|||
server_image_tag: ${{ inputs.server_image_tag }}
|
||||
server_image_repo: mattermost
|
||||
server_image_aliases: ${{ inputs.server_image_aliases }}
|
||||
skip_smoke: true
|
||||
server: onprem
|
||||
enable_reporting: true
|
||||
report_type: RELEASE_CUT
|
||||
|
|
@ -102,7 +100,6 @@ jobs:
|
|||
server_edition: fips
|
||||
server_image_repo: mattermost
|
||||
server_image_aliases: ${{ inputs.server_image_aliases }}
|
||||
skip_smoke: true
|
||||
server: onprem
|
||||
enable_reporting: true
|
||||
report_type: RELEASE_CUT
|
||||
|
|
@ -125,7 +122,6 @@ jobs:
|
|||
server_edition: fips
|
||||
server_image_repo: mattermost
|
||||
server_image_aliases: ${{ inputs.server_image_aliases }}
|
||||
skip_smoke: true
|
||||
server: onprem
|
||||
enable_reporting: true
|
||||
report_type: RELEASE_CUT
|
||||
|
|
|
|||
150
.github/workflows/e2e-tests-playwright-template.yml
vendored
150
.github/workflows/e2e-tests-playwright-template.yml
vendored
|
|
@ -12,11 +12,11 @@ on:
|
|||
description: "Test filter arguments (e.g., --grep @smoke)"
|
||||
type: string
|
||||
required: true
|
||||
timeout_minutes:
|
||||
description: "Job timeout in minutes"
|
||||
workers:
|
||||
description: "Number of parallel shards"
|
||||
type: number
|
||||
required: false
|
||||
default: 60
|
||||
default: 2
|
||||
enabled_docker_services:
|
||||
description: "Space-separated list of docker services to enable"
|
||||
type: string
|
||||
|
|
@ -117,9 +117,29 @@ jobs:
|
|||
description: "tests running, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
|
||||
status: pending
|
||||
|
||||
generate-test-variables:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
workers: "${{ steps.generate-workers.outputs.workers }}"
|
||||
start_time: "${{ steps.generate-workers.outputs.start_time }}"
|
||||
steps:
|
||||
- name: ci/generate-workers
|
||||
id: generate-workers
|
||||
run: |
|
||||
echo "workers=$(jq -nc '[range(1; ${{ inputs.workers }} + 1)]')" >> $GITHUB_OUTPUT
|
||||
echo "start_time=$(date +%s)" >> $GITHUB_OUTPUT
|
||||
|
||||
run-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: ${{ fromJSON(inputs.timeout_minutes) }}
|
||||
timeout-minutes: 30
|
||||
continue-on-error: true
|
||||
needs:
|
||||
- generate-test-variables
|
||||
if: needs.generate-test-variables.result == 'success'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
worker_index: ${{ fromJSON(needs.generate-test-variables.outputs.workers) }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: e2e-tests
|
||||
|
|
@ -129,8 +149,10 @@ jobs:
|
|||
ENABLED_DOCKER_SERVICES: "${{ inputs.enabled_docker_services }}"
|
||||
TEST: playwright
|
||||
TEST_FILTER: "${{ inputs.test_filter }}"
|
||||
PW_SHARD: "${{ format('--shard={0}/{1}', matrix.worker_index, inputs.workers) }}"
|
||||
BRANCH: "${{ inputs.branch }}-${{ inputs.test_type }}"
|
||||
BUILD_ID: "${{ inputs.build_id }}"
|
||||
CI_BASE_URL: "${{ inputs.test_type }}-test-${{ matrix.worker_index }}"
|
||||
steps:
|
||||
- name: ci/checkout-repo
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
|
@ -157,7 +179,7 @@ jobs:
|
|||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
if: always()
|
||||
with:
|
||||
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results
|
||||
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results-${{ matrix.worker_index }}
|
||||
path: |
|
||||
e2e-tests/playwright/logs/
|
||||
e2e-tests/playwright/results/
|
||||
|
|
@ -166,8 +188,9 @@ jobs:
|
|||
calculate-results:
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- generate-test-variables
|
||||
- run-tests
|
||||
if: always()
|
||||
if: always() && needs.generate-test-variables.result == 'success'
|
||||
outputs:
|
||||
passed: ${{ steps.calculate.outputs.passed }}
|
||||
failed: ${{ steps.calculate.outputs.failed }}
|
||||
|
|
@ -182,23 +205,49 @@ jobs:
|
|||
pass_rate: ${{ steps.calculate.outputs.pass_rate }}
|
||||
passing: ${{ steps.calculate.outputs.passing }}
|
||||
color: ${{ steps.calculate.outputs.color }}
|
||||
test_duration: ${{ steps.calculate.outputs.test_duration }}
|
||||
end_time: ${{ steps.record-end-time.outputs.end_time }}
|
||||
steps:
|
||||
- name: ci/checkout-repo
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
- name: ci/download-results
|
||||
- name: ci/setup-node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: npm
|
||||
cache-dependency-path: "e2e-tests/playwright/package-lock.json"
|
||||
- name: ci/download-shard-results
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results
|
||||
path: e2e-tests/playwright/
|
||||
pattern: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results-*
|
||||
path: e2e-tests/playwright/shard-results/
|
||||
merge-multiple: true
|
||||
- name: ci/merge-shard-results
|
||||
working-directory: e2e-tests/playwright
|
||||
run: |
|
||||
mkdir -p results/reporter
|
||||
|
||||
# Merge blob reports using Playwright merge-reports (per docs)
|
||||
npm install --no-save @playwright/test
|
||||
npx playwright merge-reports --config merge.config.mjs ./shard-results/results/blob-report/
|
||||
- name: ci/calculate
|
||||
id: calculate
|
||||
uses: ./.github/actions/calculate-playwright-results
|
||||
with:
|
||||
original-results-path: e2e-tests/playwright/results/reporter/results.json
|
||||
- name: ci/upload-merged-results
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results
|
||||
path: e2e-tests/playwright/results/
|
||||
retention-days: 5
|
||||
- name: ci/record-end-time
|
||||
id: record-end-time
|
||||
run: echo "end_time=$(date +%s)" >> $GITHUB_OUTPUT
|
||||
|
||||
run-failed-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: ${{ fromJSON(inputs.timeout_minutes) }}
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- run-tests
|
||||
- calculate-results
|
||||
|
|
@ -255,6 +304,7 @@ jobs:
|
|||
report:
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- generate-test-variables
|
||||
- run-tests
|
||||
- calculate-results
|
||||
- run-failed-tests
|
||||
|
|
@ -264,6 +314,9 @@ jobs:
|
|||
failed: "${{ steps.final-results.outputs.failed }}"
|
||||
commit_status_message: "${{ steps.final-results.outputs.commit_status_message }}"
|
||||
report_url: "${{ steps.upload-to-s3.outputs.report_url }}"
|
||||
duration: "${{ steps.duration.outputs.duration }}"
|
||||
duration_display: "${{ steps.duration.outputs.duration_display }}"
|
||||
retest_display: "${{ steps.duration.outputs.retest_display }}"
|
||||
defaults:
|
||||
run:
|
||||
working-directory: e2e-tests
|
||||
|
|
@ -277,12 +330,12 @@ jobs:
|
|||
cache: npm
|
||||
cache-dependency-path: "e2e-tests/playwright/package-lock.json"
|
||||
|
||||
# Download original results (always needed)
|
||||
# Download merged results (uploaded by calculate-results)
|
||||
- name: ci/download-results
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results
|
||||
path: e2e-tests/playwright/
|
||||
path: e2e-tests/playwright/results/
|
||||
|
||||
# Download retest results (only if retest ran)
|
||||
- name: ci/download-retest-results
|
||||
|
|
@ -317,7 +370,6 @@ jobs:
|
|||
TEST_TYPE: "${{ inputs.test_type }}"
|
||||
run: |
|
||||
LOCAL_RESULTS_PATH="playwright/results/"
|
||||
LOCAL_LOGS_PATH="playwright/logs/"
|
||||
|
||||
# Use PR number if available, otherwise use commit SHA prefix
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
|
|
@ -333,6 +385,56 @@ jobs:
|
|||
|
||||
REPORT_URL="https://${AWS_S3_BUCKET}.s3.amazonaws.com/${S3_PATH}/results/reporter/index.html"
|
||||
echo "report_url=$REPORT_URL" >> "$GITHUB_OUTPUT"
|
||||
- name: ci/compute-duration
|
||||
id: duration
|
||||
env:
|
||||
START_TIME: ${{ needs.generate-test-variables.outputs.start_time }}
|
||||
FIRST_PASS_END_TIME: ${{ needs.calculate-results.outputs.end_time }}
|
||||
RETEST_RESULT: ${{ needs.run-failed-tests.result }}
|
||||
RETEST_SPEC_COUNT: ${{ needs.calculate-results.outputs.failed_specs_count }}
|
||||
TEST_DURATION: ${{ steps.final-results.outputs.test_duration }}
|
||||
run: |
|
||||
NOW=$(date +%s)
|
||||
ELAPSED=$((NOW - START_TIME))
|
||||
MINUTES=$((ELAPSED / 60))
|
||||
SECONDS=$((ELAPSED % 60))
|
||||
DURATION="${MINUTES}m ${SECONDS}s"
|
||||
|
||||
# Compute first-pass and re-run durations
|
||||
FIRST_PASS_ELAPSED=$((FIRST_PASS_END_TIME - START_TIME))
|
||||
FP_MIN=$((FIRST_PASS_ELAPSED / 60))
|
||||
FP_SEC=$((FIRST_PASS_ELAPSED % 60))
|
||||
FIRST_PASS="${FP_MIN}m ${FP_SEC}s"
|
||||
|
||||
if [ "$RETEST_RESULT" != "skipped" ]; then
|
||||
RERUN_ELAPSED=$((NOW - FIRST_PASS_END_TIME))
|
||||
RR_MIN=$((RERUN_ELAPSED / 60))
|
||||
RR_SEC=$((RERUN_ELAPSED % 60))
|
||||
RUN_BREAKDOWN=" (first-pass: ${FIRST_PASS}, re-run: ${RR_MIN}m ${RR_SEC}s)"
|
||||
else
|
||||
RUN_BREAKDOWN=""
|
||||
fi
|
||||
|
||||
# Duration icons: >20m high alert, >15m warning, otherwise clock
|
||||
if [ "$MINUTES" -ge 20 ]; then
|
||||
DURATION_DISPLAY=":rotating_light: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
|
||||
elif [ "$MINUTES" -ge 15 ]; then
|
||||
DURATION_DISPLAY=":warning: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
|
||||
else
|
||||
DURATION_DISPLAY=":clock3: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
|
||||
fi
|
||||
|
||||
# Retest indicator with spec count
|
||||
if [ "$RETEST_RESULT" != "skipped" ]; then
|
||||
RETEST_DISPLAY=":repeat: re-run ${RETEST_SPEC_COUNT} spec(s)"
|
||||
else
|
||||
RETEST_DISPLAY=""
|
||||
fi
|
||||
|
||||
echo "duration=${DURATION}" >> $GITHUB_OUTPUT
|
||||
echo "duration_display=${DURATION_DISPLAY}" >> $GITHUB_OUTPUT
|
||||
echo "retest_display=${RETEST_DISPLAY}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: ci/publish-report
|
||||
if: inputs.enable_reporting && env.REPORT_WEBHOOK_URL != ''
|
||||
env:
|
||||
|
|
@ -345,6 +447,8 @@ jobs:
|
|||
COMMIT_SHA: ${{ inputs.commit_sha }}
|
||||
REF_BRANCH: ${{ inputs.ref_branch }}
|
||||
PR_NUMBER: ${{ inputs.pr_number }}
|
||||
DURATION_DISPLAY: ${{ steps.duration.outputs.duration_display }}
|
||||
RETEST_DISPLAY: ${{ steps.duration.outputs.retest_display }}
|
||||
run: |
|
||||
# Capitalize test type
|
||||
TEST_TYPE_CAP=$(echo "$TEST_TYPE" | sed 's/.*/\u&/')
|
||||
|
|
@ -360,6 +464,12 @@ jobs:
|
|||
SOURCE_LINE=":open-pull-request: [mattermost-pr-${PR_NUMBER}](https://github.com/${{ github.repository }}/pull/${PR_NUMBER})"
|
||||
fi
|
||||
|
||||
# Build retest part for message
|
||||
RETEST_PART=""
|
||||
if [ -n "$RETEST_DISPLAY" ]; then
|
||||
RETEST_PART=" | ${RETEST_DISPLAY}"
|
||||
fi
|
||||
|
||||
# Build payload with attachments
|
||||
PAYLOAD=$(cat <<EOF
|
||||
{
|
||||
|
|
@ -367,7 +477,7 @@ jobs:
|
|||
"icon_url": "https://mattermost.com/wp-content/uploads/2022/02/icon_WS.png",
|
||||
"attachments": [{
|
||||
"color": "${COLOR}",
|
||||
"text": "**Results - Playwright ${TEST_TYPE_CAP} Tests**\n\n${SOURCE_LINE}\n:docker: \`${{ env.SERVER_IMAGE }}\`\n${COMMIT_STATUS_MESSAGE} | [full report](${REPORT_URL})"
|
||||
"text": "**Results - Playwright ${TEST_TYPE_CAP} Tests**\n\n${SOURCE_LINE}\n:docker: \`${{ env.SERVER_IMAGE }}\`\n${COMMIT_STATUS_MESSAGE}${RETEST_PART} | [full report](${REPORT_URL})\n${DURATION_DISPLAY}"
|
||||
}]
|
||||
}
|
||||
EOF
|
||||
|
|
@ -389,6 +499,8 @@ jobs:
|
|||
FAILED_SPECS: ${{ steps.final-results.outputs.failed_specs }}
|
||||
COMMIT_STATUS_MESSAGE: ${{ steps.final-results.outputs.commit_status_message }}
|
||||
FAILED_TESTS: ${{ steps.final-results.outputs.failed_tests }}
|
||||
DURATION_DISPLAY: ${{ steps.duration.outputs.duration_display }}
|
||||
RETEST_RESULT: ${{ needs.run-failed-tests.result }}
|
||||
run: |
|
||||
{
|
||||
echo "## E2E Test Results - Playwright ${TEST_TYPE}"
|
||||
|
|
@ -419,6 +531,12 @@ jobs:
|
|||
echo "| failed_specs_count | ${FAILED_SPECS_COUNT} |"
|
||||
echo "| commit_status_message | ${COMMIT_STATUS_MESSAGE} |"
|
||||
echo "| failed_specs | ${FAILED_SPECS:-none} |"
|
||||
echo "| duration | ${DURATION_DISPLAY} |"
|
||||
if [ "$RETEST_RESULT" != "skipped" ]; then
|
||||
echo "| retested | Yes |"
|
||||
else
|
||||
echo "| retested | No |"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "---"
|
||||
|
|
@ -442,7 +560,7 @@ jobs:
|
|||
repository_full_name: ${{ github.repository }}
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
context: ${{ inputs.context_name }}
|
||||
description: "${{ needs.report.outputs.commit_status_message }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
|
||||
description: "${{ needs.report.outputs.commit_status_message }}, ${{ needs.report.outputs.duration }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
|
||||
status: success
|
||||
target_url: ${{ needs.report.outputs.report_url }}
|
||||
|
||||
|
|
@ -460,6 +578,6 @@ jobs:
|
|||
repository_full_name: ${{ github.repository }}
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
context: ${{ inputs.context_name }}
|
||||
description: "${{ needs.report.outputs.commit_status_message }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
|
||||
description: "${{ needs.report.outputs.commit_status_message }}, ${{ needs.report.outputs.duration }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
|
||||
status: failure
|
||||
target_url: ${{ needs.report.outputs.report_url }}
|
||||
|
|
|
|||
45
.github/workflows/e2e-tests-playwright.yml
vendored
45
.github/workflows/e2e-tests-playwright.yml
vendored
|
|
@ -33,11 +33,6 @@ on:
|
|||
required: false
|
||||
default: mattermostdevelopment
|
||||
description: "Docker registry: mattermostdevelopment (default) or mattermost"
|
||||
skip_smoke:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Skip smoke tests and run full tests directly"
|
||||
server_image_aliases:
|
||||
type: string
|
||||
required: false
|
||||
|
|
@ -64,6 +59,7 @@ jobs:
|
|||
build_id: "${{ steps.build-vars.outputs.build_id }}"
|
||||
server_image_tag: "${{ steps.build-vars.outputs.server_image_tag }}"
|
||||
server_image: "${{ steps.build-vars.outputs.server_image }}"
|
||||
context_suffix: "${{ steps.build-vars.outputs.context_suffix }}"
|
||||
steps:
|
||||
- name: ci/generate-build-variables
|
||||
id: build-vars
|
||||
|
|
@ -124,42 +120,23 @@ jobs:
|
|||
echo "build_id=${RUN_ID}_${RUN_ATTEMPT}-${SERVER_IMAGE_TAG}-playwright-onprem-ent" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
playwright-smoke:
|
||||
if: ${{ !inputs.skip_smoke }}
|
||||
needs:
|
||||
- generate-build-variables
|
||||
uses: ./.github/workflows/e2e-tests-playwright-template.yml
|
||||
with:
|
||||
test_type: smoke
|
||||
test_filter: "--grep @smoke"
|
||||
timeout_minutes: 30
|
||||
enabled_docker_services: "postgres inbucket"
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
branch: ${{ needs.generate-build-variables.outputs.branch }}
|
||||
build_id: ${{ needs.generate-build-variables.outputs.build_id }}
|
||||
server_image_tag: ${{ needs.generate-build-variables.outputs.server_image_tag }}
|
||||
server_edition: ${{ inputs.server_edition }}
|
||||
server_image_repo: ${{ inputs.server_image_repo }}
|
||||
server_image_aliases: ${{ inputs.server_image_aliases }}
|
||||
server: ${{ inputs.server }}
|
||||
context_name: "e2e-test/playwright-smoke/${{ inputs.server_edition || 'enterprise' }}"
|
||||
pr_number: ${{ inputs.pr_number }}
|
||||
secrets:
|
||||
MM_LICENSE: ${{ secrets.MM_LICENSE }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
# Generate context name suffix based on report type
|
||||
REPORT_TYPE="${{ inputs.report_type }}"
|
||||
case "$REPORT_TYPE" in
|
||||
MASTER) echo "context_suffix=/master" >> $GITHUB_OUTPUT ;;
|
||||
RELEASE) echo "context_suffix=/release" >> $GITHUB_OUTPUT ;;
|
||||
RELEASE_CUT) echo "context_suffix=/release-cut" >> $GITHUB_OUTPUT ;;
|
||||
*) echo "context_suffix=" >> $GITHUB_OUTPUT ;;
|
||||
esac
|
||||
|
||||
# Full Tests (runs if smoke passed or skipped)
|
||||
playwright-full:
|
||||
needs:
|
||||
- playwright-smoke
|
||||
- generate-build-variables
|
||||
if: always() && (needs.playwright-smoke.result == 'skipped' || needs.playwright-smoke.outputs.failed == '0')
|
||||
uses: ./.github/workflows/e2e-tests-playwright-template.yml
|
||||
with:
|
||||
test_type: full
|
||||
test_filter: '--grep-invert "@visual"'
|
||||
timeout_minutes: 120
|
||||
workers: 4
|
||||
enabled_docker_services: "postgres inbucket minio openldap elasticsearch keycloak"
|
||||
commit_sha: ${{ inputs.commit_sha }}
|
||||
branch: ${{ needs.generate-build-variables.outputs.branch }}
|
||||
|
|
@ -173,7 +150,7 @@ jobs:
|
|||
report_type: ${{ inputs.report_type }}
|
||||
ref_branch: ${{ inputs.ref_branch }}
|
||||
pr_number: ${{ inputs.pr_number }}
|
||||
context_name: "e2e-test/playwright-full/${{ inputs.server_edition || 'enterprise' }}"
|
||||
context_name: "e2e-test/playwright-full/${{ inputs.server_edition || 'enterprise' }}${{ needs.generate-build-variables.outputs.context_suffix }}"
|
||||
secrets:
|
||||
MM_LICENSE: ${{ secrets.MM_LICENSE }}
|
||||
REPORT_WEBHOOK_URL: ${{ secrets.REPORT_WEBHOOK_URL }}
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ EOF
|
|||
|
||||
# Run Playwright test
|
||||
# NB: do not exit the script if some testcases fail
|
||||
${MME2E_DC_SERVER} exec -i -u "$MME2E_UID" -- playwright bash -c "cd e2e-tests/playwright && npm run test:ci -- ${TEST_FILTER}" | tee ../playwright/logs/playwright.log || true
|
||||
${MME2E_DC_SERVER} exec -i -u "$MME2E_UID" -- playwright bash -c "cd e2e-tests/playwright && npm run test:ci -- ${TEST_FILTER} ${PW_SHARD:-}" | tee ../playwright/logs/playwright.log || true
|
||||
|
||||
# Collect run results
|
||||
# Documentation on the results.json file: https://playwright.dev/docs/api/class-testcase#test-case-expected-status
|
||||
|
|
|
|||
13
e2e-tests/playwright/merge.config.mjs
Normal file
13
e2e-tests/playwright/merge.config.mjs
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
|
||||
// See LICENSE.txt for license information.
|
||||
|
||||
// Configuration for merging sharded blob reports via:
|
||||
// npx playwright merge-reports --config merge.config.mjs ./all-blob-reports/
|
||||
|
||||
export default {
|
||||
reporter: [
|
||||
['html', {open: 'never', outputFolder: './results/reporter'}],
|
||||
['json', {outputFile: './results/reporter/results.json'}],
|
||||
['junit', {outputFile: './results/reporter/results.xml'}],
|
||||
],
|
||||
};
|
||||
|
|
@ -75,6 +75,7 @@ export default defineConfig({
|
|||
},
|
||||
],
|
||||
reporter: [
|
||||
...(testConfig.isCI ? [['blob', {outputDir: './results/blob-report'}] as const] : []),
|
||||
['html', {open: 'never', outputFolder: './results/reporter'}],
|
||||
['json', {outputFile: './results/reporter/results.json'}],
|
||||
['junit', {outputFile: './results/reporter/results.xml'}],
|
||||
|
|
|
|||
Loading…
Reference in a new issue