diff --git a/.github/scripts/js/e2e/report/cluster-report.js b/.github/scripts/js/e2e/report/cluster-report.js
new file mode 100644
index 0000000000..3261caf602
--- /dev/null
+++ b/.github/scripts/js/e2e/report/cluster-report.js
@@ -0,0 +1,361 @@
+// Copyright 2026 Flant JSC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+const fs = require('fs');
+const path = require('path');
+const {XMLParser} = require('fast-xml-parser');
+
+const stageLabels = {
+ bootstrap: 'BOOTSTRAP CLUSTER',
+ 'configure-sdn': 'CONFIGURE SDN',
+ 'storage-setup': 'STORAGE SETUP',
+ 'virtualization-setup': 'VIRTUALIZATION SETUP',
+ 'e2e-test': 'E2E TEST',
+ success: 'SUCCESS',
+ 'artifact-missing': 'TEST REPORTS NOT FOUND',
+};
+
+const preE2EStages = new Set([
+ 'bootstrap',
+ 'configure-sdn',
+ 'storage-setup',
+ 'virtualization-setup',
+]);
+
+const junitXmlParser = new XMLParser({
+ ignoreAttributes: false,
+ attributeNamePrefix: '',
+ parseTagValue: false,
+ parseAttributeValue: false,
+ trimValues: false,
+ processEntities: true,
+});
+
+function escapeRegExp(value) {
+ return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
+}
+
+function listMatchingFiles(dirPath, filePattern, files = []) {
+ if (!fs.existsSync(dirPath)) {
+ return files;
+ }
+
+ const entries = fs.readdirSync(dirPath, {withFileTypes: true})
+ .sort((left, right) => left.name.localeCompare(right.name));
+ for (const entry of entries) {
+ const fullPath = path.join(dirPath, entry.name);
+ if (entry.isDirectory()) {
+ listMatchingFiles(fullPath, filePattern, files);
+ continue;
+ }
+
+ if (filePattern.test(entry.name)) {
+ files.push(fullPath);
+ }
+ }
+
+ return files;
+}
+
+function pickLatestMatchingFile(dirPath, filePattern, core) {
+ const matchingFiles = listMatchingFiles(dirPath, filePattern);
+ if (matchingFiles.length === 0) {
+ return null;
+ }
+
+ const rankedFiles = matchingFiles
+ .map((filePath) => ({
+ filePath,
+ mtimeMs: fs.statSync(filePath).mtimeMs,
+ }))
+ .sort((left, right) => {
+ if (right.mtimeMs !== left.mtimeMs) {
+ return right.mtimeMs - left.mtimeMs;
+ }
+
+ return right.filePath.localeCompare(left.filePath);
+ });
+
+ if (rankedFiles.length > 1) {
+ core.warning(
+ `Found multiple JUnit reports for the cluster; using the newest file: ${rankedFiles[0].filePath}`
+ );
+ }
+
+ return rankedFiles[0].filePath;
+}
+
+function toArray(value) {
+ if (!value) {
+ return [];
+ }
+
+ return Array.isArray(value) ? value : [value];
+}
+
+function toInteger(value) {
+ const parsed = Number.parseInt(value || '0', 10);
+ return Number.isNaN(parsed) ? 0 : parsed;
+}
+
+function zeroMetrics() {
+ return {
+ passed: 0,
+ failed: 0,
+ errors: 0,
+ skipped: 0,
+ total: 0,
+ successRate: 0,
+ };
+}
+
+function hasOwnProperty(object, key) {
+ return Boolean(object) && Object.prototype.hasOwnProperty.call(object, key);
+}
+
+function hasMetricAttributes(node) {
+ return ['tests', 'failures', 'errors', 'skipped', 'disabled']
+ .some((attributeName) => hasOwnProperty(node, attributeName));
+}
+
+function readMetricsFromNode(node) {
+ return {
+ total: toInteger(node && node.tests),
+ failed: toInteger(node && node.failures),
+ errors: toInteger(node && node.errors),
+ skipped: toInteger((node && (node.skipped || node.disabled)) || 0),
+ };
+}
+
+function collectSuites(suites, collectedSuites = []) {
+ for (const suite of suites) {
+ collectedSuites.push(suite);
+ collectSuites(toArray(suite.testsuite), collectedSuites);
+ }
+
+ return collectedSuites;
+}
+
+function collectMetricSuites(suites, collectedSuites = []) {
+ for (const suite of suites) {
+ const nestedSuites = toArray(suite.testsuite);
+ const hasNestedSuites = nestedSuites.length > 0;
+ const hasTestcases = toArray(suite.testcase).length > 0;
+
+ if (hasTestcases || !hasNestedSuites) {
+ collectedSuites.push(suite);
+ }
+
+ if (hasNestedSuites) {
+ collectMetricSuites(nestedSuites, collectedSuites);
+ }
+ }
+
+ return collectedSuites;
+}
+
+function parseJUnitReport(xmlContent) {
+ const parsedXml = junitXmlParser.parse(xmlContent);
+ const testsuitesNode = parsedXml.testsuites || null;
+ const topLevelSuites = testsuitesNode
+ ? toArray(testsuitesNode.testsuite)
+ : toArray(parsedXml.testsuite);
+ const allSuites = collectSuites(topLevelSuites);
+ const metricSuites = collectMetricSuites(topLevelSuites);
+ const aggregateSource = hasMetricAttributes(testsuitesNode)
+ ? testsuitesNode
+ : topLevelSuites.length === 1 && hasMetricAttributes(topLevelSuites[0])
+ ? topLevelSuites[0]
+ : null;
+
+ let total = 0;
+ let failed = 0;
+ let errors = 0;
+ let skipped = 0;
+
+ if (aggregateSource) {
+ ({total, failed, errors, skipped} = readMetricsFromNode(aggregateSource));
+ } else {
+ for (const suite of metricSuites) {
+ const suiteMetrics = readMetricsFromNode(suite);
+ total += suiteMetrics.total;
+ failed += suiteMetrics.failed;
+ errors += suiteMetrics.errors;
+ skipped += suiteMetrics.skipped;
+ }
+ }
+
+ const passed = Math.max(total - failed - errors - skipped, 0);
+ const successRate = total > 0 ? Number(((passed / total) * 100).toFixed(2)) : 0;
+ const failedTests = [];
+
+ for (const suite of allSuites) {
+ for (const testcase of toArray(suite.testcase)) {
+ const testcaseStatus = String(testcase.status || '').toLowerCase();
+ const hasFailure = testcase.failure !== undefined;
+ const hasError = testcase.error !== undefined;
+
+ if (hasFailure || hasError || testcaseStatus === 'failed' || testcaseStatus === 'error') {
+ const testcaseName = String(testcase.name || '').trim();
+ if (testcaseName) {
+ failedTests.push(testcaseName);
+ }
+ }
+ }
+ }
+
+ const startedAt = allSuites.find((suite) => suite.timestamp)?.timestamp || null;
+
+ return {
+ metrics: {
+ passed,
+ failed,
+ errors,
+ skipped,
+ total,
+ successRate,
+ },
+ failedTests: Array.from(new Set(failedTests)),
+ startedAt,
+ };
+}
+
+function getStageDescriptor(storageType, stageName, resultValue) {
+ const result = (resultValue || '').trim();
+ const stageLabel = stageLabels[stageName] || stageName;
+ const reportKind = preE2EStages.has(stageName) ? 'stage-failure' : 'tests';
+
+ if (result === 'cancelled') {
+ return {
+ failedStage: stageName,
+ failedStageLabel: stageLabel,
+ failedJobName: `${stageLabel} (${storageType})`,
+ reportKind,
+ status: 'cancelled',
+ statusMessage: `⚠️ ${stageLabel} CANCELLED`,
+ };
+ }
+
+ return {
+ failedStage: stageName,
+ failedStageLabel: stageLabel,
+ failedJobName: `${stageLabel} (${storageType})`,
+ reportKind,
+ status: 'failure',
+ statusMessage: `❌ ${stageLabel} FAILED`,
+ };
+}
+
+function determineStage(storageType) {
+ const orderedStages = [
+ ['bootstrap', process.env.BOOTSTRAP_RESULT],
+ ['configure-sdn', process.env.CONFIGURE_SDN_RESULT],
+ ['storage-setup', process.env.CONFIGURE_STORAGE_RESULT],
+ ['virtualization-setup', process.env.CONFIGURE_VIRTUALIZATION_RESULT],
+ ['e2e-test', process.env.E2E_TEST_RESULT],
+ ];
+
+ for (const [stageName, resultValue] of orderedStages) {
+ if ((resultValue || 'success') !== 'success') {
+ return getStageDescriptor(storageType, stageName, resultValue);
+ }
+ }
+
+ return {
+ failedStage: 'success',
+ failedStageLabel: stageLabels.success,
+ failedJobName: `E2E test (${storageType})`,
+ reportKind: 'tests',
+ status: 'success',
+ statusMessage: '✅ SUCCESS',
+ };
+}
+
+function buildArtifactMissingDescriptor(storageType) {
+ const stageLabel = stageLabels['artifact-missing'];
+ return {
+ failedStage: 'artifact-missing',
+ failedStageLabel: stageLabel,
+ failedJobName: `E2E test (${storageType})`,
+ reportKind: 'artifact-missing',
+ status: 'missing',
+ statusMessage: `⚠️ ${stageLabel}`,
+ };
+}
+
+async function buildClusterReport({core, context}) {
+ const storageType = process.env.STORAGE_TYPE;
+ const reportsDir = process.env.E2E_REPORT_DIR || 'test/e2e';
+ const reportFile = process.env.REPORT_FILE || `e2e_report_${storageType}.json`;
+ const workflowRunUrl = process.env.WORKFLOW_RUN_URL
+ || `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
+ const branchName = process.env.BRANCH_NAME
+ || String(context.ref || '').replace(/^refs\/heads\//, '');
+ const junitPattern = new RegExp(`^e2e_summary_${escapeRegExp(storageType)}_.*\\.xml$`);
+ const junitReportPath = pickLatestMatchingFile(reportsDir, junitPattern, core);
+ const stageInfo = determineStage(storageType);
+
+ let parsedReport = {
+ metrics: zeroMetrics(),
+ failedTests: [],
+ startedAt: null,
+ source: 'empty',
+ };
+
+ if (junitReportPath) {
+ core.info(`Found JUnit report: ${junitReportPath}`);
+ parsedReport = {
+ ...parseJUnitReport(fs.readFileSync(junitReportPath, 'utf8')),
+ source: 'junit',
+ };
+ } else {
+ core.warning(`JUnit report was not found for ${storageType} under ${reportsDir}`);
+ }
+
+ const effectiveStageInfo = (
+ stageInfo.reportKind === 'tests' && parsedReport.source === 'empty'
+ ? buildArtifactMissingDescriptor(storageType)
+ : stageInfo
+ );
+
+ const report = {
+ cluster: storageType,
+ storageType,
+ reportKind: effectiveStageInfo.reportKind,
+ status: effectiveStageInfo.status,
+ statusMessage: effectiveStageInfo.statusMessage,
+ failedStage: effectiveStageInfo.failedStage,
+ failedStageLabel: effectiveStageInfo.failedStageLabel,
+ failedJobName: effectiveStageInfo.failedJobName,
+ workflowRunId: String(context.runId),
+ workflowRunUrl,
+ branch: branchName,
+ startedAt: parsedReport.startedAt,
+ metrics: parsedReport.metrics,
+ failedTests: parsedReport.failedTests,
+ sourceJUnitReport: junitReportPath,
+ reportSource: parsedReport.source,
+ };
+
+ fs.writeFileSync(reportFile, `${JSON.stringify(report, null, 2)}\n`);
+
+ core.setOutput('report_file', reportFile);
+ core.info(`Created report file: ${reportFile}`);
+ core.info(JSON.stringify(report, null, 2));
+
+ return report;
+}
+
+module.exports = buildClusterReport;
+module.exports.determineStage = determineStage;
+module.exports.parseJUnitReport = parseJUnitReport;
+module.exports.buildArtifactMissingDescriptor = buildArtifactMissingDescriptor;
diff --git a/.github/scripts/js/e2e/report/cluster-report.test.js b/.github/scripts/js/e2e/report/cluster-report.test.js
new file mode 100644
index 0000000000..174805bf32
--- /dev/null
+++ b/.github/scripts/js/e2e/report/cluster-report.test.js
@@ -0,0 +1,226 @@
+const fs = require('fs');
+const os = require('os');
+const path = require('path');
+
+const buildClusterReport = require('./cluster-report');
+const {parseJUnitReport} = require('./cluster-report');
+
+function createCore() {
+ return {
+ info: jest.fn(),
+ warning: jest.fn(),
+ debug: jest.fn(),
+ setOutput: jest.fn(),
+ };
+}
+
+function createContext() {
+ return {
+ serverUrl: 'https://github.com',
+ repo: {owner: 'test', repo: 'repo'},
+ runId: '12345',
+ ref: 'refs/heads/main',
+ };
+}
+
+async function withTempDir(testFn) {
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'cluster-report-test-'));
+ try {
+ return await testFn(tempDir);
+ } finally {
+ fs.rmSync(tempDir, {recursive: true, force: true});
+ }
+}
+
+function setStageEnv(overrides = {}) {
+ process.env.STORAGE_TYPE = 'replicated';
+ process.env.BOOTSTRAP_RESULT = 'success';
+ process.env.CONFIGURE_SDN_RESULT = 'success';
+ process.env.CONFIGURE_STORAGE_RESULT = 'success';
+ process.env.CONFIGURE_VIRTUALIZATION_RESULT = 'success';
+ process.env.E2E_TEST_RESULT = 'success';
+ Object.assign(process.env, overrides);
+}
+
+describe('cluster-report', () => {
+ afterEach(() => {
+ delete process.env.STORAGE_TYPE;
+ delete process.env.E2E_REPORT_DIR;
+ delete process.env.REPORT_FILE;
+ delete process.env.BRANCH_NAME;
+ delete process.env.WORKFLOW_RUN_URL;
+ delete process.env.BOOTSTRAP_RESULT;
+ delete process.env.CONFIGURE_SDN_RESULT;
+ delete process.env.CONFIGURE_STORAGE_RESULT;
+ delete process.env.CONFIGURE_VIRTUALIZATION_RESULT;
+ delete process.env.E2E_TEST_RESULT;
+ });
+
+ test('renders test report from JUnit when E2E succeeded', async () => withTempDir(async (tempDir) => {
+ const xmlPath = path.join(tempDir, 'e2e_summary_replicated_2026-04-15.xml');
+ fs.writeFileSync(xmlPath, `
+
+
+
+ boom
+ broken
+
+
+
+`);
+
+ const reportFile = path.join(tempDir, 'report.json');
+ setStageEnv({
+ E2E_REPORT_DIR: tempDir,
+ REPORT_FILE: reportFile,
+ });
+
+ const report = await buildClusterReport({core: createCore(), context: createContext()});
+
+ expect(report.reportKind).toBe('tests');
+ expect(report.failedStage).toBe('success');
+ expect(report.metrics).toEqual({
+ passed: 1,
+ failed: 1,
+ errors: 1,
+ skipped: 1,
+ total: 4,
+ successRate: 25,
+ });
+ expect(report.failedTests).toEqual([
+ '[It] fails & burns',
+ '[It] errors ',
+ ]);
+ expect(report.reportSource).toBe('junit');
+ expect(JSON.parse(fs.readFileSync(reportFile, 'utf8')).reportKind).toBe('tests');
+ }));
+
+ test('selects the newest matching JUnit report when multiple files exist', async () => withTempDir(async (tempDir) => {
+ const olderXmlPath = path.join(tempDir, 'nested', 'e2e_summary_replicated_2026-04-15.xml');
+ const newerXmlPath = path.join(tempDir, 'e2e_summary_replicated_2026-04-16.xml');
+ fs.mkdirSync(path.dirname(olderXmlPath), {recursive: true});
+
+ fs.writeFileSync(olderXmlPath, `
+
+
+
+ boom
+
+
+`);
+ fs.writeFileSync(newerXmlPath, `
+
+
+
+`);
+ fs.utimesSync(olderXmlPath, new Date('2026-04-15T09:30:44Z'), new Date('2026-04-15T09:30:44Z'));
+ fs.utimesSync(newerXmlPath, new Date('2026-04-16T09:30:44Z'), new Date('2026-04-16T09:30:44Z'));
+
+ const reportFile = path.join(tempDir, 'report.json');
+ const core = createCore();
+ setStageEnv({
+ E2E_REPORT_DIR: tempDir,
+ REPORT_FILE: reportFile,
+ });
+
+ const report = await buildClusterReport({core, context: createContext()});
+
+ expect(report.sourceJUnitReport).toBe(newerXmlPath);
+ expect(report.metrics).toEqual({
+ passed: 1,
+ failed: 0,
+ errors: 0,
+ skipped: 0,
+ total: 1,
+ successRate: 100,
+ });
+ expect(report.failedTests).toEqual([]);
+ expect(core.warning).toHaveBeenCalledWith(
+ expect.stringContaining('Found multiple JUnit reports for the cluster; using the newest file')
+ );
+ }));
+
+ test('parses current replicated fixture report', () => {
+ const fixturePath = path.resolve(
+ __dirname,
+ '../../../../../tmp/test-ci/report/e2e_summary_replicated_2026-04-20.xml'
+ );
+ const parsed = parseJUnitReport(fs.readFileSync(fixturePath, 'utf8'));
+
+ expect(parsed.metrics).toEqual({
+ passed: 117,
+ failed: 11,
+ errors: 0,
+ skipped: 4,
+ total: 132,
+ successRate: 88.64,
+ });
+ expect(parsed.startedAt).toBe('2026-04-20T12:48:10');
+ expect(parsed.failedTests).toHaveLength(11);
+ expect(parsed.failedTests).toContain(
+ '[It] VirtualMachineAdditionalNetworkInterfaces verifies additional network interfaces and connectivity before and after migration Main + additional network'
+ );
+ expect(parsed.failedTests).toContain(
+ '[It] VirtualMachineOperationRestore restores a virtual machine from a snapshot BestEffort restore mode; automatic restart approval mode; always on unless stopped manually run policy [Slow]'
+ );
+ });
+
+ test('parses current nfs fixture report', () => {
+ const fixturePath = path.resolve(
+ __dirname,
+ '../../../../../tmp/test-ci/report/e2e_summary_nfs_2026-04-20.xml'
+ );
+ const parsed = parseJUnitReport(fs.readFileSync(fixturePath, 'utf8'));
+
+ expect(parsed.metrics).toEqual({
+ passed: 93,
+ failed: 8,
+ errors: 0,
+ skipped: 31,
+ total: 132,
+ successRate: 70.45,
+ });
+ expect(parsed.startedAt).toBe('2026-04-20T12:38:34');
+ expect(parsed.failedTests).toHaveLength(8);
+ expect(parsed.failedTests).toContain(
+ '[It] RWOVirtualDiskMigration should be successful two migrations in a row'
+ );
+ expect(parsed.failedTests).toContain(
+ '[It] VirtualMachineOperationRestore restores a virtual machine from a snapshot BestEffort restore mode; automatic restart approval mode; manual run policy [Slow]'
+ );
+ });
+
+ test('reports configure-sdn as the failed pre-E2E phase', async () => withTempDir(async (tempDir) => {
+ const reportFile = path.join(tempDir, 'report.json');
+ setStageEnv({
+ E2E_REPORT_DIR: tempDir,
+ REPORT_FILE: reportFile,
+ CONFIGURE_SDN_RESULT: 'failure',
+ CONFIGURE_STORAGE_RESULT: 'skipped',
+ CONFIGURE_VIRTUALIZATION_RESULT: 'skipped',
+ E2E_TEST_RESULT: 'skipped',
+ });
+
+ const report = await buildClusterReport({core: createCore(), context: createContext()});
+
+ expect(report.reportKind).toBe('stage-failure');
+ expect(report.failedStage).toBe('configure-sdn');
+ expect(report.failedStageLabel).toBe('CONFIGURE SDN');
+ expect(report.status).toBe('failure');
+ }));
+
+ test('marks missing artifacts when test stage is successful but no reports were found', async () => withTempDir(async (tempDir) => {
+ const reportFile = path.join(tempDir, 'report.json');
+ setStageEnv({
+ E2E_REPORT_DIR: tempDir,
+ REPORT_FILE: reportFile,
+ });
+
+ const report = await buildClusterReport({core: createCore(), context: createContext()});
+
+ expect(report.reportKind).toBe('artifact-missing');
+ expect(report.failedStage).toBe('artifact-missing');
+ expect(report.failedStageLabel).toBe('TEST REPORTS NOT FOUND');
+ expect(report.status).toBe('missing');
+ }));
+});
diff --git a/.github/scripts/js/e2e/report/messenger-report.js b/.github/scripts/js/e2e/report/messenger-report.js
new file mode 100644
index 0000000000..0bb8c7ca77
--- /dev/null
+++ b/.github/scripts/js/e2e/report/messenger-report.js
@@ -0,0 +1,319 @@
+// Copyright 2026 Flant JSC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+const fs = require('fs');
+const path = require('path');
+
+function readMatchingFiles(dirPath, filePattern, files = []) {
+ if (!fs.existsSync(dirPath)) {
+ return files;
+ }
+
+ const entries = fs.readdirSync(dirPath, {withFileTypes: true});
+ for (const entry of entries) {
+ const fullPath = path.join(dirPath, entry.name);
+ if (entry.isDirectory()) {
+ readMatchingFiles(fullPath, filePattern, files);
+ continue;
+ }
+
+ if (filePattern.test(entry.name)) {
+ files.push(fullPath);
+ }
+ }
+
+ return files;
+}
+
+function createMissingReport(clusterName) {
+ return {
+ cluster: clusterName,
+ storageType: clusterName,
+ reportKind: 'artifact-missing',
+ status: 'missing',
+ statusMessage: '⚠️ TEST REPORTS NOT FOUND',
+ failedStage: 'artifact-missing',
+ failedStageLabel: 'TEST REPORTS NOT FOUND',
+ branch: '',
+ workflowRunUrl: '',
+ metrics: {
+ passed: 0,
+ failed: 0,
+ errors: 0,
+ total: 0,
+ successRate: 0,
+ },
+ failedTests: [],
+ };
+}
+
+function sanitizeCell(value) {
+ return String(value || '—')
+ .replace(/\|/g, '\\|')
+ .replace(/\r?\n/g, ' ')
+ .trim();
+}
+
+function sanitizeListItem(value) {
+ return String(value || '')
+ .replace(/\r?\n/g, ' ')
+ .trim();
+}
+
+function formatRate(value) {
+ const rate = Number(value || 0);
+ return `${Number.isFinite(rate) ? rate.toFixed(2) : '0.00'}%`;
+}
+
+function getReportDate(reports) {
+ const datedReport = reports.find((report) => report.startedAt);
+ if (!datedReport) {
+ return new Date().toISOString().slice(0, 10);
+ }
+
+ return String(datedReport.startedAt).slice(0, 10);
+}
+
+function sortReports(reports, preferredOrder) {
+ const orderMap = new Map(preferredOrder.map((name, index) => [name, index]));
+
+ return [...reports].sort((left, right) => {
+ const leftKey = left.storageType || left.cluster;
+ const rightKey = right.storageType || right.cluster;
+ const leftOrder = orderMap.has(leftKey) ? orderMap.get(leftKey) : Number.MAX_SAFE_INTEGER;
+ const rightOrder = orderMap.has(rightKey) ? orderMap.get(rightKey) : Number.MAX_SAFE_INTEGER;
+
+ if (leftOrder !== rightOrder) {
+ return leftOrder - rightOrder;
+ }
+
+ return String(left.cluster || left.storageType).localeCompare(String(right.cluster || right.storageType));
+ });
+}
+
+function formatClusterLink(report) {
+ const clusterName = sanitizeCell(report.cluster || report.storageType);
+ return report.workflowRunUrl ? `[${clusterName}](${report.workflowRunUrl})` : clusterName;
+}
+
+function normalizeLoopApiBaseUrl(value) {
+ const trimmedValue = String(value || '').trim().replace(/\/+$/, '');
+
+ if (!trimmedValue) {
+ return '';
+ }
+
+ if (trimmedValue.endsWith('/api/v4/posts')) {
+ return trimmedValue;
+ }
+
+ if (trimmedValue.endsWith('/api/v4')) {
+ return `${trimmedValue}/posts`;
+ }
+
+ return `${trimmedValue}/api/v4/posts`;
+}
+
+function getLoopPostsApiUrl() {
+ return normalizeLoopApiBaseUrl(process.env.LOOP_API_BASE_URL);
+}
+
+async function postToLoopApi({apiUrl, channelId, token, message, rootId}, core) {
+ const response = await fetch(apiUrl, {
+ method: 'POST',
+ headers: {
+ Authorization: `Bearer ${token}`,
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ channel_id: channelId,
+ message,
+ ...(rootId ? {root_id: rootId} : {}),
+ }),
+ });
+ const responseText = await response.text();
+
+ if (!response.ok) {
+ throw new Error(`Loop API request failed with status ${response.status}: ${responseText}`);
+ }
+
+ const payload = responseText ? JSON.parse(responseText) : {};
+ core.info(`Loop API accepted report with status ${response.status}`);
+ return payload;
+}
+
+function readReports(reportsDir, configuredClusters, core) {
+ const reportFiles = readMatchingFiles(reportsDir, /^e2e_report_.*\.json$/);
+ const reports = [];
+
+ for (const reportFile of reportFiles) {
+ try {
+ reports.push(JSON.parse(fs.readFileSync(reportFile, 'utf8')));
+ } catch (error) {
+ core.warning(`Unable to parse ${reportFile}: ${error.message}`);
+ }
+ }
+
+ const reportsByCluster = new Map();
+ for (const report of reports) {
+ const clusterName = report.storageType || report.cluster;
+ reportsByCluster.set(clusterName, report);
+ }
+
+ for (const clusterName of configuredClusters) {
+ if (!reportsByCluster.has(clusterName)) {
+ reportsByCluster.set(clusterName, createMissingReport(clusterName));
+ }
+ }
+
+ const orderedReports = sortReports(Array.from(reportsByCluster.values()), configuredClusters);
+ return orderedReports;
+}
+
+function buildMainMessage(orderedReports) {
+ const reportDate = getReportDate(orderedReports);
+ const branches = Array.from(new Set(orderedReports.map((report) => report.branch).filter(Boolean)));
+ const lines = [`## DVP | E2E on nested clusters | ${reportDate}`, ''];
+
+ if (branches.length === 1) {
+ lines.push(`Branch: \`${branches[0]}\``);
+ lines.push('');
+ }
+
+ const testsReports = orderedReports.filter((report) => report.reportKind === 'tests');
+ const nonTestReports = orderedReports.filter((report) => report.reportKind !== 'tests');
+
+ if (testsReports.length > 0) {
+ lines.push('### Test results');
+ lines.push('');
+ lines.push('| Cluster | ✅ Passed | ❌ Failed | ⚠️ Errors | Total | Success Rate |');
+ lines.push('|---|---:|---:|---:|---:|---:|');
+
+ for (const report of testsReports) {
+ const metrics = report.metrics || {};
+ lines.push(
+ `| ${formatClusterLink(report)} | ${metrics.passed || 0} | ${metrics.failed || 0} | ${metrics.errors || 0} | ${metrics.total || 0} | ${formatRate(metrics.successRate)} |`
+ );
+ }
+
+ lines.push('');
+ }
+
+ if (nonTestReports.length > 0) {
+ lines.push('### Cluster failures');
+ lines.push('');
+
+ for (const report of nonTestReports) {
+ lines.push(`- ${formatClusterLink(report)}: ${sanitizeListItem(report.failedStageLabel || report.statusMessage || report.failedStage)}`);
+ }
+
+ lines.push('');
+ }
+
+ return lines.join('\n').trim();
+}
+
+function buildThreadMessage(orderedReports) {
+ const testsReports = orderedReports.filter((report) => report.reportKind === 'tests');
+ const failedTestReports = testsReports.filter((report) => {
+ if (Array.isArray(report.failedTests) && report.failedTests.length > 0) {
+ return true;
+ }
+
+ return Boolean((report.metrics && report.metrics.failed) || (report.metrics && report.metrics.errors));
+ });
+
+ if (failedTestReports.length === 0) {
+ return '';
+ }
+
+ const lines = ['### Failed tests', ''];
+
+ for (const report of failedTestReports) {
+ const clusterName = sanitizeListItem(report.cluster || report.storageType);
+ lines.push(`**${clusterName}**`);
+
+ if (Array.isArray(report.failedTests) && report.failedTests.length > 0) {
+ for (const testName of report.failedTests) {
+ lines.push(`- ${sanitizeListItem(testName)}`);
+ }
+ } else {
+ lines.push('- No testcase-level failures were collected, but the E2E stage reported failures.');
+ }
+
+ lines.push('');
+ }
+
+ return lines.join('\n').trim();
+}
+
+function buildMessengerMessages({reportsDir, configuredClusters, core}) {
+ const orderedReports = readReports(reportsDir, configuredClusters, core);
+ return {
+ message: buildMainMessage(orderedReports),
+ threadMessage: buildThreadMessage(orderedReports),
+ };
+}
+
+async function renderMessengerReport({core}) {
+ const reportsDir = process.env.REPORTS_DIR || 'downloaded-artifacts';
+ const configuredClusters = JSON.parse(process.env.STORAGE_TYPES || '[]');
+ const {message, threadMessage} = buildMessengerMessages({reportsDir, configuredClusters, core});
+
+ core.info(message);
+ core.setOutput('message', message);
+ core.setOutput('thread_message', threadMessage);
+
+ const loopPostsApiUrl = getLoopPostsApiUrl();
+ const loopChannelId = String(process.env.LOOP_CHANNEL_ID || '').trim();
+ const loopToken = String(process.env.LOOP_TOKEN || '').trim();
+
+ if (loopPostsApiUrl || loopChannelId || loopToken) {
+ try {
+ if (!loopPostsApiUrl || !loopChannelId || !loopToken) {
+ throw new Error('LOOP_CHANNEL_ID, LOOP_TOKEN, and LOOP_API_BASE_URL are required');
+ }
+
+ const rootPost = await postToLoopApi({
+ apiUrl: loopPostsApiUrl,
+ channelId: loopChannelId,
+ token: loopToken,
+ message,
+ }, core);
+
+ if (threadMessage) {
+ const replyPost = await postToLoopApi({
+ apiUrl: loopPostsApiUrl,
+ channelId: loopChannelId,
+ token: loopToken,
+ message: threadMessage,
+ rootId: rootPost.id,
+ }, core);
+
+ core.setOutput('root_post_id', rootPost.id || '');
+ core.setOutput('thread_post_id', replyPost.id || '');
+ } else {
+ core.setOutput('root_post_id', rootPost.id || '');
+ core.setOutput('thread_post_id', '');
+ }
+ } catch (error) {
+ core.warning(`Unable to deliver report to Loop API: ${error.message}`);
+ }
+ }
+
+ return {message, threadMessage};
+}
+
+module.exports = renderMessengerReport;
+module.exports.createMissingReport = createMissingReport;
+module.exports.buildMessengerMessages = buildMessengerMessages;
+module.exports.getLoopPostsApiUrl = getLoopPostsApiUrl;
diff --git a/.github/scripts/js/e2e/report/messenger-report.test.js b/.github/scripts/js/e2e/report/messenger-report.test.js
new file mode 100644
index 0000000000..07646b028e
--- /dev/null
+++ b/.github/scripts/js/e2e/report/messenger-report.test.js
@@ -0,0 +1,158 @@
+const fs = require('fs');
+const os = require('os');
+const path = require('path');
+
+const renderMessengerReport = require('./messenger-report');
+
+function createCore() {
+ return {
+ info: jest.fn(),
+ warning: jest.fn(),
+ debug: jest.fn(),
+ setOutput: jest.fn(),
+ };
+}
+
+async function withTempDir(testFn) {
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'messenger-report-test-'));
+ try {
+ return await testFn(tempDir);
+ } finally {
+ fs.rmSync(tempDir, {recursive: true, force: true});
+ }
+}
+
+describe('messenger-report', () => {
+ afterEach(() => {
+ delete process.env.REPORTS_DIR;
+ delete process.env.STORAGE_TYPES;
+ delete process.env.LOOP_API_BASE_URL;
+ delete process.env.LOOP_CHANNEL_ID;
+ delete process.env.LOOP_TOKEN;
+ delete global.fetch;
+ });
+
+ test('renders test results and stage failures in separate sections', async () => withTempDir(async (tempDir) => {
+ fs.writeFileSync(path.join(tempDir, 'e2e_report_replicated.json'), JSON.stringify({
+ cluster: 'replicated',
+ storageType: 'replicated',
+ reportKind: 'tests',
+ branch: 'main',
+ workflowRunUrl: 'https://example.invalid/replicated',
+ startedAt: '2026-04-15T09:30:44',
+ metrics: {
+ passed: 12,
+ failed: 1,
+ errors: 0,
+ total: 13,
+ successRate: 92.31,
+ },
+ failedTests: ['[It] fails'],
+ }));
+
+ fs.writeFileSync(path.join(tempDir, 'e2e_report_nfs.json'), JSON.stringify({
+ cluster: 'nfs',
+ storageType: 'nfs',
+ reportKind: 'stage-failure',
+ branch: 'main',
+ workflowRunUrl: 'https://example.invalid/nfs',
+ failedStage: 'configure-sdn',
+ failedStageLabel: 'CONFIGURE SDN',
+ metrics: {
+ passed: 0,
+ failed: 0,
+ errors: 0,
+ total: 0,
+ successRate: 0,
+ },
+ failedTests: [],
+ }));
+
+ process.env.REPORTS_DIR = tempDir;
+ process.env.STORAGE_TYPES = '["replicated","nfs"]';
+
+ const result = await renderMessengerReport({core: createCore()});
+
+ expect(result.message).toContain('### Test results');
+ expect(result.message).toContain('| [replicated](https://example.invalid/replicated) | 12 | 1 | 0 | 13 | 92.31% |');
+ expect(result.message).toContain('### Cluster failures');
+ expect(result.message).toContain('- [nfs](https://example.invalid/nfs): CONFIGURE SDN');
+ expect(result.message).not.toContain('### Failed tests');
+ expect(result.threadMessage).toContain('### Failed tests');
+ expect(result.threadMessage).toContain('**replicated**');
+ expect(result.threadMessage).toContain('- [It] fails');
+ expect(result.threadMessage).not.toContain('**nfs**');
+ }));
+
+ test('creates artifact-missing entry for absent cluster report', async () => withTempDir(async (tempDir) => {
+ process.env.REPORTS_DIR = tempDir;
+ process.env.STORAGE_TYPES = '["replicated"]';
+
+ const result = await renderMessengerReport({core: createCore()});
+
+ expect(result.message).toContain('### Cluster failures');
+ expect(result.message).toContain('- replicated: TEST REPORTS NOT FOUND');
+ expect(result.threadMessage).toBe('');
+ }));
+
+ test('posts main report and failed tests thread via Loop API', async () => withTempDir(async (tempDir) => {
+ fs.writeFileSync(path.join(tempDir, 'e2e_report_replicated.json'), JSON.stringify({
+ cluster: 'replicated',
+ storageType: 'replicated',
+ reportKind: 'tests',
+ branch: 'main',
+ workflowRunUrl: 'https://example.invalid/replicated',
+ startedAt: '2026-04-15T09:30:44',
+ metrics: {
+ passed: 10,
+ failed: 1,
+ errors: 0,
+ total: 11,
+ successRate: 90.91,
+ },
+ failedTests: ['[It] fails'],
+ }));
+
+ process.env.REPORTS_DIR = tempDir;
+ process.env.STORAGE_TYPES = '["replicated"]';
+ process.env.LOOP_API_BASE_URL = 'https://loop.example.invalid';
+ process.env.LOOP_CHANNEL_ID = 'channel-id';
+ process.env.LOOP_TOKEN = 'loop-token';
+
+ global.fetch = jest.fn()
+ .mockResolvedValueOnce({
+ ok: true,
+ status: 201,
+ text: async () => JSON.stringify({id: 'root-post-id'}),
+ })
+ .mockResolvedValueOnce({
+ ok: true,
+ status: 201,
+ text: async () => JSON.stringify({id: 'thread-post-id'}),
+ });
+
+ const result = await renderMessengerReport({core: createCore()});
+
+ expect(global.fetch).toHaveBeenCalledTimes(2);
+ expect(global.fetch).toHaveBeenNthCalledWith(
+ 1,
+ 'https://loop.example.invalid/api/v4/posts',
+ expect.objectContaining({
+ method: 'POST',
+ headers: expect.objectContaining({
+ Authorization: 'Bearer loop-token',
+ 'Content-Type': 'application/json',
+ }),
+ }),
+ );
+ expect(JSON.parse(global.fetch.mock.calls[0][1].body)).toEqual({
+ channel_id: 'channel-id',
+ message: result.message,
+ });
+ expect(JSON.parse(global.fetch.mock.calls[1][1].body)).toEqual({
+ channel_id: 'channel-id',
+ message: result.threadMessage,
+ root_id: 'root-post-id',
+ });
+ }));
+});
diff --git a/.github/scripts/js/package.json b/.github/scripts/js/package.json
index 6a8471c4e1..6dbb3922a1 100644
--- a/.github/scripts/js/package.json
+++ b/.github/scripts/js/package.json
@@ -17,5 +17,8 @@
"@types/node": "^16.11.11",
"jest": "28.1.2",
"prettier": "^2.5.0"
+ },
+ "dependencies": {
+ "fast-xml-parser": "^5.7.1"
}
}
diff --git a/.github/workflows/e2e-matrix.yml b/.github/workflows/e2e-matrix.yml
index 055952a5b6..ca74ac51ec 100644
--- a/.github/workflows/e2e-matrix.yml
+++ b/.github/workflows/e2e-matrix.yml
@@ -399,254 +399,15 @@ jobs:
merge-multiple: false
- name: Send results to channel
- run: |
- # Map storage types to CSI names
- get_csi_name() {
- local storage_type=$1
- case "$storage_type" in
- "replicated")
- echo "replicated.csi.storage.deckhouse.io"
- ;;
- "nfs")
- echo "nfs.csi.storage.deckhouse.io"
- ;;
- *)
- echo "$storage_type"
- ;;
- esac
- }
-
- # Function to load and parse report from artifact
- # Outputs: file content to stdout, debug messages to stderr
- # Works with pattern-based artifact download (e2e-report-*)
- # Artifacts are organized as: downloaded-artifacts/e2e-report--/e2e_report_.json
- load_report_from_artifact() {
- local storage_type=$1
- local base_path="downloaded-artifacts/"
-
- echo "[INFO] Searching for report for storage type: $storage_type" >&2
- echo "[DEBUG] Base path: $base_path" >&2
-
- if [ ! -d "$base_path" ]; then
- echo "[WARN] Base path does not exist: $base_path" >&2
- return 1
- fi
-
- local report_file=""
-
- # First, search in artifact directories matching pattern: e2e-report--*
- # Pattern downloads create subdirectories named after the artifact
- # e.g., downloaded-artifacts/e2e-report-replicated-/e2e_report_replicated.json
- echo "[DEBUG] Searching in artifact directories matching pattern: e2e-report-${storage_type}-*" >&2
- local artifact_dir=$(find "$base_path" -type d -name "e2e-report-${storage_type}-*" 2>/dev/null | head -1)
- if [ -n "$artifact_dir" ]; then
- echo "[DEBUG] Found artifact dir: $artifact_dir" >&2
- report_file=$(find "$artifact_dir" -name "e2e_report_*.json" -type f 2>/dev/null | head -1)
- if [ -n "$report_file" ] && [ -f "$report_file" ]; then
- echo "[INFO] Found report file in artifact dir: $report_file" >&2
- cat "$report_file"
- return 0
- fi
- fi
-
- # Fallback: search for file by name pattern anywhere in base_path
- echo "[DEBUG] Searching for file: e2e_report_${storage_type}.json" >&2
- report_file=$(find "$base_path" -type f -name "e2e_report_${storage_type}.json" 2>/dev/null | head -1)
- if [ -n "$report_file" ] && [ -f "$report_file" ]; then
- echo "[INFO] Found report file by name: $report_file" >&2
- cat "$report_file"
- return 0
- fi
-
- echo "[WARN] Could not load report artifact for $storage_type" >&2
- return 1
- }
-
- # Function to create failure summary JSON (fallback)
- create_failure_summary() {
- local storage_type=$1
- local stage=$2
- local run_id=$3
- local csi=$(get_csi_name "$storage_type")
- local date=$(date +"%Y-%m-%d")
- local time=$(date +"%H:%M:%S")
- local branch="${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}"
- local link="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${run_id:-${GITHUB_RUN_ID}}"
-
- # Map stage to status message
- local status_msg
- case "$stage" in
- "bootstrap")
- status_msg=":x: BOOTSTRAP CLUSTER FAILED"
- ;;
- "storage-setup")
- status_msg=":x: STORAGE SETUP FAILED"
- ;;
- "virtualization-setup")
- status_msg=":x: VIRTUALIZATION SETUP FAILED"
- ;;
- "e2e-test")
- status_msg=":x: E2E TEST FAILED"
- ;;
- *)
- status_msg=":question: UNKNOWN"
- ;;
- esac
-
- jq -n \
- --arg csi "$csi" \
- --arg date "$date" \
- --arg time "$time" \
- --arg branch "$branch" \
- --arg status "$status_msg" \
- --arg link "$link" \
- '{CSI: $csi, Date: $date, StartTime: $time, Branch: $branch, Status: $status, Passed: 0, Failed: 0, Pending: 0, Skipped: 0, Link: $link}'
- }
-
-
- # Parse summary JSON and add to table
- parse_summary() {
- local summary_json=$1
- local storage_type=$2
-
- if [ -z "$summary_json" ] || [ "$summary_json" == "null" ] || [ "$summary_json" == "" ]; then
- echo "Warning: Empty summary for $storage_type"
- return
- fi
-
- # Try to parse as JSON (handle both JSON string and already parsed JSON)
- if ! echo "$summary_json" | jq empty 2>/dev/null; then
- echo "Warning: Invalid JSON for $storage_type: $summary_json"
- echo "[DEBUG] json: $summary_json"
- return
- fi
-
- # Parse JSON fields
- csi_raw=$(echo "$summary_json" | jq -r '.CSI // empty' 2>/dev/null)
- if [ -z "$csi_raw" ] || [ "$csi_raw" == "null" ] || [ "$csi_raw" == "" ]; then
- csi=$(get_csi_name "$storage_type")
- else
- csi="$csi_raw"
- fi
-
- date=$(echo "$summary_json" | jq -r '.Date // ""' 2>/dev/null)
- time=$(echo "$summary_json" | jq -r '.StartTime // ""' 2>/dev/null)
- branch=$(echo "$summary_json" | jq -r '.Branch // ""' 2>/dev/null)
- status=$(echo "$summary_json" | jq -r '.Status // ":question: UNKNOWN"' 2>/dev/null)
- passed=$(echo "$summary_json" | jq -r '.Passed // 0' 2>/dev/null)
- failed=$(echo "$summary_json" | jq -r '.Failed // 0' 2>/dev/null)
- pending=$(echo "$summary_json" | jq -r '.Pending // 0' 2>/dev/null)
- skipped=$(echo "$summary_json" | jq -r '.Skipped // 0' 2>/dev/null)
- link=$(echo "$summary_json" | jq -r '.Link // ""' 2>/dev/null)
-
- # Set defaults if empty
- [ -z "$passed" ] && passed=0
- [ -z "$failed" ] && failed=0
- [ -z "$pending" ] && pending=0
- [ -z "$skipped" ] && skipped=0
- [ -z "$status" ] && status=":question: UNKNOWN"
-
- # Format link - use CSI name as fallback if link is empty
- if [ -z "$link" ] || [ "$link" == "" ]; then
- link_text="$csi"
- else
- link_text="[:link: $csi]($link)"
- fi
-
- # Add row to table
- markdown_table+="| $link_text | $status | $passed | $failed | $pending | $skipped | $date | $time | $branch |\n"
- }
-
- # Initialize markdown table
- echo "[INFO] Generate markdown table"
- markdown_table=""
- header="| CSI | Status | Passed | Failed | Pending | Skipped | Date | Time | Branch|\n"
- separator="|---|---|---|---|---|---|---|---|---|\n"
- markdown_table+="$header"
- markdown_table+="$separator"
-
- # Get current date for header
- DATE=$(date +"%Y-%m-%d")
- COMBINED_SUMMARY="## :dvp: **DVP | E2E on a nested cluster | $DATE**\n\n"
-
- echo "[INFO] Get storage types"
- readarray -t storage_types < <(echo "$STORAGE_TYPES" | jq -r '.[]')
- echo "[INFO] Storage types: " "${storage_types[@]}"
-
- echo "[INFO] Generate summary for each storage type"
- for storage in "${storage_types[@]}"; do
- echo "[INFO] Processing $storage"
-
- # Try to load report from artifact
- # Debug messages go to stderr (visible in logs), JSON content goes to stdout
- echo "[INFO] Attempting to load report for $storage"
- structured_report=$(load_report_from_artifact "$storage" || true)
-
- if [ -n "$structured_report" ]; then
- # Check if it's valid JSON
- if echo "$structured_report" | jq empty 2>/dev/null; then
- echo "[INFO] Report is valid JSON for $storage"
- else
- echo "[WARN] Report is not valid JSON for $storage"
- echo "[DEBUG] Raw report content (first 200 chars):"
- echo "$structured_report" | head -c 200
- echo ""
- structured_report=""
- fi
- fi
-
- if [ -n "$structured_report" ] && echo "$structured_report" | jq empty 2>/dev/null; then
- # Extract report data from structured file
- report_json=$(echo "$structured_report" | jq -c '.report // empty')
- failed_stage=$(echo "$structured_report" | jq -r '.failed_stage // empty')
- workflow_run_id=$(echo "$structured_report" | jq -r '.workflow_run_id // empty')
-
- echo "[INFO] Loaded report for $storage (failed_stage: ${failed_stage}, run_id: ${workflow_run_id})"
-
- # Validate and parse report
- if [ -n "$report_json" ] && [ "$report_json" != "" ] && [ "$report_json" != "null" ]; then
- if echo "$report_json" | jq empty 2>/dev/null; then
- echo "[INFO] Found valid report for $storage"
- parse_summary "$report_json" "$storage"
- else
- echo "[WARN] Invalid report JSON for $storage, using failed stage info"
- # Fallback to failed stage
- if [ -n "$failed_stage" ] && [ "$failed_stage" != "" ] && [ "$failed_stage" != "success" ]; then
- failed_summary=$(create_failure_summary "$storage" "$failed_stage" "$workflow_run_id")
- parse_summary "$failed_summary" "$storage"
- else
- csi=$(get_csi_name "$storage")
- markdown_table+="| $csi | :warning: INVALID REPORT | 0 | 0 | 0 | 0 | — | — | — |\n"
- fi
- fi
- else
- # No report in structured file, use failed stage
- if [ -n "$failed_stage" ] && [ "$failed_stage" != "" ] && [ "$failed_stage" != "success" ]; then
- echo "[INFO] Stage '$failed_stage' failed for $storage"
- failed_summary=$(create_failure_summary "$storage" "$failed_stage" "$workflow_run_id")
- parse_summary "$failed_summary" "$storage"
- else
- csi=$(get_csi_name "$storage")
- markdown_table+="| $csi | :warning: NO REPORT | 0 | 0 | 0 | 0 | — | — | — |\n"
- fi
- fi
- else
- # Artifact not found or invalid, show warning
- echo "[WARN] Could not load report artifact for $storage"
- csi=$(get_csi_name "$storage")
- markdown_table+="| $csi | :warning: ARTIFACT NOT FOUND | 0 | 0 | 0 | 0 | — | — | — |\n"
- fi
- done
-
- echo "[INFO] Combined summary"
- COMBINED_SUMMARY+="${markdown_table}\n"
-
- echo -e "$COMBINED_SUMMARY"
-
- # Send to channel if webhook is configured
- echo "[INFO] Send to webhook"
- if [ -n "$LOOP_WEBHOOK_URL" ]; then
- curl --request POST --header 'Content-Type: application/json' --data "{\"text\": \"${COMBINED_SUMMARY}\"}" "$LOOP_WEBHOOK_URL"
- fi
+ id: render-report
+ uses: actions/github-script@v7
env:
- LOOP_WEBHOOK_URL: ${{ secrets.LOOP_WEBHOOK_URL }}
+ REPORTS_DIR: downloaded-artifacts/
+ STORAGE_TYPES: ${{ env.STORAGE_TYPES }}
+ LOOP_API_BASE_URL: ${{ secrets.LOOP_API_BASE_URL }}
+ LOOP_CHANNEL_ID: ${{ secrets.LOOP_CHANNEL_ID }}
+ LOOP_TOKEN: ${{ secrets.LOOP_TOKEN }}
+ with:
+ script: |
+ const renderMessengerReport = require('./.github/scripts/js/e2e/report/messenger-report');
+ await renderMessengerReport({core});
diff --git a/.github/workflows/e2e-reusable-pipeline.yml b/.github/workflows/e2e-reusable-pipeline.yml
index 8f88acf108..be8969bd79 100644
--- a/.github/workflows/e2e-reusable-pipeline.yml
+++ b/.github/workflows/e2e-reusable-pipeline.yml
@@ -1247,11 +1247,8 @@ jobs:
STORAGE_CLASS_NAME: ${{ inputs.nested_storageclass_name }}
working-directory: ./test/e2e/
run: |
- GINKGO_RESULT=$(mktemp -p $RUNNER_TEMP)
DATE=$(date +"%Y-%m-%d")
- START_TIME=$(date +"%H:%M:%S")
summary_file_name_junit="e2e_summary_${CSI}_${DATE}.xml"
- summary_file_name_json="e2e_summary_${CSI}_${DATE}.json"
cp -a legacy/testdata /tmp/testdata
@@ -1261,58 +1258,15 @@ jobs:
go tool ginkgo \
--focus="$FOCUS" \
-v --race --timeout=$TIMEOUT \
- --junit-report=$summary_file_name_junit | tee $GINKGO_RESULT
+ --junit-report=$summary_file_name_junit
else
go tool ginkgo \
-v --race --timeout=$TIMEOUT \
- --junit-report=$summary_file_name_junit | tee $GINKGO_RESULT
+ --junit-report=$summary_file_name_junit
fi
GINKGO_EXIT_CODE=$?
set -e
- RESULT=$(sed -e "s/\x1b\[[0-9;]*m//g" $GINKGO_RESULT | grep --color=never -E "FAIL!|SUCCESS!")
- if [[ $RESULT == FAIL!* ]]; then
- RESULT_STATUS=":x: FAIL!"
- elif [[ $RESULT == SUCCESS!* ]]; then
- RESULT_STATUS=":white_check_mark: SUCCESS!"
- else
- RESULT_STATUS=":question: UNKNOWN"
- fi
-
- PASSED=$(echo "$RESULT" | grep -oP "\d+(?= Passed)")
- FAILED=$(echo "$RESULT" | grep -oP "\d+(?= Failed)")
- PENDING=$(echo "$RESULT" | grep -oP "\d+(?= Pending)")
- SKIPPED=$(echo "$RESULT" | grep -oP "\d+(?= Skipped)")
-
- SUMMARY=$(jq -n \
- --arg csi "$CSI" \
- --arg date "$DATE" \
- --arg startTime "$START_TIME" \
- --arg branch "${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" \
- --arg status "$RESULT_STATUS" \
- --argjson passed "$PASSED" \
- --argjson failed "$FAILED" \
- --argjson pending "$PENDING" \
- --argjson skipped "$SKIPPED" \
- --arg link "$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" \
- '{
- CSI: $csi,
- Date: $date,
- StartTime: $startTime,
- Branch: $branch,
- Status: $status,
- Passed: $passed,
- Failed: $failed,
- Pending: $pending,
- Skipped: $skipped,
- Link: $link
- }'
- )
-
- echo "$SUMMARY"
- echo "summary=$(echo "$SUMMARY" | jq -c .)" >> $GITHUB_OUTPUT
- echo $SUMMARY > "${summary_file_name_json}"
-
echo "[INFO] Exit code: $GINKGO_EXIT_CODE"
exit $GINKGO_EXIT_CODE
- name: Upload summary test results (junit/xml)
@@ -1322,7 +1276,6 @@ jobs:
with:
name: e2e-test-results-${{ inputs.storage_type }}-${{ github.run_id }}-${{ steps.vars.outputs.e2e-start-time }}
path: |
- test/e2e/e2e_summary_*.json
test/e2e/e2e_summary_*.xml
test/e2e/*junit*.xml
if-no-files-found: ignore
@@ -1342,6 +1295,7 @@ jobs:
runs-on: ubuntu-latest
needs:
- bootstrap
+ - configure-sdn
- configure-storage
- configure-virtualization
- e2e-test
@@ -1360,134 +1314,22 @@ jobs:
- name: Determine failed stage and prepare report
id: determine-stage
- run: |
- # Get branch name
- BRANCH_NAME="${{ github.head_ref || github.ref_name }}"
- if [ -z "$BRANCH_NAME" ] || [ "$BRANCH_NAME" == "refs/heads/" ]; then
- BRANCH_NAME="${{ github.ref_name }}"
- fi
-
- # Function to create failure summary JSON with proper job URL
- create_failure_summary() {
- local stage=$1
- local status_msg=$2
- local job_name=$3
- local csi="${{ inputs.storage_type }}"
- local date=$(date +"%Y-%m-%d")
- local start_time=$(date +"%H:%M:%S")
- local branch="$BRANCH_NAME"
- # Create URL pointing to the failed job in the workflow run
- # Format: https://github.com/{owner}/{repo}/actions/runs/{run_id}
- # The job name will be visible in the workflow run view
- local link="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
-
- jq -n \
- --arg csi "$csi" \
- --arg date "$date" \
- --arg startTime "$start_time" \
- --arg branch "$branch" \
- --arg status "$status_msg" \
- --arg link "$link" \
- '{
- CSI: $csi,
- Date: $date,
- StartTime: $startTime,
- Branch: $branch,
- Status: $status,
- Passed: 0,
- Failed: 0,
- Pending: 0,
- Skipped: 0,
- Link: $link
- }'
- }
-
- # Try to find and load E2E test report
- E2E_REPORT_FILE=""
- REPORT_JSON=""
-
- # Search for report file in test/e2e directory
- E2E_REPORT_FILE=$(find test/e2e -name "e2e_summary_${{ inputs.storage_type }}_*.json" -type f 2>/dev/null | head -1)
-
- if [ -n "$E2E_REPORT_FILE" ] && [ -f "$E2E_REPORT_FILE" ]; then
- echo "[INFO] Found E2E report file: $E2E_REPORT_FILE"
- REPORT_JSON=$(cat "$E2E_REPORT_FILE" | jq -c .)
- echo "[INFO] Loaded report from file"
- echo "$REPORT_JSON" | jq .
- fi
-
- # Function to process a stage
- process_stage() {
- local result_value="$1"
- local stage_name="$2"
- local status_msg="$3"
- local job_name="$4"
- local is_e2e_test="${5:-false}"
-
- if [ "$result_value" != "success" ]; then
- FAILED_STAGE="$stage_name"
- FAILED_JOB_NAME="$job_name (${{ inputs.storage_type }})"
-
- if [ -z "$REPORT_JSON" ] || [ "$REPORT_JSON" == "" ]; then
- REPORT_JSON=$(create_failure_summary "$stage_name" "$status_msg" "$FAILED_JOB_NAME")
- elif [ "$is_e2e_test" == "true" ]; then
- # Special handling for e2e-test: update status if needed
- CURRENT_STATUS=$(echo "$REPORT_JSON" | jq -r '.Status // ""')
- if [[ "$CURRENT_STATUS" != *"FAIL"* ]] && [[ "$CURRENT_STATUS" != *"SUCCESS"* ]]; then
- REPORT_JSON=$(echo "$REPORT_JSON" | jq -c '.Status = ":x: E2E TEST FAILED"')
- fi
- fi
- return 0 # Stage failed
- fi
- return 1 # Stage succeeded
- }
-
- # Determine which stage failed and prepare report
- FAILED_STAGE=""
- FAILED_JOB_NAME=""
-
- if process_stage "${{ needs.bootstrap.result }}" "bootstrap" ":x: BOOTSTRAP CLUSTER FAILED" "Bootstrap cluster"; then
- : # Stage failed, handled in function
- elif process_stage "${{ needs.configure-storage.result }}" "storage-setup" ":x: STORAGE SETUP FAILED" "Configure storage"; then
- : # Stage failed, handled in function
- elif process_stage "${{ needs.configure-virtualization.result }}" "virtualization-setup" ":x: VIRTUALIZATION SETUP FAILED" "Configure Virtualization"; then
- : # Stage failed, handled in function
- elif process_stage "${{ needs.e2e-test.result }}" "e2e-test" ":x: E2E TEST FAILED" "E2E test" "true"; then
- : # Stage failed, handled in function
- else
- # All stages succeeded
- FAILED_STAGE="success"
- FAILED_JOB_NAME="E2E test (${{ inputs.storage_type }})"
- if [ -z "$REPORT_JSON" ] || [ "$REPORT_JSON" == "" ]; then
- REPORT_JSON=$(create_failure_summary "success" ":white_check_mark: SUCCESS!" "$FAILED_JOB_NAME")
- fi
- fi
-
- # Create structured report file with metadata
- REPORT_FILE="e2e_report_${{ inputs.storage_type }}.json"
- # Parse REPORT_JSON to ensure it's valid JSON before using it
- REPORT_JSON_PARSED=$(echo "$REPORT_JSON" | jq -c .)
- jq -n \
- --argjson report "$REPORT_JSON_PARSED" \
- --arg storage_type "${{ inputs.storage_type }}" \
- --arg failed_stage "$FAILED_STAGE" \
- --arg failed_job_name "$FAILED_JOB_NAME" \
- --arg workflow_run_id "${{ github.run_id }}" \
- --arg workflow_run_url "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
- '{
- storage_type: $storage_type,
- failed_stage: $failed_stage,
- failed_job_name: $failed_job_name,
- workflow_run_id: $workflow_run_id,
- workflow_run_url: $workflow_run_url,
- report: $report
- }' > "$REPORT_FILE"
-
- echo "report_file=$REPORT_FILE" >> $GITHUB_OUTPUT
- echo "[INFO] Created report file: $REPORT_FILE"
- echo "[INFO] Failed stage: $FAILED_STAGE"
- echo "[INFO] Failed job: $FAILED_JOB_NAME"
- cat "$REPORT_FILE" | jq .
+ uses: actions/github-script@v7
+ env:
+ STORAGE_TYPE: ${{ inputs.storage_type }}
+ E2E_REPORT_DIR: test/e2e
+ REPORT_FILE: e2e_report_${{ inputs.storage_type }}.json
+ BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
+ BOOTSTRAP_RESULT: ${{ needs.bootstrap.result }}
+ CONFIGURE_SDN_RESULT: ${{ needs.configure-sdn.result }}
+ CONFIGURE_STORAGE_RESULT: ${{ needs.configure-storage.result }}
+ CONFIGURE_VIRTUALIZATION_RESULT: ${{ needs.configure-virtualization.result }}
+ E2E_TEST_RESULT: ${{ needs.e2e-test.result }}
+ WORKFLOW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ with:
+ script: |
+ const buildClusterReport = require('./.github/scripts/js/e2e/report/cluster-report');
+ await buildClusterReport({core, context});
- name: Upload E2E report artifact
id: upload-artifact