From 41a3b2583ce1014ccd04f935c5c8b87831bee2e7 Mon Sep 17 00:00:00 2001 From: Matt Carvin <90224411+mcarvin8@users.noreply.github.com> Date: Mon, 16 Dec 2024 17:00:10 -0500 Subject: [PATCH] feat: add support for cobertura format --- .husky/commit-msg | 3 - .husky/pre-commit | 3 - .husky/pre-push | 3 - README.md | 118 ++++++++++++-- messages/transformer.transform.md | 4 + package.json | 5 +- src/commands/acc-transformer/transform.ts | 13 +- src/helpers/setCoveredLinesCobertura.ts | 45 ++++++ ...overedLines.ts => setCoveredLinesSonar.ts} | 2 +- src/helpers/transformDeployCoverageReport.ts | 149 +++++++++++++---- src/helpers/transformTestCoverageReport.ts | 153 ++++++++++++++---- src/helpers/types.ts | 47 ++++++ src/hooks/postrun.ts | 3 + .../commands/acc-transformer/transform.nut.ts | 68 +++++--- .../acc-transformer/transform.test.ts | 108 ++++++++++--- 15 files changed, 595 insertions(+), 129 deletions(-) create mode 100644 src/helpers/setCoveredLinesCobertura.ts rename src/helpers/{setCoveredLines.ts => setCoveredLinesSonar.ts} (96%) diff --git a/.husky/commit-msg b/.husky/commit-msg index a770f4b..564449e 100644 --- a/.husky/commit-msg +++ b/.husky/commit-msg @@ -1,4 +1 @@ -#!/bin/sh -. "$(dirname "$0")/_/husky.sh" - yarn commitlint --edit \ No newline at end of file diff --git a/.husky/pre-commit b/.husky/pre-commit index ffafe6a..2919583 100644 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,4 +1 @@ -#!/bin/sh -. "$(dirname "$0")/_/husky.sh" - yarn lint && yarn pretty-quick --staged && yarn build \ No newline at end of file diff --git a/.husky/pre-push b/.husky/pre-push index 09c3243..15ec4f6 100644 --- a/.husky/pre-push +++ b/.husky/pre-push @@ -1,4 +1 @@ -#!/bin/sh -. "$(dirname "$0")/_/husky.sh" - yarn build \ No newline at end of file diff --git a/README.md b/README.md index 031173f..e7c4066 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ - [License](#license) -A Salesforce CLI plugin to transform the Apex code coverage JSON files created during deployments and test runs into the generic test coverage format (XML) accepted by SonarQube. +A Salesforce CLI plugin to transform the Apex code coverage JSON files created during deployments and test runs into SonarQube format or Cobertura format. ## Install @@ -28,14 +28,16 @@ sf plugins install apex-code-coverage-transformer@x.y.z ## Who is the Plugin For? -This plugin is intended for users who deploy their Apex codebase (Apex classes and triggers) from any Salesforce DX repository (`sfdx-project.json` file), not just git-based ones. You should be running this plugin somewhere inside your Salesforce DX repository (root folder preferred). This plugin searches for your repository's `sfdx-project.json` file to know which package directories to search into. Since SonarQube relies on file-paths to map code coverage to the files in their explorer interface, the Apex files must be found in one of your package directories. +This plugin is intended for users who deploy their Apex codebase (Apex classes and triggers) from any Salesforce DX repository (`sfdx-project.json` file), not just git-based ones. You should be running this plugin somewhere inside your Salesforce DX repository (root folder preferred). This plugin searches for your repository's `sfdx-project.json` file to know which package directories to search into. The Apex files must be found in one of your package directories. -This plugin will work if you run local tests or run all tests in an org, including tests that originate from installed managed and unlocked packages. Since files from managed and unlocked packages aren't retrieved into Salesforce DX repositories, these files cannot be included in your SonarQube scans. +This plugin will work if you run local tests or run all tests in an org, including tests that originate from installed managed and unlocked packages. Since files from managed and unlocked packages aren't retrieved into Salesforce DX repositories, these files cannot be included in your code coverage reports. -When the plugin is unable to find the Apex file from the coverage report in your repository, it will print a warning and not add that file's coverage data to the coverage XML created by this plugin. A warning will be printed for each file not found in a package directory in your repository. See [Errors and Warnings](https://github.com/mcarvin8/apex-code-coverage-transformer?tab=readme-ov-file#errors-and-warnings) for more information. +When the plugin is unable to find the Apex file from the Salesforce CLI coverage report in your repository, it will print a warning and not add that file's coverage data to the coverage XML created by this plugin. A warning will be printed for each file not found in a package directory in your repository. See [Errors and Warnings](https://github.com/mcarvin8/apex-code-coverage-transformer?tab=readme-ov-file#errors-and-warnings) for more information. ## Creating Code Coverage Files with the Salesforce CLI +**This tool will only support the JSON coverage format from the Salesforce CLI. Do not use "json-summary" or Salesforce's cobertura output.** + To create the code coverage JSON during a Salesforce CLI deployment/validation, append `--coverage-formatters json --results-dir "coverage"` to the `sf project deploy` command. This will create a coverage JSON in this relative path - `coverage/coverage/coverage.json`. ``` @@ -49,7 +51,7 @@ sf apex run test --code-coverage --result-format json --output-dir "coverage" sf apex get test --test-run-id --code-coverage --result-format json --output-dir "coverage" ``` -The code coverage JSONs created by the Salesforce CLI aren't accepted by SonarQube automatically for Salesforce DX repositories and needs to be converted using this plugin. +The code coverage JSONs created by the Salesforce CLI aren't accepted automatically for Salesforce DX repositories and needs to be converted using this plugin. **Disclaimer**: Due to existing bugs with how the Salesforce CLI reports covered lines during deployments (see [5511](https://github.com/forcedotcom/salesforcedx-vscode/issues/5511) and [1568](https://github.com/forcedotcom/cli/issues/1568)), to add support for covered lines in this plugin for deployment coverage files, I had to add a function to re-number out-of-range covered lines the CLI may report (ex: line 100 in a 98-line Apex Class is reported back as covered by the Salesforce CLI deploy command). Salesforce's coverage result may also include extra lines as covered (ex: 120 lines are included in the coverage report for a 100 line file), so the coverage percentage may vary based on how many lines the API returns in the coverage report. Once Salesforce fixes the API to correctly return covered lines in the deploy command, this function will be removed. @@ -63,20 +65,22 @@ The `apex-code-coverage-transformer` has 1 command: ``` USAGE - $ sf acc-transformer transform -j -x [--json] + $ sf acc-transformer transform -j -x -f [--json] FLAGS -j, --coverage-json= Path to the code coverage JSON file created by the Salesforce CLI deployment or test command. - -x, --xml= [default: "coverage.xml"] Path to code coverage XML file that will be created by this plugin. + -x, --xml= [default: "coverage.xml"] Path to code coverage XML file that will be created by this plugin. + -f, --format= [default: "sonar"] Output format for the code coverage format. + Valid options are "sonar" or "cobertura". GLOBAL FLAGS --json Format output as json. DESCRIPTION - This plugin will convert the code coverage JSON file created by the Salesforce CLI during Apex deployments and test runs into an XML accepted by tools like SonarQube. + This plugin will convert the code coverage JSON file created by the Salesforce CLI during Apex deployments and test runs into SonarQube or Cobertura format. EXAMPLES - $ sf acc-transformer transform -j "coverage.json" -x "coverage.xml" + $ sf acc-transformer transform -j "coverage.json" -x "coverage.xml" -f "sonar" ``` ## Hook @@ -93,13 +97,15 @@ The `.apexcodecovtransformer.config.json` should look like this: { "deployCoverageJsonPath": "coverage/coverage/coverage.json", "testCoverageJsonPath": "coverage/test-coverage.json", - "coverageXmlPath": "coverage.xml" + "coverageXmlPath": "coverage.xml", + "format": "sonar" } ``` - `deployCoverageJsonPath` is required to use the hook after deployments and should be the path to the code coverage JSON created by the Salesforce CLI deployment command. Recommend using a relative path. - `testCoverageJsonPath` is required to use the hook after test runs and should be the path to the code coverage JSON created by the Salesforce CLI test command. Recommend using a relative path. - `coverageXmlPath` is optional and should be the path to the code coverage XML created by this plugin. Recommend using a relative path. If this isn't provided, it will default to `coverage.xml` in the working directory. +- `format` is optional and should be the intended output format for the code coverage XML created by this plugin. Options are "sonar" or "cobertura". If this isn't provided, it will default to "sonar". If the `.apexcodecovtransformer.config.json` file isn't found, the hook will be skipped. @@ -141,7 +147,7 @@ Error (1): ENOENT: no such file or directory: {packageDirPath} ## Example -This [code coverage JSON file](https://raw.githubusercontent.com/mcarvin8/apex-code-coverage-transformer/main/test/deploy_coverage_no_file_exts.json) created during a Salesforce CLI deployment will be transformed into: +This [code coverage JSON file](https://raw.githubusercontent.com/mcarvin8/apex-code-coverage-transformer/main/test/deploy_coverage_no_file_exts.json) created during a Salesforce CLI deployment will be transformed into this format for SonarQube: ```xml @@ -215,6 +221,96 @@ This [code coverage JSON file](https://raw.githubusercontent.com/mcarvin8/apex-c ``` +and this format for Cobertura: + +```xml + + + + + . + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +``` + ## Issues If you encounter any issues, please create an issue in the repository's [issue tracker](https://github.com/mcarvin8/apex-code-coverage-transformer/issues). Please also create issues to suggest any new features. diff --git a/messages/transformer.transform.md b/messages/transformer.transform.md index 2ac2bbe..239eeef 100644 --- a/messages/transformer.transform.md +++ b/messages/transformer.transform.md @@ -17,3 +17,7 @@ Path to the code coverage JSON file created by the Salesforce CLI deployment or # flags.xml.summary Path to code coverage XML file that will be created by this plugin. + +# flags.format.summary + +Output format for the coverage report. diff --git a/package.json b/package.json index 5963760..79bde0d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "apex-code-coverage-transformer", - "description": "Transforms the Apex code coverage JSON created during Salesforce deployments and test runs into the Generic Test Coverage Format (XML) for SonarQube.", + "description": "Transforms the Apex code coverage JSON created during Salesforce deployments and test runs into SonarQube or Cobertura format.", "version": "2.2.1", "dependencies": { "@oclif/core": "^4.0.37", @@ -45,7 +45,8 @@ "sonarqube", "apex", "coverage", - "git" + "git", + "cobertura" ], "license": "MIT", "oclif": { diff --git a/src/commands/acc-transformer/transform.ts b/src/commands/acc-transformer/transform.ts index ee1fff0..d8ce949 100644 --- a/src/commands/acc-transformer/transform.ts +++ b/src/commands/acc-transformer/transform.ts @@ -32,12 +32,21 @@ export default class TransformerTransform extends SfCommand { const { flags } = await this.parse(TransformerTransform); const jsonFilePath = resolve(flags['coverage-json']); const xmlFilePath = resolve(flags['xml']); + const format = flags['format']; const jsonData = await readFile(jsonFilePath, 'utf-8'); let xmlData: string; @@ -48,12 +57,12 @@ export default class TransformerTransform extends SfCommand { + const randomLines: number[] = []; + const totalLines = await getTotalLines(join(repoRoot, filePath)); + + for (const coveredLine of coveredLines) { + if (coveredLine > totalLines) { + for (let randomLineNumber = 1; randomLineNumber <= totalLines; randomLineNumber++) { + if ( + !uncoveredLines.includes(randomLineNumber) && + !coveredLines.includes(randomLineNumber) && + !randomLines.includes(randomLineNumber) + ) { + const randomLine: CoberturaLine = { + '@number': randomLineNumber, + '@hits': 1, + '@branch': 'false', + }; + classObj.lines.line.push(randomLine); + randomLines.push(randomLineNumber); + break; + } + } + } else { + const coveredLineObj: CoberturaLine = { + '@number': coveredLine, + '@hits': 1, + '@branch': 'false', + }; + classObj.lines.line.push(coveredLineObj); + } + } +} diff --git a/src/helpers/setCoveredLines.ts b/src/helpers/setCoveredLinesSonar.ts similarity index 96% rename from src/helpers/setCoveredLines.ts rename to src/helpers/setCoveredLinesSonar.ts index 66821c3..088cf6c 100644 --- a/src/helpers/setCoveredLines.ts +++ b/src/helpers/setCoveredLinesSonar.ts @@ -5,7 +5,7 @@ import { join } from 'node:path'; import { getTotalLines } from './getTotalLines.js'; import { FileObject } from './types.js'; -export async function setCoveredLines( +export async function setCoveredLinesSonar( coveredLines: number[], uncoveredLines: number[], repoRoot: string, diff --git a/src/helpers/transformDeployCoverageReport.ts b/src/helpers/transformDeployCoverageReport.ts index 8dacf35..1345455 100644 --- a/src/helpers/transformDeployCoverageReport.ts +++ b/src/helpers/transformDeployCoverageReport.ts @@ -2,50 +2,135 @@ /* eslint-disable no-await-in-loop */ import { create } from 'xmlbuilder2'; - -import { DeployCoverageData, CoverageObject, FileObject } from './types.js'; +import { DeployCoverageData, CoverageObject, CoberturaCoverageObject, FileObject, CoberturaClass } from './types.js'; import { getPackageDirectories } from './getPackageDirectories.js'; import { findFilePath } from './findFilePath.js'; -import { setCoveredLines } from './setCoveredLines.js'; +import { setCoveredLinesSonar } from './setCoveredLinesSonar.js'; +import { setCoveredLinesCobertura } from './setCoveredLinesCobertura.js'; import { normalizePathToUnix } from './normalizePathToUnix.js'; export async function transformDeployCoverageReport( - data: DeployCoverageData + data: DeployCoverageData, + format: string ): Promise<{ xml: string; warnings: string[]; filesProcessed: number }> { - const coverageObj: CoverageObject = { coverage: { '@version': '1', file: [] } }; const warnings: string[] = []; let filesProcessed: number = 0; const { repoRoot, packageDirectories } = await getPackageDirectories(); - for (const fileName in data) { - if (!Object.hasOwn(data, fileName)) continue; - const fileInfo = data[fileName]; - const formattedFileName = fileName.replace(/no-map[\\/]+/, ''); - const relativeFilePath = await findFilePath(formattedFileName, packageDirectories, repoRoot); - if (relativeFilePath === undefined) { - warnings.push(`The file name ${formattedFileName} was not found in any package directory.`); - continue; + if (format === 'sonar') { + const coverageObj: CoverageObject = { coverage: { '@version': '1', file: [] } }; + + for (const fileName in data) { + if (!Object.hasOwn(data, fileName)) continue; + const fileInfo = data[fileName]; + const formattedFileName = fileName.replace(/no-map[\\/]+/, ''); + const relativeFilePath = await findFilePath(formattedFileName, packageDirectories, repoRoot); + if (relativeFilePath === undefined) { + warnings.push(`The file name ${formattedFileName} was not found in any package directory.`); + continue; + } + const uncoveredLines = Object.keys(fileInfo.s) + .filter((lineNumber) => fileInfo.s[lineNumber] === 0) + .map(Number); + const coveredLines = Object.keys(fileInfo.s) + .filter((lineNumber) => fileInfo.s[lineNumber] === 1) + .map(Number); + + const fileObj: FileObject = { + '@path': normalizePathToUnix(relativeFilePath), + lineToCover: uncoveredLines.map((lineNumber: number) => ({ + '@lineNumber': lineNumber, + '@covered': 'false', + })), + }; + + await setCoveredLinesSonar(coveredLines, uncoveredLines, repoRoot, relativeFilePath, fileObj); + filesProcessed++; + coverageObj.coverage.file.push(fileObj); } - const uncoveredLines = Object.keys(fileInfo.s) - .filter((lineNumber) => fileInfo.s[lineNumber] === 0) - .map(Number); - const coveredLines = Object.keys(fileInfo.s) - .filter((lineNumber) => fileInfo.s[lineNumber] === 1) - .map(Number); - - const fileObj: FileObject = { - '@path': normalizePathToUnix(relativeFilePath), - lineToCover: uncoveredLines.map((lineNumber: number) => ({ - '@lineNumber': lineNumber, - '@covered': 'false', - })), + const xml = create(coverageObj).end({ prettyPrint: true, indent: ' ' }); + return { xml, warnings, filesProcessed }; + } else if (format === 'cobertura') { + const coberturaObj: CoberturaCoverageObject = { + coverage: { + '@lines-valid': 0, + '@lines-covered': 0, + '@line-rate': 0, + '@branches-valid': 0, + '@branches-covered': 0, + '@branch-rate': 1, + '@timestamp': Date.now(), + '@complexity': 0, + '@version': '0.1', + sources: { source: ['.'] }, + packages: { package: [] }, + }, }; - // this function is only needed until Salesforce fixes the API to correctly return covered lines - await setCoveredLines(coveredLines, uncoveredLines, repoRoot, relativeFilePath, fileObj); - filesProcessed++; - coverageObj.coverage.file.push(fileObj); + // Single package for all classes + const packageObj = { + '@name': 'main', + '@line-rate': 0, + '@branch-rate': 1, + classes: { class: [] as CoberturaClass[] }, + }; + coberturaObj.coverage.packages.package.push(packageObj); + + for (const fileName in data) { + if (!Object.hasOwn(data, fileName)) continue; + const fileInfo = data[fileName]; + const formattedFileName = fileName.replace(/no-map[\\/]+/, ''); + const relativeFilePath = await findFilePath(formattedFileName, packageDirectories, repoRoot); + if (relativeFilePath === undefined) { + warnings.push(`The file name ${formattedFileName} was not found in any package directory.`); + continue; + } + const uncoveredLines = Object.keys(fileInfo.s) + .filter((lineNumber) => fileInfo.s[lineNumber] === 0) + .map(Number); + const coveredLines = Object.keys(fileInfo.s) + .filter((lineNumber) => fileInfo.s[lineNumber] === 1) + .map(Number); + + const classObj: CoberturaClass = { + '@name': formattedFileName, + '@filename': normalizePathToUnix(relativeFilePath), + '@line-rate': (coveredLines.length / (coveredLines.length + uncoveredLines.length)).toFixed(4), + '@branch-rate': '1', + methods: {}, + lines: { + line: [ + ...uncoveredLines.map((lineNumber) => ({ + '@number': lineNumber, + '@hits': 0, + '@branch': 'false', + })), + ], + }, + }; + + await setCoveredLinesCobertura(coveredLines, uncoveredLines, repoRoot, relativeFilePath, classObj); + + // Update package and overall coverage metrics + coberturaObj.coverage['@lines-valid'] += uncoveredLines.length + coveredLines.length; + coberturaObj.coverage['@lines-covered'] += coveredLines.length; + + packageObj.classes.class.push(classObj); + filesProcessed++; + } + + // Update overall line-rate for the package + packageObj['@line-rate'] = Number( + (coberturaObj.coverage['@lines-covered'] / coberturaObj.coverage['@lines-valid']).toFixed(4) + ); + coberturaObj.coverage['@line-rate'] = packageObj['@line-rate']; + + let xml = create(coberturaObj).end({ prettyPrint: true, indent: ' ', headless: true }); + + // Add DOCTYPE declaration at the beginning of the XML + xml = `\n\n${xml}`; + return { xml, warnings, filesProcessed }; } - const xml = create(coverageObj).end({ prettyPrint: true, indent: ' ' }); - return { xml, warnings, filesProcessed }; + + throw new Error(`Unsupported format: ${format}`); } diff --git a/src/helpers/transformTestCoverageReport.ts b/src/helpers/transformTestCoverageReport.ts index 21ede9f..4d5409d 100644 --- a/src/helpers/transformTestCoverageReport.ts +++ b/src/helpers/transformTestCoverageReport.ts @@ -2,51 +2,144 @@ /* eslint-disable no-await-in-loop */ import { create } from 'xmlbuilder2'; - -import { TestCoverageData, CoverageObject, FileObject } from './types.js'; +import { TestCoverageData, CoverageObject, FileObject, CoberturaCoverageObject, CoberturaClass } from './types.js'; import { getPackageDirectories } from './getPackageDirectories.js'; import { findFilePath } from './findFilePath.js'; import { normalizePathToUnix } from './normalizePathToUnix.js'; export async function transformTestCoverageReport( - testCoverageData: TestCoverageData[] + testCoverageData: TestCoverageData[], + format: string ): Promise<{ xml: string; warnings: string[]; filesProcessed: number }> { - const coverageObj: CoverageObject = { coverage: { '@version': '1', file: [] } }; const warnings: string[] = []; let filesProcessed: number = 0; const { repoRoot, packageDirectories } = await getPackageDirectories(); - - if (!Array.isArray(testCoverageData)) { - testCoverageData = [testCoverageData]; + let coverageData = testCoverageData; + if (!Array.isArray(coverageData)) { + coverageData = [coverageData]; } - for (const data of testCoverageData) { - const name = data?.name; - const lines = data?.lines; + if (format === 'sonar') { + const coverageObj: CoverageObject = { coverage: { '@version': '1', file: [] } }; - if (!name || !lines) { - continue; - } - const formattedFileName = name.replace(/no-map[\\/]+/, ''); - const relativeFilePath = await findFilePath(formattedFileName, packageDirectories, repoRoot); - if (relativeFilePath === undefined) { - warnings.push(`The file name ${formattedFileName} was not found in any package directory.`); - continue; + for (const data of coverageData) { + const name = data?.name; + const lines = data?.lines; + + if (!name || !lines) continue; + + const formattedFileName = name.replace(/no-map[\\/]+/, ''); + const relativeFilePath = await findFilePath(formattedFileName, packageDirectories, repoRoot); + if (relativeFilePath === undefined) { + warnings.push(`The file name ${formattedFileName} was not found in any package directory.`); + continue; + } + + const fileObj: FileObject = { + '@path': normalizePathToUnix(relativeFilePath), + lineToCover: [], + }; + + for (const [lineNumber, isCovered] of Object.entries(lines)) { + fileObj.lineToCover.push({ + '@lineNumber': Number(lineNumber), + '@covered': `${isCovered === 1}`, + }); + } + filesProcessed++; + coverageObj.coverage.file.push(fileObj); } - const fileObj: FileObject = { - '@path': normalizePathToUnix(relativeFilePath), - lineToCover: [], + + const xml = create(coverageObj).end({ prettyPrint: true, indent: ' ' }); + return { xml, warnings, filesProcessed }; + } else if (format === 'cobertura') { + const coberturaObj: CoberturaCoverageObject = { + coverage: { + '@lines-valid': 0, + '@lines-covered': 0, + '@line-rate': 0, + '@branches-valid': 0, + '@branches-covered': 0, + '@branch-rate': 1, + '@timestamp': Date.now(), + '@complexity': 0, + '@version': '0.1', + sources: { source: ['.'] }, + packages: { package: [] }, + }, + }; + + // Single package for all classes + const packageObj = { + '@name': 'main', + '@line-rate': 0, + '@branch-rate': 1, + classes: { class: [] as CoberturaClass[] }, }; + coberturaObj.coverage.packages.package.push(packageObj); - for (const [lineNumber, isCovered] of Object.entries(lines)) { - fileObj.lineToCover.push({ - '@lineNumber': Number(lineNumber), - '@covered': `${isCovered === 1}`, - }); + for (const data of coverageData) { + const name = data?.name; + const lines = data?.lines; + + if (!name || !lines) continue; + + const formattedFileName = name.replace(/no-map[\\/]+/, ''); + const relativeFilePath = await findFilePath(formattedFileName, packageDirectories, repoRoot); + if (relativeFilePath === undefined) { + warnings.push(`The file name ${formattedFileName} was not found in any package directory.`); + continue; + } + + const uncoveredLines = Object.entries(lines) + .filter(([, isCovered]) => isCovered === 0) + .map(([lineNumber]) => Number(lineNumber)); + const coveredLines = Object.entries(lines) + .filter(([, isCovered]) => isCovered === 1) + .map(([lineNumber]) => Number(lineNumber)); + + const classObj: CoberturaClass = { + '@name': formattedFileName, + '@filename': normalizePathToUnix(relativeFilePath), + '@line-rate': (coveredLines.length / (coveredLines.length + uncoveredLines.length)).toFixed(4), + '@branch-rate': '1', + methods: {}, + lines: { + line: [ + ...uncoveredLines.map((lineNumber) => ({ + '@number': lineNumber, + '@hits': 0, + '@branch': 'false', + })), + ...coveredLines.map((lineNumber) => ({ + '@number': lineNumber, + '@hits': 1, + '@branch': 'false', + })), + ], + }, + }; + + // Update package and overall coverage metrics + coberturaObj.coverage['@lines-valid'] += uncoveredLines.length + coveredLines.length; + coberturaObj.coverage['@lines-covered'] += coveredLines.length; + + packageObj.classes.class.push(classObj); + filesProcessed++; } - filesProcessed++; - coverageObj.coverage.file.push(fileObj); + + // Update overall line-rate for the package + packageObj['@line-rate'] = parseFloat( + (coberturaObj.coverage['@lines-covered'] / coberturaObj.coverage['@lines-valid']).toFixed(4) + ); + coberturaObj.coverage['@line-rate'] = packageObj['@line-rate']; + + let xml = create(coberturaObj).end({ prettyPrint: true, indent: ' ', headless: true }); + + // Add DOCTYPE declaration at the beginning of the XML + xml = `\n\n${xml}`; + return { xml, warnings, filesProcessed }; } - const xml = create(coverageObj).end({ prettyPrint: true, indent: ' ' }); - return { xml, warnings, filesProcessed }; + + throw new Error(`Unsupported format: ${format}`); } diff --git a/src/helpers/types.ts b/src/helpers/types.ts index b788b68..5e6ed3b 100644 --- a/src/helpers/types.ts +++ b/src/helpers/types.ts @@ -56,4 +56,51 @@ export type ConfigFile = { deployCoverageJsonPath: string; testCoverageJsonPath: string; coverageXmlPath: string; + format: string; +}; + +export type CoberturaLine = { + '@number': number; + '@hits': number; + '@branch': string; +}; + +export type CoberturaClass = { + '@name': string; + '@filename': string; + '@line-rate': string; + '@branch-rate': string; + methods: Record; + lines: { + line: CoberturaLine[]; + }; +}; + +type CoberturaPackage = { + '@name': string; + '@line-rate': number; + '@branch-rate': number; + classes: { + class: CoberturaClass[]; + }; +}; + +export type CoberturaCoverageObject = { + coverage: { + '@lines-valid': number; + '@lines-covered': number; + '@line-rate': number; + '@branches-valid': number; + '@branches-covered': number; + '@branch-rate': number | string; + '@timestamp': number; + '@complexity': number; + '@version': string; + sources: { + source: string[]; + }; + packages: { + package: CoberturaPackage[]; + }; + }; }; diff --git a/src/hooks/postrun.ts b/src/hooks/postrun.ts index b4de146..c29c88a 100644 --- a/src/hooks/postrun.ts +++ b/src/hooks/postrun.ts @@ -38,6 +38,7 @@ export const postrun: Hook<'postrun'> = async function (options) { } const coverageXml: string = configFile.coverageXmlPath || 'coverage.xml'; + const coverageFormat: string = configFile.format || 'sonar'; if (commandType === 'deploy') { coverageJson = configFile.deployCoverageJsonPath || '.'; @@ -61,5 +62,7 @@ export const postrun: Hook<'postrun'> = async function (options) { commandArgs.push(coverageJsonPath); commandArgs.push('--xml'); commandArgs.push(coverageXmlPath); + commandArgs.push('--format'); + commandArgs.push(coverageFormat); await TransformerTransform.run(commandArgs); }; diff --git a/test/commands/acc-transformer/transform.nut.ts b/test/commands/acc-transformer/transform.nut.ts index 6b6cdb4..1181d29 100644 --- a/test/commands/acc-transformer/transform.nut.ts +++ b/test/commands/acc-transformer/transform.nut.ts @@ -17,9 +17,12 @@ describe('acc-transformer transform NUTs', () => { const invalidJson = resolve('test/invalid.json'); const deployBaselineXmlPath = resolve('test/deploy_coverage_baseline.xml'); const testBaselineXmlPath = resolve('test/test_coverage_baseline.xml'); - const coverageXmlPath1 = resolve('coverage1.xml'); - const coverageXmlPath2 = resolve('coverage2.xml'); - const coverageXmlPath3 = resolve('coverage3.xml'); + const sonarXmlPath1 = resolve('sonar1.xml'); + const sonarXmlPath2 = resolve('sonar2.xml'); + const sonarXmlPath3 = resolve('sonar3.xml'); + const coberturaXmlPath1 = resolve('cobertura1.xml'); + const coberturaXmlPath2 = resolve('cobertura2.xml'); + const coberturaXmlPath3 = resolve('cobertura3.xml'); const sfdxConfigFile = resolve('sfdx-project.json'); const configFile = { @@ -46,30 +49,33 @@ describe('acc-transformer transform NUTs', () => { await rm('packaged/triggers/AccountTrigger.trigger'); await rm('force-app', { recursive: true }); await rm('packaged', { recursive: true }); - await rm(coverageXmlPath1); - await rm(coverageXmlPath2); - await rm(coverageXmlPath3); + await rm(sonarXmlPath1); + await rm(sonarXmlPath2); + await rm(sonarXmlPath3); + await rm(coberturaXmlPath1); + await rm(coberturaXmlPath2); + await rm(coberturaXmlPath3); }); - it('runs transform on the deploy coverage file without file extensions.', async () => { - const command = `acc-transformer transform --coverage-json "${deployCoverageNoExts}" --xml "${coverageXmlPath1}"`; + it('runs transform on the deploy coverage file without file extensions in Sonar format.', async () => { + const command = `acc-transformer transform --coverage-json "${deployCoverageNoExts}" --xml "${sonarXmlPath1}"`; const output = execCmd(command, { ensureExitCode: 0 }).shellOutput.stdout; - expect(output.replace('\n', '')).to.equal(`The coverage XML has been written to ${coverageXmlPath1}`); + expect(output.replace('\n', '')).to.equal(`The coverage XML has been written to ${sonarXmlPath1}`); }); - it('runs transform on the deploy coverage file with file extensions.', async () => { - const command = `acc-transformer transform --coverage-json "${deployCoverageWithExts}" --xml "${coverageXmlPath2}"`; + it('runs transform on the deploy coverage file with file extensions in Sonar format.', async () => { + const command = `acc-transformer transform --coverage-json "${deployCoverageWithExts}" --xml "${sonarXmlPath2}"`; const output = execCmd(command, { ensureExitCode: 0 }).shellOutput.stdout; - expect(output.replace('\n', '')).to.equal(`The coverage XML has been written to ${coverageXmlPath2}`); + expect(output.replace('\n', '')).to.equal(`The coverage XML has been written to ${sonarXmlPath2}`); }); - it('runs transform on the test coverage file.', async () => { - const command = `acc-transformer transform --coverage-json "${testCoverage}" --xml "${coverageXmlPath3}"`; + it('runs transform on the test coverage file in Sonar format.', async () => { + const command = `acc-transformer transform --coverage-json "${testCoverage}" --xml "${sonarXmlPath3}"`; const output = execCmd(command, { ensureExitCode: 0 }).shellOutput.stdout; - expect(output.replace('\n', '')).to.equal(`The coverage XML has been written to ${coverageXmlPath3}`); + expect(output.replace('\n', '')).to.equal(`The coverage XML has been written to ${sonarXmlPath3}`); }); it('confirms a failure on an invalid JSON file.', async () => { const command = `acc-transformer transform --coverage-json "${invalidJson}"`; @@ -81,25 +87,45 @@ describe('acc-transformer transform NUTs', () => { }); it('confirm the XML files created are the same as the baselines.', async () => { - const deployXml1 = await readFile(coverageXmlPath1, 'utf-8'); - const deployXml2 = await readFile(coverageXmlPath2, 'utf-8'); - const testXml = await readFile(coverageXmlPath3, 'utf-8'); + const deployXml1 = await readFile(sonarXmlPath1, 'utf-8'); + const deployXml2 = await readFile(sonarXmlPath2, 'utf-8'); + const testXml = await readFile(sonarXmlPath3, 'utf-8'); const deployBaselineXmlContent = await readFile(deployBaselineXmlPath, 'utf-8'); const testBaselineXmlContent = await readFile(testBaselineXmlPath, 'utf-8'); strictEqual( deployXml1, deployBaselineXmlContent, - `File content is different between ${coverageXmlPath1} and ${deployBaselineXmlPath}` + `File content is different between ${sonarXmlPath1} and ${deployBaselineXmlPath}` ); strictEqual( deployXml2, deployBaselineXmlContent, - `File content is different between ${coverageXmlPath2} and ${deployBaselineXmlPath}` + `File content is different between ${sonarXmlPath2} and ${deployBaselineXmlPath}` ); strictEqual( testXml, testBaselineXmlContent, - `File content is different between ${coverageXmlPath3} and ${testBaselineXmlPath}` + `File content is different between ${sonarXmlPath3} and ${testBaselineXmlPath}` ); }); + it('runs transform on the deploy coverage file without file extensions in Cobertura format.', async () => { + const command = `acc-transformer transform --coverage-json "${deployCoverageNoExts}" --xml "${coberturaXmlPath1}" --format cobertura`; + const output = execCmd(command, { ensureExitCode: 0 }).shellOutput.stdout; + + expect(output.replace('\n', '')).to.equal(`The coverage XML has been written to ${coberturaXmlPath1}`); + }); + + it('runs transform on the deploy coverage file with file extensions in Cobertura format.', async () => { + const command = `acc-transformer transform --coverage-json "${deployCoverageWithExts}" --xml "${coberturaXmlPath2}" --format cobertura`; + const output = execCmd(command, { ensureExitCode: 0 }).shellOutput.stdout; + + expect(output.replace('\n', '')).to.equal(`The coverage XML has been written to ${coberturaXmlPath2}`); + }); + + it('runs transform on the test coverage file in Cobertura format.', async () => { + const command = `acc-transformer transform --coverage-json "${testCoverage}" --xml "${coberturaXmlPath3}" --format cobertura`; + const output = execCmd(command, { ensureExitCode: 0 }).shellOutput.stdout; + + expect(output.replace('\n', '')).to.equal(`The coverage XML has been written to ${coberturaXmlPath3}`); + }); }); diff --git a/test/commands/acc-transformer/transform.test.ts b/test/commands/acc-transformer/transform.test.ts index a374074..95cf9d8 100644 --- a/test/commands/acc-transformer/transform.test.ts +++ b/test/commands/acc-transformer/transform.test.ts @@ -20,9 +20,12 @@ describe('main', () => { const invalidJson = resolve('test/invalid.json'); const deployBaselineXmlPath = resolve('test/deploy_coverage_baseline.xml'); const testBaselineXmlPath = resolve('test/test_coverage_baseline.xml'); - const coverageXmlPath1 = resolve('coverage1.xml'); - const coverageXmlPath2 = resolve('coverage2.xml'); - const coverageXmlPath3 = resolve('coverage3.xml'); + const sonarXmlPath1 = resolve('sonar1.xml'); + const sonarXmlPath2 = resolve('sonar2.xml'); + const sonarXmlPath3 = resolve('sonar3.xml'); + const coberturaXmlPath1 = resolve('cobertura1.xml'); + const coberturaXmlPath2 = resolve('cobertura2.xml'); + const coberturaXmlPath3 = resolve('cobertura3.xml'); const sfdxConfigFile = resolve('sfdx-project.json'); const configFile = { @@ -55,44 +58,47 @@ describe('main', () => { await rm('packaged/triggers/AccountTrigger.trigger'); await rm('force-app', { recursive: true }); await rm('packaged', { recursive: true }); - await rm(coverageXmlPath1); - await rm(coverageXmlPath2); - await rm(coverageXmlPath3); + await rm(sonarXmlPath1); + await rm(sonarXmlPath2); + await rm(sonarXmlPath3); + await rm(coberturaXmlPath1); + await rm(coberturaXmlPath2); + await rm(coberturaXmlPath3); }); - it('transform the test JSON file without file extensions into the generic test coverage format without any warnings.', async () => { - await TransformerTransform.run(['--coverage-json', deployCoverageNoExts, '--xml', coverageXmlPath1]); + it('transform the test JSON file without file extensions into Sonar format without any warnings.', async () => { + await TransformerTransform.run(['--coverage-json', deployCoverageNoExts, '--xml', sonarXmlPath1]); const output = sfCommandStubs.log .getCalls() .flatMap((c) => c.args) .join('\n'); - expect(output).to.include(`The coverage XML has been written to ${coverageXmlPath1}`); + expect(output).to.include(`The coverage XML has been written to ${sonarXmlPath1}`); const warnings = sfCommandStubs.warn .getCalls() .flatMap((c) => c.args) .join('\n'); expect(warnings).to.include(''); }); - it('transform the test JSON file with file extensions into the generic test coverage format without any warnings.', async () => { - await TransformerTransform.run(['--coverage-json', deployCoverageWithExts, '--xml', coverageXmlPath2]); + it('transform the test JSON file with file extensions into Sonar format without any warnings.', async () => { + await TransformerTransform.run(['--coverage-json', deployCoverageWithExts, '--xml', sonarXmlPath2]); const output = sfCommandStubs.log .getCalls() .flatMap((c) => c.args) .join('\n'); - expect(output).to.include(`The coverage XML has been written to ${coverageXmlPath2}`); + expect(output).to.include(`The coverage XML has been written to ${sonarXmlPath2}`); const warnings = sfCommandStubs.warn .getCalls() .flatMap((c) => c.args) .join('\n'); expect(warnings).to.include(''); }); - it('transform the JSON file from a test command into the generic test coverage format without any warnings.', async () => { - await TransformerTransform.run(['--coverage-json', testCoverage, '--xml', coverageXmlPath3]); + it('transform the JSON file from a test command into Sonar format without any warnings.', async () => { + await TransformerTransform.run(['--coverage-json', testCoverage, '--xml', sonarXmlPath3]); const output = sfCommandStubs.log .getCalls() .flatMap((c) => c.args) .join('\n'); - expect(output).to.include(`The coverage XML has been written to ${coverageXmlPath3}`); + expect(output).to.include(`The coverage XML has been written to ${sonarXmlPath3}`); const warnings = sfCommandStubs.warn .getCalls() .flatMap((c) => c.args) @@ -114,25 +120,85 @@ describe('main', () => { } }); it('confirm the XML files created are the same as the baselines.', async () => { - const deployXml1 = await readFile(coverageXmlPath1, 'utf-8'); - const deployXml2 = await readFile(coverageXmlPath2, 'utf-8'); - const testXml = await readFile(coverageXmlPath3, 'utf-8'); + const deployXml1 = await readFile(sonarXmlPath1, 'utf-8'); + const deployXml2 = await readFile(sonarXmlPath2, 'utf-8'); + const testXml = await readFile(sonarXmlPath3, 'utf-8'); const deployBaselineXmlContent = await readFile(deployBaselineXmlPath, 'utf-8'); const testBaselineXmlContent = await readFile(testBaselineXmlPath, 'utf-8'); strictEqual( deployXml1, deployBaselineXmlContent, - `File content is different between ${coverageXmlPath1} and ${deployBaselineXmlPath}` + `File content is different between ${sonarXmlPath1} and ${deployBaselineXmlPath}` ); strictEqual( deployXml2, deployBaselineXmlContent, - `File content is different between ${coverageXmlPath2} and ${deployBaselineXmlPath}` + `File content is different between ${sonarXmlPath2} and ${deployBaselineXmlPath}` ); strictEqual( testXml, testBaselineXmlContent, - `File content is different between ${coverageXmlPath3} and ${testBaselineXmlPath}` + `File content is different between ${sonarXmlPath3} and ${testBaselineXmlPath}` ); }); + it('transform the test JSON file without file extensions into Cobertura format without any warnings.', async () => { + await TransformerTransform.run([ + '--coverage-json', + deployCoverageNoExts, + '--xml', + coberturaXmlPath1, + '--format', + 'cobertura', + ]); + const output = sfCommandStubs.log + .getCalls() + .flatMap((c) => c.args) + .join('\n'); + expect(output).to.include(`The coverage XML has been written to ${coberturaXmlPath1}`); + const warnings = sfCommandStubs.warn + .getCalls() + .flatMap((c) => c.args) + .join('\n'); + expect(warnings).to.include(''); + }); + it('transform the test JSON file with file extensions into Cobertura format without any warnings.', async () => { + await TransformerTransform.run([ + '--coverage-json', + deployCoverageWithExts, + '--xml', + coberturaXmlPath2, + '--format', + 'cobertura', + ]); + const output = sfCommandStubs.log + .getCalls() + .flatMap((c) => c.args) + .join('\n'); + expect(output).to.include(`The coverage XML has been written to ${coberturaXmlPath2}`); + const warnings = sfCommandStubs.warn + .getCalls() + .flatMap((c) => c.args) + .join('\n'); + expect(warnings).to.include(''); + }); + it('transform the JSON file from a test command into Cobertura format without any warnings.', async () => { + await TransformerTransform.run([ + '--coverage-json', + testCoverage, + '--xml', + coberturaXmlPath3, + '--format', + 'cobertura', + ]); + const output = sfCommandStubs.log + .getCalls() + .flatMap((c) => c.args) + .join('\n'); + expect(output).to.include(`The coverage XML has been written to ${coberturaXmlPath3}`); + const warnings = sfCommandStubs.warn + .getCalls() + .flatMap((c) => c.args) + .join('\n'); + expect(warnings).to.include(''); + }); });