Skip to content

Commit

Permalink
SIGINT-864: Support async mode (#244)
Browse files Browse the repository at this point in the history
  • Loading branch information
maksudur-rahman-maruf authored Sep 6, 2024
1 parent b91ebf8 commit 64c63a5
Show file tree
Hide file tree
Showing 12 changed files with 215 additions and 75 deletions.
12 changes: 12 additions & 0 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ inputs:
coverity_prComment_enabled:
description: 'Flag to enable pull request comments for new issues found in the Coverity scan'
required: false
coverity_waitForScan:
description: 'It specifies whether the workflow should wait for the analysis to complete or not'
required: false
coverity_build_command:
description: 'Build command for Coverity'
required: false
Expand Down Expand Up @@ -107,6 +110,9 @@ inputs:
polaris_upload_sarif_report:
description: 'Flag to enable/disable uploading of Polaris SARIF report to GitHub Advanced Security'
required: false
polaris_waitForScan:
description: 'It specifies whether the workflow should wait for the analysis to complete or not'
required: false
polaris_assessment_mode:
description: 'The test mode type of this scan'
required: false
Expand Down Expand Up @@ -179,6 +185,9 @@ inputs:
blackduck_upload_sarif_report:
description: 'Flag to enable/disable uploading of Black Duck SARIF report to GitHub Advanced Security'
required: false
blackduck_waitForScan:
description: 'It specifies whether the workflow should wait for the analysis to complete or not'
required: false
blackduck_search_depth:
description: 'Number indicating the search depth in the source directory'
required: false
Expand Down Expand Up @@ -215,6 +224,9 @@ inputs:
srm_branch_parent:
description: 'SRM branch parent'
required: false
srm_waitForScan:
description: 'It specifies whether the workflow should wait for the analysis to complete or not'
required: false
coverity_execution_path:
description: 'Coverity execution path'
required: false
Expand Down
160 changes: 90 additions & 70 deletions dist/index.js

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion dist/index.js.map

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions src/application-constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ export const SRM_PROJECT_NAME_KEY = 'srm_project_name'
export const SRM_PROJECT_ID_KEY = 'srm_project_id'
export const SRM_BRANCH_NAME_KEY = 'srm_branch_name'
export const SRM_BRANCH_PARENT_KEY = 'srm_branch_parent'
export const SRM_WAITFORSCAN_KEY = 'srm_waitForScan'
export const COVERITY_EXECUTION_PATH_KEY = 'coverity_execution_path'
export const BLACKDUCK_EXECUTION_PATH_KEY = 'blackduck_execution_path'

Expand All @@ -45,6 +46,7 @@ export const COVERITY_INSTALL_DIRECTORY_KEY = 'coverity_install_directory'
export const COVERITY_POLICY_VIEW_KEY = 'coverity_policy_view'
export const COVERITY_REPOSITORY_NAME_KEY = 'coverity_repository_name'
export const COVERITY_BRANCH_NAME_KEY = 'coverity_branch_name'
export const COVERITY_WAITFORSCAN_KEY = 'coverity_waitForScan'
export const COVERITY_BUILD_COMMAND_KEY = 'coverity_build_command'
export const COVERITY_CLEAN_COMMAND_KEY = 'coverity_clean_command'
export const COVERITY_CONFIG_PATH_KEY = 'coverity_config_path'
Expand Down Expand Up @@ -84,6 +86,7 @@ export const POLARIS_REPORTS_SARIF_SEVERITIES_KEY = 'polaris_reports_sarif_sever
export const POLARIS_REPORTS_SARIF_GROUP_SCA_ISSUES_KEY = 'polaris_reports_sarif_groupSCAIssues'
export const POLARIS_REPORTS_SARIF_ISSUE_TYPES_KEY = 'polaris_reports_sarif_issue_types'
export const POLARIS_UPLOAD_SARIF_REPORT_KEY = 'polaris_upload_sarif_report'
export const POLARIS_WAITFORSCAN_KEY = 'polaris_waitForScan'
export const POLARIS_ASSESSMENT_MODE_KEY = 'polaris_assessment_mode'
export const PROJECT_SOURCE_ARCHIVE_KEY = 'project_source_archive'
export const PROJECT_SOURCE_PRESERVESYMLINKS_KEY = 'project_source_preserveSymLinks'
Expand Down Expand Up @@ -113,6 +116,7 @@ export const BLACKDUCK_REPORTS_SARIF_FILE_PATH_KEY = 'blackduck_reports_sarif_fi
export const BLACKDUCK_REPORTS_SARIF_SEVERITIES_KEY = 'blackduck_reports_sarif_severities'
export const BLACKDUCK_REPORTS_SARIF_GROUP_SCA_ISSUES_KEY = 'blackduck_reports_sarif_groupSCAIssues'
export const BLACKDUCK_UPLOAD_SARIF_REPORT_KEY = 'blackduck_upload_sarif_report'
export const BLACKDUCK_WAITFORSCAN_KEY = 'blackduck_waitForScan'
export const BLACKDUCK_SEARCH_DEPTH_KEY = 'blackduck_search_depth'
export const BLACKDUCK_CONFIG_PATH_KEY = 'blackduck_config_path'
export const BLACKDUCK_ARGS_KEY = 'blackduck_args'
Expand Down
3 changes: 3 additions & 0 deletions src/synopsys-action/input-data/async-mode.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export interface AsyncMode {
waitForScan?: boolean
}
3 changes: 2 additions & 1 deletion src/synopsys-action/input-data/blackduck.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import {Reports} from './reports'
import {AsyncMode} from './async-mode'

export enum BLACKDUCK_SCAN_FAILURE_SEVERITIES {
ALL = 'ALL',
Expand All @@ -19,7 +20,7 @@ export interface Blackduck {
network?: NetworkAirGap
}

export interface BlackduckData extends BlackDuckArbitrary {
export interface BlackduckData extends BlackDuckArbitrary, AsyncMode {
url: string
token: string
install?: {directory: string}
Expand Down
3 changes: 2 additions & 1 deletion src/synopsys-action/input-data/coverity.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import {GithubData} from './github'
import {AsyncMode} from './async-mode'

export interface Coverity {
coverity: CoverityConnect
Expand All @@ -17,7 +18,7 @@ export interface AutomationData {
prcomment?: boolean
}

export interface CoverityConnect extends CoverityArbitrary {
export interface CoverityConnect extends CoverityArbitrary, AsyncMode {
connect: CoverityData
install?: {directory: string}
automation?: AutomationData
Expand Down
3 changes: 2 additions & 1 deletion src/synopsys-action/input-data/polaris.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import {BlackDuckArbitrary} from './blackduck'
import {CoverityArbitrary} from './coverity'
import {GithubData} from './github'
import {Reports} from './reports'
import {AsyncMode} from './async-mode'

export interface Polaris {
polaris: PolarisData
Expand All @@ -11,7 +12,7 @@ export interface Polaris {
blackduck?: BlackDuckArbitrary
}

export interface PolarisData {
export interface PolarisData extends AsyncMode {
triage?: string
accesstoken: string
serverUrl: string
Expand Down
3 changes: 2 additions & 1 deletion src/synopsys-action/input-data/srm.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {BlackDuckArbitrary} from './blackduck'
import {CoverityArbitrary} from './coverity'
import {AsyncMode} from './async-mode'

export interface SRM {
srm: SRMData
Expand All @@ -8,7 +9,7 @@ export interface SRM {
blackduck?: BlackduckData
}

export interface SRMData {
export interface SRMData extends AsyncMode {
url: string
apikey: string
project?: {id?: string; name?: string}
Expand Down
4 changes: 4 additions & 0 deletions src/synopsys-action/inputs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ export const SRM_PROJECT_NAME = getInput(constants.SRM_PROJECT_NAME_KEY)?.trim()
export const SRM_PROJECT_ID = getInput(constants.SRM_PROJECT_ID_KEY)?.trim() || ''
export const SRM_BRANCH_NAME = getInput(constants.SRM_BRANCH_NAME_KEY)?.trim() || ''
export const SRM_BRANCH_PARENT = getInput(constants.SRM_BRANCH_PARENT_KEY)?.trim() || ''
export const SRM_WAITFORSCAN = getInput(constants.SRM_WAITFORSCAN_KEY)?.trim() || ''
export const COVERITY_EXECUTION_PATH = getInput(constants.COVERITY_EXECUTION_PATH_KEY)?.trim() || ''
export const BLACKDUCK_EXECUTION_PATH = getInput(constants.BLACKDUCK_EXECUTION_PATH_KEY)?.trim() || ''

Expand All @@ -37,6 +38,7 @@ export const POLARIS_REPORTS_SARIF_SEVERITIES = getInput(constants.POLARIS_REPOR
export const POLARIS_REPORTS_SARIF_GROUP_SCA_ISSUES = getInput(constants.POLARIS_REPORTS_SARIF_GROUP_SCA_ISSUES_KEY)?.trim() || ''
export const POLARIS_REPORTS_SARIF_ISSUE_TYPES = getInput(constants.POLARIS_REPORTS_SARIF_ISSUE_TYPES_KEY)?.trim() || ''
export const POLARIS_UPLOAD_SARIF_REPORT = getInput(constants.POLARIS_UPLOAD_SARIF_REPORT_KEY)?.trim() || ''
export const POLARIS_WAITFORSCAN = getInput(constants.POLARIS_WAITFORSCAN_KEY)?.trim() || ''
export const POLARIS_ASSESSMENT_MODE = getInput(constants.POLARIS_ASSESSMENT_MODE_KEY)?.trim() || ''
export const PROJECT_DIRECTORY = getInput(constants.PROJECT_DIRECTORY_KEY)?.trim() || ''
export const PROJECT_SOURCE_ARCHIVE = getInput(constants.PROJECT_SOURCE_ARCHIVE_KEY)?.trim() || ''
Expand All @@ -56,6 +58,7 @@ export const COVERITY_BRANCH_NAME = getInput(constants.COVERITY_BRANCH_NAME_KEY)
export const COVERITY_PRCOMMENT_ENABLED = getInput(constants.COVERITY_AUTOMATION_PRCOMMENT_KEY)?.trim() || getInput(constants.COVERITY_PRCOMMENT_ENABLED_KEY)?.trim() || ''
export const COVERITY_LOCAL = getInput(constants.COVERITY_LOCAL_KEY)?.trim() === 'true' || false
export const COVERITY_VERSION = getInput(constants.COVERITY_VERSION_KEY)?.trim() || getInput(constants.BRIDGE_COVERITY_VERSION_KEY)?.trim() || ''
export const COVERITY_WAITFORSCAN = getInput(constants.COVERITY_WAITFORSCAN_KEY)?.trim() || ''
export const COVERITY_BUILD_COMMAND = getInput(constants.COVERITY_BUILD_COMMAND_KEY)?.trim() || ''
export const COVERITY_CLEAN_COMMAND = getInput(constants.COVERITY_CLEAN_COMMAND_KEY)?.trim() || ''
export const COVERITY_CONFIG_PATH = getInput(constants.COVERITY_CONFIG_PATH_KEY)?.trim() || ''
Expand All @@ -78,6 +81,7 @@ export const BLACKDUCK_REPORTS_SARIF_FILE_PATH = getInput(constants.BLACKDUCK_RE
export const BLACKDUCK_REPORTS_SARIF_SEVERITIES = getInput(constants.BLACKDUCK_REPORTS_SARIF_SEVERITIES_KEY)?.trim() || ''
export const BLACKDUCK_REPORTS_SARIF_GROUP_SCA_ISSUES = getInput(constants.BLACKDUCK_REPORTS_SARIF_GROUP_SCA_ISSUES_KEY)?.trim() || ''
export const BLACKDUCK_UPLOAD_SARIF_REPORT = getInput(constants.BLACKDUCK_UPLOAD_SARIF_REPORT_KEY)?.trim() || ''
export const BLACKDUCK_WAITFORSCAN = getInput(constants.BLACKDUCK_WAITFORSCAN_KEY)?.trim() || ''
export const BLACKDUCK_SEARCH_DEPTH = getInput(constants.BLACKDUCK_SEARCH_DEPTH_KEY)?.trim() || ''
export const BLACKDUCK_CONFIG_PATH = getInput(constants.BLACKDUCK_CONFIG_PATH_KEY)?.trim() || ''
export const BLACKDUCK_ARGS = getInput(constants.BLACKDUCK_ARGS_KEY)?.trim() || ''
Expand Down
16 changes: 16 additions & 0 deletions src/synopsys-action/tools-parameter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,10 @@ export class SynopsysToolsParameter {
}
}

if (isBoolean(inputs.POLARIS_WAITFORSCAN)) {
polData.data.polaris.waitForScan = parseToBoolean(inputs.POLARIS_WAITFORSCAN)
}

if (inputs.PROJECT_DIRECTORY || inputs.PROJECT_SOURCE_ARCHIVE || inputs.PROJECT_SOURCE_EXCLUDES || inputs.PROJECT_SOURCE_PRESERVESYMLINKS) {
polData.data.project = {}

Expand Down Expand Up @@ -263,6 +267,10 @@ export class SynopsysToolsParameter {
covData.data.coverity.connect.policy = {view: inputs.COVERITY_POLICY_VIEW}
}

if (isBoolean(inputs.COVERITY_WAITFORSCAN)) {
covData.data.coverity.waitForScan = parseToBoolean(inputs.COVERITY_WAITFORSCAN)
}

if (inputs.COVERITY_REPOSITORY_NAME || inputs.COVERITY_BRANCH_NAME || inputs.PROJECT_DIRECTORY) {
covData.data.project = {
...(inputs.COVERITY_REPOSITORY_NAME && {
Expand Down Expand Up @@ -374,6 +382,10 @@ export class SynopsysToolsParameter {
}
}

if (isBoolean(inputs.BLACKDUCK_WAITFORSCAN)) {
blackduckData.data.blackduck.waitForScan = parseToBoolean(inputs.BLACKDUCK_WAITFORSCAN)
}

if (inputs.PROJECT_DIRECTORY) {
blackduckData.data.project = {
directory: inputs.PROJECT_DIRECTORY
Expand Down Expand Up @@ -523,6 +535,10 @@ export class SynopsysToolsParameter {
}
}

if (isBoolean(inputs.SRM_WAITFORSCAN)) {
srmData.data.srm.waitForScan = parseToBoolean(inputs.SRM_WAITFORSCAN)
}

if (inputs.PROJECT_DIRECTORY) {
srmData.data.project = {
directory: inputs.PROJECT_DIRECTORY
Expand Down
77 changes: 77 additions & 0 deletions test/unit/synopsys-action/tools-parameter.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -771,6 +771,28 @@ test('Test getFormattedCommandForBlackduck with sarif params', () => {
expect(resp).toContain('--stage blackduck')
})

it('should pass polaris fields and wait for scan field to bridge', () => {
Object.defineProperty(inputs, 'POLARIS_SERVER_URL', {value: 'server_url'})
Object.defineProperty(inputs, 'POLARIS_ACCESS_TOKEN', {value: 'access_token'})
Object.defineProperty(inputs, 'POLARIS_APPLICATION_NAME', {value: 'POLARIS_APPLICATION_NAME'})
Object.defineProperty(inputs, 'POLARIS_PROJECT_NAME', {value: 'POLARIS_PROJECT_NAME'})
Object.defineProperty(inputs, 'POLARIS_ASSESSMENT_TYPES', {value: 'SCA, SAST'})
Object.defineProperty(inputs, 'POLARIS_WAITFORSCAN', {value: true})
const stp: SynopsysToolsParameter = new SynopsysToolsParameter(tempPath)
const resp = stp.getFormattedCommandForPolaris('synopsys-action')

const jsonString = fs.readFileSync(tempPath.concat(polaris_input_file), 'utf-8')
const jsonData = JSON.parse(jsonString)
expect(resp).not.toBeNull()
expect(resp).toContain('--stage polaris')
expect(jsonData.data.polaris.serverUrl).toContain('server_url')
expect(jsonData.data.polaris.accesstoken).toContain('access_token')
expect(jsonData.data.polaris.application.name).toContain('POLARIS_APPLICATION_NAME')
expect(jsonData.data.polaris.project.name).toContain('POLARIS_PROJECT_NAME')
expect(jsonData.data.polaris.assessment.types).toEqual(['SCA', 'SAST'])
expect(jsonData.data.polaris.waitForScan).toBe(true)
})

it('should pass polaris source upload fields to bridge', () => {
Object.defineProperty(inputs, 'POLARIS_SERVER_URL', {value: 'server_url'})
Object.defineProperty(inputs, 'POLARIS_ACCESS_TOKEN', {value: 'access_token'})
Expand Down Expand Up @@ -840,6 +862,23 @@ it('should pass polaris SCA and SAST arbitrary fields to bridge', () => {
expect(jsonData.data.blackduck.args).toBe('BLACKDUCK_ARGS')
})

it('should pass black duck fields and wait for scan field to bridge', () => {
Object.defineProperty(inputs, 'BLACKDUCK_URL', {value: 'BLACKDUCK_URL'})
Object.defineProperty(inputs, 'BLACKDUCK_API_TOKEN', {value: 'BLACKDUCK_API_TOKEN'})
Object.defineProperty(inputs, 'BLACKDUCK_WAITFORSCAN', {value: true})

const stp: SynopsysToolsParameter = new SynopsysToolsParameter(tempPath)
const resp = stp.getFormattedCommandForBlackduck()

const jsonString = fs.readFileSync(tempPath.concat(blackduck_input_file), 'utf-8')
const jsonData = JSON.parse(jsonString)
expect(resp).not.toBeNull()
expect(resp).toContain('--stage blackduck')
expect(jsonData.data.blackduck.url).toBe('BLACKDUCK_URL')
expect(jsonData.data.blackduck.token).toBe('BLACKDUCK_API_TOKEN')
expect(jsonData.data.blackduck.waitForScan).toBe(true)
})

it('should pass black duck fields and project directory field to bridge', () => {
Object.defineProperty(inputs, 'BLACKDUCK_URL', {value: 'BLACKDUCK_URL'})
Object.defineProperty(inputs, 'BLACKDUCK_API_TOKEN', {value: 'BLACKDUCK_API_TOKEN'})
Expand Down Expand Up @@ -878,6 +917,25 @@ it('should pass blackduck arbitrary fields to bridge', () => {
expect(jsonData.data.blackduck.args).toBe('BLACKDUCK_ARGS')
})

it('should pass coverity fields and wait for scan field to bridge', () => {
Object.defineProperty(inputs, 'COVERITY_URL', {value: 'COVERITY_URL'})
Object.defineProperty(inputs, 'COVERITY_USER', {value: 'COVERITY_USER'})
Object.defineProperty(inputs, 'COVERITY_PASSPHRASE', {value: 'COVERITY_PASSPHRASE'})
Object.defineProperty(inputs, 'COVERITY_WAITFORSCAN', {value: true})

const stp: SynopsysToolsParameter = new SynopsysToolsParameter(tempPath)
const resp = stp.getFormattedCommandForCoverity('synopsys-action')

const jsonString = fs.readFileSync(tempPath.concat(coverity_input_file), 'utf-8')
const jsonData = JSON.parse(jsonString)
expect(resp).not.toBeNull()
expect(resp).toContain('--stage connect')
expect(jsonData.data.coverity.connect.url).toBe('COVERITY_URL')
expect(jsonData.data.coverity.connect.user.name).toBe('COVERITY_USER')
expect(jsonData.data.coverity.connect.user.password).toBe('COVERITY_PASSPHRASE')
expect(jsonData.data.coverity.waitForScan).toBe(true)
})

it('should pass coverity fields and project directory field to bridge', () => {
Object.defineProperty(inputs, 'COVERITY_URL', {value: 'COVERITY_URL'})
Object.defineProperty(inputs, 'COVERITY_USER', {value: 'COVERITY_USER'})
Expand Down Expand Up @@ -1196,6 +1254,25 @@ it('should pass SRM SCA and SAST arbitrary fields to bridge', () => {
expect(jsonData.data.blackduck.args).toBe('BLACKDUCK_ARGS')
})

it('should pass SRM fields and wait for scan field to bridge', () => {
Object.defineProperty(inputs, 'SRM_URL', {value: 'srm_url'})
Object.defineProperty(inputs, 'SRM_API_KEY', {value: 'api_key'})
Object.defineProperty(inputs, 'SRM_ASSESSMENT_TYPES', {value: 'SCA,SAST'})
Object.defineProperty(inputs, 'SRM_WAITFORSCAN', {value: true})

const stp: SynopsysToolsParameter = new SynopsysToolsParameter(tempPath)
const resp = stp.getFormattedCommandForSRM('synopsys-action')

const jsonString = fs.readFileSync(tempPath.concat(srm_input_file), 'utf-8')
const jsonData = JSON.parse(jsonString)
expect(resp).not.toBeNull()
expect(resp).toContain('--stage srm')
expect(jsonData.data.srm.url).toContain('srm_url')
expect(jsonData.data.srm.apikey).toContain('api_key')
expect(jsonData.data.srm.assessment.types).toEqual(['SCA', 'SAST'])
expect(jsonData.data.srm.waitForScan).toBe(true)
})

it('should pass SRM fields and project directory field to bridge', () => {
Object.defineProperty(inputs, 'SRM_URL', {value: 'srm_url'})
Object.defineProperty(inputs, 'SRM_API_KEY', {value: 'api_key'})
Expand Down

0 comments on commit 64c63a5

Please sign in to comment.