From e2a348a7f123630e939348f145de3a8180bbe048 Mon Sep 17 00:00:00 2001 From: Joyce Quach Date: Fri, 27 Sep 2024 16:26:10 -0400 Subject: [PATCH] Run linter Signed-off-by: Joyce Quach --- src/commands/convert/anchoregrype2hdf.ts | 30 +-- src/commands/convert/asff2hdf.ts | 220 ++++++++-------- src/commands/convert/aws_config2hdf.ts | 94 +++---- src/commands/convert/burpsuite2hdf.ts | 30 +-- src/commands/convert/ckl2POAM.ts | 238 ++++++++--------- src/commands/convert/ckl2hdf.ts | 32 +-- src/commands/convert/conveyor2hdf.ts | 34 +-- src/commands/convert/cyclonedx_sbom2hdf.ts | 32 +-- src/commands/convert/dbprotect2hdf.ts | 32 +-- src/commands/convert/fortify2hdf.ts | 32 +-- src/commands/convert/hdf2asff.ts | 146 +++++------ src/commands/convert/hdf2caat.ts | 30 +-- src/commands/convert/hdf2ckl.ts | 168 ++++++------ src/commands/convert/hdf2condensed.ts | 62 ++--- src/commands/convert/hdf2csv.ts | 66 ++--- src/commands/convert/hdf2splunk.ts | 60 ++--- src/commands/convert/hdf2xccdf.ts | 24 +- src/commands/convert/index.ts | 286 +++++++++++---------- src/commands/convert/ionchannel2hdf.ts | 132 +++++----- src/commands/convert/jfrog_xray2hdf.ts | 32 +-- src/commands/convert/msft_secure2hdf.ts | 140 +++++----- src/commands/convert/nessus2hdf.ts | 40 +-- src/commands/convert/netsparker2hdf.ts | 32 +-- src/commands/convert/nikto2hdf.ts | 30 +-- src/commands/convert/prisma2hdf.ts | 36 +-- src/commands/convert/prowler2hdf.ts | 40 +-- src/commands/convert/sarif2hdf.ts | 30 +-- src/commands/convert/scoutsuite2hdf.ts | 32 +-- src/commands/convert/snyk2hdf.ts | 38 +-- src/commands/convert/sonarqube2hdf.ts | 34 +-- src/commands/convert/splunk2hdf.ts | 116 ++++----- src/commands/convert/trivy2hdf.ts | 40 +-- src/commands/convert/trufflehog2hdf.ts | 32 +-- src/commands/convert/twistlock2hdf.ts | 32 +-- src/commands/convert/veracode2hdf.ts | 28 +- src/commands/convert/xccdf_results2hdf.ts | 32 +-- src/commands/convert/zap2hdf.ts | 34 +-- 37 files changed, 1274 insertions(+), 1272 deletions(-) diff --git a/src/commands/convert/anchoregrype2hdf.ts b/src/commands/convert/anchoregrype2hdf.ts index 7edc6ecd8..6e0ab7a05 100644 --- a/src/commands/convert/anchoregrype2hdf.ts +++ b/src/commands/convert/anchoregrype2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {AnchoreGrypeMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {AnchoreGrypeMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class AnchoreGrype2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class AnchoreGrype2HDF extends Command { 'Translate a Anchore Grype output file into an HDF results set'; static readonly examples = [ - 'saf convert anchoregrype2hdf -i anchoregrype.json -o output-hdf-name.json' + 'saf convert anchoregrype2hdf -i anchoregrype.json -o output-hdf-name.json', ]; static readonly flags = { @@ -19,29 +19,29 @@ export default class AnchoreGrype2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Anchore Grype file' + description: 'Input Anchore Grype file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF file' + description: 'Output HDF file', }), - 'with-raw': Flags.boolean({char: 'w', required: false}) + 'with-raw': Flags.boolean({char: 'w', required: false}), }; async run() { - const {flags} = await this.parse(AnchoreGrype2HDF); - const input = fs.readFileSync(flags.input, 'utf8'); + const {flags} = await this.parse(AnchoreGrype2HDF) + const input = fs.readFileSync(flags.input, 'utf8') checkInput( {data: input, filename: flags.input}, 'grype', - 'Anchore Grype JSON results file' - ); + 'Anchore Grype JSON results file', + ) - const converter = new Mapper(input, flags['with-raw']); + const converter = new Mapper(input, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/asff2hdf.ts b/src/commands/convert/asff2hdf.ts index 832d728c5..e031eda05 100644 --- a/src/commands/convert/asff2hdf.ts +++ b/src/commands/convert/asff2hdf.ts @@ -1,9 +1,9 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {ASFFResults as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; -import _ from 'lodash'; -import path from 'path'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {ASFFResults as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' +import _ from 'lodash' +import path from 'path' import { AwsSecurityFindingFilters, DescribeStandardsControlsCommandOutput, @@ -11,14 +11,14 @@ import { SecurityHub, SecurityHubClientConfig, StandardsControl, - StandardsSubscription -} from '@aws-sdk/client-securityhub'; -import {NodeHttpHandler} from '@smithy/node-http-handler'; -import https from 'https'; -import {createWinstonLogger} from '../../utils/logging'; + StandardsSubscription, +} from '@aws-sdk/client-securityhub' +import {NodeHttpHandler} from '@smithy/node-http-handler' +import https from 'https' +import {createWinstonLogger} from '../../utils/logging' // Should be no more than 100 -const API_MAX_RESULTS = 100; +const API_MAX_RESULTS = 100 export default class ASFF2HDF extends Command { static readonly usage = @@ -30,7 +30,7 @@ export default class ASFF2HDF extends Command { static readonly examples = [ 'saf convert asff2hdf -i asff-findings.json -o output-folder-name', 'saf convert asff2hdf -i asff-findings.json --securityhub standard-1.json standard-2.json -o output-folder-name', - 'saf convert asff2hdf --aws -o out -r us-west-2 --target rhel7' + 'saf convert asff2hdf --aws -o out -r us-west-2 --target rhel7', ]; static readonly flags = { @@ -39,50 +39,50 @@ export default class ASFF2HDF extends Command { char: 'i', required: false, description: 'Input ASFF JSON file', - exclusive: ['aws', 'region', 'insecure', 'certificate', 'target'] + exclusive: ['aws', 'region', 'insecure', 'certificate', 'target'], }), aws: Flags.boolean({ char: 'a', required: false, description: 'Pull findings from AWS Security Hub', exclusive: ['input'], - dependsOn: ['region'] + dependsOn: ['region'], }), region: Flags.string({ char: 'r', required: false, description: 'Security Hub region to pull findings from', - exclusive: ['input'] + exclusive: ['input'], }), insecure: Flags.boolean({ char: 'I', required: false, default: false, description: 'Disable SSL verification, this is insecure.', - exclusive: ['input', 'certificate'] + exclusive: ['input', 'certificate'], }), securityhub: Flags.string({ required: false, multiple: true, description: - 'Additional input files to provide context that an ASFF file needs such as the CIS AWS Foundations or AWS Foundational Security Best Practices documents (in ASFF compliant JSON form)' + 'Additional input files to provide context that an ASFF file needs such as the CIS AWS Foundations or AWS Foundational Security Best Practices documents (in ASFF compliant JSON form)', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON folder' + description: 'Output HDF JSON folder', }), certificate: Flags.string({ char: 'C', required: false, description: 'Trusted signing certificate file', - exclusive: ['input', 'insecure'] + exclusive: ['input', 'insecure'], }), logLevel: Flags.string({ char: 'L', required: false, default: 'info', - options: ['info', 'warn', 'debug', 'verbose'] + options: ['info', 'warn', 'debug', 'verbose'], }), target: Flags.string({ char: 't', @@ -90,71 +90,71 @@ export default class ASFF2HDF extends Command { multiple: true, description: 'Target ID(s) to pull from Security Hub (maximum 10), leave blank for non-HDF findings', - exclusive: ['input'] - }) + exclusive: ['input'], + }), }; async run() { - const {flags} = await this.parse(ASFF2HDF); - const logger = createWinstonLogger('asff2hdf', flags.logLevel); - let securityhub; + const {flags} = await this.parse(ASFF2HDF) + const logger = createWinstonLogger('asff2hdf', flags.logLevel) + let securityhub // Check if output folder already exists if (fs.existsSync(flags.output)) { - throw new Error(`Output folder ${flags.output} already exists`); + throw new Error(`Output folder ${flags.output} already exists`) } - const findings: string[] = []; + const findings: string[] = [] // If we've been passed an input file if (flags.input) { - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') // Attempt to convert to one finding per line try { - const convertedJson = JSON.parse(data); + const convertedJson = JSON.parse(data) if (Array.isArray(convertedJson)) { findings.push( - ...convertedJson.map((finding) => JSON.stringify(finding)) - ); + ...convertedJson.map(finding => JSON.stringify(finding)), + ) } else if ('Findings' in convertedJson) { findings.push( ...convertedJson.Findings.map((finding: Record) => - JSON.stringify(finding) - ) - ); + JSON.stringify(finding), + ), + ) } else if ('Controls' in convertedJson) { throw new Error( - 'Invalid ASFF findings format - a standards standards was passed to --input instead of --securityhub' - ); + 'Invalid ASFF findings format - a standards standards was passed to --input instead of --securityhub', + ) } else { checkInput( {data: data, filename: flags.input}, 'asff', - 'AWS Security Finding Format JSON' - ); + 'AWS Security Finding Format JSON', + ) } } catch (error) { - const splitLines = data.split('\n'); + const splitLines = data.split('\n') if (splitLines.length === 0) { - logger.error('Invalid ASFF findings format - no lines found'); - throw error; + logger.error('Invalid ASFF findings format - no lines found') + throw error } try { findings.push( - ...splitLines.map((finding) => JSON.stringify(JSON.parse(finding))) - ); + ...splitLines.map(finding => JSON.stringify(JSON.parse(finding))), + ) } catch (error) { - logger.error('Invalid ASFF findings format - unable to parse JSON'); - throw error; + logger.error('Invalid ASFF findings format - unable to parse JSON') + throw error } } // If we've been passed any Security Standards JSONs if (flags.securityhub) { securityhub = flags.securityhub.map((file: string) => - fs.readFileSync(file, 'utf8') - ); + fs.readFileSync(file, 'utf8'), + ) } } else if (flags.aws) { // Flag to pull findings from AWS Security Hub @@ -165,129 +165,129 @@ export default class ASFF2HDF extends Command { // Disable HTTPS verification if requested rejectUnauthorized: !flags.insecure, // Pass an SSL certificate to trust - ca: flags.certificate - ? fs.readFileSync(flags.certificate, 'utf8') - : undefined - }) - }) - }; + ca: flags.certificate ? + fs.readFileSync(flags.certificate, 'utf8') : + undefined, + }), + }), + } // Create our SecurityHub client - const client = new SecurityHub(clientOptions); + const client = new SecurityHub(clientOptions) // Pagination - let nextToken; - let first = true; - let filters: AwsSecurityFindingFilters = {}; + let nextToken + let first = true + let filters: AwsSecurityFindingFilters = {} // Filter by target name if (flags.target) { filters = { Id: flags.target.map((target: string) => { - return {Value: target, Comparison: 'PREFIX'}; - }) - }; + return {Value: target, Comparison: 'PREFIX'} + }), + } } - logger.info('Starting collection of Findings'); + logger.info('Starting collection of Findings') let queryParams: Record = { Filters: filters, - MaxResults: API_MAX_RESULTS - }; + MaxResults: API_MAX_RESULTS, + } // Get findings while (first || nextToken !== undefined) { - first = false; - logger.debug(`Querying for NextToken: ${nextToken}`); - _.set(queryParams, 'NextToken', nextToken); + first = false + logger.debug(`Querying for NextToken: ${nextToken}`) + _.set(queryParams, 'NextToken', nextToken) - const getFindingsResult = await client.getFindings(queryParams); + const getFindingsResult = await client.getFindings(queryParams) logger.debug( - `Received: ${getFindingsResult.Findings?.length} findings` - ); + `Received: ${getFindingsResult.Findings?.length} findings`, + ) if (getFindingsResult.Findings) { findings.push( - ...getFindingsResult.Findings.map((finding) => - JSON.stringify(finding) - ) - ); + ...getFindingsResult.Findings.map(finding => + JSON.stringify(finding), + ), + ) } - nextToken = getFindingsResult.NextToken; + nextToken = getFindingsResult.NextToken } - nextToken = undefined; - first = true; + nextToken = undefined + first = true - logger.info('Starting collection of enabled security standards'); - const enabledStandards: StandardsSubscription[] = []; + logger.info('Starting collection of enabled security standards') + const enabledStandards: StandardsSubscription[] = [] - queryParams = _.omit(queryParams, ['Filters']); + queryParams = _.omit(queryParams, ['Filters']) // Get active security standards subscriptions (enabled standards) while (first || nextToken !== undefined) { - first = false; - logger.debug(`Querying for NextToken: ${nextToken}`); + first = false + logger.debug(`Querying for NextToken: ${nextToken}`) // type system seems to think that this call / the result is from the callback variant of the function instead of the promise based one and throwing fits const getEnabledStandardsResult: GetEnabledStandardsCommandOutput = (await client.getEnabledStandards({ - NextToken: nextToken - })) as unknown as GetEnabledStandardsCommandOutput; + NextToken: nextToken, + })) as unknown as GetEnabledStandardsCommandOutput logger.debug( - `Received: ${getEnabledStandardsResult.StandardsSubscriptions?.length} standards` - ); + `Received: ${getEnabledStandardsResult.StandardsSubscriptions?.length} standards`, + ) if (getEnabledStandardsResult.StandardsSubscriptions) { enabledStandards.push( - ...getEnabledStandardsResult.StandardsSubscriptions - ); + ...getEnabledStandardsResult.StandardsSubscriptions, + ) } - nextToken = getEnabledStandardsResult.NextToken; + nextToken = getEnabledStandardsResult.NextToken } - securityhub = []; + securityhub = [] // Describe the controls to give context to the mapper for (const standard of enabledStandards) { - nextToken = undefined; - first = true; - const standardsControls: StandardsControl[] = []; + nextToken = undefined + first = true + const standardsControls: StandardsControl[] = [] while (nextToken !== undefined) { - first = false; - logger.debug(`Querying for NextToken: ${nextToken}`); + first = false + logger.debug(`Querying for NextToken: ${nextToken}`) const getEnabledStandardsResult: DescribeStandardsControlsCommandOutput = await client.describeStandardsControls({ StandardsSubscriptionArn: standard.StandardsSubscriptionArn, - NextToken: nextToken || '' - }); + NextToken: nextToken || '', + }) logger.info( - `Received: ${getEnabledStandardsResult.Controls?.length} Controls` - ); + `Received: ${getEnabledStandardsResult.Controls?.length} Controls`, + ) if (getEnabledStandardsResult.Controls) { - standardsControls.push(...getEnabledStandardsResult.Controls); + standardsControls.push(...getEnabledStandardsResult.Controls) } - nextToken = getEnabledStandardsResult.NextToken; + nextToken = getEnabledStandardsResult.NextToken } - securityhub.push(JSON.stringify({Controls: standardsControls})); + securityhub.push(JSON.stringify({Controls: standardsControls})) } } else { throw new Error( - 'Please select an input file or --aws to pull findings from AWS' - ); + 'Please select an input file or --aws to pull findings from AWS', + ) } - const converter = new Mapper(findings.join('\n'), securityhub); + const converter = new Mapper(findings.join('\n'), securityhub) - const results = converter.toHdf(); + const results = converter.toHdf() - fs.mkdirSync(flags.output); + fs.mkdirSync(flags.output) _.forOwn(results, (result, filename) => { fs.writeFileSync( path.join(flags.output, checkSuffix(filename)), - JSON.stringify(result, null, 2) - ); - }); + JSON.stringify(result, null, 2), + ) + }) } } diff --git a/src/commands/convert/aws_config2hdf.ts b/src/commands/convert/aws_config2hdf.ts index 6affe92a8..b490cc9b1 100644 --- a/src/commands/convert/aws_config2hdf.ts +++ b/src/commands/convert/aws_config2hdf.ts @@ -1,8 +1,8 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {AwsConfigMapper as Mapper} from '@mitre/hdf-converters'; -import {ExecJSON} from 'inspecjs'; -import {checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {AwsConfigMapper as Mapper} from '@mitre/hdf-converters' +import {ExecJSON} from 'inspecjs' +import {checkSuffix} from '../../utils/global' export default class AWSConfig2HDF extends Command { static readonly usage = @@ -12,7 +12,7 @@ export default class AWSConfig2HDF extends Command { 'Pull Configuration findings from AWS Config and convert into a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert aws_config2hdf -a ABCDEFGHIJKLMNOPQRSTUV -s +4NOT39A48REAL93SECRET934 -r us-east-1 -o output-hdf-name.json' + 'saf convert aws_config2hdf -a ABCDEFGHIJKLMNOPQRSTUV -s +4NOT39A48REAL93SECRET934 -r us-east-1 -o output-hdf-name.json', ]; static readonly flags = { @@ -20,96 +20,96 @@ export default class AWSConfig2HDF extends Command { accessKeyId: Flags.string({ char: 'a', required: false, - description: 'Access key ID' + description: 'Access key ID', }), secretAccessKey: Flags.string({ char: 's', required: false, - description: 'Secret access key' + description: 'Secret access key', }), sessionToken: Flags.string({ char: 't', required: false, - description: 'Session token' + description: 'Session token', }), region: Flags.string({ char: 'r', required: true, - description: 'Region to pull findings from' + description: 'Region to pull findings from', }), insecure: Flags.boolean({ char: 'i', required: false, default: false, description: 'Disable SSL verification, this is insecure.', - exclusive: ['certificate'] + exclusive: ['certificate'], }), certificate: Flags.string({ char: 'C', required: false, description: 'Trusted signing certificate file', - exclusive: ['insecure'] + exclusive: ['insecure'], }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' - }) + description: 'Output HDF JSON File', + }), }; // Refs may not be defined if no resources were found ensureRefs(output: ExecJSON.Execution): ExecJSON.Execution { return { ...output, - profiles: output.profiles.map((profile) => { + profiles: output.profiles.map(profile => { return { ...profile, - controls: profile.controls.map((control) => { + controls: profile.controls.map(control => { if (!control.refs || !control.results) { return { ...control, refs: [], - results: [] - }; + results: [], + } } - return control; - }) - }; - }) - }; + return control + }), + } + }), + } } async run() { - const {flags} = await this.parse(AWSConfig2HDF); + const {flags} = await this.parse(AWSConfig2HDF) const converter = - flags.accessKeyId && flags.secretAccessKey - ? new Mapper( - { - credentials: { - accessKeyId: flags.accessKeyId || '', - secretAccessKey: flags.secretAccessKey || '', - sessionToken: flags.sessionToken - }, - region: flags.region + flags.accessKeyId && flags.secretAccessKey ? + new Mapper( + { + credentials: { + accessKeyId: flags.accessKeyId || '', + secretAccessKey: flags.secretAccessKey || '', + sessionToken: flags.sessionToken, }, - !flags.insecure, - flags.certificate - ? fs.readFileSync(flags.certificate, 'utf8') - : undefined - ) - : new Mapper( - {region: flags.region}, - !flags.insecure, - flags.certificate - ? fs.readFileSync(flags.certificate, 'utf8') - : undefined - ); + region: flags.region, + }, + !flags.insecure, + flags.certificate ? + fs.readFileSync(flags.certificate, 'utf8') : + undefined, + ) : + new Mapper( + {region: flags.region}, + !flags.insecure, + flags.certificate ? + fs.readFileSync(flags.certificate, 'utf8') : + undefined, + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(this.ensureRefs(await converter.toHdf()), null, 2) - ); + JSON.stringify(this.ensureRefs(await converter.toHdf()), null, 2), + ) } } diff --git a/src/commands/convert/burpsuite2hdf.ts b/src/commands/convert/burpsuite2hdf.ts index 11b5365d2..67532574d 100644 --- a/src/commands/convert/burpsuite2hdf.ts +++ b/src/commands/convert/burpsuite2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {BurpSuiteMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {BurpSuiteMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class Burpsuite2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Burpsuite2HDF extends Command { 'Translate a BurpSuite Pro XML file into a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert burpsuite2hdf -i burpsuite_results.xml -o output-hdf-name.json' + 'saf convert burpsuite2hdf -i burpsuite_results.xml -o output-hdf-name.json', ]; static readonly flags = { @@ -19,31 +19,31 @@ export default class Burpsuite2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Burpsuite Pro XML File' + description: 'Input Burpsuite Pro XML File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Burpsuite2HDF); + const {flags} = await this.parse(Burpsuite2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); - checkInput({data, filename: flags.input}, 'burp', 'BurpSuite Pro XML'); + const data = fs.readFileSync(flags.input, 'utf8') + checkInput({data, filename: flags.input}, 'burp', 'BurpSuite Pro XML') - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/ckl2POAM.ts b/src/commands/convert/ckl2POAM.ts index 27a7780d3..39417f778 100644 --- a/src/commands/convert/ckl2POAM.ts +++ b/src/commands/convert/ckl2POAM.ts @@ -1,13 +1,13 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import path from 'path'; -import _ from 'lodash'; -import {createLogger, format, transports} from 'winston'; -import xml2js from 'xml2js'; -import {STIG, Vulnerability, STIGHolder} from '../../types/STIG'; -import promptSync from 'prompt-sync'; -import XlsxPopulate from 'xlsx-populate'; -import moment from 'moment'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import path from 'path' +import _ from 'lodash' +import {createLogger, format, transports} from 'winston' +import xml2js from 'xml2js' +import {STIG, Vulnerability, STIGHolder} from '../../types/STIG' +import promptSync from 'prompt-sync' +import XlsxPopulate from 'xlsx-populate' +import moment from 'moment' import { cci2nist, cklSeverityToImpact, @@ -21,24 +21,24 @@ import { createCVD, extractSolution, extractSTIGUrl, - replaceSpecialCharacters -} from '../../utils/ckl2poam'; -import {default as files} from '../../resources/files.json'; -import {convertFullPathToFilename, dataURLtoU8Array} from '../../utils/global'; + replaceSpecialCharacters, +} from '../../utils/ckl2poam' +import {default as files} from '../../resources/files.json' +import {convertFullPathToFilename, dataURLtoU8Array} from '../../utils/global' -const prompt = promptSync(); -const {printf} = format; +const prompt = promptSync() +const {printf} = format const fmt = printf(({level, file, message}) => { - return `${level.toUpperCase()}: ${file}: ${message}`; -}); + return `${level.toUpperCase()}: ${file}: ${message}` +}) const logger = createLogger({ format: fmt, - transports: [new transports.Console()] -}); + transports: [new transports.Console()], +}) -const STARTING_ROW = 8; // The row we start inserting controls into +const STARTING_ROW = 8 // The row we start inserting controls into export default class CKL2POAM extends Command { static readonly usage = @@ -50,7 +50,7 @@ export default class CKL2POAM extends Command { static aliases = ['convert:ckl2poam']; static readonly examples = [ - 'saf convert ckl2POAM -i checklist_file.ckl -o output-folder -d abcdefg -s 2' + 'saf convert ckl2POAM -i checklist_file.ckl -o output-folder -d abcdefg -s 2', ]; static readonly flags = { @@ -59,60 +59,60 @@ export default class CKL2POAM extends Command { char: 'i', required: true, multiple: true, - description: 'Path to the DISA Checklist File(s)' + description: 'Path to the DISA Checklist File(s)', }), officeOrg: Flags.string({ char: 'O', required: false, default: '', description: - 'Default value for Office/org (prompts for each file if not set)' + 'Default value for Office/org (prompts for each file if not set)', }), deviceName: Flags.string({ char: 'd', required: false, default: '', - description: 'Name of target device (prompts for each file if not set)' + description: 'Name of target device (prompts for each file if not set)', }), rowsToSkip: Flags.integer({ char: 's', required: false, default: 4, - description: 'Rows to leave between POA&M Items for milestones' + description: 'Rows to leave between POA&M Items for milestones', }), output: Flags.string({ char: 'o', required: true, - description: 'Path to output PO&M File(s)' - }) + description: 'Path to output PO&M File(s)', + }), }; async run() { - const {flags} = await this.parse(CKL2POAM); + const {flags} = await this.parse(CKL2POAM) // Create output folder if it doesn't exist already if (!fs.existsSync(flags.output)) { - fs.mkdirSync(flags.output); + fs.mkdirSync(flags.output) } flags.input.forEach((fileName: string) => { // Ignore files that start with . (e.g .gitignore) if (fileName.startsWith('.')) { - return; + return } logger.log({ level: 'info', file: fileName, - message: 'Opening file' - }); - const parser = new xml2js.Parser(); + message: 'Opening file', + }) + const parser = new xml2js.Parser() fs.readFile(fileName, function (readFileError, data) { if (readFileError) { logger.log({ level: 'error', file: fileName, - message: `An error occurred opening the file ${fileName}: ${readFileError}` - }); + message: `An error occurred opening the file ${fileName}: ${readFileError}`, + }) } // Parse the XML to a javascript object @@ -121,116 +121,116 @@ export default class CKL2POAM extends Command { logger.log({ level: 'error', file: fileName, - message: `An error occurred parsing the file: ${readFileError}` - }); + message: `An error occurred parsing the file: ${readFileError}`, + }) } else { - const infos: Record = {}; - let vulnerabilities: Vulnerability[] = []; - const iStigs: STIGHolder[] = []; - const stigs = result.CHECKLIST.STIGS; + const infos: Record = {} + let vulnerabilities: Vulnerability[] = [] + const iStigs: STIGHolder[] = [] + const stigs = result.CHECKLIST.STIGS logger.log({ level: 'info', file: fileName, - message: `Found ${stigs?.length} STIGs` - }); + message: `Found ${stigs?.length} STIGs`, + }) // Get nested iSTIGs - stigs?.forEach((stig) => { - stig.iSTIG?.forEach((iStig) => { - iStigs.push(iStig); - }); - }); + stigs?.forEach(stig => { + stig.iSTIG?.forEach(iStig => { + iStigs.push(iStig) + }) + }) logger.log({ level: 'info', file: fileName, - message: `Found ${iStigs.length} iSTIGs` - }); + message: `Found ${iStigs.length} iSTIGs`, + }) // Get the controls/vulnerabilities from each stig - iStigs.forEach((iSTIG) => { - iSTIG.STIG_INFO?.forEach((info) => { - info.SI_DATA?.forEach((data) => { + iStigs.forEach(iSTIG => { + iSTIG.STIG_INFO?.forEach(info => { + info.SI_DATA?.forEach(data => { if (data.SID_DATA) { - infos[data.SID_NAME[0]] = data.SID_DATA[0]; + infos[data.SID_NAME[0]] = data.SID_DATA[0] } - }); - }); + }) + }) if (iSTIG.VULN) { vulnerabilities = [ ...vulnerabilities, - ...iSTIG.VULN.map((vulnerability) => { - const values: Record = {}; + ...iSTIG.VULN.map(vulnerability => { + const values: Record = {} // Extract STIG_DATA - vulnerability.STIG_DATA?.reverse().forEach((data) => { - values[data.VULN_ATTRIBUTE[0]] = data.ATTRIBUTE_DATA[0]; - }); + vulnerability.STIG_DATA?.reverse().forEach(data => { + values[data.VULN_ATTRIBUTE[0]] = data.ATTRIBUTE_DATA[0] + }) // Extract remaining fields (status, finding details, comments, security override, and severity justification) Object.entries(vulnerability).forEach(([key, value]) => { - values[key] = value[0]; - }); - return values; - }) - ]; + values[key] = value[0] + }) + return values + }), + ] } - }); + }) logger.log({ level: 'info', file: fileName, - message: `Found ${vulnerabilities.length} vulnerabilities` - }); + message: `Found ${vulnerabilities.length} vulnerabilities`, + }) const officeOrg = flags.officeOrg || - prompt('What should the default value be for Office/org? '); + prompt('What should the default value be for Office/org? ') const host = - flags.deviceName || prompt('What is the device name? '); + flags.deviceName || prompt('What is the device name? ') // Read our template XlsxPopulate.fromDataAsync( - dataURLtoU8Array(files.POAMTemplate.data) + dataURLtoU8Array(files.POAMTemplate.data), ).then((workBook: any) => { // eMASS reads the first sheet in the notebook - const sheet = workBook.sheet(0); + const sheet = workBook.sheet(0) // The current row we are on - let currentRow = STARTING_ROW; + let currentRow = STARTING_ROW // The scheduled completion date, default of one year from today const aYearFromNow = moment( - new Date(new Date().setFullYear(new Date().getFullYear() + 1)) - ).format('M/DD/YYYY'); + new Date(new Date().setFullYear(new Date().getFullYear() + 1)), + ).format('M/DD/YYYY') // For each vulnerability - vulnerabilities.forEach((vulnerability) => { + vulnerabilities.forEach(vulnerability => { if ( vulnerability.STATUS !== 'NotAFinding' && vulnerability.STATUS !== 'Not_Reviewed' ) { // Control Vulnerability Description if (vulnerability.STATUS === 'Not_Applicable') { - sheet.cell(`C${currentRow}`).value('Not Applicable'); + sheet.cell(`C${currentRow}`).value('Not Applicable') } else { sheet .cell(`C${currentRow}`) .value( - replaceSpecialCharacters(createCVD(vulnerability)) - ); + replaceSpecialCharacters(createCVD(vulnerability)), + ) } // Security Control Number sheet .cell(`D${currentRow}`) - .value(cci2nist(vulnerability.CCI_REF || '')); + .value(cci2nist(vulnerability.CCI_REF || '')) // Office/org - sheet.cell(`E${currentRow}`).value(officeOrg); + sheet.cell(`E${currentRow}`).value(officeOrg) // Security Checks sheet .cell(`F${currentRow}`) - .value(vulnerability.Rule_ID?.split(',')[0]); + .value(vulnerability.Rule_ID?.split(',')[0]) // Resources Required - sheet.cell(`G${currentRow}`).value('NA'); + sheet.cell(`G${currentRow}`).value('NA') // Scheduled Completion Date // Default is one year from today - sheet.cell(`H${currentRow}`).value(aYearFromNow); + sheet.cell(`H${currentRow}`).value(aYearFromNow) // Source Identifying Vulnerability - sheet.cell(`K${currentRow}`).value(infos.title || ''); + sheet.cell(`K${currentRow}`).value(infos.title || '') // Status sheet .cell(`L${currentRow}`) - .value(cleanStatus(vulnerability.STATUS || '')); + .value(cleanStatus(vulnerability.STATUS || '')) // Comments if ( vulnerability.STATUS === 'Open' || @@ -242,58 +242,58 @@ export default class CKL2POAM extends Command { .value( combineComments( vulnerability, - extractSTIGUrl(vulnerability.FINDING_DETAILS || '') - ) - ); + extractSTIGUrl(vulnerability.FINDING_DETAILS || ''), + ), + ) } else { sheet .cell(`M${currentRow}`) - .value(combineComments(vulnerability, host)); + .value(combineComments(vulnerability, host)) } } // Raw Severity sheet .cell(`N${currentRow}`) - .value(convertToRawSeverity(vulnerability.Severity || '')); + .value(convertToRawSeverity(vulnerability.Severity || '')) // Severity sheet .cell(`P${currentRow}`) .value( - cklSeverityToPOAMSeverity(vulnerability.Severity || '') - ); + cklSeverityToPOAMSeverity(vulnerability.Severity || ''), + ) // Relevance of Threat sheet .cell(`Q${currentRow}`) .value( cklSeverityToRelevanceOfThreat( - vulnerability.Severity || '' - ) - ); + vulnerability.Severity || '', + ), + ) // Likelihood sheet .cell(`R${currentRow}`) .value( - cklSeverityToLikelihood(vulnerability.Severity || '') - ); + cklSeverityToLikelihood(vulnerability.Severity || ''), + ) // Impact sheet .cell(`S${currentRow}`) - .value(cklSeverityToImpact(vulnerability.Severity || '')); + .value(cklSeverityToImpact(vulnerability.Severity || '')) // Residual Risk Level sheet .cell(`U${currentRow}`) .value( cklSeverityToResidualRiskLevel( - vulnerability.Severity || '' - ) - ); + vulnerability.Severity || '', + ), + ) // Impact Description sheet .cell(`T${currentRow}`) .value( - replaceSpecialCharacters(vulnerability.Vuln_Discuss || '') - ); + replaceSpecialCharacters(vulnerability.Vuln_Discuss || ''), + ) // Recommendations sheet .cell(`V${currentRow}`) @@ -301,25 +301,25 @@ export default class CKL2POAM extends Command { replaceSpecialCharacters( vulnerability.Fix_Text || extractSolution( - vulnerability.FINDING_DETAILS || '' + vulnerability.FINDING_DETAILS || '', ) || - '' - ) - ); + '', + ), + ) // Go to the next row - currentRow += flags.rowsToSkip + 1; + currentRow += flags.rowsToSkip + 1 } - }); + }) return workBook.toFileAsync( path.join( flags.output, - `${convertFullPathToFilename(fileName)}-${moment(new Date()).format('YYYY-MM-DD-HHmm')}.xlsm` - ) - ); - }); + `${convertFullPathToFilename(fileName)}-${moment(new Date()).format('YYYY-MM-DD-HHmm')}.xlsm`, + ), + ) + }) } - }); - }); - }); + }) + }) + }) } } diff --git a/src/commands/convert/ckl2hdf.ts b/src/commands/convert/ckl2hdf.ts index 49daffc7a..93a13b81b 100644 --- a/src/commands/convert/ckl2hdf.ts +++ b/src/commands/convert/ckl2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {ChecklistResults as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {ChecklistResults as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class CKL2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class CKL2HDF extends Command { 'Translate a Checklist XML file into a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert ckl2hdf -i ckl_results.xml -o output-hdf-name.json' + 'saf convert ckl2hdf -i ckl_results.xml -o output-hdf-name.json', ]; static readonly flags = { @@ -19,34 +19,34 @@ export default class CKL2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Checklist XML File' + description: 'Input Checklist XML File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(CKL2HDF); + const {flags} = await this.parse(CKL2HDF) - const data = fs.readFileSync(flags.input, 'utf8'); - checkInput({data, filename: flags.input}, 'checklist', 'DISA Checklist'); + const data = fs.readFileSync(flags.input, 'utf8') + checkInput({data, filename: flags.input}, 'checklist', 'DISA Checklist') try { - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } catch (error) { - console.error(`Error converting to hdf:\n${error}`); + console.error(`Error converting to hdf:\n${error}`) } } } diff --git a/src/commands/convert/conveyor2hdf.ts b/src/commands/convert/conveyor2hdf.ts index f00f440f6..9ad1d942d 100644 --- a/src/commands/convert/conveyor2hdf.ts +++ b/src/commands/convert/conveyor2hdf.ts @@ -1,8 +1,8 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {ConveyorResults as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; -import path from 'path'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {ConveyorResults as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' +import path from 'path' export default class Conveyor2HDF extends Command { static readonly usage = 'convert conveyor2hdf -i -o [-h]'; @@ -11,7 +11,7 @@ export default class Conveyor2HDF extends Command { 'Translate a Conveyor JSON file into a Heimdall Data Format JSON files'; static readonly examples = [ - 'saf convert conveyor2hdf -i conveyor_results.json -o output-hdf-name.json' + 'saf convert conveyor2hdf -i conveyor_results.json -o output-hdf-name.json', ]; static readonly flags = { @@ -19,30 +19,30 @@ export default class Conveyor2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Conveyor JSON File' + description: 'Input Conveyor JSON File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON Folder' - }) + description: 'Output HDF JSON Folder', + }), }; async run() { - const {flags} = await this.parse(Conveyor2HDF); + const {flags} = await this.parse(Conveyor2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); - checkInput({data, filename: flags.input}, 'Conveyor', 'Conveyor JSON'); + const data = fs.readFileSync(flags.input, 'utf8') + checkInput({data, filename: flags.input}, 'Conveyor', 'Conveyor JSON') - const converter = new Mapper(data); - const results = converter.toHdf(); - fs.mkdirSync(flags.output); + const converter = new Mapper(data) + const results = converter.toHdf() + fs.mkdirSync(flags.output) for (const [filename, result] of Object.entries(results)) { fs.writeFileSync( path.join(flags.output, checkSuffix(filename)), - JSON.stringify(result, null, 2) - ); + JSON.stringify(result, null, 2), + ) } } } diff --git a/src/commands/convert/cyclonedx_sbom2hdf.ts b/src/commands/convert/cyclonedx_sbom2hdf.ts index d3480220f..ce9296cbc 100644 --- a/src/commands/convert/cyclonedx_sbom2hdf.ts +++ b/src/commands/convert/cyclonedx_sbom2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {CycloneDXSBOMResults as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {CycloneDXSBOMResults as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class CycloneDXSBOM2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class CycloneDXSBOM2HDF extends Command { 'Translate a CycloneDX SBOM report into an HDF results set'; static readonly examples = [ - 'saf convert cyclonedx_sbom2hdf -i cyclonedx_sbom.json -o output-hdf-name.json' + 'saf convert cyclonedx_sbom2hdf -i cyclonedx_sbom.json -o output-hdf-name.json', ]; static readonly flags = { @@ -19,35 +19,35 @@ export default class CycloneDXSBOM2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input CycloneDX SBOM file' + description: 'Input CycloneDX SBOM file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON file' + description: 'Output HDF JSON file', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(CycloneDXSBOM2HDF); + const {flags} = await this.parse(CycloneDXSBOM2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'cyclonedx_sbom', - 'CycloneDX SBOM output file' - ); + 'CycloneDX SBOM output file', + ) - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/dbprotect2hdf.ts b/src/commands/convert/dbprotect2hdf.ts index 99c73cb68..083951817 100644 --- a/src/commands/convert/dbprotect2hdf.ts +++ b/src/commands/convert/dbprotect2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {DBProtectMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {DBProtectMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class DBProtect2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class DBProtect2HDF extends Command { 'Translate a DBProtect report in "Check Results Details" XML format into a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert dbprotect2hdf -i check_results_details_report.xml -o output-hdf-name.json' + 'saf convert dbprotect2hdf -i check_results_details_report.xml -o output-hdf-name.json', ]; static readonly flags = { @@ -19,35 +19,35 @@ export default class DBProtect2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: "'Check Results Details' XML File" + description: "'Check Results Details' XML File", }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(DBProtect2HDF); + const {flags} = await this.parse(DBProtect2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'dbProtect', - 'DBProtect report in "Check Results Details" XML format' - ); + 'DBProtect report in "Check Results Details" XML format', + ) - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/fortify2hdf.ts b/src/commands/convert/fortify2hdf.ts index 36c8d8dcd..12538ecb2 100644 --- a/src/commands/convert/fortify2hdf.ts +++ b/src/commands/convert/fortify2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {FortifyMapper as Mapper} from '@mitre/hdf-converters'; -import {checkSuffix, checkInput} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {FortifyMapper as Mapper} from '@mitre/hdf-converters' +import {checkSuffix, checkInput} from '../../utils/global' export default class Fortify2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Fortify2HDF extends Command { 'Translate a Fortify results FVDL file into a Heimdall Data Format JSON file; the FVDL file is an XML that can be extracted from the Fortify FPR project file using standard file compression tools'; static readonly examples = [ - 'saf convert fortify2hdf -i audit.fvdl -o output-hdf-name.json' + 'saf convert fortify2hdf -i audit.fvdl -o output-hdf-name.json', ]; static readonly flags = { @@ -19,35 +19,35 @@ export default class Fortify2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input FVDL File' + description: 'Input FVDL File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Fortify2HDF); + const {flags} = await this.parse(Fortify2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'fortify', - 'Fortify results FVDL file' - ); + 'Fortify results FVDL file', + ) - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/hdf2asff.ts b/src/commands/convert/hdf2asff.ts index 11f238a2f..6f0d6ff23 100644 --- a/src/commands/convert/hdf2asff.ts +++ b/src/commands/convert/hdf2asff.ts @@ -1,16 +1,16 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import https from 'https'; -import {FromHdfToAsffMapper as Mapper} from '@mitre/hdf-converters'; -import path from 'path'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import https from 'https' +import {FromHdfToAsffMapper as Mapper} from '@mitre/hdf-converters' +import path from 'path' import { AwsSecurityFinding, SecurityHub, - SecurityHubClientConfig -} from '@aws-sdk/client-securityhub'; -import {NodeHttpHandler} from '@smithy/node-http-handler'; -import {checkSuffix, convertFullPathToFilename} from '../../utils/global'; -import _ from 'lodash'; + SecurityHubClientConfig, +} from '@aws-sdk/client-securityhub' +import {NodeHttpHandler} from '@smithy/node-http-handler' +import {checkSuffix, convertFullPathToFilename} from '../../utils/global' +import _ from 'lodash' export default class HDF2ASFF extends Command { static readonly usage = @@ -22,7 +22,7 @@ export default class HDF2ASFF extends Command { static readonly examples = [ 'saf convert hdf2asff -i rhel7-scan_02032022A.json -a 123456789 -r us-east-1 -t rhel7_example_host -o rhel7.asff', 'saf convert hdf2asff -i rds_mysql_i123456789scan_03042022A.json -a 987654321 -r us-west-1 -t Instance_i123456789 -u', - 'saf convert hdf2asff -i snyk_acme_project5_hdf_04052022A.json -a 2143658798 -r us-east-1 -t acme_project5 -o snyk_acme_project5 -u' + 'saf convert hdf2asff -i snyk_acme_project5_hdf_04052022A.json -a 2143658798 -r us-east-1 -t acme_project5 -o snyk_acme_project5 -u', ]; static readonly flags = { @@ -30,54 +30,54 @@ export default class HDF2ASFF extends Command { accountId: Flags.string({ char: 'a', required: true, - description: 'AWS Account ID' + description: 'AWS Account ID', }), region: Flags.string({ char: 'r', required: true, - description: 'SecurityHub Region' + description: 'SecurityHub Region', }), specifyRegionAttribute: Flags.boolean({ char: 'R', required: false, description: - 'Manually specify the top-level `Region` attribute - SecurityHub populates this attribute automatically and prohibits one from updating it using `BatchImportFindings` or `BatchUpdateFindings`' + 'Manually specify the top-level `Region` attribute - SecurityHub populates this attribute automatically and prohibits one from updating it using `BatchImportFindings` or `BatchUpdateFindings`', }), input: Flags.string({ char: 'i', required: true, - description: 'Input HDF JSON File' + description: 'Input HDF JSON File', }), target: Flags.string({ char: 't', required: true, - description: 'Unique name for target to track findings across time' + description: 'Unique name for target to track findings across time', }), upload: Flags.boolean({ char: 'u', required: false, - description: 'Upload findings to AWS Security Hub' + description: 'Upload findings to AWS Security Hub', }), output: Flags.string({ char: 'o', required: false, - description: 'Output ASFF JSON Folder' + description: 'Output ASFF JSON Folder', }), insecure: Flags.boolean({ char: 'I', required: false, default: false, - description: 'Disable SSL verification, this is insecure.' + description: 'Disable SSL verification, this is insecure.', }), certificate: Flags.string({ char: 'C', required: false, - description: 'Trusted signing certificate file' - }) + description: 'Trusted signing certificate file', + }), }; async run() { - const {flags} = await this.parse(HDF2ASFF); + const {flags} = await this.parse(HDF2ASFF) const converted = new Mapper( JSON.parse(fs.readFileSync(flags.input, 'utf8')), @@ -86,42 +86,42 @@ export default class HDF2ASFF extends Command { region: flags.region, regionAttribute: flags.specifyRegionAttribute, target: flags.target, - input: flags.input - } - ).toAsff(); + input: flags.input, + }, + ).toAsff() if (flags.output) { - const convertedSlices = _.chunk(converted, 100); // AWS doesn't allow uploading more than 100 findings at a time so we need to split them into chunks - const outputFolder = flags.output?.replace('.json', '') || 'asff-output'; - fs.mkdirSync(outputFolder); + const convertedSlices = _.chunk(converted, 100) // AWS doesn't allow uploading more than 100 findings at a time so we need to split them into chunks + const outputFolder = flags.output?.replace('.json', '') || 'asff-output' + fs.mkdirSync(outputFolder) if (convertedSlices.length === 1) { const outfilePath = path.join( outputFolder, - convertFullPathToFilename(checkSuffix(flags.output)) - ); + convertFullPathToFilename(checkSuffix(flags.output)), + ) fs.writeFileSync( outfilePath, - JSON.stringify(convertedSlices[0], null, 2) - ); + JSON.stringify(convertedSlices[0], null, 2), + ) } else { convertedSlices.forEach((slice, index) => { const outfilePath = path.join( outputFolder, - `${convertFullPathToFilename(checkSuffix(flags.output || '')).replace('.json', '')}.p${index}.json` - ); - fs.writeFileSync(outfilePath, JSON.stringify(slice, null, 2)); - }); + `${convertFullPathToFilename(checkSuffix(flags.output || '')).replace('.json', '')}.p${index}.json`, + ) + fs.writeFileSync(outfilePath, JSON.stringify(slice, null, 2)) + }) } } if (flags.upload) { - const profileInfoFinding = converted.pop(); - const convertedSlices = _.chunk(converted, 100) as AwsSecurityFinding[][]; + const profileInfoFinding = converted.pop() + const convertedSlices = _.chunk(converted, 100) as AwsSecurityFinding[][] if (flags.insecure) { console.warn( - 'WARNING: Using --insecure will make all connections to AWS open to MITM attacks, if possible pass a certificate file with --certificate' - ); + 'WARNING: Using --insecure will make all connections to AWS open to MITM attacks, if possible pass a certificate file with --certificate', + ) } const clientOptions: SecurityHubClientConfig = { @@ -131,29 +131,29 @@ export default class HDF2ASFF extends Command { // Disable HTTPS verification if requested rejectUnauthorized: !flags.insecure, // Pass an SSL certificate to trust - ca: flags.certificate - ? fs.readFileSync(flags.certificate, 'utf8') - : undefined - }) - }) - }; - const client = new SecurityHub(clientOptions); + ca: flags.certificate ? + fs.readFileSync(flags.certificate, 'utf8') : + undefined, + }), + }), + } + const client = new SecurityHub(clientOptions) try { await Promise.all( - convertedSlices.map(async (chunk) => { + convertedSlices.map(async chunk => { try { const result = await client.batchImportFindings({ - Findings: chunk - }); + Findings: chunk, + }) console.log( - `Uploaded ${chunk.length} controls. Success: ${result.SuccessCount}, Fail: ${result.FailedCount}` - ); + `Uploaded ${chunk.length} controls. Success: ${result.SuccessCount}, Fail: ${result.FailedCount}`, + ) if (result.FailedFindings?.length) { console.error( - `Failed to upload ${result.FailedCount} Findings` - ); - console.log(result.FailedFindings); + `Failed to upload ${result.FailedCount} Findings`, + ) + console.log(result.FailedFindings) } } catch (error) { if ( @@ -161,44 +161,44 @@ export default class HDF2ASFF extends Command { _.get(error, 'code') === 'NetworkingError' ) { console.error( - `Failed to upload controls: ${error}; Using --certificate to provide your own SSL intermediary certificate (in .crt format) or use the flag --insecure to ignore SSL might resolve this issue` - ); + `Failed to upload controls: ${error}; Using --certificate to provide your own SSL intermediary certificate (in .crt format) or use the flag --insecure to ignore SSL might resolve this issue`, + ) } else { - console.error(`Failed to upload controls: ${error}`); + console.error(`Failed to upload controls: ${error}`) } } - }) - ); + }), + ) } catch (error) { if (error instanceof Error) { - console.error(error.message); + console.error(error.message) } else { - console.error('Unexpected error', error); + console.error('Unexpected error', error) } } try { if (profileInfoFinding) { - profileInfoFinding.UpdatedAt = new Date().toISOString(); + profileInfoFinding.UpdatedAt = new Date().toISOString() const result = await client.batchImportFindings({ - Findings: [profileInfoFinding as unknown] as AwsSecurityFinding[] - }); - console.info(`Statistics: ${profileInfoFinding.Description}`); + Findings: [profileInfoFinding as unknown] as AwsSecurityFinding[], + }) + console.info(`Statistics: ${profileInfoFinding.Description}`) console.info( - `Uploaded Results Set Info Finding(s) - Success: ${result.SuccessCount}, Fail: ${result.FailedCount}` - ); + `Uploaded Results Set Info Finding(s) - Success: ${result.SuccessCount}, Fail: ${result.FailedCount}`, + ) if (result.FailedFindings?.length) { console.error( - `Failed to upload ${result.FailedCount} Results Set Info Finding` - ); - console.log(result.FailedFindings); + `Failed to upload ${result.FailedCount} Results Set Info Finding`, + ) + console.log(result.FailedFindings) } } } catch (error) { if (error instanceof Error) { - console.error(error.message); + console.error(error.message) } else { - console.error('Unexpected error', error); + console.error('Unexpected error', error) } } } diff --git a/src/commands/convert/hdf2caat.ts b/src/commands/convert/hdf2caat.ts index d2450939d..e3aac4b41 100644 --- a/src/commands/convert/hdf2caat.ts +++ b/src/commands/convert/hdf2caat.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import path from 'path'; -import {FromHDFToCAATMapper as Mapper} from '@mitre/hdf-converters'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import path from 'path' +import {FromHDFToCAATMapper as Mapper} from '@mitre/hdf-converters' export default class HDF2CAAT extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class HDF2CAAT extends Command { 'Translate an HDF file into a Compliance Assessment and Audit Tracking (CAAT) XLSX file'; static readonly examples = [ - 'saf convert hdf2caat -i hdf_input.json -o caat-results.xlsx' + 'saf convert hdf2caat -i hdf_input.json -o caat-results.xlsx', ]; static readonly flags = { @@ -20,27 +20,27 @@ export default class HDF2CAAT extends Command { char: 'i', required: true, multiple: true, - description: 'Input HDF JSON file' + description: 'Input HDF JSON file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output CAAT XLSX file' - }) + description: 'Output CAAT XLSX file', + }), }; async run() { - const {flags} = await this.parse(HDF2CAAT); + const {flags} = await this.parse(HDF2CAAT) - const inputData = flags.input.map((filename) => ({ + const inputData = flags.input.map(filename => ({ data: fs.readFileSync(filename, 'utf8'), - filename: path.basename(filename) - })); + filename: path.basename(filename), + })) - const converter = new Mapper(inputData); + const converter = new Mapper(inputData) fs.writeFileSync( flags.output, - converter.toCAAT(false, {bookType: 'xlsx', type: 'buffer'}) - ); + converter.toCAAT(false, {bookType: 'xlsx', type: 'buffer'}), + ) } } diff --git a/src/commands/convert/hdf2ckl.ts b/src/commands/convert/hdf2ckl.ts index 36328c7bd..1b9d5cc58 100644 --- a/src/commands/convert/hdf2ckl.ts +++ b/src/commands/convert/hdf2ckl.ts @@ -1,14 +1,14 @@ -import {Command, Flags} from '@oclif/core'; -import _ from 'lodash'; -import fs from 'fs'; +import {Command, Flags} from '@oclif/core' +import _ from 'lodash' +import fs from 'fs' import { Assettype, ChecklistMetadata, ChecklistResults as Mapper, Role, Techarea, - validateChecklistMetadata -} from '@mitre/hdf-converters'; + validateChecklistMetadata, +} from '@mitre/hdf-converters' export default class HDF2CKL extends Command { static readonly usage = @@ -22,12 +22,12 @@ export default class HDF2CKL extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input HDF file' + description: 'Input HDF file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output CKL file' + description: 'Output CKL file', }), metadata: Flags.string({ char: 'm', @@ -59,136 +59,136 @@ export default class HDF2CKL extends Command { 'webordatabase', 'webdbsite', 'webdbinstance', - 'vulidmapping' - ] - } + 'vulidmapping', + ], + }, ], - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), profilename: Flags.string({ required: false, description: 'Profile name', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), profiletitle: Flags.string({ required: false, description: 'Profile title', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), version: Flags.integer({ required: false, description: 'Profile version number', min: 0, - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), releasenumber: Flags.integer({ required: false, description: 'Profile release number', min: 0, - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), releasedate: Flags.string({ required: false, description: 'Profile release date', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), marking: Flags.string({ required: false, description: 'A security classification or designation of the asset, indicating its sensitivity level', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), hostname: Flags.string({ char: 'H', required: false, description: 'The name assigned to the asset within the network', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), ip: Flags.string({ char: 'I', required: false, description: 'IP address', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), mac: Flags.string({ char: 'M', required: false, description: 'MAC address', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), fqdn: Flags.string({ char: 'F', required: false, description: 'Fully Qualified Domain Name', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), targetcomment: Flags.string({ required: false, description: 'Additional comments or notes about the asset', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), role: Flags.string({ required: false, description: 'The primary function or role of the asset within the network or organization', options: Object.values(Role), - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), assettype: Flags.string({ required: false, description: 'The category or classification of the asset', options: Object.values(Assettype), - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), techarea: Flags.string({ required: false, description: 'The technical area or domain to which the asset belongs', options: Object.values(Techarea), - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), stigguid: Flags.string({ required: false, description: 'A unique identifier associated with the STIG for the asset', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), targetkey: Flags.string({ required: false, description: 'A unique key or identifier for the asset within the checklist or inventory system', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), webordatabase: Flags.boolean({ required: false, description: 'Indicates whether the STIG is primarily for either a web or database server', - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), webdbsite: Flags.string({ required: false, description: 'The specific site or application hosted on the web or database server', dependsOn: ['webordatabase'], - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), webdbinstance: Flags.string({ required: false, description: 'The specific instance of the web application or database running on the server', dependsOn: ['webordatabase'], - helpGroup: 'Checklist Metadata' + helpGroup: 'Checklist Metadata', }), vulidmapping: Flags.string({ required: false, description: 'Which type of control identifier to map to the checklist ID', options: ['gid', 'id'], - helpGroup: 'Checklist Metadata' - }) + helpGroup: 'Checklist Metadata', + }), }; static readonly examples = [ 'saf convert hdf2ckl -i rhel7-results.json -o rhel7.ckl --fqdn reverseproxy.example.org --hostname reverseproxy --ip 10.0.0.3 --mac 12:34:56:78:90:AB', - 'saf convert hdf2ckl -i rhel8-results.json -o rhel8.ckl -m rhel8-metadata.json' + 'saf convert hdf2ckl -i rhel8-results.json -o rhel8.ckl -m rhel8-metadata.json', ]; static readonly oldMetadataFormatMapping = { @@ -203,11 +203,11 @@ export default class HDF2CKL extends Command { techarea: 'tech_area', targetkey: 'target_key', webdbsite: 'web_db_site', - webdbinstance: 'web_db_site' + webdbinstance: 'web_db_site', }; async run() { - const {flags} = await this.parse(HDF2CKL); + const {flags} = await this.parse(HDF2CKL) /* Order of precedence for checklist metadata: command flags (hostname, ip, etc.) or metadata file (-m flag) @@ -228,83 +228,83 @@ export default class HDF2CKL extends Command { hostname: '', targetcomment: '', webdbinstance: '', - webdbsite: '' - }; - const inputHDF = JSON.parse(fs.readFileSync(flags.input, 'utf8')); - let flagMetadata; - flagMetadata = flags.metadata - ? JSON.parse(fs.readFileSync(flags.metadata, 'utf8')) - : { - profiles: [ - { - name: flags.profilename, - title: flags.profiletitle, - version: flags.version, - releasenumber: flags.releasenumber, - releasedate: flags.releasedate - } - ], - marking: flags.marking, - hostname: flags.hostname, - hostip: flags.ip, - hostmac: flags.mac, - hostfqdn: flags.fqdn, - targetcomment: flags.targetcomment, - role: flags.role, - assettype: flags.assettype, - techarea: flags.techarea, - targetkey: flags.targetkey, - webordatabase: flags.webordatabase, - webdbsite: flags.webdbsite, - webdbinstance: flags.webdbinstance, - vulidmapping: flags.vulidmapping - }; + webdbsite: '', + } + const inputHDF = JSON.parse(fs.readFileSync(flags.input, 'utf8')) + let flagMetadata + flagMetadata = flags.metadata ? + JSON.parse(fs.readFileSync(flags.metadata, 'utf8')) : + { + profiles: [ + { + name: flags.profilename, + title: flags.profiletitle, + version: flags.version, + releasenumber: flags.releasenumber, + releasedate: flags.releasedate, + }, + ], + marking: flags.marking, + hostname: flags.hostname, + hostip: flags.ip, + hostmac: flags.mac, + hostfqdn: flags.fqdn, + targetcomment: flags.targetcomment, + role: flags.role, + assettype: flags.assettype, + techarea: flags.techarea, + targetkey: flags.targetkey, + webordatabase: flags.webordatabase, + webdbsite: flags.webdbsite, + webdbinstance: flags.webdbinstance, + vulidmapping: flags.vulidmapping, + } // to preserve backwards compatibility with old metadata format if (flags.metadata && _.has(flagMetadata, 'benchmark')) { - let profile; + let profile if (_.has(flagMetadata, 'benchmark.version')) { - const version: string = _.get(flagMetadata, 'benchmark.version'); + const version: string = _.get(flagMetadata, 'benchmark.version') // get sections of numbers in version string const parsedVersion = version .split(/\D+/) .filter(Boolean) - .map((s) => Number.parseInt(s, 10)); - profile = {version: parsedVersion[0], releasenumber: parsedVersion[1]}; + .map(s => Number.parseInt(s, 10)) + profile = {version: parsedVersion[0], releasenumber: parsedVersion[1]} } else { - profile = {}; + profile = {} } - const newFlagMetadata = {profiles: [profile]}; + const newFlagMetadata = {profiles: [profile]} for (const [newKey, oldKey] of Object.entries( - HDF2CKL.oldMetadataFormatMapping + HDF2CKL.oldMetadataFormatMapping, )) { - const oldValue = _.get(flagMetadata, oldKey); + const oldValue = _.get(flagMetadata, oldKey) if (oldValue) { - _.set(newFlagMetadata, newKey, oldValue); + _.set(newFlagMetadata, newKey, oldValue) } } - flagMetadata = newFlagMetadata; + flagMetadata = newFlagMetadata } const hdfMetadata = _.get( inputHDF, 'passthrough.metadata', - _.get(inputHDF, 'passthrough.checklist.asset', {}) - ); - const metadata = _.merge(defaultMetadata, hdfMetadata, flagMetadata); - _.set(inputHDF, 'passthrough.metadata', metadata); + _.get(inputHDF, 'passthrough.checklist.asset', {}), + ) + const metadata = _.merge(defaultMetadata, hdfMetadata, flagMetadata) + _.set(inputHDF, 'passthrough.metadata', metadata) - const validationResults = validateChecklistMetadata(metadata); + const validationResults = validateChecklistMetadata(metadata) if (validationResults.ok) { - fs.writeFileSync(flags.output, new Mapper(inputHDF).toCkl()); + fs.writeFileSync(flags.output, new Mapper(inputHDF).toCkl()) } else { console.error( - `Error creating checklist:\n${validationResults.error.message}` - ); + `Error creating checklist:\n${validationResults.error.message}`, + ) } } } diff --git a/src/commands/convert/hdf2condensed.ts b/src/commands/convert/hdf2condensed.ts index 8614b7467..eabbb650f 100644 --- a/src/commands/convert/hdf2condensed.ts +++ b/src/commands/convert/hdf2condensed.ts @@ -1,15 +1,15 @@ -import {Command, Flags} from '@oclif/core'; -import {ContextualizedProfile, convertFileContextual} from 'inspecjs'; -import fs from 'fs'; +import {Command, Flags} from '@oclif/core' +import {ContextualizedProfile, convertFileContextual} from 'inspecjs' +import fs from 'fs' import { calculateCompliance, extractControlSummariesBySeverity, extractStatusCounts, renameStatusName, - severityTargetsObject -} from '../../utils/threshold'; -import _ from 'lodash'; -import {checkSuffix} from '../../utils/global'; + severityTargetsObject, +} from '../../utils/threshold' +import _ from 'lodash' +import {checkSuffix} from '../../utils/global' export default class HDF2Condensed extends Command { static readonly usage = @@ -23,64 +23,64 @@ export default class HDF2Condensed extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input HDF file' + description: 'Input HDF file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output condensed JSON file' - }) + description: 'Output condensed JSON file', + }), }; static readonly examples = [ - 'saf convert hdf2condensed -i rhel7-results.json -o rhel7-condensed.json' + 'saf convert hdf2condensed -i rhel7-results.json -o rhel7-condensed.json', ]; async run() { - const {flags} = await this.parse(HDF2Condensed); - const thresholds: Record> = {}; + const {flags} = await this.parse(HDF2Condensed) + const thresholds: Record> = {} const parsedExecJSON = convertFileContextual( - fs.readFileSync(flags.input, 'utf8') - ); - const parsedProfile = parsedExecJSON.contains[0] as ContextualizedProfile; - const overallStatusCounts = extractStatusCounts(parsedProfile); - const overallCompliance = calculateCompliance(overallStatusCounts); + fs.readFileSync(flags.input, 'utf8'), + ) + const parsedProfile = parsedExecJSON.contains[0] as ContextualizedProfile + const overallStatusCounts = extractStatusCounts(parsedProfile) + const overallCompliance = calculateCompliance(overallStatusCounts) - _.set(thresholds, 'compliance', overallCompliance); + _.set(thresholds, 'compliance', overallCompliance) // Severity counts for (const [severity, severityTargets] of Object.entries( - severityTargetsObject + severityTargetsObject, )) { - const severityStatusCounts = extractStatusCounts(parsedProfile, severity); + const severityStatusCounts = extractStatusCounts(parsedProfile, severity) for (const severityTarget of severityTargets) { const [statusName, _severity, thresholdType] = - severityTarget.split('.'); + severityTarget.split('.') _.set( thresholds, severityTarget.replace(`.${thresholdType}`, ''), - _.get(severityStatusCounts, renameStatusName(statusName)) - ); + _.get(severityStatusCounts, renameStatusName(statusName)), + ) } } // Total Counts for (const [type, counts] of Object.entries(thresholds)) { - let total = 0; + let total = 0 for (const [, count] of Object.entries(counts)) { - total += count; + total += count } - _.set(thresholds, `${type}.total`, total); + _.set(thresholds, `${type}.total`, total) } const result = { buckets: extractControlSummariesBySeverity(parsedProfile), - status: thresholds - }; + status: thresholds, + } fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(result, null, 2) - ); + JSON.stringify(result, null, 2), + ) } } diff --git a/src/commands/convert/hdf2csv.ts b/src/commands/convert/hdf2csv.ts index 11e6f3cc2..75b7a1efb 100644 --- a/src/commands/convert/hdf2csv.ts +++ b/src/commands/convert/hdf2csv.ts @@ -1,11 +1,11 @@ -import {Command, Flags} from '@oclif/core'; -import {ContextualizedEvaluation, contextualizeEvaluation} from 'inspecjs'; -import _ from 'lodash'; -import fs from 'fs'; -import ObjectsToCsv from 'objects-to-csv'; -import {ControlSetRows} from '../../types/csv'; -import {convertRow, csvExportFields} from '../../utils/csv'; -import {convertFullPathToFilename} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import {ContextualizedEvaluation, contextualizeEvaluation} from 'inspecjs' +import _ from 'lodash' +import fs from 'fs' +import ObjectsToCsv from 'objects-to-csv' +import {ControlSetRows} from '../../types/csv' +import {convertRow, csvExportFields} from '../../utils/csv' +import {convertFullPathToFilename} from '../../utils/global' export default class HDF2CSV extends Command { static readonly usage = @@ -19,79 +19,79 @@ export default class HDF2CSV extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input HDF file' + description: 'Input HDF file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output CSV file' + description: 'Output CSV file', }), fields: Flags.string({ char: 'f', required: false, default: csvExportFields.join(','), - description: 'Fields to include in output CSV, separated by commas' + description: 'Fields to include in output CSV, separated by commas', }), noTruncate: Flags.boolean({ char: 't', required: false, default: false, description: - 'Do not truncate fields longer than 32,767 characters (the cell limit in Excel)' - }) + 'Do not truncate fields longer than 32,767 characters (the cell limit in Excel)', + }), }; static readonly examples = [ - 'saf convert hdf2csv -i rhel7-results.json -o rhel7.csv --fields "Results Set,Status,ID,Title,Severity"' + 'saf convert hdf2csv -i rhel7-results.json -o rhel7.csv --fields "Results Set,Status,ID,Title,Severity"', ]; convertRows( evaluation: ContextualizedEvaluation, filename: string, - fieldsToAdd: string[] + fieldsToAdd: string[], ): ControlSetRows { const controls = - evaluation.contains.flatMap((profile) => profile.contains) || []; - return controls.map((ctrl) => convertRow(filename, ctrl, fieldsToAdd)); + evaluation.contains.flatMap(profile => profile.contains) || [] + return controls.map(ctrl => convertRow(filename, ctrl, fieldsToAdd)) } async run() { - const {flags} = await this.parse(HDF2CSV); + const {flags} = await this.parse(HDF2CSV) const contextualizedEvaluation = contextualizeEvaluation( - JSON.parse(fs.readFileSync(flags.input, 'utf8')) - ); + JSON.parse(fs.readFileSync(flags.input, 'utf8')), + ) // Convert all controls from a file to ControlSetRows let rows: ControlSetRows = this.convertRows( contextualizedEvaluation, convertFullPathToFilename(flags.input), - flags.fields.split(',') - ); + flags.fields.split(','), + ) rows = rows.map((row, index) => { - const cleanedRow: Record = {}; + const cleanedRow: Record = {} for (const key in row) { if (row[key].length > 32767) { if ('ID' in row) { console.error( - `Field ${key} of control ${row.ID} is longer than 32,767 characters and has been truncated for compatibility with Excel. To disable this behavior use the option --noTruncate` - ); + `Field ${key} of control ${row.ID} is longer than 32,767 characters and has been truncated for compatibility with Excel. To disable this behavior use the option --noTruncate`, + ) } else { console.error( - `Field ${key} of control at index ${index} is longer than 32,767 characters and has been truncated for compatibility with Excel. To disable this behavior use the option --noTruncate` - ); + `Field ${key} of control at index ${index} is longer than 32,767 characters and has been truncated for compatibility with Excel. To disable this behavior use the option --noTruncate`, + ) } cleanedRow[key] = _.truncate(row[key], { length: 32757, - omission: 'TRUNCATED' - }); + omission: 'TRUNCATED', + }) } else { - cleanedRow[key] = row[key]; + cleanedRow[key] = row[key] } } - return cleanedRow; - }); - await new ObjectsToCsv(rows).toDisk(flags.output); + return cleanedRow + }) + await new ObjectsToCsv(rows).toDisk(flags.output) } } diff --git a/src/commands/convert/hdf2splunk.ts b/src/commands/convert/hdf2splunk.ts index 58ff4eff2..d3195113f 100644 --- a/src/commands/convert/hdf2splunk.ts +++ b/src/commands/convert/hdf2splunk.ts @@ -1,8 +1,8 @@ -import {Command, Flags} from '@oclif/core'; -import {FromHDFToSplunkMapper} from '@mitre/hdf-converters'; -import {convertFullPathToFilename} from '../../utils/global'; -import fs from 'fs'; -import {createWinstonLogger, getHDFSummary} from '../../utils/logging'; +import {Command, Flags} from '@oclif/core' +import {FromHDFToSplunkMapper} from '@mitre/hdf-converters' +import {convertFullPathToFilename} from '../../utils/global' +import fs from 'fs' +import {createWinstonLogger, getHDFSummary} from '../../utils/logging' export default class HDF2Splunk extends Command { static readonly usage = @@ -16,83 +16,83 @@ export default class HDF2Splunk extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input HDF file' + description: 'Input HDF file', }), host: Flags.string({ char: 'H', required: true, - description: 'Splunk Hostname or IP' + description: 'Splunk Hostname or IP', }), port: Flags.integer({ char: 'P', required: false, description: 'Splunk management port (also known as the Universal Forwarder port)', - default: 8089 + default: 8089, }), scheme: Flags.string({ char: 's', required: false, description: 'HTTP Scheme used for communication with splunk', default: 'https', - options: ['http', 'https'] + options: ['http', 'https'], }), username: Flags.string({ char: 'u', required: false, description: 'Your Splunk username', - exclusive: ['token'] + exclusive: ['token'], }), password: Flags.string({ char: 'p', required: false, description: 'Your Splunk password', - exclusive: ['token'] + exclusive: ['token'], }), token: Flags.string({ char: 't', required: false, description: 'Your Splunk API Token', - exclusive: ['username', 'password'] + exclusive: ['username', 'password'], }), index: Flags.string({ char: 'I', required: true, - description: 'Splunk index to import HDF data into' + description: 'Splunk index to import HDF data into', }), logLevel: Flags.string({ char: 'L', required: false, default: 'info', - options: ['info', 'warn', 'debug', 'verbose'] - }) + options: ['info', 'warn', 'debug', 'verbose'], + }), }; static readonly examples = [ 'saf convert hdf2splunk -i rhel7-results.json -H 127.0.0.1 -u admin -p Valid_password! -I hdf', - 'saf convert hdf2splunk -i rhel7-results.json -H 127.0.0.1 -t your.splunk.token -I hdf' + 'saf convert hdf2splunk -i rhel7-results.json -H 127.0.0.1 -t your.splunk.token -I hdf', ]; async run() { - const {flags} = await this.parse(HDF2Splunk); - const logger = createWinstonLogger('hdf2splunk', flags.logLevel); + const {flags} = await this.parse(HDF2Splunk) + const logger = createWinstonLogger('hdf2splunk', flags.logLevel) if (!(flags.username && flags.password) && !flags.token) { logger.error( - 'Please provide either a Username and Password or a Splunk token' - ); + 'Please provide either a Username and Password or a Splunk token', + ) throw new Error( - 'Please provide either a Username and Password or a Splunk token' - ); + 'Please provide either a Username and Password or a Splunk token', + ) } logger.warn( - 'Please ensure the necessary configuration changes for your Splunk server have been configured to prevent data loss. See https://github.com/mitre/saf/wiki/Splunk-Configuration' - ); - const inputFile = JSON.parse(fs.readFileSync(flags.input, 'utf8')); + 'Please ensure the necessary configuration changes for your Splunk server have been configured to prevent data loss. See https://github.com/mitre/saf/wiki/Splunk-Configuration', + ) + const inputFile = JSON.parse(fs.readFileSync(flags.input, 'utf8')) logger.info( - `Input File "${convertFullPathToFilename(flags.input)}": ${getHDFSummary(inputFile)}` - ); + `Input File "${convertFullPathToFilename(flags.input)}": ${getHDFSummary(inputFile)}`, + ) await new FromHDFToSplunkMapper(inputFile, logger).toSplunk( { host: flags.host, @@ -101,9 +101,9 @@ export default class HDF2Splunk extends Command { username: flags.username, password: flags.password, sessionKey: flags.token, - index: flags.index + index: flags.index, }, - convertFullPathToFilename(flags.input) - ); + convertFullPathToFilename(flags.input), + ) } } diff --git a/src/commands/convert/hdf2xccdf.ts b/src/commands/convert/hdf2xccdf.ts index fa62b0706..86372b936 100644 --- a/src/commands/convert/hdf2xccdf.ts +++ b/src/commands/convert/hdf2xccdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {FromHDFToXCCDFMapper as Mapper} from '@mitre/hdf-converters'; -import {default as files} from '../../resources/files.json'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {FromHDFToXCCDFMapper as Mapper} from '@mitre/hdf-converters' +import {default as files} from '../../resources/files.json' export default class HDF2XCCDF extends Command { static readonly usage = @@ -10,7 +10,7 @@ export default class HDF2XCCDF extends Command { static readonly description = 'Translate an HDF file into an XCCDF XML file'; static readonly examples = [ - 'saf convert hdf2xccdf -i hdf_input.json -o xccdf-results.xml' + 'saf convert hdf2xccdf -i hdf_input.json -o xccdf-results.xml', ]; static readonly flags = { @@ -18,22 +18,22 @@ export default class HDF2XCCDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input HDF JSON file' + description: 'Input HDF JSON file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output XCCDF XML file' - }) + description: 'Output XCCDF XML file', + }), }; async run() { - const {flags} = await this.parse(HDF2XCCDF); + const {flags} = await this.parse(HDF2XCCDF) const converter = new Mapper( fs.readFileSync(flags.input, 'utf8'), - files['xccdfTemplate.xml'].data - ); - fs.writeFileSync(flags.output, converter.toXCCDF()); + files['xccdfTemplate.xml'].data, + ) + fs.writeFileSync(flags.output, converter.toXCCDF()) } } diff --git a/src/commands/convert/index.ts b/src/commands/convert/index.ts index 8cf041dcf..a36552576 100644 --- a/src/commands/convert/index.ts +++ b/src/commands/convert/index.ts @@ -21,25 +21,25 @@ import { TrufflehogResults, TwistlockResults, XCCDFResultsMapper, - ZapMapper -} from '@mitre/hdf-converters'; -import fs from 'fs'; -import _ from 'lodash'; -import {checkSuffix, convertFullPathToFilename} from '../../utils/global'; -import path from 'path'; -import ASFF2HDF from './asff2hdf'; -import {Command, Flags} from '@oclif/core'; -import Zap2HDF from './zap2hdf'; + ZapMapper, +} from '@mitre/hdf-converters' +import fs from 'fs' +import _ from 'lodash' +import {checkSuffix, convertFullPathToFilename} from '../../utils/global' +import path from 'path' +import ASFF2HDF from './asff2hdf' +import {Command, Flags} from '@oclif/core' +import Zap2HDF from './zap2hdf' function getInputFilename(): string { const inputFileIndex = process.argv.findIndex( - (param) => param.toLowerCase() === '-i' || param.toLowerCase() === '--input' - ); + param => param.toLowerCase() === '-i' || param.toLowerCase() === '--input', + ) if (inputFileIndex === -1) { - return process.env.INPUT_FILE ?? ''; + return process.env.INPUT_FILE ?? '' } - return process.argv[inputFileIndex + 1]; + return process.argv[inputFileIndex + 1] } export default class Convert extends Command { @@ -52,31 +52,31 @@ export default class Convert extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input results set file' + description: 'Input results set file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output results sets' + description: 'Output results sets', }), - ...Convert.getFlagsForInputFile(getInputFilename()) + ...Convert.getFlagsForInputFile(getInputFilename()), }; static getFlagsForInputFile(filePath: string) { if (filePath) { Convert.detectedType = fingerprint({ data: fs.readFileSync(filePath, 'utf8'), - filename: convertFullPathToFilename(filePath) - }); + filename: convertFullPathToFilename(filePath), + }) switch ( Convert.detectedType // skipcq: JS-0047 ) { case 'asff': { - return ASFF2HDF.flags; + return ASFF2HDF.flags } case 'zap': { - return Zap2HDF.flags; + return Zap2HDF.flags } case 'anchoregrype': @@ -99,285 +99,287 @@ export default class Convert extends Command { case 'trufflehog': case 'twistlock': case 'xccdf': { - return {}; + return {} } } } - return {}; + return {} } static detectedType: string; async run() { // skipcq: JS-0044 - const {flags} = await this.parse(Convert); - let converter; + const {flags} = await this.parse(Convert) + let converter switch (Convert.detectedType) { case 'anchoregrype': { converter = new AnchoreGrypeMapper( - fs.readFileSync(flags.input, 'utf8') - ); + fs.readFileSync(flags.input, 'utf8'), + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'asff': { - const securityhub = _.get(flags, 'securityhub') as string[]; - const files = securityhub?.map((file) => fs.readFileSync(file, 'utf8')); + const securityhub = _.get(flags, 'securityhub') as string[] + const files = securityhub?.map(file => fs.readFileSync(file, 'utf8')) converter = new ASFFResults( fs.readFileSync(flags.input, 'utf8'), - files - ); + files, + ) - const results = converter.toHdf(); + const results = converter.toHdf() - fs.mkdirSync(flags.output); + fs.mkdirSync(flags.output) _.forOwn(results, (result, filename) => { fs.writeFileSync( path.join(flags.output, checkSuffix(filename)), - JSON.stringify(result, null, 2) - ); - }); - break; + JSON.stringify(result, null, 2), + ) + }) + break } case 'burp': { - converter = new BurpSuiteMapper(fs.readFileSync(flags.input, 'utf8')); + converter = new BurpSuiteMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'conveyor': { - converter = new ConveyorResults(fs.readFileSync(flags.input, 'utf8')); - const results = converter.toHdf(); - fs.mkdirSync(flags.output); + converter = new ConveyorResults(fs.readFileSync(flags.input, 'utf8')) + const results = converter.toHdf() + fs.mkdirSync(flags.output) for (const [filename, result] of Object.entries(results)) { fs.writeFileSync( path.join(flags.output, checkSuffix(filename as string)), - JSON.stringify(result, null, 2) - ); + JSON.stringify(result, null, 2), + ) } - break; + break } case 'checklist': { - converter = new ChecklistResults(fs.readFileSync(flags.input, 'utf8')); + converter = new ChecklistResults(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'dbProtect': { - converter = new DBProtectMapper(fs.readFileSync(flags.input, 'utf8')); + converter = new DBProtectMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'cyclonedx_sbom': { converter = new CycloneDXSBOMResults( - fs.readFileSync(flags.input, 'utf8') - ); + fs.readFileSync(flags.input, 'utf8'), + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'fortify': { - converter = new FortifyMapper(fs.readFileSync(flags.input, 'utf8')); + converter = new FortifyMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'jfrog': { - converter = new JfrogXrayMapper(fs.readFileSync(flags.input, 'utf8')); + converter = new JfrogXrayMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'msft_secure_score': { converter = new MsftSecureScoreMapper( - fs.readFileSync(flags.input, 'utf8') - ); + fs.readFileSync(flags.input, 'utf8'), + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'nessus': { - converter = new NessusResults(fs.readFileSync(flags.input, 'utf8')); - const result = converter.toHdf(); - const pluralResults = Array.isArray(result) ? result : []; - const singularResult = pluralResults.length === 0; + converter = new NessusResults(fs.readFileSync(flags.input, 'utf8')) + const result = converter.toHdf() + const pluralResults = Array.isArray(result) ? result : [] + const singularResult = pluralResults.length === 0 for (const element of pluralResults) { fs.writeFileSync( `${flags.output.replaceAll(/\.json/gi, '')}-${_.get(element, 'platform.target_id')}.json`, - JSON.stringify(element, null, 2) - ); + JSON.stringify(element, null, 2), + ) } + if (singularResult) { fs.writeFileSync( `${checkSuffix(flags.output)}`, - JSON.stringify(result, null, 2) - ); + JSON.stringify(result, null, 2), + ) } - break; + break } case 'neuvector': { - converter = new NeuVectorMapper(fs.readFileSync(flags.input, 'utf8')); + converter = new NeuVectorMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'netsparker': { - converter = new NetsparkerMapper(fs.readFileSync(flags.input, 'utf8')); + converter = new NetsparkerMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'nikto': { - converter = new NiktoMapper(fs.readFileSync(flags.input, 'utf8')); + converter = new NiktoMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'prisma': { converter = new PrismaMapper( - fs.readFileSync(flags.input, {encoding: 'utf8'}) - ); - const results = converter.toHdf(); + fs.readFileSync(flags.input, {encoding: 'utf8'}), + ) + const results = converter.toHdf() - fs.mkdirSync(flags.output); - _.forOwn(results, (result) => { + fs.mkdirSync(flags.output) + _.forOwn(results, result => { fs.writeFileSync( path.join( flags.output, - `${_.get(result, 'platform.target_id')}.json` + `${_.get(result, 'platform.target_id')}.json`, ), - JSON.stringify(result, null, 2) - ); - }); - break; + JSON.stringify(result, null, 2), + ) + }) + break } case 'sarif': { - converter = new SarifMapper(fs.readFileSync(flags.input, 'utf8')); + converter = new SarifMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'scoutsuite': { - converter = new ScoutsuiteMapper(fs.readFileSync(flags.input, 'utf8')); + converter = new ScoutsuiteMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'snyk': { - converter = new SnykResults(fs.readFileSync(flags.input, 'utf8')); - const result = converter.toHdf(); - const pluralResults = Array.isArray(result) ? result : []; - const singularResult = pluralResults.length === 0; + converter = new SnykResults(fs.readFileSync(flags.input, 'utf8')) + const result = converter.toHdf() + const pluralResults = Array.isArray(result) ? result : [] + const singularResult = pluralResults.length === 0 for (const element of pluralResults) { fs.writeFileSync( `${flags.output.replaceAll(/\.json/gi, '')}-${_.get(element, 'platform.target_id')}.json`, - JSON.stringify(element, null, 2) - ); + JSON.stringify(element, null, 2), + ) } + if (singularResult) { fs.writeFileSync( `${checkSuffix(flags.output)}`, - JSON.stringify(result, null, 2) - ); + JSON.stringify(result, null, 2), + ) } - break; + break } case 'trufflehog': { - converter = new TrufflehogResults(fs.readFileSync(flags.input, 'utf8')); + converter = new TrufflehogResults(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'twistlock': { - converter = new TwistlockResults(fs.readFileSync(flags.input, 'utf8')); + converter = new TwistlockResults(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'xccdf': { converter = new XCCDFResultsMapper( - fs.readFileSync(flags.input, 'utf8') - ); + fs.readFileSync(flags.input, 'utf8'), + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } case 'zap': { converter = new ZapMapper( fs.readFileSync(flags.input, 'utf8'), - _.get(flags, 'name') as string - ); + _.get(flags, 'name') as string, + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); - break; + JSON.stringify(converter.toHdf(), null, 2), + ) + break } default: { throw new Error(`Unknown filetype provided: ${getInputFilename()} The generic convert command should only be used for taking supported file-based security results and converting into Heimdall Data Format - For more information, run "saf convert --help"`); + For more information, run "saf convert --help"`) } } } diff --git a/src/commands/convert/ionchannel2hdf.ts b/src/commands/convert/ionchannel2hdf.ts index 9130a7359..7e168743e 100644 --- a/src/commands/convert/ionchannel2hdf.ts +++ b/src/commands/convert/ionchannel2hdf.ts @@ -1,13 +1,13 @@ -import {IonChannelAPIMapper, IonChannelMapper} from '@mitre/hdf-converters'; -import {Command, Flags} from '@oclif/core'; +import {IonChannelAPIMapper, IonChannelMapper} from '@mitre/hdf-converters' +import {Command, Flags} from '@oclif/core' import { checkInput, checkSuffix, - convertFullPathToFilename -} from '../../utils/global'; -import {createWinstonLogger} from '../../utils/logging'; -import fs from 'fs'; -import path from 'path'; + convertFullPathToFilename, +} from '../../utils/global' +import {createWinstonLogger} from '../../utils/logging' +import fs from 'fs' +import path from 'path' export default class IonChannel2HDF extends Command { static readonly usage = @@ -22,136 +22,136 @@ export default class IonChannel2HDF extends Command { char: 'i', description: 'Input IonChannel JSON file', multiple: true, - exclusive: ['apiKey'] + exclusive: ['apiKey'], }), apiKey: Flags.string({ char: 'a', description: 'API Key from Ion Channel user settings', - dependsOn: ['teamName'] + dependsOn: ['teamName'], }), teamName: Flags.string({ char: 't', description: 'Your team name that contains the project(s) you would like to pull data from', - dependsOn: ['apiKey'] + dependsOn: ['apiKey'], }), output: Flags.string({ char: 'o', required: true, - description: 'Output JSON folder' + description: 'Output JSON folder', }), raw: Flags.boolean({ description: 'Output Ion Channel raw data', - dependsOn: ['apiKey'] + dependsOn: ['apiKey'], }), project: Flags.string({ char: 'p', description: 'The name of the project(s) you would like to pull', multiple: true, - dependsOn: ['apiKey'] + dependsOn: ['apiKey'], }), allProjects: Flags.boolean({ char: 'A', description: 'Pull all projects available within your team', - dependsOn: ['apiKey'] + dependsOn: ['apiKey'], }), logLevel: Flags.string({ char: 'L', default: 'info', - options: ['info', 'warn', 'debug', 'verbose'] - }) + options: ['info', 'warn', 'debug', 'verbose'], + }), }; async run() { - const {flags} = await this.parse(IonChannel2HDF); - const logger = createWinstonLogger('IonChannel2HDF', flags.logLevel); + const {flags} = await this.parse(IonChannel2HDF) + const logger = createWinstonLogger('IonChannel2HDF', flags.logLevel) if (!Array.isArray(flags.input) && !(flags.apiKey && flags.teamName)) { throw new Error( - 'Please either provide a list of input files or set the api key and the team name.' - ); + 'Please either provide a list of input files or set the api key and the team name.', + ) } if (flags.apiKey && flags.teamName && flags.allProjects) { - logger.debug('Creating Ion Channel API Client'); - const apiClient = new IonChannelAPIMapper(flags.apiKey); - logger.debug(`Setting team to ${flags.teamName}`); - await apiClient.setTeam(flags.teamName); - logger.debug(`Set team to ID ${apiClient.teamId}`); + logger.debug('Creating Ion Channel API Client') + const apiClient = new IonChannelAPIMapper(flags.apiKey) + logger.debug(`Setting team to ${flags.teamName}`) + await apiClient.setTeam(flags.teamName) + logger.debug(`Set team to ID ${apiClient.teamId}`) - fs.mkdirSync(flags.output); - const availableProjects = await apiClient.getProjects(); + fs.mkdirSync(flags.output) + const availableProjects = await apiClient.getProjects() for (const project of availableProjects) { - logger.info(`Pulling findings from ${project.name}`); - apiClient.projectId = project.id; - apiClient.analysisId = project.analysis_summary.analysis_id; - let filename = ''; - let json = {}; + logger.info(`Pulling findings from ${project.name}`) + apiClient.projectId = project.id + apiClient.analysisId = project.analysis_summary.analysis_id + let filename = '' + let json = {} if (flags.raw) { - filename = project.name + '_raw.json'; - json = await apiClient.getAnalysis().then(({analysis}) => analysis); + filename = project.name + '_raw.json' + json = await apiClient.getAnalysis().then(({analysis}) => analysis) } else { - filename = project.name + '.json'; - json = await apiClient.toHdf(); + filename = project.name + '.json' + json = await apiClient.toHdf() } fs.writeFileSync( path.join(flags.output, filename), - JSON.stringify(json, null, 2) - ); + JSON.stringify(json, null, 2), + ) } } else if (flags.apiKey && flags.teamName && Array.isArray(flags.project)) { - logger.debug('Creating Ion Channel API Client'); - const apiClient = new IonChannelAPIMapper(flags.apiKey); - logger.debug(`Setting team to ${flags.teamName}`); - await apiClient.setTeam(flags.teamName); - logger.debug(`Set team to ID ${apiClient.teamId}`); + logger.debug('Creating Ion Channel API Client') + const apiClient = new IonChannelAPIMapper(flags.apiKey) + logger.debug(`Setting team to ${flags.teamName}`) + await apiClient.setTeam(flags.teamName) + logger.debug(`Set team to ID ${apiClient.teamId}`) - fs.mkdirSync(flags.output); + fs.mkdirSync(flags.output) for (const projectName of flags.project) { - logger.info(`Pulling findings from ${projectName}`); - await apiClient.setProject(projectName); - logger.debug(`Set project ID ${apiClient.projectId}`); - let filename = ''; - let json = {}; + logger.info(`Pulling findings from ${projectName}`) + await apiClient.setProject(projectName) + logger.debug(`Set project ID ${apiClient.projectId}`) + let filename = '' + let json = {} if (flags.raw) { - filename = projectName + '_raw.json'; - json = await apiClient.getAnalysis().then(({analysis}) => analysis); + filename = projectName + '_raw.json' + json = await apiClient.getAnalysis().then(({analysis}) => analysis) } else { - filename = projectName + '.json'; - json = await apiClient.toHdf(); + filename = projectName + '.json' + json = await apiClient.toHdf() } fs.writeFileSync( path.join(flags.output, filename), - JSON.stringify(json, null, 2) - ); + JSON.stringify(json, null, 2), + ) } } else if (Array.isArray(flags.input)) { - logger.debug('Processing input files'); - fs.mkdirSync(flags.output); + logger.debug('Processing input files') + fs.mkdirSync(flags.output) for (const filename of flags.input) { // Check for correct input type - const data = fs.readFileSync(filename, 'utf8'); + const data = fs.readFileSync(filename, 'utf8') checkInput( {data: data, filename: filename}, 'ionchannel', - 'IonChannel JSON' - ); + 'IonChannel JSON', + ) - logger.debug(`Processing...${filename}`); + logger.debug(`Processing...${filename}`) fs.writeFileSync( path.join( flags.output, - checkSuffix(convertFullPathToFilename(filename)) + checkSuffix(convertFullPathToFilename(filename)), ), - JSON.stringify(new IonChannelMapper(data).toHdf()) - ); + JSON.stringify(new IonChannelMapper(data).toHdf()), + ) } } else { throw new TypeError( - 'Please provide a list of input files, a list of projects, or use the --allProjects flag.' - ); + 'Please provide a list of input files, a list of projects, or use the --allProjects flag.', + ) } } } diff --git a/src/commands/convert/jfrog_xray2hdf.ts b/src/commands/convert/jfrog_xray2hdf.ts index ac8e7f427..e50f298f9 100644 --- a/src/commands/convert/jfrog_xray2hdf.ts +++ b/src/commands/convert/jfrog_xray2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {JfrogXrayMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {JfrogXrayMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class JfrogXray2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class JfrogXray2HDF extends Command { 'Translate a JFrog Xray results JSON file into a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert jfrog_xray2hdf -i xray_results.json -o output-hdf-name.json' + 'saf convert jfrog_xray2hdf -i xray_results.json -o output-hdf-name.json', ]; static readonly flags = { @@ -19,35 +19,35 @@ export default class JfrogXray2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input JFrog JSON File' + description: 'Input JFrog JSON File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(JfrogXray2HDF); + const {flags} = await this.parse(JfrogXray2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'jfrog', - 'JFrog Xray results JSON' - ); + 'JFrog Xray results JSON', + ) - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/msft_secure2hdf.ts b/src/commands/convert/msft_secure2hdf.ts index 6c2780dd4..16035b153 100644 --- a/src/commands/convert/msft_secure2hdf.ts +++ b/src/commands/convert/msft_secure2hdf.ts @@ -1,47 +1,47 @@ -import {ExecJSON} from 'inspecjs'; -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import https from 'https'; -import {MsftSecureScoreResults as Mapper} from '@mitre/hdf-converters'; -import {ClientSecretCredential} from '@azure/identity'; +import {ExecJSON} from 'inspecjs' +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import https from 'https' +import {MsftSecureScoreResults as Mapper} from '@mitre/hdf-converters' +import {ClientSecretCredential} from '@azure/identity' import { Client, ClientOptions, PageIterator, - PageIteratorCallback -} from '@microsoft/microsoft-graph-client'; + PageIteratorCallback, +} from '@microsoft/microsoft-graph-client' import { SecureScore, - SecureScoreControlProfile -} from '@microsoft/microsoft-graph-types'; -import {TokenCredentialAuthenticationProvider} from '@microsoft/microsoft-graph-client/authProviders/azureTokenCredentials'; -import _ from 'lodash'; + SecureScoreControlProfile, +} from '@microsoft/microsoft-graph-types' +import {TokenCredentialAuthenticationProvider} from '@microsoft/microsoft-graph-client/authProviders/azureTokenCredentials' +import _ from 'lodash' function processInputs( scoreDoc: SecureScore, profiles: {value: SecureScoreControlProfile[]}, output: string, - withRaw: boolean + withRaw: boolean, ) { const converter = new Mapper( JSON.stringify({ secureScore: scoreDoc, - profiles + profiles, }), - withRaw - ); + withRaw, + ) for (const hdfReport of converter.toHdf()) { const auxData = ( (hdfReport as ExecJSON.Execution & {passthrough: Record}) .passthrough?.auxiliary_data as Record[] - ).find((auxDat) => auxDat?.name === 'Microsoft Secure Score') - ?.data as Record; - const reportId = auxData?.reportId as string; + ).find(auxDat => auxDat?.name === 'Microsoft Secure Score') + ?.data as Record + const reportId = auxData?.reportId as string fs.writeFileSync( `${output.replaceAll(/\.json/gi, '')}-${reportId}.json`, - JSON.stringify(hdfReport) - ); + JSON.stringify(hdfReport), + ) } } @@ -49,7 +49,7 @@ export default class MsftSecure2HDF extends Command { static readonly usage = [ 'convert msft_secure2hdf -r -p -o [-h]', 'convert msft_secure2hdf -t -a -s -o [-h]', - 'convert msft_secure2hdf -i -o [-h]' + 'convert msft_secure2hdf -i -o [-h]', ]; static readonly description = @@ -58,7 +58,7 @@ export default class MsftSecure2HDF extends Command { static readonly examples = [ 'saf convert msft_secure2hdf -p secureScore.json -r secureScoreControlProfiles -o output-hdf-name.json', 'saf convert msft_secure2hdf -t "12345678-1234-1234-1234-1234567890abcd" -a "12345678-1234-1234-1234-1234567890abcd" -s "aaaaa~bbbbbbbbbbbbbbbbbbbbbbbbb-cccccccc" -o output-hdf-name.json [-I | -C ] [-t ...]', - 'saf convert msft_secure2hdf -i <(jq \'{"secureScore": .[0], "profiles": .[1]}\' secureScore.json secureScoreControlProfiles.json) -o output-hdf-name.json' + 'saf convert msft_secure2hdf -i <(jq \'{"secureScore": .[0], "profiles": .[1]}\' secureScore.json secureScoreControlProfiles.json) -o output-hdf-name.json', ]; static readonly flags = { @@ -68,7 +68,7 @@ export default class MsftSecure2HDF extends Command { required: false, description: '{secureScore: }, profiles: ', - exclusive: ['inputProfiles'] + exclusive: ['inputProfiles'], }), inputProfiles: Flags.string({ char: 'p', @@ -76,7 +76,7 @@ export default class MsftSecure2HDF extends Command { description: 'Input Microsoft Graph API "GET /security/secureScoreControlProfiles" output JSON File', dependsOn: ['inputScoreDoc', 'inputProfiles'], - exclusive: ['tenantId', 'combinedInputs'] + exclusive: ['tenantId', 'combinedInputs'], }), inputScoreDoc: Flags.string({ char: 'r', @@ -84,127 +84,127 @@ export default class MsftSecure2HDF extends Command { description: 'Input Microsoft Graph API "GET /security/secureScores" output JSON File', dependsOn: ['inputScoreDoc', 'inputProfiles'], - exclusive: ['tenantId', 'combinedInputs'] + exclusive: ['tenantId', 'combinedInputs'], }), tenantId: Flags.string({ char: 't', required: false, description: 'Azure tenant ID', dependsOn: ['tenantId', 'appId', 'appSecret'], - exclusive: ['inputProfiles', 'combinedInputs'] + exclusive: ['inputProfiles', 'combinedInputs'], }), appId: Flags.string({ char: 'a', required: false, description: 'Azure application ID', dependsOn: ['tenantId', 'appId', 'appSecret'], - exclusive: ['inputProfiles', 'combinedInputs'] + exclusive: ['inputProfiles', 'combinedInputs'], }), appSecret: Flags.string({ char: 's', required: false, description: 'Azure application secret', dependsOn: ['tenantId', 'appId', 'appSecret'], - exclusive: ['inputProfiles', 'combinedInputs'] + exclusive: ['inputProfiles', 'combinedInputs'], }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON file' + description: 'Output HDF JSON file', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' + description: 'Include raw input file in HDF JSON file', }), certificate: Flags.string({ char: 'C', required: false, description: 'Trusted signing certificate file', - exclusive: ['input', 'insecure'] + exclusive: ['input', 'insecure'], }), insecure: Flags.boolean({ char: 'I', required: false, default: false, description: 'Disable SSL verification, this is insecure.', - exclusive: ['input', 'certificate'] - }) + exclusive: ['input', 'certificate'], + }), }; async run() { - const {flags} = await this.parse(MsftSecure2HDF); - let scoreDoc: SecureScore; - let profilesDoc: {value: SecureScoreControlProfile[]}; + const {flags} = await this.parse(MsftSecure2HDF) + let scoreDoc: SecureScore + let profilesDoc: {value: SecureScoreControlProfile[]} if ( flags.inputProfiles !== undefined && flags.inputScoreDoc !== undefined ) { // load from pre-downloaded files - scoreDoc = JSON.parse(fs.readFileSync(flags.inputScoreDoc, 'utf8')); - profilesDoc = JSON.parse(fs.readFileSync(flags.inputProfiles, 'utf8')); - processInputs(scoreDoc, profilesDoc, flags.output, flags['with-raw']); + scoreDoc = JSON.parse(fs.readFileSync(flags.inputScoreDoc, 'utf8')) + profilesDoc = JSON.parse(fs.readFileSync(flags.inputProfiles, 'utf8')) + processInputs(scoreDoc, profilesDoc, flags.output, flags['with-raw']) } else if (flags.combinedInputs !== undefined) { const combined = JSON.parse( - fs.readFileSync(flags.combinedInputs, 'utf8') - ); - const scoreDoc = combined.secureScore; - const profilesDoc = combined.profiles; - processInputs(scoreDoc, profilesDoc, flags.output, flags['with-raw']); + fs.readFileSync(flags.combinedInputs, 'utf8'), + ) + const scoreDoc = combined.secureScore + const profilesDoc = combined.profiles + processInputs(scoreDoc, profilesDoc, flags.output, flags['with-raw']) } else if ( flags.tenantId !== undefined && flags.appId !== undefined && flags.appSecret !== undefined ) { // attempt to use the Graph API to pull files - const tenantId = flags.tenantId; - const appId = flags.appId; - const appSecret = flags.appSecret; - const creds = new ClientSecretCredential(tenantId, appId, appSecret); + const tenantId = flags.tenantId + const appId = flags.appId + const appSecret = flags.appSecret + const creds = new ClientSecretCredential(tenantId, appId, appSecret) const graphClientOpts: ClientOptions = { authProvider: new TokenCredentialAuthenticationProvider(creds, { - scopes: ['https://graph.microsoft.com/.default'] + scopes: ['https://graph.microsoft.com/.default'], }), fetchOptions: { agent: new https.Agent({ // Disable HTTPS verification if requested rejectUnauthorized: !flags.insecure, // Pass an SSL certificate to trust - ca: flags.certificate - ? fs.readFileSync(flags.certificate, 'utf8') - : undefined - }) - } - }; - const graphClient: Client = Client.initWithMiddleware(graphClientOpts); + ca: flags.certificate ? + fs.readFileSync(flags.certificate, 'utf8') : + undefined, + }), + }, + } + const graphClient: Client = Client.initWithMiddleware(graphClientOpts) - scoreDoc = await graphClient.api('/security/secureScores').get(); + scoreDoc = await graphClient.api('/security/secureScores').get() profilesDoc = await graphClient .api('/security/secureScoreControlProfiles') - .get(); + .get() - const allProfiles: SecureScoreControlProfile[] = []; + const allProfiles: SecureScoreControlProfile[] = [] const callback: PageIteratorCallback = (v: SecureScoreControlProfile) => { - allProfiles.push(v); - return true; - }; + allProfiles.push(v) + return true + } const pagingIterator = new PageIterator( graphClient, profilesDoc, - callback - ); + callback, + ) - await pagingIterator.iterate(); - profilesDoc.value = allProfiles; + await pagingIterator.iterate() + profilesDoc.value = allProfiles - processInputs(scoreDoc, profilesDoc, flags.output, flags['with-raw']); + processInputs(scoreDoc, profilesDoc, flags.output, flags['with-raw']) } else { throw new Error( - 'Invalid arguments provided. Include (-a, -s, -t) or (-r, -p) or (-h)' - ); + 'Invalid arguments provided. Include (-a, -s, -t) or (-r, -p) or (-h)', + ) } } } diff --git a/src/commands/convert/nessus2hdf.ts b/src/commands/convert/nessus2hdf.ts index 9225d590f..a04fb22fa 100644 --- a/src/commands/convert/nessus2hdf.ts +++ b/src/commands/convert/nessus2hdf.ts @@ -1,8 +1,8 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {NessusResults as Mapper} from '@mitre/hdf-converters'; -import _ from 'lodash'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {NessusResults as Mapper} from '@mitre/hdf-converters' +import _ from 'lodash' +import {checkInput, checkSuffix} from '../../utils/global' export default class Nessus2HDF extends Command { static readonly usage = @@ -12,7 +12,7 @@ export default class Nessus2HDF extends Command { "Translate a Nessus XML results file into a Heimdall Data Format JSON file\nThe current iteration maps all plugin families except 'Policy Compliance'\nA separate HDF JSON is generated for each host reported in the Nessus Report."; static readonly examples = [ - 'saf convert nessus2hdf -i nessus_results.xml -o output-hdf-name.json' + 'saf convert nessus2hdf -i nessus_results.xml -o output-hdf-name.json', ]; static readonly flags = { @@ -20,45 +20,45 @@ export default class Nessus2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Nessus XML File' + description: 'Input Nessus XML File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Nessus2HDF); + const {flags} = await this.parse(Nessus2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'nessus', - 'Nessus XML results file' - ); + 'Nessus XML results file', + ) - const converter = new Mapper(data, flags['with-raw']); - const result = converter.toHdf(); + const converter = new Mapper(data, flags['with-raw']) + const result = converter.toHdf() if (Array.isArray(result)) { for (const element of result) { fs.writeFileSync( `${flags.output.replaceAll(/\.json/gi, '')}-${_.get(element, 'platform.target_id')}.json`, - JSON.stringify(element, null, 2) - ); + JSON.stringify(element, null, 2), + ) } } else { fs.writeFileSync( `${checkSuffix(flags.output)}`, - JSON.stringify(result, null, 2) - ); + JSON.stringify(result, null, 2), + ) } } } diff --git a/src/commands/convert/netsparker2hdf.ts b/src/commands/convert/netsparker2hdf.ts index 1623f838b..6bcfd9566 100644 --- a/src/commands/convert/netsparker2hdf.ts +++ b/src/commands/convert/netsparker2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {NetsparkerMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {NetsparkerMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class Netsparker2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Netsparker2HDF extends Command { 'Translate a Netsparker XML results file into a Heimdall Data Format JSON file\nThe current iteration only works with Netsparker Enterprise Vulnerabilities Scan.'; static readonly examples = [ - 'saf convert netsparker2hdf -i netsparker_results.xml -o output-hdf-name.json' + 'saf convert netsparker2hdf -i netsparker_results.xml -o output-hdf-name.json', ]; static readonly flags = { @@ -19,35 +19,35 @@ export default class Netsparker2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Netsparker XML File' + description: 'Input Netsparker XML File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Netsparker2HDF); + const {flags} = await this.parse(Netsparker2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'netsparker', - 'Netsparker XML results file' - ); + 'Netsparker XML results file', + ) - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/nikto2hdf.ts b/src/commands/convert/nikto2hdf.ts index 89a7a0d04..70db3532e 100644 --- a/src/commands/convert/nikto2hdf.ts +++ b/src/commands/convert/nikto2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {NiktoMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {NiktoMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class Nikto2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Nikto2HDF extends Command { 'Translate a Nikto results JSON file into a Heimdall Data Format JSON file\nNote: Current this mapper only supports single target Nikto Scans'; static readonly examples = [ - 'saf convert nikto2hdf -i nikto-results.json -o output-hdf-name.json' + 'saf convert nikto2hdf -i nikto-results.json -o output-hdf-name.json', ]; static readonly flags = { @@ -19,31 +19,31 @@ export default class Nikto2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Niktop Results JSON File' + description: 'Input Niktop Results JSON File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Nikto2HDF); + const {flags} = await this.parse(Nikto2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); - checkInput({data, filename: flags.input}, 'nikto', 'Nikto results JSON'); + const data = fs.readFileSync(flags.input, 'utf8') + checkInput({data, filename: flags.input}, 'nikto', 'Nikto results JSON') - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/prisma2hdf.ts b/src/commands/convert/prisma2hdf.ts index 985cb4e04..e8be4f745 100644 --- a/src/commands/convert/prisma2hdf.ts +++ b/src/commands/convert/prisma2hdf.ts @@ -1,8 +1,8 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {PrismaMapper as Mapper} from '@mitre/hdf-converters'; -import path from 'path'; -import _ from 'lodash'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {PrismaMapper as Mapper} from '@mitre/hdf-converters' +import path from 'path' +import _ from 'lodash' export default class Prisma2HDF extends Command { static readonly usage = @@ -12,7 +12,7 @@ export default class Prisma2HDF extends Command { 'Translate a Prisma Cloud Scan Report CSV file into Heimdall Data Format JSON files'; static readonly examples = [ - 'saf convert prisma2hdf -i prismacloud-report.csv -o output-hdf-name.json' + 'saf convert prisma2hdf -i prismacloud-report.csv -o output-hdf-name.json', ]; static readonly flags = { @@ -20,32 +20,32 @@ export default class Prisma2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Prisma Cloud Scan Report CSV' + description: 'Prisma Cloud Scan Report CSV', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' - }) + description: 'Output HDF JSON File', + }), }; async run() { - const {flags} = await this.parse(Prisma2HDF); + const {flags} = await this.parse(Prisma2HDF) const converter = new Mapper( - fs.readFileSync(flags.input, {encoding: 'utf8'}) - ); - const results = converter.toHdf(); + fs.readFileSync(flags.input, {encoding: 'utf8'}), + ) + const results = converter.toHdf() if (!fs.existsSync(flags.output)) { - fs.mkdirSync(flags.output); + fs.mkdirSync(flags.output) } - _.forOwn(results, (result) => { + _.forOwn(results, result => { fs.writeFileSync( path.join(flags.output, `${_.get(result, 'platform.target_id')}.json`), - JSON.stringify(result, null, 2) - ); - }); + JSON.stringify(result, null, 2), + ) + }) } } diff --git a/src/commands/convert/prowler2hdf.ts b/src/commands/convert/prowler2hdf.ts index 96e6c52c4..0a59760d8 100644 --- a/src/commands/convert/prowler2hdf.ts +++ b/src/commands/convert/prowler2hdf.ts @@ -1,9 +1,9 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {ASFFResults as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; -import _ from 'lodash'; -import path from 'path'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {ASFFResults as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' +import _ from 'lodash' +import path from 'path' export default class Prowler2HDF extends Command { static readonly usage = @@ -13,7 +13,7 @@ export default class Prowler2HDF extends Command { 'Translate a Prowler-derived AWS Security Finding Format results from JSONL into a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert prowler2hdf -i prowler-asff.json -o output-folder' + 'saf convert prowler2hdf -i prowler-asff.json -o output-folder', ]; static readonly flags = { @@ -21,36 +21,36 @@ export default class Prowler2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Prowler ASFF JSON File' + description: 'Input Prowler ASFF JSON File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON Folder' - }) + description: 'Output HDF JSON Folder', + }), }; async run() { - const {flags} = await this.parse(Prowler2HDF); - const data = fs.readFileSync(flags.input, 'utf8'); + const {flags} = await this.parse(Prowler2HDF) + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data: data, filename: flags.input}, 'asff', - 'Prowler-derived AWS Security Finding Format results' - ); - const converter = new Mapper(data); - const results = converter.toHdf(); + 'Prowler-derived AWS Security Finding Format results', + ) + const converter = new Mapper(data) + const results = converter.toHdf() // Create output folder if not exists if (!fs.existsSync(flags.output)) { - fs.mkdirSync(flags.output); + fs.mkdirSync(flags.output) } _.forOwn(results, (result, filename) => { fs.writeFileSync( path.join(flags.output, checkSuffix(filename)), - JSON.stringify(result, null, 2) - ); - }); + JSON.stringify(result, null, 2), + ) + }) } } diff --git a/src/commands/convert/sarif2hdf.ts b/src/commands/convert/sarif2hdf.ts index 722ea48da..d35460fdf 100644 --- a/src/commands/convert/sarif2hdf.ts +++ b/src/commands/convert/sarif2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {SarifMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {SarifMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class Sarif2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Sarif2HDF extends Command { 'Translate a SARIF JSON file into a Heimdall Data Format JSON file\nSARIF level to HDF impact Mapping:\nSARIF level error -> HDF impact 0.7\nSARIF level warning -> HDF impact 0.5\nSARIF level note -> HDF impact 0.3\nSARIF level none -> HDF impact 0.1\nSARIF level not provided -> HDF impact 0.1 as default'; static readonly examples = [ - 'saf convert sarif2hdf -i sarif-results.json -o output-hdf-name.json' + 'saf convert sarif2hdf -i sarif-results.json -o output-hdf-name.json', ]; static readonly flags = { @@ -19,31 +19,31 @@ export default class Sarif2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input SARIF JSON File' + description: 'Input SARIF JSON File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Sarif2HDF); + const {flags} = await this.parse(Sarif2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); - checkInput({data, filename: flags.input}, 'sarif', 'SARIF JSON'); + const data = fs.readFileSync(flags.input, 'utf8') + checkInput({data, filename: flags.input}, 'sarif', 'SARIF JSON') - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/scoutsuite2hdf.ts b/src/commands/convert/scoutsuite2hdf.ts index 91b564d78..cecdd4d73 100644 --- a/src/commands/convert/scoutsuite2hdf.ts +++ b/src/commands/convert/scoutsuite2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {ScoutsuiteMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {ScoutsuiteMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class Scoutsuite2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Scoutsuite2HDF extends Command { 'Translate a ScoutSuite results from a Javascript object into a Heimdall Data Format JSON file\nNote: Currently this mapper only supports AWS.'; static readonly examples = [ - 'saf convert scoutsuite2hdf -i scoutsuite-results.js -o output-hdf-name.json' + 'saf convert scoutsuite2hdf -i scoutsuite-results.js -o output-hdf-name.json', ]; static readonly flags = { @@ -19,35 +19,35 @@ export default class Scoutsuite2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input ScoutSuite Results JS File' + description: 'Input ScoutSuite Results JS File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Scoutsuite2HDF); + const {flags} = await this.parse(Scoutsuite2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'scoutsuite', - 'ScoutSuite results from a Javascript object' - ); + 'ScoutSuite results from a Javascript object', + ) - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/snyk2hdf.ts b/src/commands/convert/snyk2hdf.ts index aa20a2347..7e99c2237 100644 --- a/src/commands/convert/snyk2hdf.ts +++ b/src/commands/convert/snyk2hdf.ts @@ -1,8 +1,8 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {SnykResults as Mapper} from '@mitre/hdf-converters'; -import _ from 'lodash'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {SnykResults as Mapper} from '@mitre/hdf-converters' +import _ from 'lodash' +import {checkInput, checkSuffix} from '../../utils/global' export default class Snyk2HDF extends Command { static readonly usage = @@ -12,7 +12,7 @@ export default class Snyk2HDF extends Command { 'Translate a Snyk results JSON file into a Heimdall Data Format JSON file\nA separate HDF JSON is generated for each project reported in the Snyk Report.'; static readonly examples = [ - 'saf convert snyk2hdf -i snyk_results.json -o output-file-prefix' + 'saf convert snyk2hdf -i snyk_results.json -o output-file-prefix', ]; static readonly flags = { @@ -20,40 +20,40 @@ export default class Snyk2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Snyk Results JSON File' + description: 'Input Snyk Results JSON File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' - }) + description: 'Output HDF JSON File', + }), }; async run() { - const {flags} = await this.parse(Snyk2HDF); + const {flags} = await this.parse(Snyk2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data: data, filename: flags.input}, 'snyk', - 'Snyk results JSON' - ); + 'Snyk results JSON', + ) - const converter = new Mapper(data); - const result = converter.toHdf(); + const converter = new Mapper(data) + const result = converter.toHdf() if (Array.isArray(result)) { for (const element of result) { fs.writeFileSync( `${flags.output.replaceAll(/\.json/gi, '')}-${_.get(element, 'platform.target_id')}.json`, - JSON.stringify(element, null, 2) - ); + JSON.stringify(element, null, 2), + ) } } else { fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(result, null, 2) - ); + JSON.stringify(result, null, 2), + ) } } } diff --git a/src/commands/convert/sonarqube2hdf.ts b/src/commands/convert/sonarqube2hdf.ts index 0dc50d94b..5790cb148 100644 --- a/src/commands/convert/sonarqube2hdf.ts +++ b/src/commands/convert/sonarqube2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {SonarQubeResults as Mapper} from '@mitre/hdf-converters'; -import {checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {SonarQubeResults as Mapper} from '@mitre/hdf-converters' +import {checkSuffix} from '../../utils/global' export default class Sonarqube2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Sonarqube2HDF extends Command { 'Pull SonarQube vulnerabilities for the specified project name and optional branch or pull/merge request ID name from an API and convert into a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert sonarqube2hdf -n sonar_project_key -u http://sonar:9000 --auth abcdefg -p 123 -o scan_results.json' + 'saf convert sonarqube2hdf -n sonar_project_key -u http://sonar:9000 --auth abcdefg -p 123 -o scan_results.json', ]; static readonly flags = { @@ -19,49 +19,49 @@ export default class Sonarqube2HDF extends Command { auth: Flags.string({ char: 'a', required: true, - description: 'SonarQube API Key' + description: 'SonarQube API Key', }), projectKey: Flags.string({ char: 'n', required: true, - description: 'SonarQube Project Key' + description: 'SonarQube Project Key', }), url: Flags.string({ char: 'u', required: true, - description: "SonarQube Base URL (excluding '/api')" + description: "SonarQube Base URL (excluding '/api')", }), branch: Flags.string({ char: 'b', required: false, exclusive: ['pullRequestID'], - description: 'Requires Sonarqube Developer Edition or above' + description: 'Requires Sonarqube Developer Edition or above', }), pullRequestID: Flags.string({ char: 'p', required: false, exclusive: ['branch'], - description: 'Requires Sonarqube Developer Edition or above' + description: 'Requires Sonarqube Developer Edition or above', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' - }) + description: 'Output HDF JSON File', + }), }; async run() { - const {flags} = await this.parse(Sonarqube2HDF); + const {flags} = await this.parse(Sonarqube2HDF) const converter = new Mapper( flags.url, flags.projectKey, flags.auth, flags.branch, - flags.pullRequestID - ); + flags.pullRequestID, + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(await converter.toHdf(), null, 2) - ); + JSON.stringify(await converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/splunk2hdf.ts b/src/commands/convert/splunk2hdf.ts index fa38e349d..f5f54c5bd 100644 --- a/src/commands/convert/splunk2hdf.ts +++ b/src/commands/convert/splunk2hdf.ts @@ -1,10 +1,10 @@ -import {Command, Flags} from '@oclif/core'; -import {SplunkMapper} from '@mitre/hdf-converters/lib/src/splunk-mapper'; -import {table} from 'table'; -import {createWinstonLogger} from '../../utils/logging'; -import _ from 'lodash'; -import fs from 'fs'; -import path from 'path'; +import {Command, Flags} from '@oclif/core' +import {SplunkMapper} from '@mitre/hdf-converters/lib/src/splunk-mapper' +import {table} from 'table' +import {createWinstonLogger} from '../../utils/logging' +import _ from 'lodash' +import fs from 'fs' +import path from 'path' export default class Splunk2HDF extends Command { static readonly usage = @@ -18,89 +18,89 @@ export default class Splunk2HDF extends Command { host: Flags.string({ char: 'H', required: true, - description: 'Splunk Hostname or IP' + description: 'Splunk Hostname or IP', }), port: Flags.integer({ char: 'P', required: false, description: 'Splunk management port (also known as the Universal Forwarder port)', - default: 8089 + default: 8089, }), scheme: Flags.string({ char: 's', required: false, description: 'HTTP Scheme used for communication with splunk', default: 'https', - options: ['http', 'https'] + options: ['http', 'https'], }), username: Flags.string({ char: 'u', required: false, description: 'Your Splunk username', - exclusive: ['token'] + exclusive: ['token'], }), password: Flags.string({ char: 'p', required: false, description: 'Your Splunk password', - exclusive: ['token'] + exclusive: ['token'], }), token: Flags.string({ char: 't', required: false, description: 'Your Splunk API Token', - exclusive: ['username', 'password'] + exclusive: ['username', 'password'], }), index: Flags.string({ char: 'I', required: true, - description: 'Splunk index to query HDF data from' + description: 'Splunk index to query HDF data from', }), logLevel: Flags.string({ char: 'L', required: false, default: 'info', - options: ['info', 'warn', 'debug', 'verbose'] + options: ['info', 'warn', 'debug', 'verbose'], }), input: Flags.string({ char: 'i', multiple: true, required: false, - description: 'GUID(s) or Filename(s) of files from Splunk to convert' + description: 'GUID(s) or Filename(s) of files from Splunk to convert', }), output: Flags.string({ char: 'o', required: false, - description: 'Output HDF JSON Folder' - }) + description: 'Output HDF JSON Folder', + }), }; static readonly examples = [ - 'saf convert splunk2hdf -H 127.0.0.1 -u admin -p Valid_password! -I hdf -i some-file-in-your-splunk-instance.json -i yBNxQsE1mi4f3mkjtpap5YxNTttpeG -o output-folder' + 'saf convert splunk2hdf -H 127.0.0.1 -u admin -p Valid_password! -I hdf -i some-file-in-your-splunk-instance.json -i yBNxQsE1mi4f3mkjtpap5YxNTttpeG -o output-folder', ]; async searchExecutions( mapper: SplunkMapper, filename: string, - index?: string + index?: string, ) { return mapper.queryData( - `search index="${index || '*'}" meta.filename="${filename || '*'}" meta.subtype="header" | head 100` - ); + `search index="${index || '*'}" meta.filename="${filename || '*'}" meta.subtype="header" | head 100`, + ) } async run() { - const {flags} = await this.parse(Splunk2HDF); - const logger = createWinstonLogger('splunk2hdf', flags.logLevel); + const {flags} = await this.parse(Splunk2HDF) + const logger = createWinstonLogger('splunk2hdf', flags.logLevel) if (!(flags.username && flags.password) && !flags.token) { logger.error( - 'Please provide either a Username and Password or a Splunk token' - ); + 'Please provide either a Username and Password or a Splunk token', + ) throw new Error( - 'Please provide either a Username and Password or a Splunk token' - ); + 'Please provide either a Username and Password or a Splunk token', + ) } const mapper = new SplunkMapper( @@ -111,70 +111,70 @@ export default class Splunk2HDF extends Command { username: flags.username, password: flags.password, sessionKey: flags.token, - index: flags.index + index: flags.index, }, - logger - ); + logger, + ) if (flags.input && flags.output) { - const outputFolder = flags.output?.replace('.json', '') || 'asff-output'; - fs.mkdirSync(outputFolder); + const outputFolder = flags.output?.replace('.json', '') || 'asff-output' + fs.mkdirSync(outputFolder) flags.input.forEach(async (input: string) => { // If we have a GUID if (/^(\w){30}$/.test(input)) { - const hdf = await mapper.toHdf(input); + const hdf = await mapper.toHdf(input) // Rename example.json -> example-p9dwG2kdSoHsYdyF2dMytUmljgOHD5.json and put into the outputFolder fs.writeFileSync( path.join( outputFolder, _.get(hdf, 'meta.filename', '').replace(/\.json$/, '') + _.get(hdf, 'meta.guid') + - '.json' + '.json', ), - JSON.stringify(hdf, null, 2) - ); + JSON.stringify(hdf, null, 2), + ) } else { // If we have a filename - const executions = await this.searchExecutions(mapper, input); - executions.forEach(async (execution) => { - const hdf = await mapper.toHdf(_.get(execution, 'meta.guid')); + const executions = await this.searchExecutions(mapper, input) + executions.forEach(async execution => { + const hdf = await mapper.toHdf(_.get(execution, 'meta.guid')) fs.writeFileSync( path.join( outputFolder, _.get(hdf, 'meta.filename', '').replace(/\.json$/, '') + _.get(hdf, 'meta.guid') + - '.json' + '.json', ), - JSON.stringify(hdf, null, 2) - ); - }); + JSON.stringify(hdf, null, 2), + ) + }) } - }); + }) } else if (flags.input && !flags.output) { - logger.error('Please provide an output HDF folder'); - throw new Error('Please provide an output HDF folder'); + logger.error('Please provide an output HDF folder') + throw new Error('Please provide an output HDF folder') } else { const availableExecutionsTable: string[][] = [ - ['File Name', 'GUID', 'Imported At'] - ]; + ['File Name', 'GUID', 'Imported At'], + ] - const executionsAvailable = await this.searchExecutions(mapper, '*'); + const executionsAvailable = await this.searchExecutions(mapper, '*') - executionsAvailable.forEach((execution) => { + executionsAvailable.forEach(execution => { availableExecutionsTable.push([ _.get(execution, 'meta.filename') || '', _.get(execution, 'meta.guid') || '', - _.get(execution, 'meta.parse_time') || '' - ]); - }); + _.get(execution, 'meta.parse_time') || '', + ]) + }) if (availableExecutionsTable.length === 1) { - logger.warn('No executions found in the provided Splunk instance'); + logger.warn('No executions found in the provided Splunk instance') } else { console.log( - 'No filename or GUID provided (-i), available executions are:' - ); - console.log(table(availableExecutionsTable)); + 'No filename or GUID provided (-i), available executions are:', + ) + console.log(table(availableExecutionsTable)) } } } diff --git a/src/commands/convert/trivy2hdf.ts b/src/commands/convert/trivy2hdf.ts index 997ec83f0..afb60b464 100644 --- a/src/commands/convert/trivy2hdf.ts +++ b/src/commands/convert/trivy2hdf.ts @@ -1,9 +1,9 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {ASFFResults as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; -import _ from 'lodash'; -import path from 'path'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {ASFFResults as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' +import _ from 'lodash' +import path from 'path' export default class Trivy2HDF extends Command { static readonly usage = @@ -13,7 +13,7 @@ export default class Trivy2HDF extends Command { 'Translate a Trivy-derived AWS Security Finding Format results from JSONL into a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert trivy2hdf -i trivy-asff.json -o output-folder' + 'saf convert trivy2hdf -i trivy-asff.json -o output-folder', ]; static readonly flags = { @@ -21,19 +21,19 @@ export default class Trivy2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Trivy ASFF JSON File' + description: 'Input Trivy ASFF JSON File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON Folder' - }) + description: 'Output HDF JSON Folder', + }), }; async run() { - const {flags} = await this.parse(Trivy2HDF); + const {flags} = await this.parse(Trivy2HDF) // comes as an _asff.json file which is basically the array of findings but without the surrounding object; however, could also be properly formed asff since it depends on the template used - const input = fs.readFileSync(flags.input, 'utf8').trim(); + const input = fs.readFileSync(flags.input, 'utf8').trim() // if (Array.isArray(JSON.parse(input))) { // input = `{"Findings": ${fs.readFileSync(flags.input, 'utf8').trim()}}` // } @@ -41,21 +41,21 @@ export default class Trivy2HDF extends Command { checkInput( {data: input, filename: flags.input}, 'asff', - 'Trivy-derived AWS Security Finding Format results' - ); + 'Trivy-derived AWS Security Finding Format results', + ) - const converter = new Mapper(input); - const results = converter.toHdf(); + const converter = new Mapper(input) + const results = converter.toHdf() if (!fs.existsSync(flags.output)) { - fs.mkdirSync(flags.output); + fs.mkdirSync(flags.output) } _.forOwn(results, (result, filename) => { fs.writeFileSync( path.join(flags.output, checkSuffix(filename)), - JSON.stringify(result, null, 2) - ); - }); + JSON.stringify(result, null, 2), + ) + }) } } diff --git a/src/commands/convert/trufflehog2hdf.ts b/src/commands/convert/trufflehog2hdf.ts index a279cd3e7..6d031e6cc 100644 --- a/src/commands/convert/trufflehog2hdf.ts +++ b/src/commands/convert/trufflehog2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {TrufflehogResults as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {TrufflehogResults as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class Trufflehog2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Trufflehog2HDF extends Command { 'Translate a Trufflehog output file into an HDF results set'; static readonly examples = [ - 'saf convert trufflehog2hdf -i trufflehog.json -o output-hdf-name.json' + 'saf convert trufflehog2hdf -i trufflehog.json -o output-hdf-name.json', ]; static readonly flags = { @@ -19,35 +19,35 @@ export default class Trufflehog2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Trufflehog file' + description: 'Input Trufflehog file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF file' + description: 'Output HDF file', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Trufflehog2HDF); + const {flags} = await this.parse(Trufflehog2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'trufflehog', - 'Trufflehog output file' - ); + 'Trufflehog output file', + ) - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/twistlock2hdf.ts b/src/commands/convert/twistlock2hdf.ts index 5a2233264..36d9c5322 100644 --- a/src/commands/convert/twistlock2hdf.ts +++ b/src/commands/convert/twistlock2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {TwistlockResults as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {TwistlockResults as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class Twistlock2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Twistlock2HDF extends Command { 'Translate a Twistlock CLI output file into an HDF results set'; static readonly examples = [ - 'saf convert twistlock2hdf -i twistlock.json -o output-hdf-name.json' + 'saf convert twistlock2hdf -i twistlock.json -o output-hdf-name.json', ]; static readonly flags = { @@ -19,35 +19,35 @@ export default class Twistlock2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Twistlock file' + description: 'Input Twistlock file', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Twistlock2HDF); + const {flags} = await this.parse(Twistlock2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'twistlock', - 'Twistlock CLI output file' - ); + 'Twistlock CLI output file', + ) - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/veracode2hdf.ts b/src/commands/convert/veracode2hdf.ts index 6abb970e8..d02f86b9f 100644 --- a/src/commands/convert/veracode2hdf.ts +++ b/src/commands/convert/veracode2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {VeracodeMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {VeracodeMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class Veracode2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Veracode2HDF extends Command { 'Translate a Veracode XML file into a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert veracode2hdf -i veracode_results.xml -o output-hdf-name.json' + 'saf convert veracode2hdf -i veracode_results.xml -o output-hdf-name.json', ]; static readonly flags = { @@ -19,26 +19,26 @@ export default class Veracode2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input Veracode XML File' + description: 'Input Veracode XML File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' - }) + description: 'Output HDF JSON File', + }), }; async run() { - const {flags} = await this.parse(Veracode2HDF); + const {flags} = await this.parse(Veracode2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); - checkInput({data, filename: flags.input}, 'veracode', 'Veracode XML'); + const data = fs.readFileSync(flags.input, 'utf8') + checkInput({data, filename: flags.input}, 'veracode', 'Veracode XML') - const converter = new Mapper(data); + const converter = new Mapper(data) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/xccdf_results2hdf.ts b/src/commands/convert/xccdf_results2hdf.ts index a8e416ae8..ffcaa3e76 100644 --- a/src/commands/convert/xccdf_results2hdf.ts +++ b/src/commands/convert/xccdf_results2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {XCCDFResultsMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {XCCDFResultsMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class XCCDFResults2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class XCCDFResults2HDF extends Command { 'Translate a SCAP client XCCDF-Results XML report to a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert xccdf_results2hdf -i results-xccdf.xml -o output-hdf-name.json' + 'saf convert xccdf_results2hdf -i results-xccdf.xml -o output-hdf-name.json', ]; static readonly flags = { @@ -19,35 +19,35 @@ export default class XCCDFResults2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input XCCDF Results XML File' + description: 'Input XCCDF Results XML File', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(XCCDFResults2HDF); + const {flags} = await this.parse(XCCDFResults2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); + const data = fs.readFileSync(flags.input, 'utf8') checkInput( {data, filename: flags.input}, 'xccdf', - 'SCAP client XCCDF-Results XML report' - ); + 'SCAP client XCCDF-Results XML report', + ) - const converter = new Mapper(data, flags['with-raw']); + const converter = new Mapper(data, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/src/commands/convert/zap2hdf.ts b/src/commands/convert/zap2hdf.ts index be57472f4..5b76fd0cd 100644 --- a/src/commands/convert/zap2hdf.ts +++ b/src/commands/convert/zap2hdf.ts @@ -1,7 +1,7 @@ -import {Command, Flags} from '@oclif/core'; -import fs from 'fs'; -import {ZapMapper as Mapper} from '@mitre/hdf-converters'; -import {checkInput, checkSuffix} from '../../utils/global'; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {ZapMapper as Mapper} from '@mitre/hdf-converters' +import {checkInput, checkSuffix} from '../../utils/global' export default class Zap2HDF extends Command { static readonly usage = @@ -11,7 +11,7 @@ export default class Zap2HDF extends Command { 'Translate a OWASP ZAP results JSON to a Heimdall Data Format JSON file'; static readonly examples = [ - 'saf convert zap2hdf -i zap_results.json -n mitre.org -o scan_results.json' + 'saf convert zap2hdf -i zap_results.json -n mitre.org -o scan_results.json', ]; static readonly flags = { @@ -19,40 +19,40 @@ export default class Zap2HDF extends Command { input: Flags.string({ char: 'i', required: true, - description: 'Input OWASP Zap Results JSON File' + description: 'Input OWASP Zap Results JSON File', }), name: Flags.string({ char: 'n', required: true, - description: 'Target Site Name' + description: 'Target Site Name', }), output: Flags.string({ char: 'o', required: true, - description: 'Output HDF JSON File' + description: 'Output HDF JSON File', }), 'with-raw': Flags.boolean({ char: 'w', required: false, - description: 'Include raw input file in HDF JSON file' - }) + description: 'Include raw input file in HDF JSON file', + }), }; async run() { - const {flags} = await this.parse(Zap2HDF); + const {flags} = await this.parse(Zap2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8'); - checkInput({data, filename: flags.input}, 'zap', 'OWASP ZAP results JSON'); + const data = fs.readFileSync(flags.input, 'utf8') + checkInput({data, filename: flags.input}, 'zap', 'OWASP ZAP results JSON') const converter = new Mapper( fs.readFileSync(flags.input, 'utf8'), flags.name, - flags['with-raw'] - ); + flags['with-raw'], + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf(), null, 2) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } }