Skip to content

Commit

Permalink
Dependency track mapper (#3004)
Browse files Browse the repository at this point in the history
* Add converter for Dependency-Track FPF files

* Linting fixes

* Update test files for latest mapper update

* Update test files

* sonarqube says that these ought to be readonly

Signed-off-by: Amndeep Singh Mann <amann@mitre.org>

* added checkinput step, made output formatted, transitioned to using the extension of of basecommand, fixed help text

Signed-off-by: Amndeep Singh Mann <amann@mitre.org>

* updated readme

Signed-off-by: Amndeep Singh Mann <amann@mitre.org>

* remove unused import

Signed-off-by: Amndeep Singh Mann <amann@mitre.org>

* fixed indentation in readme

Signed-off-by: Amndeep Singh Mann <amann@mitre.org>

---------

Signed-off-by: Amndeep Singh Mann <amann@mitre.org>
Co-authored-by: Jace Barayuga <jbarayuga@referentia.com>
  • Loading branch information
Amndeep7 and jbarayuga authored Oct 25, 2024
1 parent 42ab38e commit 300360b
Show file tree
Hide file tree
Showing 15 changed files with 14,629 additions and 1 deletion.
24 changes: 24 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ The SAF CLI is the successor to [Heimdall Tools](https://github.com/mitre/heimda
* [CKL to POA&amp;M](#ckl-to-poam)
* [CycloneDX SBOM to HDF](#cyclonedx-sbom-to-hdf)
* [DBProtect to HDF](#dbprotect-to-hdf)
* [Dependency-Track to HDF](#dependency-track-to-hdf)
* [Fortify to HDF](#fortify-to-hdf)
* [gosec to HDF](#gosec-to-hdf)
* [Ion Channel 2 HDF](#ion-channel-2-hdf)
Expand Down Expand Up @@ -714,6 +715,29 @@ convert dbprotect2hdf Translate a DBProtect report in "Check Results
$ saf convert dbprotect2hdf -i check_results_details_report.xml -o output-hdf-name.json
```

[top](#convert-other-formats-to-hdf)
##### Dependency-Track to HDF
```
convert dependency_track2hdf Translate a Dependency-Track results JSON
file into a Heimdall Data Format JSON file
USAGE
$ saf convert dependency_track2hdf -i <dt-fpf-json> -o <hdf-scan-results-json> [-h] [-w]
FLAGS
-h, --help Show CLI help.
-i, --input=<value> (required) Input Dependency-Track FPF file
-o, --output=<value> (required) Output HDF file
-w, --with-raw
GLOBAL FLAGS
-L, --logLevel=<option> [default: info] Specify level for logging (if implemented by the CLI command)
<options: info|warn|debug|verbose>
--interactive Collect input tags interactively (not available on all CLI commands)
EXAMPLES
saf convert dependency_track2hdf -i dt-fpf.json -o output-hdf-name.json
```

[top](#convert-other-formats-to-hdf)
#### Fortify to HDF
```
Expand Down
32 changes: 32 additions & 0 deletions src/commands/convert/dependency_track2hdf.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import {Flags} from '@oclif/core'
import fs from 'fs'
import {DependencyTrackMapper as Mapper} from '@mitre/hdf-converters'
import {checkInput, checkSuffix} from '../../utils/global'
import {BaseCommand} from '../../utils/oclif/baseCommand'

export default class DependencyTrack2HDF extends BaseCommand<typeof DependencyTrack2HDF> {
static readonly usage = '<%= command.id %> -i <dt-fpf-json> -o <hdf-scan-results-json> [-h] [-w]'

static readonly description = 'Translate a Dependency-Track results JSON file into a Heimdall Data Format JSON file'

static readonly examples = ['<%= config.bin %> <%= command.id %> -i dt-fpf.json -o output-hdf-name.json']

static readonly flags = {
input: Flags.string({char: 'i', required: true, description: 'Input Dependency-Track FPF file'}),
output: Flags.string({char: 'o', required: true, description: 'Output HDF file'}),
'with-raw': Flags.boolean({char: 'w', required: false}),
}

async run() {
const {flags} = await this.parse(DependencyTrack2HDF)
const data = fs.readFileSync(flags.input, 'utf8')
checkInput(
{data, filename: flags.input},
'dependencyTrack',
'Dependency-Track results JSON',
)

const converter = new Mapper(data, flags['with-raw'])
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
}
}
12 changes: 11 additions & 1 deletion src/commands/convert/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import {
ConveyorResults,
CycloneDXSBOMResults,
DBProtectMapper,
DependencyTrackMapper,
fingerprint,
FortifyMapper,
JfrogXrayMapper,
Expand Down Expand Up @@ -84,7 +85,7 @@ export default class Convert extends BaseCommand<typeof Convert> {
}

// catch all other cases:
// 'anchoregrype', 'burp', 'conveyor' 'checklist', 'dbProtect', 'fortify',
// 'anchoregrype', 'burp', 'conveyor' 'checklist', 'dbProtect', 'dependencyTrack', 'fortify',
// 'jfrog', 'msft_secure_score', 'nessus', 'netsparker', 'neuvector' 'nikto',
// 'prisma', 'sarif', 'cyclonedx_sbom', 'scoutsuite', 'snyk', 'trufflehog',
// 'twistlock', 'xccdf'
Expand Down Expand Up @@ -176,6 +177,15 @@ export default class Convert extends BaseCommand<typeof Convert> {
break
}

case 'dependencyTrack': {
converter = new DependencyTrackMapper(fs.readFileSync(flags.input, 'utf8'))
fs.writeFileSync(
checkSuffix(flags.output),
JSON.stringify(converter.toHdf(), null, 2),
)
break
}

case 'cyclonedx_sbom': {
converter = new CycloneDXSBOMResults(
fs.readFileSync(flags.input, 'utf8'),
Expand Down
186 changes: 186 additions & 0 deletions test/commands/convert/dependency_track2hdf.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
import {expect, test} from '@oclif/test'
import tmp from 'tmp'
import path from 'path'
import fs from 'fs'
import {omitHDFChangingFields} from '../utils'

describe('Test Dependency-Track', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command([
'convert dependency_track2hdf',
'-i',
path.resolve(
'./test/sample_data/dependency_track/sample_input_report/fpf-default.json',
),
'-o',
`${tmpobj.name}/dependencytracktest.json`,
])
.it('hdf-converter output test', () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
)
const sample = JSON.parse(
fs.readFileSync(
path.resolve('./test/sample_data/dependency_track/hdf-default.json'),
'utf8',
),
)
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample),
)
})
})

describe('Test Dependency-Track withraw flag', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command([
'convert dependency_track2hdf',
'-i',
path.resolve(
'./test/sample_data/dependency_track/sample_input_report/fpf-default.json',
),
'-o',
`${tmpobj.name}/dependencytracktest.json`,
'-w',
])
.it('hdf-converter withraw output test', () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
)
const sample = JSON.parse(
fs.readFileSync(
path.resolve('./test/sample_data/dependency_track/hdf-default-withraw.json'),
'utf8',
),
)
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample),
)
})
})

describe('Test Dependency-Track optional attributes (e.g. vulnerability.cwes, analysis.state, etc.)', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command([
'convert dependency_track2hdf',
'-i',
path.resolve(
'./test/sample_data/dependency_track/sample_input_report/fpf-optional-attributes.json',
),
'-o',
`${tmpobj.name}/dependencytracktest.json`,
])
.it('hdf-converter output test', () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
)
const sample = JSON.parse(
fs.readFileSync(
path.resolve('./test/sample_data/dependency_track/hdf-optional-attributes.json'),
'utf8',
),
)
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample),
)
})
})

describe('Test Dependency-Track no vulnerabilities', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command([
'convert dependency_track2hdf',
'-i',
path.resolve(
'./test/sample_data/dependency_track/sample_input_report/fpf-no-vulnerabilities.json',
),
'-o',
`${tmpobj.name}/dependencytracktest.json`,
])
.it('hdf-converter output test', () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
)
const sample = JSON.parse(
fs.readFileSync(
path.resolve('./test/sample_data/dependency_track/hdf-no-vulnerabilities.json'),
'utf8',
),
)
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample),
)
})
})

describe('Test Dependency-Track with attributions', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command([
'convert dependency_track2hdf',
'-i',
path.resolve(
'./test/sample_data/dependency_track/sample_input_report/fpf-with-attributions.json',
),
'-o',
`${tmpobj.name}/dependencytracktest.json`,
])
.it('hdf-converter output test', () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
)
const sample = JSON.parse(
fs.readFileSync(
path.resolve('./test/sample_data/dependency_track/hdf-with-attributions.json'),
'utf8',
),
)
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample),
)
})
})

describe('Test Dependency-Track info vulnerability', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command([
'convert dependency_track2hdf',
'-i',
path.resolve(
'./test/sample_data/dependency_track/sample_input_report/fpf-info-vulnerability.json',
),
'-o',
`${tmpobj.name}/dependencytracktest.json`,
])
.it('hdf-converter output test', () => {
const converted = JSON.parse(
fs.readFileSync(`${tmpobj.name}/dependencytracktest.json`, 'utf8'),
)
const sample = JSON.parse(
fs.readFileSync(
path.resolve('./test/sample_data/dependency_track/hdf-info-vulnerability.json'),
'utf8',
),
)
expect(omitHDFChangingFields(converted)).to.eql(
omitHDFChangingFields(sample),
)
})
})
Loading

0 comments on commit 300360b

Please sign in to comment.