Skip to content

Commit

Permalink
Trufflehog SAF CLI Integration (#2778)
Browse files Browse the repository at this point in the history
* initial commit

* reverting files

* finished comments from pr

* command change

* Unlint index.ts

Signed-off-by: Charles Hu <computerscience@verizon.net>

* E2E changes

* E2E change

* Styling fix

Signed-off-by: Charles Hu <computerscience@verizon.net>

---------

Signed-off-by: Charles Hu <computerscience@verizon.net>
Co-authored-by: Charles Hu <computerscience@verizon.net>
  • Loading branch information
andytang99 and charleshu-8 authored Aug 1, 2024
1 parent 3956856 commit e9217a3
Show file tree
Hide file tree
Showing 16 changed files with 5,741 additions and 1 deletion.
18 changes: 18 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ The SAF CLI is the successor to [Heimdall Tools](https://github.com/mitre/heimda
* [SonarQube to HDF](#sonarqube-to-hdf)
* [Splunk to HDF](#splunk-to-hdf)
* [Trivy to HDF](#trivy-to-hdf)
* [Trufflehog to HDF](#trufflehog-to-hdf)
* [Twistlock to HDF](#twistlock-to-hdf)
* [Veracode to HDF](#veracode-to-hdf)
* [XCCDF Results to HDF](#xccdf-results-to-hdf)
Expand Down Expand Up @@ -881,6 +882,23 @@ convert trivy2hdf Translate a Trivy-derived AWS Security Finding
$ saf convert trivy2hdf -i trivy-asff.json -o output-folder
```

[top](#convert-other-formats-to-hdf)
#### Trufflehog to HDF
```
convert trufflehog2hdf Translate a Trufflehog output file into an HDF results set
USAGE
$ saf convert trufflehog2hdf -i <trufflehog-json> -o <hdf-scan-results-json>
FLAGS
-h, --help Show CLI help.
-i, --input=<trufflehog-json> (required) Input Trufflehog file
-o, --output=<hdf-scan-results-json> (required) Output HDF JSON File
EXAMPLES
$ saf convert trufflehog2hdf -i trufflehog.json -o output-hdf-name.json
```

[top](#convert-other-formats-to-hdf)
#### Twistlock to HDF
```
Expand Down
9 changes: 8 additions & 1 deletion src/commands/convert/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {ASFFResults, ChecklistResults, BurpSuiteMapper, ConveyorResults, DBProtectMapper, fingerprint, FortifyMapper, JfrogXrayMapper, NessusResults, NetsparkerMapper, NiktoMapper, PrismaMapper, SarifMapper, ScoutsuiteMapper, SnykResults, TwistlockResults, XCCDFResultsMapper, ZapMapper} from '@mitre/hdf-converters'
import {ASFFResults, ChecklistResults, BurpSuiteMapper, ConveyorResults, DBProtectMapper, fingerprint, FortifyMapper, JfrogXrayMapper, NessusResults, NetsparkerMapper, NiktoMapper, PrismaMapper, SarifMapper, ScoutsuiteMapper, SnykResults, TrufflehogResults, TwistlockResults, XCCDFResultsMapper, ZapMapper} from '@mitre/hdf-converters'
import fs from 'fs'
import _ from 'lodash'
import {checkSuffix, convertFullPathToFilename} from '../../utils/global'
Expand Down Expand Up @@ -52,6 +52,7 @@ export default class Convert extends Command {
case 'sarif':
case 'scoutsuite':
case 'snyk':
case 'trufflehog':
case 'twistlock':
case 'xccdf': {
return {}
Expand Down Expand Up @@ -205,6 +206,12 @@ export default class Convert extends Command {
break
}

case 'trufflehog': {
converter = new TrufflehogResults(fs.readFileSync(flags.input, 'utf8'))
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
break
}

case 'twistlock': {
converter = new TwistlockResults(fs.readFileSync(flags.input, 'utf8'))
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
Expand Down
30 changes: 30 additions & 0 deletions src/commands/convert/trufflehog2hdf.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import {Command, Flags} from '@oclif/core'
import fs from 'fs'
import {TrufflehogResults as Mapper} from '@mitre/hdf-converters'
import {checkInput, checkSuffix} from '../../utils/global'

export default class Trufflehog2HDF extends Command {
static usage = 'convert trufflehog2hdf -i <trufflehog-json> -o <hdf-scan-results-json> [-h] [-w]'

static description = 'Translate a Trufflehog output file into an HDF results set'

static examples = ['saf convert trufflehog2hdf -i trufflehog.json -o output-hdf-name.json']

static flags = {
help: Flags.help({char: 'h'}),
input: Flags.string({char: 'i', required: true, description: 'Input Trufflehog file'}),
output: Flags.string({char: 'o', required: true, description: 'Output HDF file'}),
'with-raw': Flags.boolean({char: 'w', required: false, description: 'Include raw input file in HDF JSON file'}),
}

async run() {
const {flags} = await this.parse(Trufflehog2HDF)

// Check for correct input type
const data = fs.readFileSync(flags.input, 'utf8')
checkInput({data, filename: flags.input}, 'trufflehog', 'Trufflehog output file')

const converter = new Mapper(data, flags['with-raw'])
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
}
}
79 changes: 79 additions & 0 deletions test/commands/convert/trufflehog2hdf.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import {expect, test} from '@oclif/test'
import tmp from 'tmp'
import path from 'path'
import fs from 'fs'
import {omitHDFChangingFields} from '../utils'

describe('Test Trufflehog', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog.json'), '-o', `${tmpobj.name}/trufflehog.json`])
.it('hdf-converter output test - standard', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_docker_example.json'), '-o', `${tmpobj.name}/trufflehog.json`])
.it('hdf-converter output test - docker', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-docker-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_report_example.json'), '-o', `${tmpobj.name}/trufflehog.json`])
.it('hdf-converter output test - report', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-report-example-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_saf_example.json'), '-o', `${tmpobj.name}/trufflehog.json`])
.it('hdf-converter output test - saf', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-saf-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
})

describe('Test Trufflehog using withraw flag', () => {
const tmpobj = tmp.dirSync({unsafeCleanup: true})

test
.stdout()
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog.json'), '-o', `${tmpobj.name}/trufflehog.json`, '-w'])
.it('hdf-converter withraw output test - standard', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_docker_example.json'), '-o', `${tmpobj.name}/trufflehog.json`, '-w'])
.it('hdf-converter withraw output test - docker', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-docker-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_report_example.json'), '-o', `${tmpobj.name}/trufflehog.json`, '-w'])
.it('hdf-converter withraw output test - report', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-report-example-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
test
.stdout()
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_saf_example.json'), '-o', `${tmpobj.name}/trufflehog.json`, '-w'])
.it('hdf-converter withraw output test - saf', () => {
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-saf-hdf-withraw.json'), 'utf8'))
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample))
})
})
92 changes: 92 additions & 0 deletions test/sample_data/trufflehog/sample_input_report/trufflehog.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
[
{
"SourceMetadata": {
"Data": {
"Git": {
"commit": "0416560b1330d8ac42045813251d85c688717eaf",
"file": "new_key",
"email": "counter \u003chello@trufflesec.com\u003e",
"repository": "https://github.com/trufflesecurity/test_keys",
"timestamp": "2023-10-19 02:56:37 +0000",
"line": 2
}
}
},
"SourceID": 1,
"SourceType": 16,
"SourceName": "trufflehog - git",
"DetectorType": 2,
"DetectorName": "AWS",
"DecoderName": "PLAIN",
"Verified": true,
"Raw": "AKIAQYLPMN5HHHFPZAM2",
"RawV2": "AKIAQYLPMN5HHHFPZAM21tUm636uS1yOEcfP5pvfqJ/ml36mF7AkyHsEU0IU",
"Redacted": "AKIAQYLPMN5HHHFPZAM2",
"ExtraData": {
"account": "052310077262",
"arn": "arn:aws:iam::052310077262:user/canarytokens.com@@c20nnjzlioibnaxvt392i9ope",
"is_canary": "true",
"message": "This is an AWS canary token generated at canarytokens.org, and was not set off; learn more here: https://trufflesecurity.com/canaries",
"resource_type": "Access key"
},
"StructuredData": null
},
{
"SourceMetadata": {
"Data": {
"Git": {
"commit": "fbc14303ffbf8fb1c2c1914e8dda7d0121633aca",
"file": "keys",
"email": "counter \u003ccounter@counters-MacBook-Air.local\u003e",
"repository": "https://github.com/trufflesecurity/test_keys",
"timestamp": "2022-06-16 17:17:40 +0000",
"line": 4
}
}
},
"SourceID": 1,
"SourceType": 16,
"SourceName": "trufflehog - git",
"DetectorType": 2,
"DetectorName": "AWS",
"DecoderName": "PLAIN",
"Verified": true,
"Raw": "AKIAYVP4CIPPERUVIFXG",
"RawV2": "AKIAYVP4CIPPERUVIFXGZt2U1h267eViPnuSA+JO5ABhiu4T7XUMSZ+Y2Oth",
"Redacted": "AKIAYVP4CIPPERUVIFXG",
"ExtraData": {
"account": "595918472158",
"arn": "arn:aws:iam::595918472158:user/canarytokens.com@@mirux23ppyky6hx3l6vclmhnj",
"is_canary": "true",
"message": "This is an AWS canary token generated at canarytokens.org, and was not set off; learn more here: https://trufflesecurity.com/canaries",
"resource_type": "Access key"
},
"StructuredData": null
},
{
"SourceMetadata": {
"Data": {
"Git": {
"commit": "77b2a3e56973785a52ba4ae4b8dac61d4bac016f",
"file": "keys",
"email": "counter \u003ccounter@counters-MacBook-Air.local\u003e",
"repository": "https://github.com/trufflesecurity/test_keys",
"timestamp": "2022-06-16 17:27:56 +0000",
"line": 3
}
}
},
"SourceID": 1,
"SourceType": 16,
"SourceName": "trufflehog - git",
"DetectorType": 17,
"DetectorName": "URI",
"DecoderName": "PLAIN",
"Verified": true,
"Raw": "https://admin:admin@the-internet.herokuapp.com",
"RawV2": "https://admin:admin@the-internet.herokuapp.com/basic_auth",
"Redacted": "https://admin:********@the-internet.herokuapp.com",
"ExtraData": null,
"StructuredData": null
}
]
Loading

0 comments on commit e9217a3

Please sign in to comment.