-
Notifications
You must be signed in to change notification settings - Fork 37
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Trufflehog SAF CLI Integration (#2778)
* initial commit * reverting files * finished comments from pr * command change * Unlint index.ts Signed-off-by: Charles Hu <computerscience@verizon.net> * E2E changes * E2E change * Styling fix Signed-off-by: Charles Hu <computerscience@verizon.net> --------- Signed-off-by: Charles Hu <computerscience@verizon.net> Co-authored-by: Charles Hu <computerscience@verizon.net>
- Loading branch information
1 parent
3956856
commit e9217a3
Showing
16 changed files
with
5,741 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
import {Command, Flags} from '@oclif/core' | ||
import fs from 'fs' | ||
import {TrufflehogResults as Mapper} from '@mitre/hdf-converters' | ||
import {checkInput, checkSuffix} from '../../utils/global' | ||
|
||
export default class Trufflehog2HDF extends Command { | ||
static usage = 'convert trufflehog2hdf -i <trufflehog-json> -o <hdf-scan-results-json> [-h] [-w]' | ||
|
||
static description = 'Translate a Trufflehog output file into an HDF results set' | ||
|
||
static examples = ['saf convert trufflehog2hdf -i trufflehog.json -o output-hdf-name.json'] | ||
|
||
static flags = { | ||
help: Flags.help({char: 'h'}), | ||
input: Flags.string({char: 'i', required: true, description: 'Input Trufflehog file'}), | ||
output: Flags.string({char: 'o', required: true, description: 'Output HDF file'}), | ||
'with-raw': Flags.boolean({char: 'w', required: false, description: 'Include raw input file in HDF JSON file'}), | ||
} | ||
|
||
async run() { | ||
const {flags} = await this.parse(Trufflehog2HDF) | ||
|
||
// Check for correct input type | ||
const data = fs.readFileSync(flags.input, 'utf8') | ||
checkInput({data, filename: flags.input}, 'trufflehog', 'Trufflehog output file') | ||
|
||
const converter = new Mapper(data, flags['with-raw']) | ||
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2)) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
import {expect, test} from '@oclif/test' | ||
import tmp from 'tmp' | ||
import path from 'path' | ||
import fs from 'fs' | ||
import {omitHDFChangingFields} from '../utils' | ||
|
||
describe('Test Trufflehog', () => { | ||
const tmpobj = tmp.dirSync({unsafeCleanup: true}) | ||
|
||
test | ||
.stdout() | ||
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog.json'), '-o', `${tmpobj.name}/trufflehog.json`]) | ||
.it('hdf-converter output test - standard', () => { | ||
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8')) | ||
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-hdf.json'), 'utf8')) | ||
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample)) | ||
}) | ||
test | ||
.stdout() | ||
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_docker_example.json'), '-o', `${tmpobj.name}/trufflehog.json`]) | ||
.it('hdf-converter output test - docker', () => { | ||
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8')) | ||
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-docker-hdf.json'), 'utf8')) | ||
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample)) | ||
}) | ||
test | ||
.stdout() | ||
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_report_example.json'), '-o', `${tmpobj.name}/trufflehog.json`]) | ||
.it('hdf-converter output test - report', () => { | ||
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8')) | ||
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-report-example-hdf.json'), 'utf8')) | ||
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample)) | ||
}) | ||
test | ||
.stdout() | ||
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_saf_example.json'), '-o', `${tmpobj.name}/trufflehog.json`]) | ||
.it('hdf-converter output test - saf', () => { | ||
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8')) | ||
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-saf-hdf.json'), 'utf8')) | ||
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample)) | ||
}) | ||
}) | ||
|
||
describe('Test Trufflehog using withraw flag', () => { | ||
const tmpobj = tmp.dirSync({unsafeCleanup: true}) | ||
|
||
test | ||
.stdout() | ||
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog.json'), '-o', `${tmpobj.name}/trufflehog.json`, '-w']) | ||
.it('hdf-converter withraw output test - standard', () => { | ||
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8')) | ||
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-hdf-withraw.json'), 'utf8')) | ||
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample)) | ||
}) | ||
test | ||
.stdout() | ||
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_docker_example.json'), '-o', `${tmpobj.name}/trufflehog.json`, '-w']) | ||
.it('hdf-converter withraw output test - docker', () => { | ||
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8')) | ||
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-docker-hdf-withraw.json'), 'utf8')) | ||
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample)) | ||
}) | ||
test | ||
.stdout() | ||
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_report_example.json'), '-o', `${tmpobj.name}/trufflehog.json`, '-w']) | ||
.it('hdf-converter withraw output test - report', () => { | ||
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8')) | ||
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-report-example-hdf-withraw.json'), 'utf8')) | ||
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample)) | ||
}) | ||
test | ||
.stdout() | ||
.command(['convert trufflehog2hdf', '-i', path.resolve('./test/sample_data/trufflehog/sample_input_report/trufflehog_saf_example.json'), '-o', `${tmpobj.name}/trufflehog.json`, '-w']) | ||
.it('hdf-converter withraw output test - saf', () => { | ||
const converted = JSON.parse(fs.readFileSync(`${tmpobj.name}/trufflehog.json`, 'utf8')) | ||
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/trufflehog/trufflehog-saf-hdf-withraw.json'), 'utf8')) | ||
expect(omitHDFChangingFields(converted)).to.eql(omitHDFChangingFields(sample)) | ||
}) | ||
}) |
92 changes: 92 additions & 0 deletions
92
test/sample_data/trufflehog/sample_input_report/trufflehog.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,92 @@ | ||
[ | ||
{ | ||
"SourceMetadata": { | ||
"Data": { | ||
"Git": { | ||
"commit": "0416560b1330d8ac42045813251d85c688717eaf", | ||
"file": "new_key", | ||
"email": "counter \u003chello@trufflesec.com\u003e", | ||
"repository": "https://github.com/trufflesecurity/test_keys", | ||
"timestamp": "2023-10-19 02:56:37 +0000", | ||
"line": 2 | ||
} | ||
} | ||
}, | ||
"SourceID": 1, | ||
"SourceType": 16, | ||
"SourceName": "trufflehog - git", | ||
"DetectorType": 2, | ||
"DetectorName": "AWS", | ||
"DecoderName": "PLAIN", | ||
"Verified": true, | ||
"Raw": "AKIAQYLPMN5HHHFPZAM2", | ||
"RawV2": "AKIAQYLPMN5HHHFPZAM21tUm636uS1yOEcfP5pvfqJ/ml36mF7AkyHsEU0IU", | ||
"Redacted": "AKIAQYLPMN5HHHFPZAM2", | ||
"ExtraData": { | ||
"account": "052310077262", | ||
"arn": "arn:aws:iam::052310077262:user/canarytokens.com@@c20nnjzlioibnaxvt392i9ope", | ||
"is_canary": "true", | ||
"message": "This is an AWS canary token generated at canarytokens.org, and was not set off; learn more here: https://trufflesecurity.com/canaries", | ||
"resource_type": "Access key" | ||
}, | ||
"StructuredData": null | ||
}, | ||
{ | ||
"SourceMetadata": { | ||
"Data": { | ||
"Git": { | ||
"commit": "fbc14303ffbf8fb1c2c1914e8dda7d0121633aca", | ||
"file": "keys", | ||
"email": "counter \u003ccounter@counters-MacBook-Air.local\u003e", | ||
"repository": "https://github.com/trufflesecurity/test_keys", | ||
"timestamp": "2022-06-16 17:17:40 +0000", | ||
"line": 4 | ||
} | ||
} | ||
}, | ||
"SourceID": 1, | ||
"SourceType": 16, | ||
"SourceName": "trufflehog - git", | ||
"DetectorType": 2, | ||
"DetectorName": "AWS", | ||
"DecoderName": "PLAIN", | ||
"Verified": true, | ||
"Raw": "AKIAYVP4CIPPERUVIFXG", | ||
"RawV2": "AKIAYVP4CIPPERUVIFXGZt2U1h267eViPnuSA+JO5ABhiu4T7XUMSZ+Y2Oth", | ||
"Redacted": "AKIAYVP4CIPPERUVIFXG", | ||
"ExtraData": { | ||
"account": "595918472158", | ||
"arn": "arn:aws:iam::595918472158:user/canarytokens.com@@mirux23ppyky6hx3l6vclmhnj", | ||
"is_canary": "true", | ||
"message": "This is an AWS canary token generated at canarytokens.org, and was not set off; learn more here: https://trufflesecurity.com/canaries", | ||
"resource_type": "Access key" | ||
}, | ||
"StructuredData": null | ||
}, | ||
{ | ||
"SourceMetadata": { | ||
"Data": { | ||
"Git": { | ||
"commit": "77b2a3e56973785a52ba4ae4b8dac61d4bac016f", | ||
"file": "keys", | ||
"email": "counter \u003ccounter@counters-MacBook-Air.local\u003e", | ||
"repository": "https://github.com/trufflesecurity/test_keys", | ||
"timestamp": "2022-06-16 17:27:56 +0000", | ||
"line": 3 | ||
} | ||
} | ||
}, | ||
"SourceID": 1, | ||
"SourceType": 16, | ||
"SourceName": "trufflehog - git", | ||
"DetectorType": 17, | ||
"DetectorName": "URI", | ||
"DecoderName": "PLAIN", | ||
"Verified": true, | ||
"Raw": "https://admin:admin@the-internet.herokuapp.com", | ||
"RawV2": "https://admin:admin@the-internet.herokuapp.com/basic_auth", | ||
"Redacted": "https://admin:********@the-internet.herokuapp.com", | ||
"ExtraData": null, | ||
"StructuredData": null | ||
} | ||
] |
Oops, something went wrong.