Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for reading and writing to AWS S3 buckets #746

Open
wants to merge 18 commits into
base: main
Choose a base branch
from
Open
21 changes: 20 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,23 @@ To update the SAF CLI on Windows, uninstall any existing version from your syste
## Usage
---

### File Input/Output
The SAF CLI can take local files, an HTTP(s) URL, or a file within an S3 bucket as input.

For example, to view an HDF file from an S3 bucket:

``saf view heimdall -f s3://HDF/rhel7-scan_02032022A.json``

Or to take a URL as input:

``saf convert hdf2csv -i https://raw.githubusercontent.com/mitre/saf/main/test/sample_data/HDF/input/red_hat_good.json -o red_hat_good.csv``

The SAF CLI supports writing its output to the local filesystem or to an S3 bucket.

For example, to convert an HDF file into ASFF using remote buckets:

``saf convert hdf2asff -i s3://HDF/rhel7-scan_02032022A.json -a 123456789 -r us-east-1 -t rhel7_example_host -o s3://ASFF/rhel7.asff``

### Attest

Attest to 'Not Reviewed' controls: sometimes requirements can’t be tested automatically by security tools and hence require manual review, whereby someone interviews people and/or examines a system to confirm (i.e., attest as to) whether the control requirements have been satisfied.
Expand Down Expand Up @@ -307,7 +324,7 @@ convert hdf2asff Translate a Heimdall Data Format JSON file into
-t, --target=<target> (required) Unique name for target to track findings across time
-u, --upload Upload findings to AWS Security Hub
EXAMPLES
$ saf convert hdf2asff -i rhel7-scan_02032022A.json -a 123456789 -r us-east-1 -t rhel7_example_host -o rhel7.asff
$ saf convert hdf2asff -i s3://HDF/rhel7-scan_02032022A.json -a 123456789 -r us-east-1 -t rhel7_example_host -o s3://ASFF/rhel7.asff
$ saf convert hdf2asff -i rds_mysql_i123456789scan_03042022A.json -a 987654321 -r us-west-1 -t Instance_i123456789 -u
$ saf convert hdf2asff -i snyk_acme_project5_hdf_04052022A.json -a 2143658798 -r us-east-1 -t acme_project5 -o snyk_acme_project5 -u
```
Expand Down Expand Up @@ -339,6 +356,7 @@ convert hdf2splunk Translate and upload a Heimdall Data Format JSON f
EXAMPLES
$ saf convert hdf2splunk -i rhel7-results.json -H 127.0.0.1 -u admin -p Valid_password! -I hdf
$ saf convert hdf2splunk -i rhel7-results.json -H 127.0.0.1 -t your.splunk.token -I hdf
$ saf convert hdf2splunk -i s3://HDF/rhel7-results.json -H 127.0.0.1 -t your.splunk.token -I hdf
```

For HDF Splunk Schema documentation visit 👉 [Heimdall converter schemas](https://github.com/mitre/heimdall2/blob/master/libs/hdf-converters/src/converters-from-hdf/splunk/Schemas.md#schemas)
Expand Down Expand Up @@ -379,6 +397,7 @@ convert hdf2xccdf Translate an HDF file into an XCCDF XML

EXAMPLES
$ saf convert hdf2xccdf -i hdf_input.json -o xccdf-results.xml
$ saf convert hdf2xccdf -i s3://hdf/hdf_input.json -o xccdf-results.xml
```
[top](#convert-hdf-to-other-formats)
#### HDF to Checklist
Expand Down
18 changes: 18 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

22 changes: 11 additions & 11 deletions src/commands/convert/asff2hdf.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import {Command, Flags} from '@oclif/core'
import fs from 'fs'
import {ASFFResults as Mapper} from '@mitre/hdf-converters'
import {checkInput, checkSuffix} from '../../utils/global'
import _ from 'lodash'
Expand All @@ -8,6 +7,7 @@ import AWS from 'aws-sdk'
import https from 'https'
import {AwsSecurityFindingFilters} from 'aws-sdk/clients/securityhub'
import {createWinstonLogger} from '../../utils/logging'
import {createFolderIfNotExists, folderExistsURI, readFileURI, writeFileURI} from '../../utils/io'

// Should be no more than 100
const API_MAX_RESULTS = 100
Expand Down Expand Up @@ -40,17 +40,17 @@ export default class ASFF2HDF extends Command {
async run() {
const {flags} = await this.parse(ASFF2HDF)
const logger = createWinstonLogger('asff2hdf', flags.logLevel)
let securityhub
let securityhub: string[] | undefined

// Check if output folder already exists
if (fs.existsSync(flags.output)) {
if (await folderExistsURI(flags.output)) {
throw new Error(`Output folder ${flags.output} already exists`)
}

const findings: string[] = []
// If we've been passed an input file
if (flags.input) {
const data = fs.readFileSync(flags.input, 'utf8')
const data = await readFileURI(flags.input, 'utf8')
// Attempt to convert to one finding per line
try {
const convertedJson = JSON.parse(data)
Expand Down Expand Up @@ -81,9 +81,9 @@ export default class ASFF2HDF extends Command {

// If we've been passed any Security Standards JSONs
if (flags.securityhub) {
securityhub = flags.securityhub.map((file: string) =>
fs.readFileSync(file, 'utf8'),
)
securityhub = await Promise.all(flags.securityhub.map((file: string) =>
readFileURI(file, 'utf8'),
))
}
} else if (flags.aws) { // Flag to pull findings from AWS Security Hub
AWS.config.update({
Expand All @@ -92,7 +92,7 @@ export default class ASFF2HDF extends Command {
// Disable HTTPS verification if requested
rejectUnauthorized: !flags.insecure,
// Pass an SSL certificate to trust
ca: flags.certificate ? fs.readFileSync(flags.certificate, 'utf8') : undefined,
ca: flags.certificate ? await readFileURI(flags.certificate, 'utf8') : undefined,
}),
},
})
Expand Down Expand Up @@ -176,9 +176,9 @@ export default class ASFF2HDF extends Command {

const results = converter.toHdf()

fs.mkdirSync(flags.output)
_.forOwn(results, (result, filename) => {
fs.writeFileSync(
createFolderIfNotExists(flags.output)
_.forOwn(results, async (result, filename) => {
await writeFileURI(
path.join(flags.output, checkSuffix(filename)),
JSON.stringify(result),
)
Expand Down
4 changes: 2 additions & 2 deletions src/commands/convert/aws_config2hdf.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import {Command, Flags} from '@oclif/core'
import fs from 'fs'
import {AwsConfigMapper as Mapper} from '@mitre/hdf-converters'
import {ExecJSON} from 'inspecjs'
import {checkSuffix} from '../../utils/global'
import {writeFileURI} from '../../utils/io'

export default class AWSConfig2HDF extends Command {
static usage = 'convert aws_config2hdf -r <region> -o <hdf-scan-results-json> [-h] [-a <access-key-id>] [-s <secret-access-key>] [-t <session-token>] [-i]'
Expand Down Expand Up @@ -56,6 +56,6 @@ export default class AWSConfig2HDF extends Command {
region: flags.region,
}, !flags.insecure) : new Mapper({region: flags.region}, !flags.insecure)

fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(this.ensureRefs(await converter.toHdf())))
await writeFileURI(checkSuffix(flags.output), JSON.stringify(this.ensureRefs(await converter.toHdf())))
}
}
6 changes: 3 additions & 3 deletions src/commands/convert/burpsuite2hdf.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import {Command, Flags} from '@oclif/core'
import fs from 'fs'
import {BurpSuiteMapper as Mapper} from '@mitre/hdf-converters'
import {checkInput, checkSuffix} from '../../utils/global'
import {readFileURI, writeFileURI} from '../../utils/io'

export default class Burpsuite2HDF extends Command {
static usage = 'convert burpsuite2hdf -i <burpsuite-xml> -o <hdf-scan-results-json> [-h] [-w]'
Expand All @@ -21,10 +21,10 @@ export default class Burpsuite2HDF extends Command {
const {flags} = await this.parse(Burpsuite2HDF)

// Check for correct input type
const data = fs.readFileSync(flags.input, 'utf8')
const data = await readFileURI(flags.input, 'utf8')
checkInput({data, filename: flags.input}, 'burp', 'BurpSuite Pro XML')

const converter = new Mapper(data, flags['with-raw'])
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf()))
await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf()))
}
}
Loading