Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove AWS context from non-AWS checks #291

Merged
merged 7 commits into from
Aug 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/sbom-vulns.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:

# Scan the CDX SBOM with Grype
- name: Grype Scan SBOM
uses: anchore/scan-action@v3.6.4
uses: anchore/scan-action@v4.1.2
id: scan
with:
output-format: sarif
Expand Down
12 changes: 6 additions & 6 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@
#specific language governing permissions and limitations
#under the License.

# latest hash as of 21 JUNE 2024 - Alpine 3.20.1
# https://hub.docker.com/layers/library/alpine/3.20.1/images/sha256-dabf91b69c191a1a0a1628fd6bdd029c0c4018041c7f052870bb13c5a222ae76?context=explore
# latest hash as of 27 AUG 2024 - Alpine 3.20.2
# https://hub.docker.com/layers/library/alpine/3.20.2/images/sha256-eddacbc7e24bf8799a4ed3cdcfa50d4b88a323695ad80f317b6629883b2c2a78?context=explore
# use as builder image to pull in required deps
FROM alpine@sha256:b89d9c93e9ed3597455c90a0b88a8bbb5cb7188438f70953fede212a0c4394e0 AS builder
FROM alpine@sha256:0a4eaa0eecf5f8c050e5bba433f58c052be7587ee8af3e8b3910ef9ab5fbe9f5 AS builder

ENV PYTHONUNBUFFERED=1

Expand All @@ -40,9 +40,9 @@ RUN \
rm -rf /tmp/* && \
rm -f /var/cache/apk/*

# latest hash as of 21 JUNE 2024 - Alpine 3.20.1
# https://hub.docker.com/layers/library/alpine/3.20.1/images/sha256-dabf91b69c191a1a0a1628fd6bdd029c0c4018041c7f052870bb13c5a222ae76?context=explore
FROM alpine@sha256:b89d9c93e9ed3597455c90a0b88a8bbb5cb7188438f70953fede212a0c4394e0 as electriceye
# latest hash as of 27 AUG 2024 - Alpine 3.20.2
# https://hub.docker.com/layers/library/alpine/3.20.2/images/sha256-eddacbc7e24bf8799a4ed3cdcfa50d4b88a323695ad80f317b6629883b2c2a78?context=explore
FROM alpine@sha256:0a4eaa0eecf5f8c050e5bba433f58c052be7587ee8af3e8b3910ef9ab5fbe9f5 as electriceye

COPY --from=builder /usr /usr

Expand Down
88 changes: 27 additions & 61 deletions eeauditor/eeauditor.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,16 +330,10 @@ def run_gcp_checks(self, pluginName=None, delay=0):
"""
Runs GCP Auditors across all TOML-specified Projects
"""

# These details are needed for the ASFF...
import boto3

sts = boto3.client("sts")

region = boto3.Session().region_name
account = sts.get_caller_identity()["Account"]
# Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks
partition = CloudConfig.check_aws_partition(region)
# hardcode the region and account for GCP
region = "us-placeholder-1"
account = "000000000000"
partition = "not-aws"

for project in self.gcpProjectIds:
for serviceName, checkList in self.registry.checks.items():
Expand Down Expand Up @@ -379,15 +373,10 @@ def run_oci_checks(self, pluginName=None, delay=0):
"""
Run OCI Auditors for all Compartments specified in the TOML for a Tenancy
"""

import boto3

sts = boto3.client("sts")

region = boto3.Session().region_name
account = sts.get_caller_identity()["Account"]
# Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks
partition = CloudConfig.check_aws_partition(region)
# hardcode the region and account for OCI
region = "us-placeholder-1"
account = "000000000000"
partition = "not-aws"

for serviceName, checkList in self.registry.checks.items():
# Pass the Cache at the "serviceName" level aka Plugin
Expand Down Expand Up @@ -430,16 +419,10 @@ def run_azure_checks(self, pluginName=None, delay=0):
"""
Runs Azure Auditors using Client Secret credentials from an Application Registration
"""

# These details are needed for the ASFF...
import boto3

sts = boto3.client("sts")

region = boto3.Session().region_name
account = sts.get_caller_identity()["Account"]
# Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks
partition = CloudConfig.check_aws_partition(region)
# hardcode the region and account for Azure
region = "us-placeholder-1"
account = "000000000000"
partition = "not-aws"

for azSubId in self.azureSubscriptions:
for serviceName, checkList in self.registry.checks.items():
Expand Down Expand Up @@ -480,16 +463,10 @@ def run_m365_checks(self, pluginName=None, delay=0):
"""
Runs M365 Auditors using Client Secret credentials from an Enterprise Application
"""

# These details are needed for the ASFF...
import boto3

sts = boto3.client("sts")

region = boto3.Session().region_name
account = sts.get_caller_identity()["Account"]
# Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks
partition = CloudConfig.check_aws_partition(region)
# hardcode the region and account for non-AWS checks
region = "us-placeholder-1"
account = "000000000000"
partition = "not-aws"

for serviceName, checkList in self.registry.checks.items():
# Pass the Cache at the "serviceName" level aka Plugin
Expand Down Expand Up @@ -532,16 +509,10 @@ def run_salesforce_checks(self, pluginName=None, delay=0):
Runs Salesforce Auditors using Password-based OAuth flow with Username, Password along with a
Connected Application Client ID and Client Secret and a User Security Token
"""

# These details are needed for the ASFF...
import boto3

sts = boto3.client("sts")

region = boto3.Session().region_name
account = sts.get_caller_identity()["Account"]
# Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks
partition = CloudConfig.check_aws_partition(region)
# hardcode the region and account for SFDC
region = "us-placeholder-1"
account = "000000000000"
partition = "not-aws"

for serviceName, checkList in self.registry.checks.items():
# Pass the Cache at the "serviceName" level aka Plugin
Expand Down Expand Up @@ -585,15 +556,10 @@ def run_non_aws_checks(self, pluginName=None, delay=0):
"""
Generic function to run Auditors, unless specialized logic is required, Assessment Target default to running here
"""

import boto3

sts = boto3.client("sts")

region = boto3.Session().region_name
account = sts.get_caller_identity()["Account"]
# Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks
partition = CloudConfig.check_aws_partition(region)
# hardcode the region and account for Non-AWS Checks
region = "us-placeholder-1"
account = "000000000000"
partition = "not-aws"

for serviceName, checkList in self.registry.checks.items():
# Pass the Cache at the "serviceName" level aka Plugin
Expand Down Expand Up @@ -638,7 +604,7 @@ def print_checks_md(self):
if doc:
description = str(check.__doc__).replace("\n", "").replace(" ", "")
else:
description = "This shit is fucked!"
description = "Docstring is missing, please open an Issue!"

auditorFile = getfile(check).rpartition("/")[2]
auditorName = auditorFile.split(".py")[0]
Expand All @@ -659,10 +625,10 @@ def print_controls_json(self):
if doc:
description = str(check.__doc__).replace("\n", "").replace(" ", "")
else:
description = "This shit is fucked!"
description = "Docstring is missing, please open an Issue!"

controlPrinter.append(description)

print(json.dumps(controlPrinter,indent=2))
print(json.dumps(controlPrinter,indent=4))

# EOF
27 changes: 20 additions & 7 deletions eeauditor/processor/outputs/ocsf_v1_1_0_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,10 +239,23 @@ def ocsf_compliance_finding_mapping(self, findings: list) -> list:
complianceStatusLabel=finding["Compliance"]["Status"]
)

if finding["ProductFields"]["Provider"] == "AWS":
partition = finding["Resources"][0]["Partition"]
else:
partition = finding["Resources"][0]["Partition"]
region = finding["ProductFields"]["AssetRegion"]
accountId = finding["ProductFields"]["ProviderAccountId"]

if partition != "AWS" or partition == "not-aws":
partition = None

if partition == "AWS" and region == "us-placeholder-1":
region = None

if partition == "AWS" and accountId == "000000000000":
accountId = None

# Non-AWS checks have hardcoded "dummy" data for Account, Region, and Partition - set these to none depending on the dummy data
#region = "us-placeholder-1"
#account = "000000000000"
#partition = "not-aws"

ocsf = {
# Base Event data
Expand Down Expand Up @@ -277,9 +290,9 @@ def ocsf_compliance_finding_mapping(self, findings: list) -> list:
},
"cloud": {
"provider": finding["ProductFields"]["Provider"],
"region": finding["ProductFields"]["AssetRegion"],
"region": region,
"account": {
"uid": finding["ProductFields"]["ProviderAccountId"],
"uid": accountId,
"type": asffToOcsf[3],
"type_uid": asffToOcsf[2]
}
Expand All @@ -291,7 +304,7 @@ def ocsf_compliance_finding_mapping(self, findings: list) -> list:
"name": "cloud.account.uid",
"type": "Resource UID",
"type_id": 10,
"value": finding["ProductFields"]["ProviderAccountId"]
"value": accountId
},
# Resource UID
{
Expand Down Expand Up @@ -326,7 +339,7 @@ def ocsf_compliance_finding_mapping(self, findings: list) -> list:
"resource": {
"data": finding["ProductFields"]["AssetDetails"],
"cloud_partition": partition,
"region": finding["ProductFields"]["AssetRegion"],
"region": region,
"type": finding["ProductFields"]["AssetService"],
"uid": finding["Resources"][0]["Id"]
},
Expand Down
Loading