diff --git a/.github/workflows/sbom-vulns.yml b/.github/workflows/sbom-vulns.yml index c3251fdf..21f8af91 100644 --- a/.github/workflows/sbom-vulns.yml +++ b/.github/workflows/sbom-vulns.yml @@ -32,7 +32,7 @@ jobs: # Scan the CDX SBOM with Grype - name: Grype Scan SBOM - uses: anchore/scan-action@v3.6.4 + uses: anchore/scan-action@v4.1.2 id: scan with: output-format: sarif diff --git a/Dockerfile b/Dockerfile index 14f9d757..6fdb6b8c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,10 +18,10 @@ #specific language governing permissions and limitations #under the License. -# latest hash as of 21 JUNE 2024 - Alpine 3.20.1 -# https://hub.docker.com/layers/library/alpine/3.20.1/images/sha256-dabf91b69c191a1a0a1628fd6bdd029c0c4018041c7f052870bb13c5a222ae76?context=explore +# latest hash as of 27 AUG 2024 - Alpine 3.20.2 +# https://hub.docker.com/layers/library/alpine/3.20.2/images/sha256-eddacbc7e24bf8799a4ed3cdcfa50d4b88a323695ad80f317b6629883b2c2a78?context=explore # use as builder image to pull in required deps -FROM alpine@sha256:b89d9c93e9ed3597455c90a0b88a8bbb5cb7188438f70953fede212a0c4394e0 AS builder +FROM alpine@sha256:0a4eaa0eecf5f8c050e5bba433f58c052be7587ee8af3e8b3910ef9ab5fbe9f5 AS builder ENV PYTHONUNBUFFERED=1 @@ -40,9 +40,9 @@ RUN \ rm -rf /tmp/* && \ rm -f /var/cache/apk/* -# latest hash as of 21 JUNE 2024 - Alpine 3.20.1 -# https://hub.docker.com/layers/library/alpine/3.20.1/images/sha256-dabf91b69c191a1a0a1628fd6bdd029c0c4018041c7f052870bb13c5a222ae76?context=explore -FROM alpine@sha256:b89d9c93e9ed3597455c90a0b88a8bbb5cb7188438f70953fede212a0c4394e0 as electriceye +# latest hash as of 27 AUG 2024 - Alpine 3.20.2 +# https://hub.docker.com/layers/library/alpine/3.20.2/images/sha256-eddacbc7e24bf8799a4ed3cdcfa50d4b88a323695ad80f317b6629883b2c2a78?context=explore +FROM alpine@sha256:0a4eaa0eecf5f8c050e5bba433f58c052be7587ee8af3e8b3910ef9ab5fbe9f5 as electriceye COPY --from=builder /usr /usr diff --git a/eeauditor/eeauditor.py b/eeauditor/eeauditor.py index 5f04715d..12bc0761 100644 --- a/eeauditor/eeauditor.py +++ b/eeauditor/eeauditor.py @@ -330,16 +330,10 @@ def run_gcp_checks(self, pluginName=None, delay=0): """ Runs GCP Auditors across all TOML-specified Projects """ - - # These details are needed for the ASFF... - import boto3 - - sts = boto3.client("sts") - - region = boto3.Session().region_name - account = sts.get_caller_identity()["Account"] - # Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks - partition = CloudConfig.check_aws_partition(region) + # hardcode the region and account for GCP + region = "us-placeholder-1" + account = "000000000000" + partition = "not-aws" for project in self.gcpProjectIds: for serviceName, checkList in self.registry.checks.items(): @@ -379,15 +373,10 @@ def run_oci_checks(self, pluginName=None, delay=0): """ Run OCI Auditors for all Compartments specified in the TOML for a Tenancy """ - - import boto3 - - sts = boto3.client("sts") - - region = boto3.Session().region_name - account = sts.get_caller_identity()["Account"] - # Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks - partition = CloudConfig.check_aws_partition(region) + # hardcode the region and account for OCI + region = "us-placeholder-1" + account = "000000000000" + partition = "not-aws" for serviceName, checkList in self.registry.checks.items(): # Pass the Cache at the "serviceName" level aka Plugin @@ -430,16 +419,10 @@ def run_azure_checks(self, pluginName=None, delay=0): """ Runs Azure Auditors using Client Secret credentials from an Application Registration """ - - # These details are needed for the ASFF... - import boto3 - - sts = boto3.client("sts") - - region = boto3.Session().region_name - account = sts.get_caller_identity()["Account"] - # Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks - partition = CloudConfig.check_aws_partition(region) + # hardcode the region and account for Azure + region = "us-placeholder-1" + account = "000000000000" + partition = "not-aws" for azSubId in self.azureSubscriptions: for serviceName, checkList in self.registry.checks.items(): @@ -480,16 +463,10 @@ def run_m365_checks(self, pluginName=None, delay=0): """ Runs M365 Auditors using Client Secret credentials from an Enterprise Application """ - - # These details are needed for the ASFF... - import boto3 - - sts = boto3.client("sts") - - region = boto3.Session().region_name - account = sts.get_caller_identity()["Account"] - # Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks - partition = CloudConfig.check_aws_partition(region) + # hardcode the region and account for non-AWS checks + region = "us-placeholder-1" + account = "000000000000" + partition = "not-aws" for serviceName, checkList in self.registry.checks.items(): # Pass the Cache at the "serviceName" level aka Plugin @@ -532,16 +509,10 @@ def run_salesforce_checks(self, pluginName=None, delay=0): Runs Salesforce Auditors using Password-based OAuth flow with Username, Password along with a Connected Application Client ID and Client Secret and a User Security Token """ - - # These details are needed for the ASFF... - import boto3 - - sts = boto3.client("sts") - - region = boto3.Session().region_name - account = sts.get_caller_identity()["Account"] - # Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks - partition = CloudConfig.check_aws_partition(region) + # hardcode the region and account for SFDC + region = "us-placeholder-1" + account = "000000000000" + partition = "not-aws" for serviceName, checkList in self.registry.checks.items(): # Pass the Cache at the "serviceName" level aka Plugin @@ -585,15 +556,10 @@ def run_non_aws_checks(self, pluginName=None, delay=0): """ Generic function to run Auditors, unless specialized logic is required, Assessment Target default to running here """ - - import boto3 - - sts = boto3.client("sts") - - region = boto3.Session().region_name - account = sts.get_caller_identity()["Account"] - # Dervice the Partition ID from the AWS Region - needed for ASFF & service availability checks - partition = CloudConfig.check_aws_partition(region) + # hardcode the region and account for Non-AWS Checks + region = "us-placeholder-1" + account = "000000000000" + partition = "not-aws" for serviceName, checkList in self.registry.checks.items(): # Pass the Cache at the "serviceName" level aka Plugin @@ -638,7 +604,7 @@ def print_checks_md(self): if doc: description = str(check.__doc__).replace("\n", "").replace(" ", "") else: - description = "This shit is fucked!" + description = "Docstring is missing, please open an Issue!" auditorFile = getfile(check).rpartition("/")[2] auditorName = auditorFile.split(".py")[0] @@ -659,10 +625,10 @@ def print_controls_json(self): if doc: description = str(check.__doc__).replace("\n", "").replace(" ", "") else: - description = "This shit is fucked!" + description = "Docstring is missing, please open an Issue!" controlPrinter.append(description) - print(json.dumps(controlPrinter,indent=2)) + print(json.dumps(controlPrinter,indent=4)) # EOF \ No newline at end of file diff --git a/eeauditor/processor/outputs/ocsf_v1_1_0_output.py b/eeauditor/processor/outputs/ocsf_v1_1_0_output.py index 48e6c296..356b2785 100644 --- a/eeauditor/processor/outputs/ocsf_v1_1_0_output.py +++ b/eeauditor/processor/outputs/ocsf_v1_1_0_output.py @@ -239,10 +239,23 @@ def ocsf_compliance_finding_mapping(self, findings: list) -> list: complianceStatusLabel=finding["Compliance"]["Status"] ) - if finding["ProductFields"]["Provider"] == "AWS": - partition = finding["Resources"][0]["Partition"] - else: + partition = finding["Resources"][0]["Partition"] + region = finding["ProductFields"]["AssetRegion"] + accountId = finding["ProductFields"]["ProviderAccountId"] + + if partition != "AWS" or partition == "not-aws": partition = None + + if partition == "AWS" and region == "us-placeholder-1": + region = None + + if partition == "AWS" and accountId == "000000000000": + accountId = None + + # Non-AWS checks have hardcoded "dummy" data for Account, Region, and Partition - set these to none depending on the dummy data + #region = "us-placeholder-1" + #account = "000000000000" + #partition = "not-aws" ocsf = { # Base Event data @@ -277,9 +290,9 @@ def ocsf_compliance_finding_mapping(self, findings: list) -> list: }, "cloud": { "provider": finding["ProductFields"]["Provider"], - "region": finding["ProductFields"]["AssetRegion"], + "region": region, "account": { - "uid": finding["ProductFields"]["ProviderAccountId"], + "uid": accountId, "type": asffToOcsf[3], "type_uid": asffToOcsf[2] } @@ -291,7 +304,7 @@ def ocsf_compliance_finding_mapping(self, findings: list) -> list: "name": "cloud.account.uid", "type": "Resource UID", "type_id": 10, - "value": finding["ProductFields"]["ProviderAccountId"] + "value": accountId }, # Resource UID { @@ -326,7 +339,7 @@ def ocsf_compliance_finding_mapping(self, findings: list) -> list: "resource": { "data": finding["ProductFields"]["AssetDetails"], "cloud_partition": partition, - "region": finding["ProductFields"]["AssetRegion"], + "region": region, "type": finding["ProductFields"]["AssetService"], "uid": finding["Resources"][0]["Id"] },