diff --git a/README.md b/README.md index ee144796..bcd679c8 100644 --- a/README.md +++ b/README.md @@ -295,6 +295,7 @@ The following resources are available in the InSpec GCP Profile | [google_data_fusion_instance](docs/resources/google_data_fusion_instance.md) | [google_data_fusion_instances](docs/resources/google_data_fusion_instances.md) | | [google_dataflow_project_location_job](docs/resources/google_dataflow_project_location_job.md) | [google_dataflow_project_location_jobs](docs/resources/google_dataflow_project_location_jobs.md) | | [google_dataproc_autoscaling_policy](docs/resources/google_dataproc_autoscaling_policy.md) | [google_dataproc_autoscaling_policies](docs/resources/google_dataproc_autoscaling_policies.md) | +| [google_dataproc_batch](docs/resources/google_dataproc_batch.md) | [google_dataproc_batches](docs/resources/google_dataproc_batches.md) | | [google_dataproc_cluster](docs/resources/google_dataproc_cluster.md) | [google_dataproc_clusters](docs/resources/google_dataproc_clusters.md) | | [google_dataproc_job](docs/resources/google_dataproc_job.md) | [google_dataproc_jobs](docs/resources/google_dataproc_jobs.md) | | [google_dataproc_metastore_federation](docs/resources/google_dataproc_metastore_federation.md) | [google_dataproc_metastore_federations](docs/resources/google_dataproc_metastore_federations.md) | diff --git a/docs/resources/google_dataproc_batch.md b/docs/resources/google_dataproc_batch.md index c9442643..757f3681 100644 --- a/docs/resources/google_dataproc_batch.md +++ b/docs/resources/google_dataproc_batch.md @@ -23,7 +23,7 @@ A `google_dataproc_batch` is used to test a Google Batch resource ## Examples ``` -describe google_dataproc_batch(name: ' value_name') do +describe google_dataproc_batch(name: 'projects/*/locations/*/batches/value_name') do it { should exist } its('name') { should cmp 'value_name' } its('uuid') { should cmp 'value_uuid' } @@ -33,7 +33,6 @@ describe google_dataproc_batch(name: ' value_name') do its('state_time') { should cmp 'value_statetime' } its('creator') { should cmp 'value_creator' } its('operation') { should cmp 'value_operation' } - end describe google_dataproc_batch(name: "does_not_exit") do @@ -98,7 +97,7 @@ Properties that can be accessed from the `google_dataproc_batch` resource: * `query_variables`: Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). - * `additional_properties`: + * `additional_properties`: * `jar_file_uris`: Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. @@ -106,7 +105,7 @@ Properties that can be accessed from the `google_dataproc_batch` resource: * `endpoints`: Output only. Map of remote access endpoints (such as web interfaces and APIs) to their URIs. - * `additional_properties`: + * `additional_properties`: * `output_uri`: Output only. A URI pointing to the location of the stdout and stderr of the workload. @@ -156,7 +155,7 @@ Properties that can be accessed from the `google_dataproc_batch` resource: * `labels`: Optional. The labels to associate with this batch. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a batch. - * `additional_properties`: + * `additional_properties`: * `runtime_config`: Runtime configuration for a workload. @@ -166,7 +165,7 @@ Properties that can be accessed from the `google_dataproc_batch` resource: * `properties`: Optional. A mapping of property names to values, which are used to configure workload execution. - * `additional_properties`: + * `additional_properties`: * `repository_config`: Configuration for dependency repositories diff --git a/docs/resources/google_dataproc_batches.md b/docs/resources/google_dataproc_batches.md index 4a93cecd..e0de208c 100644 --- a/docs/resources/google_dataproc_batches.md +++ b/docs/resources/google_dataproc_batches.md @@ -23,8 +23,16 @@ A `google_dataproc_batches` is used to test a Google Batch resource ## Examples ``` - describe google_dataproc_batches(parent: ' value_parent') do + describe google_dataproc_batches(parent: 'projects/*/locations/*') do it { should exist } + its('names') { should include 'value_name' } + its('uuids') { should include 'value_uuid' } + its('create_times') { should include 'value_createtime' } + its('states') { should include 'value_state' } + its('state_messages') { should include 'value_statemessage' } + its('state_times') { should include 'value_statetime' } + its('creators') { should include 'value_creator' } + its('operations') { should include 'value_operation' } end ``` diff --git a/libraries/google_dataproc_batch.rb b/libraries/google_dataproc_batch.rb index 5b12937c..83f712ee 100644 --- a/libraries/google_dataproc_batch.rb +++ b/libraries/google_dataproc_batch.rb @@ -91,7 +91,7 @@ def exists? end def to_s - "Batch #{@params[:]}" + "Batch #{@params[:name]}" end private @@ -101,6 +101,6 @@ def product_url(_ = nil) end def resource_base_url - '{{+name}}' + '{{name}}' end end diff --git a/libraries/google_dataproc_batches.rb b/libraries/google_dataproc_batches.rb index ee08801f..92731c94 100644 --- a/libraries/google_dataproc_batches.rb +++ b/libraries/google_dataproc_batches.rb @@ -106,6 +106,6 @@ def product_url(_ = nil) end def resource_base_url - '{{+parent}}/batches' + '{{parent}}/batches' end end diff --git a/test/integration/build/gcp-mm.tf b/test/integration/build/gcp-mm.tf index 0866db18..978e3965 100644 --- a/test/integration/build/gcp-mm.tf +++ b/test/integration/build/gcp-mm.tf @@ -269,6 +269,9 @@ variable "data_fusion_instance" { variable "cloud_run_jobs" { type = any } +variable "dataproc_serverless_batches" { + type = any +} resource "google_compute_ssl_policy" "custom-ssl-policy" { name = var.ssl_policy["name"] min_tls_version = var.ssl_policy["min_tls_version"] @@ -2245,3 +2248,27 @@ resource "google_cloud_run_v2_job" "default" { } } } +resource "google_dataproc_batch" "inspec_batch_spark" { + + batch_id = var.dataproc_serverless_batches.name + location = var.dataproc_serverless_batches.location + labels = {"app": "inspec"} + project = var.gcp_project_id + runtime_config { + properties = { "spark.dynamicAllocation.enabled": "false", "spark.executor.instances": "2" } + } + + environment_config { + execution_config { + subnetwork_uri = "default" + ttl = "3600s" + network_tags = ["tag1"] + } + } + + spark_batch { + main_class = var.dataproc_serverless_batches.main_class + args = [var.dataproc_serverless_batches.args] + jar_file_uris = [var.dataproc_serverless_batches.path] + } +} diff --git a/test/integration/configuration/mm-attributes.yml b/test/integration/configuration/mm-attributes.yml index ab0626a0..837515e5 100644 --- a/test/integration/configuration/mm-attributes.yml +++ b/test/integration/configuration/mm-attributes.yml @@ -751,3 +751,10 @@ cloud_run_jobs: location: "us-central1" deletion_protection: "false" image: "us-central1-docker.pkg.dev/ppradhan/nas/balasubs_tutorial1_20230915_182543:latest" + +dataproc_serverless_batches: + name: "inspec-test-batch-0052" + location: "us-central1" + main_class: "org.apache.spark.examples.SparkPi" + args: "10" + path: "file:///usr/lib/spark/examples/jars/spark-examples.jar" diff --git a/test/integration/verify/controls/google_dataproc_batch.rb b/test/integration/verify/controls/google_dataproc_batch.rb index 08d30edb..884a70d8 100644 --- a/test/integration/verify/controls/google_dataproc_batch.rb +++ b/test/integration/verify/controls/google_dataproc_batch.rb @@ -16,16 +16,15 @@ gcp_project_id = input(:gcp_project_id, value: 'gcp_project_id', description: 'The GCP project identifier.') - batch = input('batch', value: { - "name": "value_name", - "parent": "value_parent", - "uuid": "value_uuid", - "create_time": "value_createtime", - "state": "value_state", - "state_message": "value_statemessage", - "state_time": "value_statetime", - "creator": "value_creator", - "operation": "value_operation" +batch = input('batch', value: { + "name": "projects/ppradhan/locations/us-central1/batches/inspec-test-batch-0052", + "parent": "projects/ppradhan/locations/us-central1", + "uuid": "5a1b8402-2aa5-4578-98ee-2ff12ff2a14e", + "create_time": "2024-10-15T06:42:29.671473Z", + "state": "SUCCEEDED", + "state_time": "2024-10-15T06:44:55.114445Z", + "creator": "bala-local@ppradhan.iam.gserviceaccount.com", + "operation": "projects/ppradhan/regions/us-central1/operations/19a2ac29-3564-49b8-8116-c36dd98d9cd5" }, description: 'batch description') control 'google_dataproc_batch-1.0' do impact 1.0 diff --git a/test/integration/verify/controls/google_dataproc_batches.rb b/test/integration/verify/controls/google_dataproc_batches.rb index cf42ec21..8210780b 100644 --- a/test/integration/verify/controls/google_dataproc_batches.rb +++ b/test/integration/verify/controls/google_dataproc_batches.rb @@ -16,16 +16,15 @@ gcp_project_id = input(:gcp_project_id, value: 'gcp_project_id', description: 'The GCP project identifier.') - batch = input('batch', value: { - "name": "value_name", - "parent": "value_parent", - "uuid": "value_uuid", - "create_time": "value_createtime", - "state": "value_state", - "state_message": "value_statemessage", - "state_time": "value_statetime", - "creator": "value_creator", - "operation": "value_operation" +batch = input('batch', value: { + "name": "projects/ppradhan/locations/us-central1/batches/inspec-test-batch-0052", + "parent": "projects/ppradhan/locations/us-central1", + "uuid": "5a1b8402-2aa5-4578-98ee-2ff12ff2a14e", + "create_time": "2024-10-15T06:42:29.671473Z", + "state": "SUCCEEDED", + "state_time": "2024-10-15T06:44:55.114445Z", + "creator": "bala-local@ppradhan.iam.gserviceaccount.com", + "operation": "projects/ppradhan/regions/us-central1/operations/19a2ac29-3564-49b8-8116-c36dd98d9cd5" }, description: 'batch description') control 'google_dataproc_batches-1.0' do impact 1.0