diff --git a/buildstockbatch/gcp/gcp.py b/buildstockbatch/gcp/gcp.py index ca1fbc59..d8e979d3 100644 --- a/buildstockbatch/gcp/gcp.py +++ b/buildstockbatch/gcp/gcp.py @@ -654,8 +654,9 @@ def start_batch_job(self, batch_info): boot_disk_mib=job_env_cfg.get("boot_disk_mib", None), ) - # Give three minutes per simulation, plus ten minutes for job overhead - task_duration_secs = 60 * (10 + batch_info.n_sims_per_job * 3) + # Use specified time per simulation, plus ten minutes for job overhead. + minutes_per_sim = job_env_cfg.get("minutes_per_sim", 3) + task_duration_secs = 60 * (10 + batch_info.n_sims_per_job * minutes_per_sim) task = batch_v1.TaskSpec( runnables=[bsb_runnable], compute_resource=resources, diff --git a/buildstockbatch/schemas/v0.3.yaml b/buildstockbatch/schemas/v0.3.yaml index 77f2cc11..8aa35244 100644 --- a/buildstockbatch/schemas/v0.3.yaml +++ b/buildstockbatch/schemas/v0.3.yaml @@ -48,6 +48,7 @@ gcp-job-environment-spec: boot_disk_mib: int(required=False) machine_type: str(required=False) use_spot: bool(required=False) + minutes_per_sim: num(min=0.05, max=480, required=False) gcp-postprocessing_environment-spec: # Limits documented at diff --git a/docs/project_defn.rst b/docs/project_defn.rst index d0800425..e1f7b339 100644 --- a/docs/project_defn.rst +++ b/docs/project_defn.rst @@ -333,6 +333,8 @@ using `GCP Batch `_ and `Cloud Run `_ for data simulations, which can reduce costs by up to 91%. Default: false + * ``minutes_per_sim``: Optional. Maximum time per simulation. Default works well for ResStock, + but this should be increased for ComStock. Default: 3 minutes * ``postprocessing_environment``: Optional. Specifies the Cloud Run computing environment for postprocessing.