Skip to content

Commit

Permalink
Merge branch 'develop' into wx_927_call_caching_prefixes_and_blacklist
Browse files Browse the repository at this point in the history
  • Loading branch information
mcovarr authored Sep 27, 2024
2 parents 81d31a2 + 9853b52 commit 1af653a
Show file tree
Hide file tree
Showing 34 changed files with 524 additions and 123 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,11 @@ class GroupMetricsActor(engineDbInterface: EngineSqlDatabase,
case LogQuotaExhaustedGroups =>
getQuotaExhaustedGroups() onComplete {
case Success(quotaExhaustedGroups) =>
log.info(
s"Hog groups currently experiencing quota exhaustion: ${quotaExhaustedGroups.length}. Group IDs: [${quotaExhaustedGroups.toList
.mkString(", ")}]."
)
if (quotaExhaustedGroups.nonEmpty)
log.info(
s"Hog groups currently experiencing quota exhaustion: ${quotaExhaustedGroups.length}. Group IDs: [${quotaExhaustedGroups.toList
.mkString(", ")}]."
)
case Failure(exception) =>
log.info(
s"Something went wrong when fetching quota exhausted groups for logging. Will retry in ${loggingInterval
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
task dockerhub {
command {
echo "hello"
}
runtime {
docker: "broadinstitute/cloud-cromwell:dev"
backend: "GCPBATCHUSADockerhub"
}
}

workflow docker_hash_dockerhub_private {
call dockerhub
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name: docker_hash_dockerhub_private
testFormat: workflowsuccess
# see https://github.com/broadinstitute/cromwell/pull/7515
backends: [Papi, GCPBATCH_FAIL]
backendsMode: any
backends: [Papi, GCPBATCH]

files {
workflow: docker_hash/docker_hash_dockerhub_private.wdl
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name: docker_hash_dockerhub_private_config_usa_wf_options
testFormat: workflowsuccess
# see https://github.com/broadinstitute/cromwell/pull/7515
backends: [Papiv2USADockerhub, GCPBATCH_FAIL, GCPBATCH_NEEDS_ALT]
backends: [Papiv2USADockerhub, GCPBATCH_ALT]

files {
workflow: docker_hash/docker_hash_dockerhub_private_usa_dockerhub.wdl
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
version 1.0

workflow drs_usa_jdr {
input {
File file1
File file2
}

call localize_jdr_drs_with_usa {
input:
file1 = file1,
file2 = file2
}

call skip_localize_jdr_drs_with_usa {
input:
file1 = file1,
file2 = file2
}

call read_drs_with_usa {
input:
file1 = file1,
file2 = file2
}

output {
String path1 = localize_jdr_drs_with_usa.path1
String path2 = localize_jdr_drs_with_usa.path2
String hash1 = localize_jdr_drs_with_usa.hash1
String hash2 = localize_jdr_drs_with_usa.hash2
Float size1 = localize_jdr_drs_with_usa.size1
Float size2 = localize_jdr_drs_with_usa.size2
String cloud1 = skip_localize_jdr_drs_with_usa.path1
String cloud2 = skip_localize_jdr_drs_with_usa.path2
Map[String, String] map1 = read_drs_with_usa.map1
Map[String, String] map2 = read_drs_with_usa.map2
}
}

task localize_jdr_drs_with_usa {
input {
File file1
File file2
}

command <<<
echo ~{file1} > path1
echo ~{file2} > path2
md5sum ~{file1} | cut -c1-32 > hash1
md5sum ~{file2} | cut -c1-32 > hash2
>>>

output {
String path1 = read_string("path1")
String path2 = read_string("path2")
String hash1 = read_string("hash1")
String hash2 = read_string("hash2")
Float size1 = size(file1)
Float size2 = size(file2)
}

runtime {
docker: "ubuntu:latest"
backend: "GCPBATCH-usa"
}
}

task skip_localize_jdr_drs_with_usa {
input {
File file1
File file2
}

parameter_meta {
file1: { localization_optional: true }
file2: { localization_optional: true }
}

command <<<
echo ~{file1} > path1
echo ~{file2} > path2
>>>

output {
String path1 = read_string("path1")
String path2 = read_string("path2")
}

runtime {
docker: "ubuntu:latest"
backend: "GCPBATCH-usa"
}
}

task read_drs_with_usa {
input {
File file1
File file2
}

command <<<
echo file is read by the engine
>>>

output {
Map[String, String] map1 = read_json(file1)
Map[String, String] map2 = read_json(file2)
}

runtime {
docker: "ubuntu:latest"
backend: "GCPBATCH-usa"
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: drs_usa_jdr
testFormat: WorkflowSuccess
backends: ["papi-v2-usa", "GCPBATCH_NEEDS_ALT"]
backends: ["papi-v2-usa", "GCPBATCH_ALT"]
tags: [ drs ]
skipDescribeEndpointValidation: true

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: drs_usa_jdr_preresolve
testFormat: WorkflowSuccess
backends: ["papi-v2-usa", GCPBATCH_NEEDS_ALT]
backends: ["papi-v2-usa", GCPBATCH_ALT]
tags: [ drs ]
skipDescribeEndpointValidation: true

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
# NB: To request this test by name, make it lowercase, eg sbt "centaur/it:testOnly * -- -n fast_fail_noaddress"
name: fast_fail_noAddress
# GCPBATCH error message is different
# Unable to complete Batch request due to a problem with the request (io.grpc.StatusRuntimeException: INVALID_ARGUMENT: no_external_ip_address field is invalid. both network and subnetwork have to be specified when no_external_ip_address is true).
backends: [Papi, Papiv2, GCPBATCH_NEEDS_ALT]
backends: [Papi, Papiv2, GCPBATCH_ALT]
backendsMode: any
testFormat: workflowfailure

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
name: gcpbatch_docker_hash_dockerhub_private_config_usa_wf_options
testFormat: workflowsuccess
# see https://github.com/broadinstitute/cromwell/pull/7515
backends: [GCPBATCHUSADockerhub]

files {
workflow: docker_hash/gcpbatch_docker_hash_dockerhub_private_usa_dockerhub.wdl
# Updated the options to read_from_cache: false for
# https://github.com/broadinstitute/cromwell/issues/3998
options-dir: "Error: BA-6546 The environment variable CROMWELL_BUILD_RESOURCES_DIRECTORY must be set/export pointing to a valid path such as '${YOUR_CROMWELL_DIR}/target/ci/resources'"
options-dir: ${?CROMWELL_BUILD_RESOURCES_DIRECTORY}
options: ${files.options-dir}/private_docker_papi_v2_usa.options
}

metadata {
"calls.docker_hash_dockerhub_private.dockerhub.runtimeAttributes.docker": "broadinstitute/cloud-cromwell:dev",
"calls.docker_hash_dockerhub_private.dockerhub.dockerImageUsed": "broadinstitute/cloud-cromwell@sha256:0d51f90e1dd6a449d4587004c945e43f2a7bbf615151308cff40c15998cc3ad4"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: gcpbatch_drs_usa_jdr
testFormat: WorkflowSuccess
backends: ["GCPBATCH-usa"]
tags: [ drs ]
skipDescribeEndpointValidation: true

files {
workflow: drs_tests/gcpbatch_drs_usa_jdr.wdl
options-dir: "Error: BA-6546 The environment variable CROMWELL_BUILD_RESOURCES_DIRECTORY must be set/export pointing to a valid path such as '${YOUR_CROMWELL_DIR}/target/ci/resources'"
options-dir: ${?CROMWELL_BUILD_RESOURCES_DIRECTORY}
options: ${files.options-dir}/papi_v2_usa.options.json
inputs: drs_tests/drs_usa_jdr.inputs
}

metadata {
workflowName: drs_usa_jdr
status: Succeeded

"outputs.drs_usa_jdr.path1" =
"/mnt/disks/cromwell_root/drs_localization_paths/CromwellSimpleWithFilerefs/hello_jade.json"
"outputs.drs_usa_jdr.path2" =
"/mnt/disks/cromwell_root/drs_localization_paths/CromwellSimpleWithFilerefs2/hello_jade_2.json"
"outputs.drs_usa_jdr.hash1" = "faf12e94c25bef7df62e4a5eb62573f5"
"outputs.drs_usa_jdr.hash2" = "19e1b021628130fda04c79ee9a056b67"
"outputs.drs_usa_jdr.size1" = 18.0
"outputs.drs_usa_jdr.size2" = 38.0
# This JDR file has a gsUri that doesn't end in /fileName so it must be downloaded with the DRS localizer
"outputs.drs_usa_jdr.cloud1" =
"/mnt/disks/cromwell_root/drs_localization_paths/CromwellSimpleWithFilerefs/hello_jade.json"
# This JDR file has a gsUri that can skip localization
"outputs.drs_usa_jdr.cloud2" =
"gs://broad-jade-dev-data-bucket/e1941fb9-6537-4e1a-b70d-34352a3a7817/ad783b60-aeba-4055-8f7b-194880f37259/hello_jade_2.json"
"outputs.drs_usa_jdr.map1.hello" = "jade"
"outputs.drs_usa_jdr.map2.hello" = "jade"
"outputs.drs_usa_jdr.map2.attempt" = "2"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: gcpbatch_drs_usa_jdr_preresolve
testFormat: WorkflowSuccess
backends: ["GCPBATCH-usa"]
tags: [ drs ]
skipDescribeEndpointValidation: true

files {
workflow: drs_tests/gcpbatch_drs_usa_jdr.wdl
options-dir: "Error: BA-6546 The environment variable CROMWELL_BUILD_RESOURCES_DIRECTORY must be set/export pointing to a valid path such as '${YOUR_CROMWELL_DIR}/target/ci/resources'"
options-dir: ${?CROMWELL_BUILD_RESOURCES_DIRECTORY}
options: ${files.options-dir}/gcpbatch_papi_v2_usa_preresolve.options.json
inputs: drs_tests/drs_usa_jdr.inputs
}

metadata {
workflowName: drs_usa_jdr
status: Succeeded

"outputs.drs_usa_jdr.path1" =
"/mnt/disks/cromwell_root/drs_localization_paths/CromwellSimpleWithFilerefs/hello_jade.json"
# This JDR file has a gsUri that can be preresolved to a regular GCS file for improved localization performance.
# However this means that the file's container path is determined by the GCS localization logic and not the
# `localizationPath`-aware DRS localization logic. The GCS localization logic always uses a containerized version
# of the GCS path, which is what this expectation represents.
"outputs.drs_usa_jdr.path2" =
"/mnt/disks/cromwell_root/broad-jade-dev-data-bucket/e1941fb9-6537-4e1a-b70d-34352a3a7817/ad783b60-aeba-4055-8f7b-194880f37259/hello_jade_2.json"
"outputs.drs_usa_jdr.hash1" = "faf12e94c25bef7df62e4a5eb62573f5"
"outputs.drs_usa_jdr.hash2" = "19e1b021628130fda04c79ee9a056b67"
"outputs.drs_usa_jdr.size1" = 18.0
"outputs.drs_usa_jdr.size2" = 38.0
# This JDR file has a gsUri that doesn't end in /fileName so it must be downloaded with the DRS localizer
"outputs.drs_usa_jdr.cloud1" =
"/mnt/disks/cromwell_root/drs_localization_paths/CromwellSimpleWithFilerefs/hello_jade.json"
# This JDR file has a gsUri that can skip localization
"outputs.drs_usa_jdr.cloud2" =
"gs://broad-jade-dev-data-bucket/e1941fb9-6537-4e1a-b70d-34352a3a7817/ad783b60-aeba-4055-8f7b-194880f37259/hello_jade_2.json"
"outputs.drs_usa_jdr.map1.hello" = "jade"
"outputs.drs_usa_jdr.map2.hello" = "jade"
"outputs.drs_usa_jdr.map2.attempt" = "2"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# NB: To request this test by name, make it lowercase, eg sbt "centaur/it:testOnly * -- -n gcpbatch_fast_fail_noaddress"
name: gcpbatch_fast_fail_noaddress
backends: [ GCPBATCH ]
backendsMode: any
testFormat: workflowfailure

files {
workflow: fast_fail_noAddress/fast_fail_noAddress.wdl
}

metadata {
workflowName: fast_fail_noAddress
"failures.0.causedBy.0.message": ~~"Unable to complete Batch request due to a problem with the request (io.grpc.StatusRuntimeException: INVALID_ARGUMENT: no_external_ip_address field is invalid. both network and subnetwork have to be specified when no_external_ip_address is true)."
status: Failed
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Test to confirm the output of the monitoring script will be written to a monitoring.log file

name: gcpbatch_monitoring_log
testFormat: workflowsuccess
backends: [GCPBATCH]

files {
workflow: monitoring_log/monitoring_log.wdl
options: monitoring_log/monitoring_log.options
}

metadata {
"calls.monitoring_log.get_stats.gcpBatch.monitoringScript": "gs://cloud-cromwell-dev/some/rounding_script_int.sh"
"calls.monitoring_log.get_stats.monitoringLog": "gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/monitoring_log/<<UUID>>/call-get_stats/monitoring.log"
"outputs.monitoring_log.get_stats.stats.0": "CPU: 1"
"outputs.monitoring_log.get_stats.stats.1": "Total Memory: 2G"
"outputs.monitoring_log.get_stats.stats.2": "Total Disk space: 9G"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: gcpbatch_papi_v2beta_gcsa
testFormat: WorkflowSuccess
backends: [GCPBATCH-gcsa]

files {
workflow: papi_v2_gcsa/gcpbatch_papi_v2_gcsa.wdl
options-dir: "Error: BA-6546 The environment variable CROMWELL_BUILD_RESOURCES_DIRECTORY must be set/export pointing to a valid path such as '${YOUR_CROMWELL_DIR}/target/ci/resources'"
options-dir: ${?CROMWELL_BUILD_RESOURCES_DIRECTORY}
options: ${files.options-dir}/papi_v2_gcsa.options.json
}

metadata {
workflowName: papi_v2_gcsa
status: Succeeded
"outputs.papi_v2_gcsa.email": "[email protected]"
"outputs.papi_v2_gcsa.scopes": "https://www.googleapis.com/auth/bigquery https://www.googleapis.com/auth/cloud-platform https://www.googleapis.com/auth/cloudkms https://www.googleapis.com/auth/compute https://www.googleapis.com/auth/devstorage.full_control https://www.googleapis.com/auth/monitoring.write https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile"
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
name: hello_google_legacy_machine_selection
testFormat: workflowsuccess
# Explicitly turned this off in my machine contraint wiring-in PR. Do we still care about this?
backends: [ Papiv2, GCPBATCH_FAIL ]

# Legacy PAPI v1 (aka JES) machine types not supported on GCP Batch.
# Task wf_hello.hello:NA:1 failed: Job failed when Batch tries to schedule it:
# Batch Error: code - CODE_MACHINE_TYPE_NOT_FOUND, description -
# machine type predefined-1-2048 for job job-xyz, project 8675309, region us-central1, zones (if any) us-central1-b is not available.
backends: [ Papiv2, GCPBATCH_LEGACY_MACHINE_TYPES_NOT_SUPPORTED ]

files {
workflow: wdl_draft3/hello/hello.wdl
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@

name: monitoring_log
testFormat: workflowsuccess
# see doc 2.14
backends: [Papiv2, GCPBATCH_FAIL]
backends: [Papiv2, GCPBATCH_ALT]

files {
workflow: monitoring_log/monitoring_log.wdl
Expand Down
Loading

0 comments on commit 1af653a

Please sign in to comment.