Skip to content

Commit

Permalink
generalize on-demand job queue name
Browse files Browse the repository at this point in the history
  • Loading branch information
solomon-negusse committed Sep 24, 2024
1 parent 451eda1 commit 10d357e
Show file tree
Hide file tree
Showing 12 changed files with 23 additions and 21 deletions.
6 changes: 3 additions & 3 deletions app/models/pydantic/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from ...settings.globals import (
AURORA_JOB_QUEUE,
COGIFY_JOB_QUEUE,
ON_DEMAND_COMPUTE_JOB_QUEUE,
DATA_LAKE_JOB_QUEUE,
DEFAULT_JOB_DURATION,
GDAL_PYTHON_JOB_DEFINITION,
Expand Down Expand Up @@ -139,9 +139,9 @@ class PixETLJob(Job):


class GDALCOGJob(Job):
"""Use for creating COG files using GDAL Python docker in COG queue."""
"""Use for creating COG files using GDAL Python docker in on-demand compute queue."""

job_queue = COGIFY_JOB_QUEUE
job_queue = ON_DEMAND_COMPUTE_JOB_QUEUE
job_definition = GDAL_PYTHON_JOB_DEFINITION
vcpus = 8
memory = 64000
Expand Down
2 changes: 1 addition & 1 deletion app/settings/globals.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@
MAX_MEM = config("MAX_MEM", cast=int, default=760000)
PIXETL_JOB_DEFINITION = config("PIXETL_JOB_DEFINITION", cast=str)
PIXETL_JOB_QUEUE = config("PIXETL_JOB_QUEUE", cast=str)
COGIFY_JOB_QUEUE = config("COGIFY_JOB_QUEUE", cast=str)
ON_DEMAND_COMPUTE_JOB_QUEUE = config("ON_DEMAND_COMPUTE_JOB_QUEUE", cast=str)
PIXETL_CORES = config("PIXETL_CORES", cast=int, default=48)
PIXETL_MAX_MEM = config("PIXETL_MAX_MEM", cast=int, default=380000)
PIXETL_DEFAULT_RESAMPLING = config(
Expand Down
2 changes: 1 addition & 1 deletion docker-compose.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ services:
- TILE_CACHE_CLUSTER=tile_cache_cluster
- TILE_CACHE_SERVICE=tile_cache_service
- PIXETL_JOB_QUEUE=pixetl_jq
- COGIFY_JOB_QUEUE=cogify_jq
- ON_DEMAND_COMPUTE_JOB_QUEUE=cogify_jq
- API_URL=http://app_dev:80
- RASTER_ANALYSIS_LAMBDA_NAME=raster-analysis-tiled_raster_analysis-default
- RW_API_URL=https://staging-api.resourcewatch.org
Expand Down
2 changes: 1 addition & 1 deletion docker-compose.prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ services:
- DATA_LAKE_JOB_QUEUE=data_lake_jq
- TILE_CACHE_JOB_QUEUE=tile_cache_jq
- PIXETL_JOB_QUEUE=pixetl_jq
- COGIFY_JOB_QUEUE=cogify_jq
- ON_DEMAND_COMPUTE_JOB_QUEUE=cogify_jq
- RASTER_ANALYSIS_LAMBDA_NAME=raster_analysis
- API_URL="http://app_dev:80"
- RW_API_URL=https://api.resourcewatch.org
Expand Down
2 changes: 1 addition & 1 deletion docker-compose.test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ services:
- TILE_CACHE_CLUSTER=tile_cache_cluster
- TILE_CACHE_SERVICE=tile_cache_service
- PIXETL_JOB_QUEUE=pixetl_jq
- COGIFY_JOB_QUEUE=cogify_jq
- ON_DEMAND_COMPUTE_JOB_QUEUE=cogify_jq
- PIXETL_CORES=1
- MAX_CORES=1
- NUM_PROCESSES=1
Expand Down
2 changes: 1 addition & 1 deletion terraform/data.tf
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ data "template_file" "container_definition" {
tile_cache_job_queue = module.batch_job_queues.tile_cache_job_queue_arn
pixetl_job_definition = module.batch_job_queues.pixetl_job_definition_arn
pixetl_job_queue = module.batch_job_queues.pixetl_job_queue_arn
cogify_job_queue = module.batch_job_queues.cogify_job_queue_arn
on_demand_compute_job_queue = module.batch_job_queues.on_demand_compute_job_queue_arn
raster_analysis_lambda_name = "raster-analysis-tiled_raster_analysis-default"
raster_analysis_sfn_arn = data.terraform_remote_state.raster_analysis_lambda.outputs.raster_analysis_state_machine_arn
service_url = local.service_url
Expand Down
6 changes: 3 additions & 3 deletions terraform/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ module "batch_data_lake_writer" {
compute_environment_name = "data_lake_writer"
}

module "batch_cog_creator" {
module "batch_cogify" {
source = "git::https://github.com/wri/gfw-terraform-modules.git//terraform/modules/compute_environment?ref=v0.4.2.3"
ecs_role_policy_arns = [
aws_iam_policy.query_batch_jobs.arn,
Expand All @@ -202,7 +202,7 @@ module "batch_cog_creator" {
use_ephemeral_storage = true
launch_type = "EC2"
instance_types = var.data_lake_writer_instance_types
compute_environment_name = "batch_cog_creator"
compute_environment_name = "batch_cogify"
}

module "batch_job_queues" {
Expand All @@ -211,7 +211,7 @@ module "batch_job_queues" {
data_lake_compute_environment_arn = module.batch_data_lake_writer.arn
pixetl_compute_environment_arn = module.batch_data_lake_writer.arn
tile_cache_compute_environment_arn = module.batch_data_lake_writer.arn
cog_compute_environment_arn = module.batch_cog_creator.arn
cogify_compute_environment_arn = module.batch_cogify.arn
environment = var.environment
name_suffix = local.name_suffix
project = local.project
Expand Down
6 changes: 3 additions & 3 deletions terraform/modules/batch/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,12 @@ resource "aws_batch_job_queue" "pixetl" {
depends_on = [var.pixetl_compute_environment_arn]
}

resource "aws_batch_job_queue" "cog" {
resource "aws_batch_job_queue" "on_demand" {
name = substr("${var.project}-cog-job-queue${var.name_suffix}", 0, 64)
state = "ENABLED"
priority = 1
compute_environments = [var.cog_compute_environment_arn]
depends_on = [var.pixetl_compute_environment_arn]
compute_environments = [var.cogify_compute_environment_arn]
depends_on = [var.cogify_compute_environment_arn]
}

resource "aws_batch_job_definition" "tile_cache" {
Expand Down
4 changes: 2 additions & 2 deletions terraform/modules/batch/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ output "pixetl_job_queue_arn" {
value = aws_batch_job_queue.pixetl.arn
}

output "cogify_job_queue_arn" {
value = aws_batch_job_queue.cog.arn
output "on_demand_compute_job_queue_arn" {
value = aws_batch_job_queue.on_demand.arn
}

output "tile_cache_job_definition_arn" {
Expand Down
2 changes: 1 addition & 1 deletion terraform/modules/batch/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ variable "project" { type = string }
variable "name_suffix" { type = string }
variable "aurora_compute_environment_arn" { type = string }
variable "data_lake_compute_environment_arn" { type = string }
variable "cog_compute_environment_arn" { type = string }
variable "cogify_compute_environment_arn" { type = string }
variable "tile_cache_compute_environment_arn" { type = string }
variable "pixetl_compute_environment_arn" { type = string }
variable "gdal_repository_url" { type = string }
Expand Down
4 changes: 2 additions & 2 deletions terraform/templates/container_definition.json.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@
"value": "${pixetl_job_queue}"
},
{
"name": "COGIFY_JOB_QUEUE",
"value": "${cogify_job_queue}"
"name": "ON_DEMAND_COMPUTE_JOB_QUEUE",
"value": "${on_demand_compute_job_queue}"
},
{
"name": "API_URL",
Expand Down
6 changes: 4 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
GDAL_PYTHON_JOB_DEFINITION,
PIXETL_JOB_DEFINITION,
PIXETL_JOB_QUEUE,
COGIFY_JOB_QUEUE,
ON_DEMAND_COMPUTE_JOB_QUEUE,
POSTGRESQL_CLIENT_JOB_DEFINITION,
TILE_CACHE_BUCKET,
TILE_CACHE_JOB_DEFINITION,
Expand Down Expand Up @@ -177,7 +177,9 @@ def patch_run(self, *k, **kwargs):
aws_mock.add_job_queue(DATA_LAKE_JOB_QUEUE, s3_writer_env["computeEnvironmentArn"])
aws_mock.add_job_queue(TILE_CACHE_JOB_QUEUE, s3_writer_env["computeEnvironmentArn"])
aws_mock.add_job_queue(PIXETL_JOB_QUEUE, pixetl_env["computeEnvironmentArn"])
aws_mock.add_job_queue(COGIFY_JOB_QUEUE, cogify_env["computeEnvironmentArn"])
aws_mock.add_job_queue(
ON_DEMAND_COMPUTE_JOB_QUEUE, cogify_env["computeEnvironmentArn"]
)

aws_mock.add_job_definition(GDAL_PYTHON_JOB_DEFINITION, "batch_gdal-python_test")
aws_mock.add_job_definition(
Expand Down

0 comments on commit 10d357e

Please sign in to comment.