Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions infrastructure/account/.terraform.lock.hcl

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 4 additions & 0 deletions infrastructure/account/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ terraform {
source = "hashicorp/aws"
version = "~> 6"
}
random = {
source = "hashicorp/random"
version = "~> 3"
}
}
backend "s3" {
region = "eu-west-2"
Expand Down
29 changes: 26 additions & 3 deletions infrastructure/account/redis_cache.tf
Original file line number Diff line number Diff line change
@@ -1,12 +1,35 @@
resource "aws_elasticache_cluster" "redis_cluster" {
cluster_id = "immunisation-redis-cluster"
resource "random_password" "redis_auth_token" {
length = 32
special = true
override_special = "!&#$^<>-"
}

resource "aws_secretsmanager_secret" "redis_auth_token" {
name = "imms/redis/auth-token"
description = "Auth token for the immunisation Redis cache"
}

resource "aws_secretsmanager_secret_version" "redis_auth_token" {
secret_id = aws_secretsmanager_secret.redis_auth_token.id
secret_string = random_password.redis_auth_token.result
}

resource "aws_elasticache_replication_group" "redis_cluster" {
replication_group_id = "immunisation-redis-cluster"
description = "Redis cache for immunisation configuration data"
engine = "redis"
engine_version = "7.0"
node_type = "cache.t2.micro"
num_cache_nodes = 1
num_cache_clusters = 1
parameter_group_name = "default.redis7"
port = 6379
security_group_ids = [aws_security_group.lambda_redis_sg.id]
subnet_group_name = aws_elasticache_subnet_group.redis_subnet_group.name

at_rest_encryption_enabled = true
transit_encryption_enabled = true
auth_token = random_password.redis_auth_token.result
auth_token_update_strategy = "SET"
}

# Subnet Group for Redis
Expand Down
68 changes: 34 additions & 34 deletions infrastructure/instance/ecs_batch_processor_config.tf
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,11 @@ resource "aws_iam_policy" "ecs_task_exec_policy" {
"firehose:PutRecordBatch"
],
"Resource" : "arn:aws:firehose:*:*:deliverystream/${module.splunk.firehose_stream_name}"
},
{
Effect = "Allow",
Action = "secretsmanager:GetSecretValue",
Resource = data.aws_secretsmanager_secret.redis_auth_token.arn
}
]
})
Expand Down Expand Up @@ -147,40 +152,35 @@ resource "aws_ecs_task_definition" "ecs_task" {
name = "${local.short_prefix}-process-records-container"
image = var.recordprocessor_image_uri
essential = true
environment = [
{
name = "SOURCE_BUCKET_NAME"
value = aws_s3_bucket.batch_data_source_bucket.bucket
},
{
name = "ACK_BUCKET_NAME"
value = aws_s3_bucket.batch_data_destination_bucket.bucket
},
{
name = "KINESIS_STREAM_ARN"
value = local.kinesis_arn
},
{
name = "KINESIS_STREAM_NAME"
value = "${local.short_prefix}-processingdata-stream"
},
{
name = "SPLUNK_FIREHOSE_NAME"
value = module.splunk.firehose_stream_name
},
{
name = "AUDIT_TABLE_NAME"
value = aws_dynamodb_table.audit-table.name
},
{
name = "REDIS_HOST"
value = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
},
{
name = "REDIS_PORT"
value = tostring(data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port)
}
]
environment = concat(
[
{
name = "SOURCE_BUCKET_NAME"
value = aws_s3_bucket.batch_data_source_bucket.bucket
},
{
name = "ACK_BUCKET_NAME"
value = aws_s3_bucket.batch_data_destination_bucket.bucket
},
{
name = "KINESIS_STREAM_ARN"
value = local.kinesis_arn
},
{
name = "KINESIS_STREAM_NAME"
value = "${local.short_prefix}-processingdata-stream"
},
{
name = "SPLUNK_FIREHOSE_NAME"
value = module.splunk.firehose_stream_name
},
{
name = "AUDIT_TABLE_NAME"
value = aws_dynamodb_table.audit-table.name
}
],
local.redis_environment
)
logConfiguration = {
logDriver = "awslogs"
options = {
Expand Down
23 changes: 12 additions & 11 deletions infrastructure/instance/endpoints.tf
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,18 @@ locals {
"get_imms", "create_imms", "update_imms", "search_imms", "delete_imms", "not_found"
]
imms_table_name = aws_dynamodb_table.events-dynamodb-table.name
imms_lambda_env_vars = {
"DYNAMODB_TABLE_NAME" = local.imms_table_name,
"IMMUNIZATION_ENV" = local.resource_scope,
"IMMUNIZATION_BASE_PATH" = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4"
# except for prod and ref, any other env uses PDS int environment
"PDS_ENV" = var.pds_environment
"SPLUNK_FIREHOSE_NAME" = module.splunk.firehose_stream_name
"SQS_QUEUE_URL" = "https://sqs.${var.aws_region}.amazonaws.com/${var.immunisation_account_id}/${local.short_prefix}-ack-metadata-queue.fifo"
"REDIS_HOST" = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
"REDIS_PORT" = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
}
imms_lambda_env_vars = merge(
{
"DYNAMODB_TABLE_NAME" = local.imms_table_name,
"IMMUNIZATION_ENV" = local.resource_scope,
"IMMUNIZATION_BASE_PATH" = strcontains(var.sub_environment, "pr-") ? "immunisation-fhir-api/FHIR/R4-${var.sub_environment}" : "immunisation-fhir-api/FHIR/R4"
# except for prod and ref, any other env uses PDS int environment
"PDS_ENV" = var.pds_environment
"SPLUNK_FIREHOSE_NAME" = module.splunk.firehose_stream_name
"SQS_QUEUE_URL" = "https://sqs.${var.aws_region}.amazonaws.com/${var.immunisation_account_id}/${local.short_prefix}-ack-metadata-queue.fifo"
},
local.redis_env_vars
)
}
data "aws_iam_policy_document" "imms_policy_document" {
source_policy_documents = [
Expand Down
32 changes: 19 additions & 13 deletions infrastructure/instance/file_name_processor.tf
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,11 @@ resource "aws_iam_policy" "filenameprocessor_lambda_exec_policy" {
],
"Resource" : "arn:aws:firehose:*:*:deliverystream/${module.splunk.firehose_stream_name}"
},
{
Effect = "Allow",
Action = "secretsmanager:GetSecretValue",
Resource = data.aws_secretsmanager_secret.redis_auth_token.arn
},
{
"Effect" : "Allow",
"Action" : [
Expand Down Expand Up @@ -240,19 +245,20 @@ resource "aws_lambda_function" "file_processor_lambda" {
}

environment {
variables = {
ACCOUNT_ID = var.immunisation_account_id
DPS_ACCOUNT_ID = var.dspp_core_account_id
SOURCE_BUCKET_NAME = aws_s3_bucket.batch_data_source_bucket.bucket
ACK_BUCKET_NAME = aws_s3_bucket.batch_data_destination_bucket.bucket
DPS_BUCKET_NAME = var.dspp_submission_s3_bucket_name
QUEUE_URL = aws_sqs_queue.batch_file_created.url
REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
REDIS_PORT = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
AUDIT_TABLE_NAME = aws_dynamodb_table.audit-table.name
AUDIT_TABLE_TTL_DAYS = 60
}
variables = merge(
{
ACCOUNT_ID = var.immunisation_account_id
DPS_ACCOUNT_ID = var.dspp_core_account_id
SOURCE_BUCKET_NAME = aws_s3_bucket.batch_data_source_bucket.bucket
ACK_BUCKET_NAME = aws_s3_bucket.batch_data_destination_bucket.bucket
DPS_BUCKET_NAME = var.dspp_submission_s3_bucket_name
QUEUE_URL = aws_sqs_queue.batch_file_created.url
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
AUDIT_TABLE_NAME = aws_dynamodb_table.audit-table.name
AUDIT_TABLE_TTL_DAYS = 60
},
local.redis_env_vars
)
}
kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn
reserved_concurrent_executions = local.is_temp ? -1 : 20
Expand Down
22 changes: 14 additions & 8 deletions infrastructure/instance/forwarder_lambda.tf
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,11 @@ resource "aws_iam_policy" "forwarding_lambda_exec_policy" {
]
Resource = aws_sqs_queue.fifo_queue.arn
},
{
Effect = "Allow"
Action = "secretsmanager:GetSecretValue"
Resource = data.aws_secretsmanager_secret.redis_auth_token.arn
},
{
Effect = "Allow",
Action = [
Expand Down Expand Up @@ -146,14 +151,15 @@ resource "aws_lambda_function" "forwarding_lambda" {
}

environment {
variables = {
SOURCE_BUCKET_NAME = aws_s3_bucket.batch_data_source_bucket.bucket
ACK_BUCKET_NAME = aws_s3_bucket.batch_data_destination_bucket.bucket
DYNAMODB_TABLE_NAME = aws_dynamodb_table.events-dynamodb-table.name
SQS_QUEUE_URL = aws_sqs_queue.fifo_queue.url
REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
REDIS_PORT = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
}
variables = merge(
{
SOURCE_BUCKET_NAME = aws_s3_bucket.batch_data_source_bucket.bucket
ACK_BUCKET_NAME = aws_s3_bucket.batch_data_destination_bucket.bucket
DYNAMODB_TABLE_NAME = aws_dynamodb_table.events-dynamodb-table.name
SQS_QUEUE_URL = aws_sqs_queue.fifo_queue.url
},
local.redis_env_vars
)
}
kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn
depends_on = [
Expand Down
24 changes: 22 additions & 2 deletions infrastructure/instance/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,28 @@ data "aws_kms_key" "existing_dynamo_encryption_key" {
key_id = "alias/imms-event-dynamodb-encryption"
}

data "aws_elasticache_cluster" "existing_redis" {
cluster_id = "immunisation-redis-cluster"
data "aws_elasticache_replication_group" "existing_redis" {
replication_group_id = "immunisation-redis-cluster"
}

data "aws_secretsmanager_secret" "redis_auth_token" {
name = "imms/redis/auth-token"
}

locals {
redis_env_vars = {
REDIS_HOST = data.aws_elasticache_replication_group.existing_redis.primary_endpoint_address
REDIS_PORT = tostring(data.aws_elasticache_replication_group.existing_redis.port)
REDIS_SSL = "true"
REDIS_AUTH_TOKEN_SECRET_NAME = data.aws_secretsmanager_secret.redis_auth_token.name
}

redis_environment = [
for name, value in local.redis_env_vars : {
name = name
value = value
}
]
}

data "aws_security_group" "existing_securitygroup" {
Expand Down
3 changes: 2 additions & 1 deletion infrastructure/instance/policies/secret_manager.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"Action": "secretsmanager:GetSecretValue",
"Resource": [
"arn:aws:secretsmanager:eu-west-2:${account_id}:secret:imms/outbound/*/*",
"arn:aws:secretsmanager:eu-west-2:${account_id}:secret:imms/pds/*/*"
"arn:aws:secretsmanager:eu-west-2:${account_id}:secret:imms/pds/*/*",
"arn:aws:secretsmanager:eu-west-2:${account_id}:secret:imms/redis/auth-token-*"
]
}
]
Expand Down
18 changes: 12 additions & 6 deletions infrastructure/instance/redis_sync_lambda.tf
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,11 @@ resource "aws_iam_policy" "redis_sync_lambda_exec_policy" {
],
Resource : "arn:aws:firehose:*:*:deliverystream/${module.splunk.firehose_stream_name}"
},
{
Effect = "Allow",
Action = "secretsmanager:GetSecretValue",
Resource = data.aws_secretsmanager_secret.redis_auth_token.arn
},
{
Effect = "Allow"
Action = "lambda:InvokeFunction"
Expand Down Expand Up @@ -155,12 +160,13 @@ resource "aws_lambda_function" "redis_sync_lambda" {
}

environment {
variables = {
CONFIG_BUCKET_NAME = local.config_bucket_name
REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
REDIS_PORT = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
}
variables = merge(
{
CONFIG_BUCKET_NAME = local.config_bucket_name
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
},
local.redis_env_vars
)
}
kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn

Expand Down
27 changes: 24 additions & 3 deletions lambdas/shared/src/common/redis_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,38 @@

import redis

from common.clients import logger
from common.clients import get_secrets_manager_client, logger

REDIS_HOST = os.getenv("REDIS_HOST", "")
REDIS_PORT = os.getenv("REDIS_PORT", 6379)
REDIS_SSL = os.getenv("REDIS_SSL", "false").lower() == "true"
REDIS_AUTH_TOKEN_SECRET_NAME = os.getenv("REDIS_AUTH_TOKEN_SECRET_NAME", "")

redis_client = None
redis_auth_token = None


def get_redis_auth_token():
global redis_auth_token
if not REDIS_AUTH_TOKEN_SECRET_NAME:
return None

if redis_auth_token is None:
response = get_secrets_manager_client().get_secret_value(SecretId=REDIS_AUTH_TOKEN_SECRET_NAME)
redis_auth_token = response["SecretString"]

return redis_auth_token


def get_redis_client():
global redis_client
if redis_client is None:
logger.info(f"Connecting to Redis at {REDIS_HOST}:{REDIS_PORT}")
redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True)
logger.info(f"Connecting to Redis at {REDIS_HOST}:{REDIS_PORT} with ssl={REDIS_SSL}")
redis_client = redis.StrictRedis(
host=REDIS_HOST,
port=REDIS_PORT,
password=get_redis_auth_token(),
ssl=REDIS_SSL,
decode_responses=True,
)
return redis_client
Loading
Loading