diff --git a/docs/infrastructure/create-environment.md b/docs/infrastructure/create-environment.md index d12dd4ae..e62c336a 100644 --- a/docs/infrastructure/create-environment.md +++ b/docs/infrastructure/create-environment.md @@ -133,3 +133,16 @@ Add the infrastructure secrets to the _inf_ key vault `kv-lungcs-[environment]-i - assign yourself "Key Vault Secrets User" to application key vault to run the terraform code from the CLI inside the AVD when first trying to deploy the application. - assign yourself "Data Blob Reader" to State file storage account to run the terraform code from the CLI inside the AVD when first trying to deploy the application. + +## Connect to Postgres Database + +- Add your user as a member to the respective Entra ID group: + - `postgres_lungcs_[environment]_uks_admin` +- Log into the correct ADV for your environment type (either nonlive or live) +- Run the following commands on the CLI to log into the database: - + - `export PGPASSWORD="$(az account get-access-token --resource https://ossrdbms-aad.database.windows.net --query accessToken --output tsv)"` + - `psql "host=postgres-lungcs-[environment]-uks.postgres.database.azure.com \ + port=5432 \ + dbname=[database] \ + user=postgres_lungcs_[environment]_uks_admin \ + sslmode=require"` diff --git a/infrastructure/bootstrap/hub.bicep b/infrastructure/bootstrap/hub.bicep index b17eaa81..6c18d65d 100644 --- a/infrastructure/bootstrap/hub.bicep +++ b/infrastructure/bootstrap/hub.bicep @@ -18,6 +18,7 @@ targetScope = 'subscription' +// param devopsInfrastructureId string param devopsSubnetAddressPrefix string param privateEndpointSubnetAddressPrefix string param hubType string // live / nonlive diff --git a/infrastructure/modules/container-apps/alerts.tf b/infrastructure/modules/container-apps/alerts.tf new file mode 100644 index 00000000..855311f5 --- /dev/null +++ b/infrastructure/modules/container-apps/alerts.tf @@ -0,0 +1,35 @@ +resource "azurerm_monitor_scheduled_query_rules_alert_v2" "five_hundred_error_alert" { + count = var.enable_alerting ? 1 : 0 + + auto_mitigation_enabled = false + description = "An alert triggered by 500 errors logged in code" + enabled = var.enable_alerting + evaluation_frequency = "PT5M" + location = var.region + name = "${var.app_short_name}-500-error-alert" + resource_group_name = azurerm_resource_group.main.name + scopes = [var.action_group_id] + severity = 2 + skip_query_validation = false + window_duration = "PT5M" + workspace_alerts_storage_enabled = false + + action { + action_groups = [var.action_group_id] + } + + criteria { + operator = "GreaterThan" + query = <<-QUERY + ContainerAppConsoleLogs_CL + | where Log contains "[ERROR]" + QUERY + threshold = 0 + time_aggregation_method = "Count" + + failing_periods { + minimum_failing_periods_to_trigger_alert = 1 + number_of_evaluation_periods = 1 + } + } +} diff --git a/infrastructure/modules/container-apps/jobs.tf b/infrastructure/modules/container-apps/jobs.tf index 8770825d..70fe58bb 100644 --- a/infrastructure/modules/container-apps/jobs.tf +++ b/infrastructure/modules/container-apps/jobs.tf @@ -1,3 +1,16 @@ +locals { + scheduled_jobs = { + collect_metrics = { + cron_expression = "*/5 * * * *" + environment_variables = { + ENVIRONMENT = var.environment + } + job_short_name = "clm" + job_container_args = "collect_metrics" + } + } +} + module "db_setup" { source = "../dtos-devops-templates/infrastructure/modules/container-app-job" @@ -25,3 +38,54 @@ module "db_setup" { ] } + +module "scheduled_jobs" { + source = "../dtos-devops-templates/infrastructure/modules/container-app-job" + + for_each = local.scheduled_jobs + + name = "${var.app_short_name}-${each.value.job_short_name}-${var.environment}" + container_app_environment_id = var.container_app_environment_id + resource_group_name = azurerm_resource_group.main.name + + fetch_secrets_from_app_key_vault = var.fetch_secrets_from_app_key_vault + app_key_vault_id = var.app_key_vault_id + + container_command = ["/bin/sh", "-c"] + container_args = [ + "python manage.py ${each.value.job_container_args}" + ] + + docker_image = var.docker_image + replica_retry_limit = 0 + user_assigned_identity_ids = flatten([ + [module.azure_blob_storage_identity.id], + var.deploy_database_as_container ? [] : [module.db_connect_identity[0].id] + ]) + + environment_variables = merge( + local.common_env, + { + "STORAGE_ACCOUNT_NAME" = module.storage.storage_account_name, + "BLOB_MI_CLIENT_ID" = module.azure_blob_storage_identity.client_id, + }, + each.value.environment_variables, + var.deploy_database_as_container ? local.container_db_env : local.azure_db_env + ) + secret_variables = merge( + { APPLICATIONINSIGHTS_CONNECTION_STRING = var.app_insights_connection_string }, + var.deploy_database_as_container ? { DATABASE_PASSWORD = resource.random_password.admin_password[0].result } : {} + ) + + # alerts + action_group_id = var.action_group_id + enable_alerting = var.enable_alerting + log_analytics_workspace_id = var.log_analytics_workspace_audit_id + + # Ensure RBAC role assignments are created before the job definition finalizes + depends_on = [ + module.blob_storage_role_assignment, + ] + + cron_expression = each.value.cron_expression +} diff --git a/infrastructure/modules/container-apps/main.tf b/infrastructure/modules/container-apps/main.tf index f5d947f8..d775e27f 100644 --- a/infrastructure/modules/container-apps/main.tf +++ b/infrastructure/modules/container-apps/main.tf @@ -31,17 +31,20 @@ module "webapp" { environment_variables = merge( local.common_env, { - ALLOWED_HOSTS = "${local.hostname},${var.app_short_name}-web-${var.environment}.${var.default_domain},localhost", + ALLOWED_HOSTS = "${local.hostname},${var.app_short_name}-web-${var.environment}.${var.default_domain},localhost,*", CSRF_TRUSTED_ORIGINS = "https://${local.hostname}" }, var.deploy_database_as_container ? local.container_db_env : local.azure_db_env ) - secret_variables = var.deploy_database_as_container ? { DATABASE_PASSWORD = resource.random_password.admin_password[0].result } : {} - is_web_app = true - port = 8000 - probe_path = "/healthcheck" - min_replicas = var.min_replicas - memory = var.container_memory + secret_variables = merge( + { APPLICATIONINSIGHTS_CONNECTION_STRING = var.app_insights_connection_string }, + var.deploy_database_as_container ? { DATABASE_PASSWORD = resource.random_password.admin_password[0].result } : {} + ) + is_web_app = true + port = 8000 + probe_path = "/healthcheck" + min_replicas = var.min_replicas + memory = var.container_memory } module "azurerm_application_insights_standard_web_test" { diff --git a/infrastructure/modules/container-apps/variables.tf b/infrastructure/modules/container-apps/variables.tf index 5c752459..3d7363e3 100644 --- a/infrastructure/modules/container-apps/variables.tf +++ b/infrastructure/modules/container-apps/variables.tf @@ -144,7 +144,6 @@ variable "app_insights_id" { type = string } - variable "region" { description = "The region to deploy in" type = string @@ -196,6 +195,11 @@ variable "infra_key_vault_rg" { type = string } +variable "app_insights_connection_string" { + description = "The Application Insights connection string." + type = string +} + locals { resource_group_name = "rg-${var.app_short_name}-${var.environment}-container-app-uks" diff --git a/infrastructure/modules/infra/output.tf b/infrastructure/modules/infra/output.tf index 922cf5c1..9aca2dfc 100644 --- a/infrastructure/modules/infra/output.tf +++ b/infrastructure/modules/infra/output.tf @@ -33,3 +33,7 @@ output "postgres_subnet_id" { output "main_subnet_id" { value = module.main_subnet.id } + +output "app_insights_connection_string" { + value = module.app_insights_audit.connection_string +} diff --git a/infrastructure/terraform/spoke/main.tf b/infrastructure/terraform/spoke/main.tf index d3626dd8..8bccee1f 100644 --- a/infrastructure/terraform/spoke/main.tf +++ b/infrastructure/terraform/spoke/main.tf @@ -40,6 +40,7 @@ module "container-apps" { enable_alerting = var.enable_alerting app_key_vault_id = var.deploy_infra ? module.infra[0].app_key_vault_id : data.azurerm_key_vault.app_key_vault[0].id app_short_name = var.app_short_name + app_insights_connection_string = var.deploy_infra ? module.infra[0].app_insights_connection_string : data.azurerm_application_insights.app_insights[0].connection_string app_insights_id = var.deploy_infra ? module.infra[0].app_insights_id : data.azurerm_application_insights.app_insights[0].id container_app_environment_id = var.deploy_infra ? module.infra[0].container_app_environment_id : data.azurerm_container_app_environment.this[0].id default_domain = var.deploy_infra ? module.infra[0].default_domain : data.azurerm_container_app_environment.this[0].default_domain diff --git a/lung_cancer_screening/questions/management/__init__.py b/lung_cancer_screening/questions/management/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lung_cancer_screening/questions/management/commands/__init__.py b/lung_cancer_screening/questions/management/commands/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lung_cancer_screening/questions/management/commands/collect_metrics.py b/lung_cancer_screening/questions/management/commands/collect_metrics.py new file mode 100644 index 00000000..f32e5766 --- /dev/null +++ b/lung_cancer_screening/questions/management/commands/collect_metrics.py @@ -0,0 +1,19 @@ +import logging + +from django.core.management.base import BaseCommand, CommandError + +from lung_cancer_screening.questions.services.metricsCollector import ModelMetricsCollector + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Collects current model metrics and exports them via OpenTelemetry." + + def handle(self, *args, **options): + logger.info("Command: collect_metrics.") + try: + ModelMetricsCollector().collect() + except Exception as e: + logger.error(e, exc_info=True) + raise CommandError(e) diff --git a/lung_cancer_screening/questions/models/base.py b/lung_cancer_screening/questions/models/base.py index 478dce63..5a8de010 100644 --- a/lung_cancer_screening/questions/models/base.py +++ b/lung_cancer_screening/questions/models/base.py @@ -1,5 +1,8 @@ from django.db import models +from lung_cancer_screening.questions.services.metrics import Metrics +import logging +logger = logging.getLogger(__name__) class BaseQuerySet(models.QuerySet): def get_or_build(self, **kwargs): @@ -28,6 +31,29 @@ class Meta: objects = BaseQuerySet.as_manager() + @property + def model_name(self) -> str: + return self._meta.label_lower + def save(self, *args, **kwargs): + is_create = self.pk is None + + old_status = None + if not is_create and hasattr(self, "status"): + old_status = ( + self.__class__.objects.filter(pk=self.pk) + .values_list("status", flat=True) + .first() + ) + + self.full_clean() # Validate before saving super().save(*args, **kwargs) + + metrics = Metrics() + + if is_create: + metrics.record_request_created(self.model_name) + + if hasattr(self, "status") and self.status == "submitted" and old_status != "submitted": + metrics.record_request_submitted(self.model_name) diff --git a/lung_cancer_screening/questions/services/__init__.py b/lung_cancer_screening/questions/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lung_cancer_screening/questions/services/metrics.py b/lung_cancer_screening/questions/services/metrics.py new file mode 100644 index 00000000..0cef4f2f --- /dev/null +++ b/lung_cancer_screening/questions/services/metrics.py @@ -0,0 +1,123 @@ +import logging +import os +from threading import Lock +from typing import Iterable + +from azure.monitor.opentelemetry.exporter import AzureMonitorMetricExporter +from opentelemetry import metrics +from opentelemetry.metrics import Observation, CallbackOptions +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader + +logger = logging.getLogger(__name__) + + +class Metrics: + _instance = None + _lock = Lock() + _initialised = False + + def __new__(cls, *args, **kwargs): + logger.info("Creating a new instance of Metrics class.") + if cls._instance is None: + with cls._lock: + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self): + if self.__class__._initialised: + return + + logger.info("Going into Metrics class.") + + connection_string = os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING") + environment = os.getenv("ENVIRONMENT", "unknown") + + if not connection_string: + logger.warning( + "APPLICATIONINSIGHTS_CONNECTION_STRING not set; metrics will be no-op." + ) + self.meter = metrics.get_meter("lungcs.models") + else: + exporter = AzureMonitorMetricExporter( + connection_string=connection_string + ) + provider = MeterProvider( + metric_readers=[PeriodicExportingMetricReader(exporter)] + ) + metrics.set_meter_provider(provider) + self.meter = metrics.get_meter("lungcs.models") + + self.environment = environment + + # store latest gauge values here + self._gauge_values = {} + self._gauge_lock = Lock() + self._registered_observable_gauges = set() + + self.requests_created = self.meter.create_counter( + name="requests.created", + unit="1", + description="Number of request records created", + ) + self.requests_submitted = self.meter.create_counter( + name="requests.submitted", + unit="1", + description="Number of request records submitted", + ) + + self.__class__._initialised = True + + def record_request_created(self, model_name: str): + logger.info("Metrics: record_request_created(model_name=%s)", model_name) + self.requests_created.add( + 1, + { + "environment": self.environment, + "model": model_name, + }, + ) + + def record_request_submitted(self, model_name: str): + logger.info("Metrics: record_request_submitted(model_name=%s)", model_name) + self.requests_submitted.add( + 1, + { + "environment": self.environment, + "model": model_name, + }, + ) + + def _make_gauge_callback(self, metric_name: str): + def callback(options: CallbackOptions) -> Iterable[Observation]: + with self._gauge_lock: + value = self._gauge_values.get(metric_name, 0) + + yield Observation( + value, + {"environment": self.environment}, + ) + + return callback + + def set_gauge_value(self, metric_name, units, description, value): + logger.debug( + "Metrics: set_gauge_value(metric_name=%s, units=%s, description=%s, value=%s)", + metric_name, + units, + description, + value, + ) + + with self._gauge_lock: + self._gauge_values[metric_name] = value + + if metric_name not in self._registered_observable_gauges: + self.meter.create_observable_gauge( + name=metric_name, + callbacks=[self._make_gauge_callback(metric_name)], + unit=units, + description=description, + ) + self._registered_observable_gauges.add(metric_name) diff --git a/lung_cancer_screening/questions/services/metricsCollector.py b/lung_cancer_screening/questions/services/metricsCollector.py new file mode 100644 index 00000000..62b306c5 --- /dev/null +++ b/lung_cancer_screening/questions/services/metricsCollector.py @@ -0,0 +1,69 @@ +import logging + +from django.apps import apps +from django.db import models + +from lung_cancer_screening.questions.models.base import BaseModel +from lung_cancer_screening.questions.services.metrics import Metrics + +logger = logging.getLogger(__name__) + + +class ModelMetricsCollector: + """ + Collects current-state metrics for all models inheriting from BaseModel. + + Emits: + - model_records_ + - model_submitted_records_ (only for models with a status field) + """ + + def __init__(self): + logger.info( + "ModelMetricsCollector: Starting collection of model metrics." ) + self.metrics = Metrics() + + def collect(self): + logger.info( + "ModelMetricsCollector: collect." ) + for model in apps.get_models(): + if not issubclass(model, BaseModel) or model._meta.abstract: + continue + + self._collect_for_model(model) + + def _collect_for_model(self, model: type[models.Model]): + model_name = model._meta.label_lower.replace(".", "_") + + total_count = model.objects.count() + + logger.info( + "ModelMetricsCollector: _collect_for_model." + " model=%s, total_count=%d", + model._meta.label_lower, + total_count + ) + + self.metrics.set_gauge_value( + metric_name=f"model_records_{model_name}", + units="records", + description=f"Current number of records for {model._meta.label_lower}", + value=total_count, + ) + + status_field = self._get_status_field(model) + if status_field: + submitted_count = model.objects.filter(status="submitted").count() + self.metrics.set_gauge_value( + metric_name=f"model_submitted_records_{model_name}", + units="records", + description=f"Current number of submitted records for {model._meta.label_lower}", + value=submitted_count, + ) + + @staticmethod + def _get_status_field(model: type[models.Model]): + return next( + (field for field in model._meta.fields if field.name == "status"), + None, + ) diff --git a/lung_cancer_screening/urls.py b/lung_cancer_screening/urls.py index df3cebfd..d9ee6257 100644 --- a/lung_cancer_screening/urls.py +++ b/lung_cancer_screening/urls.py @@ -28,14 +28,12 @@ def sha_view(request): return HttpResponse(settings.COMMIT_SHA) - @require_GET @basic_auth_exempt @login_not_required def health_check(request): return HttpResponse("OK") - urlpatterns = [ path('', include( ("lung_cancer_screening.questions.urls", "questions"), diff --git a/poetry.lock b/poetry.lock index 3c50f88a..edc24f02 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] name = "asgiref" @@ -72,6 +72,26 @@ msal = ">=1.35.1" msal-extensions = ">=1.2.0" typing-extensions = ">=4.0.0" +[[package]] +name = "azure-monitor-opentelemetry-exporter" +version = "1.0.0b51" +description = "Microsoft Azure Monitor Opentelemetry Exporter Client Library for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "azure_monitor_opentelemetry_exporter-1.0.0b51-py2.py3-none-any.whl", hash = "sha256:6572cac11f96e3b18ae1187cb35cf3b40d0004655dae8048896c41c765bea530"}, + {file = "azure_monitor_opentelemetry_exporter-1.0.0b51.tar.gz", hash = "sha256:a6171c34326bcd6216938bb40d715c15f1f22984ac1986fc97231336d8ac4c3c"}, +] + +[package.dependencies] +azure-core = ">=1.28.0,<2.0.0" +azure-identity = ">=1.17,<2.0" +msrest = ">=0.6.10" +opentelemetry-api = "1.40" +opentelemetry-sdk = "1.40" +psutil = ">=5.9,<8" + [[package]] name = "beautifulsoup4" version = "4.14.3" @@ -760,6 +780,30 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "importlib-metadata" +version = "8.7.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151"}, + {file = "importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=3.4)"] +perf = ["ipython"] +test = ["flufl.flake8", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["mypy (<1.19) ; platform_python_implementation == \"PyPy\"", "pytest-mypy (>=1.0.1)"] + [[package]] name = "inflection" version = "0.5.1" @@ -772,6 +816,18 @@ files = [ {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, ] +[[package]] +name = "isodate" +version = "0.7.2" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -963,6 +1019,28 @@ msal = ">=1.29,<2" [package.extras] portalocker = ["portalocker (>=1.4,<4)"] +[[package]] +name = "msrest" +version = "0.7.1" +description = "AutoRest swagger generator Python client runtime." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32"}, + {file = "msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9"}, +] + +[package.dependencies] +azure-core = ">=1.24.0" +certifi = ">=2017.4.17" +isodate = ">=0.6.0" +requests = ">=2.16,<3.0" +requests-oauthlib = ">=0.5.0" + +[package.extras] +async = ["aiodns ; python_version >= \"3.5\"", "aiohttp (>=3.0) ; python_version >= \"3.5\""] + [[package]] name = "nhsuk-frontend-jinja" version = "0.4.1" @@ -978,6 +1056,72 @@ files = [ [package.dependencies] jinja2 = ">=3.1.6,<4.0.0" +[[package]] +name = "oauthlib" +version = "3.3.1" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1"}, + {file = "oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "opentelemetry-api" +version = "1.40.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9"}, + {file = "opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f"}, +] + +[package.dependencies] +importlib-metadata = ">=6.0,<8.8.0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.40.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "opentelemetry_sdk-1.40.0-py3-none-any.whl", hash = "sha256:787d2154a71f4b3d81f20524a8ce061b7db667d24e46753f32a7bc48f1c1f3f1"}, + {file = "opentelemetry_sdk-1.40.0.tar.gz", hash = "sha256:18e9f5ec20d859d268c7cb3c5198c8d105d073714db3de50b593b8c1345a48f2"}, +] + +[package.dependencies] +opentelemetry-api = "1.40.0" +opentelemetry-semantic-conventions = "0.61b0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.61b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "opentelemetry_semantic_conventions-0.61b0-py3-none-any.whl", hash = "sha256:fa530a96be229795f8cef353739b618148b0fe2b4b3f005e60e262926c4d38e2"}, + {file = "opentelemetry_semantic_conventions-0.61b0.tar.gz", hash = "sha256:072f65473c5d7c6dc0355b27d6c9d1a679d63b6d4b4b16a9773062cb7e31192a"}, +] + +[package.dependencies] +opentelemetry-api = "1.40.0" +typing-extensions = ">=4.5.0" + [[package]] name = "packaging" version = "26.0" @@ -1045,6 +1189,41 @@ files = [ greenlet = ">=3.1.1,<4.0.0" pyee = ">=13,<14" +[[package]] +name = "psutil" +version = "7.2.2" +description = "Cross-platform lib for process and system monitoring." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b"}, + {file = "psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea"}, + {file = "psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63"}, + {file = "psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312"}, + {file = "psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b"}, + {file = "psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9"}, + {file = "psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00"}, + {file = "psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9"}, + {file = "psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a"}, + {file = "psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf"}, + {file = "psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1"}, + {file = "psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841"}, + {file = "psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486"}, + {file = "psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979"}, + {file = "psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9"}, + {file = "psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e"}, + {file = "psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8"}, + {file = "psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc"}, + {file = "psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988"}, + {file = "psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee"}, + {file = "psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372"}, +] + +[package.extras] +dev = ["abi3audit", "black", "check-manifest", "colorama ; os_name == \"nt\"", "coverage", "packaging", "psleak", "pylint", "pyperf", "pypinfo", "pyreadline3 ; os_name == \"nt\"", "pytest", "pytest-cov", "pytest-instafail", "pytest-xdist", "pywin32 ; os_name == \"nt\" and implementation_name != \"pypy\"", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "validate-pyproject[all]", "virtualenv", "vulture", "wheel", "wheel ; os_name == \"nt\" and implementation_name != \"pypy\"", "wmi ; os_name == \"nt\" and implementation_name != \"pypy\""] +test = ["psleak", "pytest", "pytest-instafail", "pytest-xdist", "pywin32 ; os_name == \"nt\" and implementation_name != \"pypy\"", "setuptools", "wheel ; os_name == \"nt\" and implementation_name != \"pypy\"", "wmi ; os_name == \"nt\" and implementation_name != \"pypy\""] + [[package]] name = "psycopg2-binary" version = "2.9.12" @@ -1119,6 +1298,7 @@ files = [ {file = "psycopg2_binary-2.9.12-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:ace94261f43850e9e79f6c56636c5e0147978ab79eda5e5e5ebf13ae146fc8fe"}, {file = "psycopg2_binary-2.9.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a7e39a65b7d2a20e4ba2e0aaad1960b61cc2888d6ab047769f8347bd3c9ad915"}, {file = "psycopg2_binary-2.9.12-cp39-cp39-win_amd64.whl", hash = "sha256:f625abb7020e4af3432d95342daa1aa0db3fa369eed19807aa596367ba791b10"}, + {file = "psycopg2_binary-2.9.12.tar.gz", hash = "sha256:5ac9444edc768c02a6b6a591f070b8aae28ff3a99be57560ac996001580f294c"}, ] [[package]] @@ -1211,6 +1391,25 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)"] test = ["PySocks (>=1.5.6,!=1.5.7)", "pytest (>=3)", "pytest-cov", "pytest-httpbin (==2.1.0)", "pytest-mock", "pytest-xdist"] use-chardet-on-py3 = ["chardet (>=3.0.2,<8)"] +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +groups = ["main"] +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + [[package]] name = "ruff" version = "0.15.9" @@ -1337,7 +1536,27 @@ files = [ [package.extras] brotli = ["brotli"] +[[package]] +name = "zipp" +version = "3.23.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] lock-version = "2.1" python-versions = ">=3.13, <4.0" -content-hash = "c20a09db706201ecf0def6048ff41d949dd30fb73e229e61d15876a6f36edcca" +content-hash = "77f4bd2fa9ffd86aaa23310ed47c320ec3e3528a90dc17c715b95175d8c2a1be" diff --git a/pyproject.toml b/pyproject.toml index b539d133..5fb80b1b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,8 @@ readme = "README.md" requires-python = ">=3.13, <4.0" dependencies = [ "azure-identity (>=1.23.0,<2.0.0)", + "azure-monitor-opentelemetry-exporter", + "opentelemetry-sdk", "django (>=6.0.3,<7.0.0)", "gunicorn (>=23.0.0,<24.0.0)", "nhsuk-frontend-jinja (>=0.4.1,<0.5.0)", diff --git a/scripts/config/vale/styles/config/vocabularies/words/accept.txt b/scripts/config/vale/styles/config/vocabularies/words/accept.txt index d4c2e7b4..3bb4f9d7 100644 --- a/scripts/config/vale/styles/config/vocabularies/words/accept.txt +++ b/scripts/config/vale/styles/config/vocabularies/words/accept.txt @@ -30,3 +30,4 @@ yaml jq choco CLI +nonlive