diff --git a/.env.template b/.env.template deleted file mode 100644 index b141e2e3..00000000 --- a/.env.template +++ /dev/null @@ -1,10 +0,0 @@ -FLASK_PORT= -FLASK_HOST= - -BASE_URL= -HOST= - -STUB_PDS= -STUB_PROVIDER= -STUB_SDS= - diff --git a/.github/actions/run-test-suite/action.yaml b/.github/actions/run-test-suite/action.yaml index 1113c720..2eee7d00 100644 --- a/.github/actions/run-test-suite/action.yaml +++ b/.github/actions/run-test-suite/action.yaml @@ -9,33 +9,25 @@ inputs: test-type: description: "Type of test to run" required: true - apigee-access-token: - description: "Apigee access token" - required: false - base-url: - description: "The URL of the environment to test" - required: false env: - description: "Environment: local or remote" + description: "Environment to run tests against: ci, alpha-int, pr- - see env.mk" required: false - default: "remote" + default: "ci" runs: using: composite steps: - - name: "Run ${{ inputs.test-type }} tests" + - name: Set up environment shell: bash env: - APIGEE_ACCESS_TOKEN: ${{ inputs.apigee-access-token }} - BASE_URL: ${{ inputs.base-url }} ENV: ${{ inputs.env }} + run: make env-test-"${ENV}" + + - name: "Run ${{ inputs.test-type }} tests" + shell: bash + env: TEST_TYPE: ${{ inputs.test-type }} run: | - if [[ -n "${APIGEE_ACCESS_TOKEN}" ]]; then - echo "::add-mask::${APIGEE_ACCESS_TOKEN}" - fi - - # Clean test artefacts so each suite uploads only its own results rm -rf gateway-api/test-artefacts/* || true mkdir -p gateway-api/test-artefacts make test-"${TEST_TYPE}" diff --git a/.github/actions/start-app/action.yaml b/.github/actions/start-app/action.yaml index 27b0ba56..27f8e177 100644 --- a/.github/actions/start-app/action.yaml +++ b/.github/actions/start-app/action.yaml @@ -4,7 +4,7 @@ inputs: deploy-command: description: "Command to start app" required: false - default: "make deploy" + default: "make deploy-ci" health-path: description: "Health check path" required: false diff --git a/.github/instructions/copilot-instructions.md b/.github/instructions/copilot-instructions.md index 6999f07d..a8641e9e 100644 --- a/.github/instructions/copilot-instructions.md +++ b/.github/instructions/copilot-instructions.md @@ -8,7 +8,7 @@ This repository is for handling HTTP requests from "Consumer systems" and forwar We use other NHSE services to assist in the validation and processing of the requests including PDS FHIR API for obtaining GP practice codes for the patient, SDS FHIR API for obtaining the "Provider system" details of that GP practice and Healthcare Worker FHIR API for obtaining details of the requesting practitioner using the "Consumer System" that will then be added to the forwarded request. -`make deploy` will build and start a container running Gateway API at `localhost:5000`. +`make deploy-dev` will build and start a container running Gateway API at `localhost:5000`. After deploying the container locally, `make test` will run all tests and capture their coverage. Note: env variables control the use of stubs for the PDS FHIR API, SDS FHIR API, Healthcare Worker FHIR API and Provider system services. diff --git a/.github/workflows/alpha-integration-env.yml b/.github/workflows/alpha-integration-env.yml index 55afdedb..e845b0fe 100644 --- a/.github/workflows/alpha-integration-env.yml +++ b/.github/workflows/alpha-integration-env.yml @@ -14,7 +14,6 @@ env: TF_STATE_KEY: "dev/preview/alpha-integration.tfstate" BRANCH_NAME: "alpha-integration" ALB_RULE_PRIORITY: "900" - BASE_URL: "https://internal-dev.api.service.nhs.uk/clinical-data-gateway-api-poc-alpha-integration" python_version: "3.14" PROXYGEN_API_NAME: ${{ vars.PROXYGEN_API_NAME }} @@ -196,63 +195,34 @@ jobs: echo "http_result=unexpected-status" >> "$GITHUB_OUTPUT" exit 0 - - name: Retrieve Apigee Token - id: apigee-token - shell: bash - run: | - set -euo pipefail - - APIGEE_TOKEN="$(proxygen pytest-nhsd-apim get-token | jq -r '.pytest_nhsd_apim_token' 2>/dev/null)" - if [ -z "$APIGEE_TOKEN" ] || [ "$APIGEE_TOKEN" = "null" ]; then - echo "::error::Failed to retrieve Apigee token" - exit 1 - fi - - echo "::add-mask::$APIGEE_TOKEN" - printf 'apigee-access-token=%s\n' "$APIGEE_TOKEN" >> "$GITHUB_OUTPUT" - echo "Token retrieved successfully (length: ${#APIGEE_TOKEN})" - - name: Run unit tests uses: ./.github/actions/run-test-suite with: test-type: unit - env: local - name: Run contract tests uses: ./.github/actions/run-test-suite - env: - PROXY_BASE_PATH: "clinical-data-gateway-api-poc-alpha-integration" with: test-type: contract - apigee-access-token: ${{ steps.apigee-token.outputs.apigee-access-token }} - base-url: ${{ env.BASE_URL }} + env: alpha-int - name: Run schema validation tests uses: ./.github/actions/run-test-suite - env: - PROXY_BASE_PATH: "clinical-data-gateway-api-poc-alpha-integration" with: test-type: schema - apigee-access-token: ${{ steps.apigee-token.outputs.apigee-access-token }} - base-url: ${{ env.BASE_URL }} + env: alpha-int - name: Run integration tests uses: ./.github/actions/run-test-suite - env: - PROXY_BASE_PATH: "clinical-data-gateway-api-poc-alpha-integration" with: test-type: integration - apigee-access-token: ${{ steps.apigee-token.outputs.apigee-access-token }} - base-url: ${{ env.BASE_URL }} + env: alpha-int - name: Run acceptance tests uses: ./.github/actions/run-test-suite - env: - PROXY_BASE_PATH: "clinical-data-gateway-api-poc-alpha-integration" with: test-type: acceptance - apigee-access-token: ${{ steps.apigee-token.outputs.apigee-access-token }} - base-url: ${{ env.BASE_URL }} + env: alpha-int - name: Remove mTLS temp files run: rm -f /tmp/client1-key.pem /tmp/client1-cert.pem diff --git a/.github/workflows/preview-env.yml b/.github/workflows/preview-env.yml index 5f0176df..e3868417 100644 --- a/.github/workflows/preview-env.yml +++ b/.github/workflows/preview-env.yml @@ -10,7 +10,6 @@ env: ECR_REPOSITORY_NAME: "whoami" TF_STATE_BUCKET: "cds-cdg-dev-tfstate-900119715266" PREVIEW_STATE_PREFIX: "dev/preview/" - BASE_URL: "https://internal-dev.api.service.nhs.uk/clinical-data-gateway-api-poc-pr-${{ github.event.pull_request.number }}" python_version: "3.14" PROXYGEN_API_NAME: ${{ vars.PROXYGEN_API_NAME }} PR_NUMBER: ${{ github.event.pull_request.number }} @@ -314,28 +313,11 @@ jobs: # ---------- QUALITY CHECKS (Test Suites) ---------- - - name: Retrieve Apigee Token - id: apigee-token - shell: bash - run: | - set -euo pipefail - - APIGEE_TOKEN="$(proxygen pytest-nhsd-apim get-token | jq -r '.pytest_nhsd_apim_token' 2>/dev/null)" - if [ -z "$APIGEE_TOKEN" ] || [ "$APIGEE_TOKEN" = "null" ]; then - echo "::error::Failed to retrieve Apigee token" - exit 1 - fi - - echo "::add-mask::$APIGEE_TOKEN" - printf 'apigee-access-token=%s\n' "$APIGEE_TOKEN" >> "$GITHUB_OUTPUT" - echo "Token retrieved successfully (length: ${#APIGEE_TOKEN})" - - name: "Run unit tests" if: github.event.action != 'closed' uses: ./.github/actions/run-test-suite with: test-type: unit - env: local - name: "Run load tests" if: github.event.action != 'closed' && steps.smoke-test.outputs.http_result != 'unexpected-status' @@ -349,42 +331,30 @@ jobs: - name: "Run contract tests" if: github.event.action != 'closed' uses: ./.github/actions/run-test-suite - env: - PROXY_BASE_PATH: "clinical-data-gateway-api-poc-pr-${{ github.event.pull_request.number }}" with: test-type: contract - apigee-access-token: ${{ steps.apigee-token.outputs.apigee-access-token }} - base-url: ${{ env.BASE_URL }} + env: pr-${{ github.event.pull_request.number }} - name: "Run schema validation tests" if: github.event.action != 'closed' uses: ./.github/actions/run-test-suite - env: - PROXY_BASE_PATH: "clinical-data-gateway-api-poc-pr-${{ github.event.pull_request.number }}" with: test-type: schema - apigee-access-token: ${{ steps.apigee-token.outputs.apigee-access-token }} - base-url: ${{ env.BASE_URL }} + env: pr-${{ github.event.pull_request.number }} - name: "Run integration tests" if: github.event.action != 'closed' uses: ./.github/actions/run-test-suite - env: - PROXY_BASE_PATH: "clinical-data-gateway-api-poc-pr-${{ github.event.pull_request.number }}" with: test-type: integration - apigee-access-token: ${{ steps.apigee-token.outputs.apigee-access-token }} - base-url: ${{ env.BASE_URL }} + env: pr-${{ github.event.pull_request.number }} - name: "Run acceptance tests" if: github.event.action != 'closed' uses: ./.github/actions/run-test-suite - env: - PROXY_BASE_PATH: "clinical-data-gateway-api-poc-pr-${{ github.event.pull_request.number }}" with: test-type: acceptance - apigee-access-token: ${{ steps.apigee-token.outputs.apigee-access-token }} - base-url: ${{ env.BASE_URL }} + env: pr-${{ github.event.pull_request.number }} # Cleanup after tests - name: Remove mTLS temp files diff --git a/.github/workflows/stage-2-test.yaml b/.github/workflows/stage-2-test.yaml index 2d3b8318..5a720036 100644 --- a/.github/workflows/stage-2-test.yaml +++ b/.github/workflows/stage-2-test.yaml @@ -1,12 +1,5 @@ name: "Test stage" -env: - BASE_URL: "http://localhost:5000" - HOST: "localhost" - STUB_SDS: "true" - STUB_PDS: "true" - STUB_PROVIDER: "true" - on: workflow_call: inputs: @@ -45,6 +38,8 @@ jobs: uses: ./.github/actions/setup-python-project with: python-version: ${{ inputs.python_version }} + - name: Set environment variables + run: make env-test-ci - name: "Run unit test suite" run: make test-unit - name: "Upload unit test results" @@ -75,6 +70,8 @@ jobs: uses: ./.github/actions/start-app with: python-version: ${{ inputs.python_version }} + - name: Set environment variables + run: make env-test-ci - name: "Run contract tests" run: make test-contract - name: "Upload contract test results" @@ -105,6 +102,8 @@ jobs: uses: ./.github/actions/start-app with: python-version: ${{ inputs.python_version }} + - name: Set environment variables + run: make env-test-ci - name: "Run schema validation tests" run: make test-schema - name: "Upload schema test results" @@ -135,6 +134,8 @@ jobs: uses: ./.github/actions/start-app with: python-version: ${{ inputs.python_version }} + - name: Set environment variables + run: make env-test-ci - name: "Run integration test" run: make test-integration - name: "Upload integration test results" @@ -166,6 +167,8 @@ jobs: with: python-version: ${{ inputs.python_version }} max-seconds: 90 + - name: Set environment variables + run: make env-test-ci - name: "Run acceptance test" run: make test-acceptance - name: "Upload acceptance test results" diff --git a/.gitignore b/.gitignore index 3ab02b93..8e9f79dc 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,7 @@ gateway-api/test-artefacts/ **/.env.* **/.DS_Store **/.terraform.lock.hcl + +# Any file within .secrets +.secrets/** +!.secrets/README.md diff --git a/.secrets/README.md b/.secrets/README.md new file mode 100644 index 00000000..9b8b5967 --- /dev/null +++ b/.secrets/README.md @@ -0,0 +1,19 @@ +# Secrets + +This directory is used to store secrets. + +The secrets are accessed through `make env-` which sets the secrets required for PDS FHIR API and SDS FHIR API to `.env` file, which is then fed in to the locally deployed application through `make deploy`. + +## PDS + +PDS FHIR API requires [signed JWT for application-resrtictecd access](https://digital.nhs.uk/developer/guides-and-documentation/security-and-authorisation/application-restricted-restful-apis-signed-jwt-authentication). As such, the following three secrets enable the Gateway API to authenticate: + +* `.secrets/pds/api_token` - the API key of the application through which the Gateway API will consume NHSE APIs. +* `.secrets/pds/api_secret` - the private key of the public/private key pair created for application identified by `api_token` +* `.secrets/pds/api_kid` - the key identifier for the private/public key pair used for the `api_secret`. + +## SDS + +SDS FHIR API requires [API key authentication](https://digital.nhs.uk/developer/guides-and-documentation/security-and-authorisation/application-restricted-restful-apis-api-key-authentication) for application-restricted access. As such, the only secret required is + +* `.secrets/sds/api_token` - the API key of the application through which the Gateway API will consume NHSE APIs. diff --git a/.secrets/pds/.gitkeep b/.secrets/pds/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/.secrets/sds/.gitkeep b/.secrets/sds/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/.vscode/cspell-dictionary.txt b/.vscode/cspell-dictionary.txt index 004b5f0a..a15065cc 100644 --- a/.vscode/cspell-dictionary.txt +++ b/.vscode/cspell-dictionary.txt @@ -36,6 +36,7 @@ eamodio errstr Farsley fhir +getfixturevalue getstructuredrecord gocloc GPCAPIM @@ -73,6 +74,7 @@ NOSONAR NPFIT ONESHELL opencollection +orangebox pipefail PIPX pkce diff --git a/Makefile b/Makefile index 33e12b80..0683b732 100644 --- a/Makefile +++ b/Makefile @@ -53,27 +53,13 @@ build: build-gateway-api # Build the project artefact @Pipeline publish: # Publish the project artefact @Pipeline # TODO [GPCAPIM-283]: Implement the artefact publishing step -deploy: clean build # Deploy the project artefact to the target environment @Pipeline +deploy: clean build # Build project artefact and deploy locally @Pipeline @$(docker) network inspect gateway-local >/dev/null 2>&1 || $(docker) network create gateway-local - # Build up list of environment variables to pass to the container - @ENVIRONMENT_STRING="" ; \ - if [[ -n "$${STUB_PROVIDER}" ]]; then \ - ENVIRONMENT_STRING="$${ENVIRONMENT_STRING} -e STUB_PROVIDER=$${STUB_PROVIDER}" ; \ - fi ; \ - if [[ -n "$${STUB_PDS}" ]]; then \ - ENVIRONMENT_STRING="$${ENVIRONMENT_STRING} -e STUB_PDS=$${STUB_PDS}" ; \ - fi ; \ - if [[ -n "$${STUB_SDS}" ]]; then \ - ENVIRONMENT_STRING="$${ENVIRONMENT_STRING} -e STUB_SDS=$${STUB_SDS}" ; \ - fi ; \ - if [[ -n "$${CDG_DEBUG}" ]]; then \ - ENVIRONMENT_STRING="$${ENVIRONMENT_STRING} -e CDG_DEBUG=$${CDG_DEBUG}" ; \ - fi ; \ if [[ -n "$${IN_BUILD_CONTAINER}" ]]; then \ echo "Starting using local docker network ..." ; \ - $(docker) run --platform linux/amd64 --name gateway-api -p 5000:8080 --network gateway-local $${ENVIRONMENT_STRING} -d ${IMAGE_NAME} ; \ + $(docker) run --platform linux/amd64 --name gateway-api -p 5000:8080 --network gateway-local --env-file .env -d ${IMAGE_NAME} ; \ else \ - $(docker) run --platform linux/amd64 --name gateway-api -p 5000:8080 $${ENVIRONMENT_STRING} -d ${IMAGE_NAME} ; \ + $(docker) run --platform linux/amd64 --name gateway-api -p 5000:8080 --env-file .env -d ${IMAGE_NAME} ; \ fi @max_attempts=5 ; \ attempt=1 ; \ @@ -88,6 +74,9 @@ deploy: clean build # Deploy the project artefact to the target environment @Pip $(docker) logs gateway-api ; \ exit 1 +deploy-%: # Build project artefact and deploy locally as specified environment - mandatory: name=[name of the environment, e.g. 'dev'] @Pipeline + make env-$* deploy + clean:: stop # Clean-up project resources (main) @Operations @echo "Removing Gateway API container..." @$(docker) rm gateway-api || echo "No Gateway API container currently exists." diff --git a/README.md b/README.md index fd89463d..68b562f6 100644 --- a/README.md +++ b/README.md @@ -128,7 +128,8 @@ The project uses `make` targets to build, deploy, and manage the application. Ru | --- | --- | | `make dependencies` | Install all project dependencies via Poetry | | `make build` | Type-check, package, and build the Docker image | -| `make deploy` | Build and start the Gateway API container at `localhost:5000` | +| `make env` | Create a `.env` to be consumed when starting the app, e.g. `make deploy` | +| `make deploy` | Build and start the Gateway API container using the environment variables defined in `.env` | | `make clean` | Stop and remove the Gateway API container | | `make config` | Configure the development environment | @@ -145,18 +146,40 @@ The full API schema is defined in [gateway-api/openapi.yaml](gateway-api/openapi ### Environment Variables +Make commands help build the `.env` and `.env.test` files used to populate the required environment variables. + +#### .env + +`make deploy` will feed the `.env` variables in to the app's environment. + +Environment variables control whether stubs are used in place of the real PDS, SDS, and Provider services during local development. + | Variable | Description | | --- | --- | -| `BASE_URL` | Protocol, hostname and port for the running API (e.g. `http://localhost:5000`, or `http://gateway-api:8080` from within the devcontainer) | -| `HOST` | hostname portion of `BASE_URL` | -| `FLASK_HOST` | Host the Flask app binds to | -| `FLASK_PORT` | Port the Flask app listens on | -| `STUB_PDS` | `true`, use the stubs/stubs/pds/stub.py to return stubbed responses for PDS FHIR API; otherwise, not. | -| `STUB_SDS` | `true`, use the stubs/stubs/sds/stub.py to return stubbed responses for SDS FHIR API; otherwise, not. | -| `STUB_PROVIDER` | `true`, use the stubs/stubs/provider/stub.py to return stubbed responses for the provider system; otherwise, not. | -| `CDG_DEBUG` | `true`, Return additional debug information when the call to the GP provider returns an error. Note if set true causes the unit tests to fail, because expected return values are changed. | +| `PDS_URL` | The URL for the PDS FHIR API; set as `stub` to use development stub. | +| `PDS_API_TOKEN`| Leave unset in development environment. | +| `PDS_API_SECRET`| Leave unset in development environment. | +| `PDS_API_KID`| Leave unset in development environment. | +| `SDS_URL` | The URL for the SDS FHIR API; set as `stub` to use development stub. | +| `SDS_API_TOKEN`| Leave unset in development environment. | +| `PROVIDER_URL` | The URL for the GP Provider; set as `stub` to use development stub. | +| `CDG_DEBUG` | `true`, return additional debug information when the call to the GP provider returns an error. | + +See `make env-*` in `scripts/env/app/env.mk` for the commands that will write these variables to a file. -Environment variables also control whether stubs are used in place of the real PDS, SDS, and Provider services during local development. +_Note: `FLASK_HOST` and `FLASK_PORT` are hardcoded in to the Dockerfile. These are for container, and do not need adjusting._ + +#### .env.test + +| Variable | Description | +| --- | --- | +| `BASE_URL` | Protocol, hostname and port for the running API (e.g. `http://localhost:5000`, or `http://gateway-api:8080` from within the devcontainer) | +| `APIGEE_ACCESS_TOKEN` | An access token to Apigee API used by `pytest_nhds_apim`, fed from the environment variables at run time. | +| `PROXYGEN_API_NAME` | The name of the API defined in Proxygen. Used by `pytest_nhsd_apim` to run tests against, fed in the CLI arguments in `make test-*` | +| `PROXY_BASE_PATH` | The suffix of the proxy instance being deployed. Used by `pytest_nhsd_apim` to run tests against, fed in the CLI arguments in `make test-*` | +| `BASE_URL` | Set if targeting a locally deployed application; otherwise, leave unset. | +| `TARGET_ENV` | Either `local` or `remote`, to inform the HTTP client used by the tests how to behave - e.g. add auth headers, etc. | +| `REMOTE_TEST_USERNAME` | The test user through which the tests will be authenticated against when run against a remote target. | ## Testing @@ -201,10 +224,10 @@ To be able to use the `load-tests` or `local-tests`, you will need to have Proxy - Please follow the [guide here](https://nhsd-confluence.digital.nhs.uk/spaces/DCA/pages/1236046532/Proxygen) - When the devcontainer has built, the Proxygen configuration is created on the host at `~/gateway/ptl/.proxygen` and bind-mounted into the container as `~/.proxygen`. -- For *settings*: +- For _settings_: - Set the `api` value - `endpoint_url` and `spec_output_format` should be already set. -- For *credentials*: +- For _credentials_: - Set the `base_url`, `client_secret`, `password` and `username` values. - Remove unused fields. diff --git a/bruno/gateway-api/collections/Steel_Thread/environments/orangebox.yml b/bruno/gateway-api/collections/Steel_Thread/environments/orangebox.yml new file mode 100644 index 00000000..807c3a47 --- /dev/null +++ b/bruno/gateway-api/collections/Steel_Thread/environments/orangebox.yml @@ -0,0 +1,8 @@ +name: orangebox +variables: + - name: nhs_number + value: "9690937278" + - name: base_url + value: http://localhost:5000 + - name: ods_code + value: S55555 diff --git a/gateway-api/src/fhir/stu3/elements/parameters.py b/gateway-api/src/fhir/stu3/elements/parameters.py index bff7cc4e..a45fe85a 100644 --- a/gateway-api/src/fhir/stu3/elements/parameters.py +++ b/gateway-api/src/fhir/stu3/elements/parameters.py @@ -1,6 +1,6 @@ from abc import ABC from dataclasses import dataclass -from typing import Annotated +from typing import Annotated, Literal from pydantic import Field @@ -15,7 +15,7 @@ class Parameters(Resource, resource_type="Parameters"): class Parameter(ABC): """A FHIR STU3 Parameter resource.""" - name: Annotated[str, Field(frozen=True)] + name: Annotated[Literal["patientNHSNumber"], Field(frozen=True)] valueIdentifier: Annotated[PatientIdentifier, Field(frozen=True)] parameter: Annotated[list[Parameter], Field(frozen=True, min_length=1)] diff --git a/gateway-api/src/fhir/stu3/elements/test_elements.py b/gateway-api/src/fhir/stu3/elements/test_elements.py index aae758ba..651b796f 100644 --- a/gateway-api/src/fhir/stu3/elements/test_elements.py +++ b/gateway-api/src/fhir/stu3/elements/test_elements.py @@ -91,6 +91,7 @@ def test_model_validate_with_wrong_resource_type_raises_error(self) -> None: "resourceType": "Patient", "parameter": [ { + "name": "patientNHSNumber", "valueIdentifier": { "system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9000000009", @@ -114,6 +115,7 @@ def test_model_validate_with_invalid_identifier_system_raises_error(self) -> Non "resourceType": "Parameters", "parameter": [ { + "name": "patientNHSNumber", "valueIdentifier": { "system": "https://example.org/invalid", "value": "9000000009", diff --git a/gateway-api/src/gateway_api/app.py b/gateway-api/src/gateway_api/app.py index 4edd0f83..0ff3f695 100644 --- a/gateway-api/src/gateway_api/app.py +++ b/gateway-api/src/gateway_api/app.py @@ -1,5 +1,7 @@ import os import traceback +from collections.abc import Callable +from logging import basicConfig, getLogger from flask import Flask, Request, request from flask.wrappers import Response @@ -12,33 +14,56 @@ ) app = Flask(__name__) -app.logger.setLevel("INFO") +_logger = getLogger(__name__) -def get_app_host() -> str: - host = os.getenv("FLASK_HOST") - if host is None: - raise RuntimeError("FLASK_HOST environment variable is not set.") - print(f"Starting Flask app on host: {host}") - return host +def start_app(app: Flask) -> None: + setup_logging() + log_env_vars() + configure_app(app) + log_starting_app(app) + app.run(host=app.config["FLASK_HOST"], port=app.config["FLASK_PORT"]) -def get_app_port() -> int: - port = os.getenv("FLASK_PORT") - if port is None: - raise RuntimeError("FLASK_PORT environment variable is not set.") - print(f"Starting Flask app on port: {port}") - return int(port) +def setup_logging() -> None: + basicConfig( + level=os.getenv("LOG_LEVEL", "INFO"), + ) + + +def configure_app(app: Flask) -> None: + config = { + "FLASK_HOST": get_env_var("FLASK_HOST", str), + "FLASK_PORT": get_env_var("FLASK_PORT", int), + "PDS_URL": get_env_var("PDS_URL", str), + "SDS_URL": get_env_var("SDS_URL", str), + } + app.config.update(config) + + +def get_env_var[T](name: str, parser: Callable[[str], T]) -> T: + value = os.getenv(name) + if value is None: + raise RuntimeError(f"{name} environment variable is not set.") + try: + return parser(value) + except Exception as e: + raise RuntimeError(f"Error loading {name} environment variable: {e}") from e def log_request_received(request: Request) -> None: + sanitized_headers = { + key: value + for key, value in request.headers.items() + if key.lower() != "authorization" + } log_details = { "description": "Received request", "method": request.method, "path": request.path, - "headers": dict(request.headers), + "headers": sanitized_headers, } - app.logger.info(log_details) + _logger.info(log_details) def log_error(error: AbstractCDGError) -> None: @@ -48,7 +73,31 @@ def log_error(error: AbstractCDGError) -> None: "error_message": str(error), "traceback": traceback.format_exc(), } - app.logger.error(log_details) + _logger.error(log_details) + + +def log_env_vars() -> None: + env_vars = { + key: value + for key, value in os.environ.items() + if key in {"FLASK_HOST", "FLASK_PORT", "PDS_URL", "SDS_URL"} + } + log_details = { + "description": "Initializing Flask app", + "env_vars": env_vars, + } + _logger.info(log_details) + + +def log_starting_app(app: Flask) -> None: + log_details = { + "description": "Starting Flask app", + "host": app.config["FLASK_HOST"], + "port": app.config["FLASK_PORT"], + "pds_base_url": app.config["PDS_URL"], + "sds_base_url": app.config["SDS_URL"], + } + _logger.info(log_details) @app.route("/patient/$gpc.getstructuredrecord", methods=["POST"]) @@ -58,7 +107,9 @@ def get_structured_record() -> Response: response.mirror_headers(request) try: get_structured_record_request = GetStructuredRecordRequest(request) - controller = Controller() + controller = Controller( + pds_base_url=app.config["PDS_URL"], sds_base_url=app.config["SDS_URL"] + ) provider_response = controller.run(request=get_structured_record_request) response.add_provider_response(provider_response) except AbstractCDGError as e: @@ -86,6 +137,4 @@ def health_check() -> dict[str, str]: if __name__ == "__main__": - host, port = get_app_host(), get_app_port() - print(f"Version: {os.getenv('COMMIT_VERSION')}") - app.run(host=host, port=port) + start_app(app) diff --git a/gateway-api/src/gateway_api/conftest.py b/gateway-api/src/gateway_api/conftest.py index ed7844f0..99548b5c 100644 --- a/gateway-api/src/gateway_api/conftest.py +++ b/gateway-api/src/gateway_api/conftest.py @@ -1,7 +1,10 @@ """Pytest configuration and shared fixtures for gateway API tests.""" import json +import os +from collections.abc import Mapping from dataclasses import dataclass +from types import TracebackType from typing import Any import pytest @@ -14,6 +17,34 @@ from gateway_api.clinical_jwt import JWT +class ScopedEnvVars: + def __init__(self, new_env_vars: Mapping[str, str | None]) -> None: + self.new_env_vars = new_env_vars + self.original_env_vars = {} + for key in new_env_vars: + if key in os.environ: + self.original_env_vars[key] = os.environ[key] + + def __enter__(self) -> "ScopedEnvVars": + for key, value in self.new_env_vars.items(): + if value is None and key in os.environ: + del os.environ[key] + elif value is not None: + os.environ[key] = value + return self + + def __exit__( + self, + _type: type[BaseException] | None, + _value: BaseException | None, + _traceback: TracebackType | None, + ) -> None: + for key in self.new_env_vars: + if key in os.environ: + del os.environ[key] + os.environ.update(self.original_env_vars) + + @dataclass class FakeResponse: """ diff --git a/gateway-api/src/gateway_api/controller.py b/gateway-api/src/gateway_api/controller.py index ac37c16e..539bea71 100644 --- a/gateway-api/src/gateway_api/controller.py +++ b/gateway-api/src/gateway_api/controller.py @@ -27,8 +27,8 @@ class Controller: def __init__( self, - pds_base_url: str = PdsClient.SANDBOX_URL, - sds_base_url: str = SdsClient.SANDBOX_URL, + pds_base_url: str, + sds_base_url: str, timeout: int = 10, ) -> None: """ diff --git a/gateway-api/src/gateway_api/pds/client.py b/gateway-api/src/gateway_api/pds/client.py index 303671bf..b228c8bc 100644 --- a/gateway-api/src/gateway_api/pds/client.py +++ b/gateway-api/src/gateway_api/pds/client.py @@ -18,9 +18,9 @@ malformed upstream data (or malformed test fixtures) and should be corrected at source. """ +import logging import os import uuid -from collections.abc import Callable import requests from fhir.r4 import Patient @@ -29,18 +29,19 @@ from gateway_api.common.error import PdsRequestFailedError # TODO [GPCAPIM-359]: Once stub servers/containers made for PDS, SDS and provider -# we should remove the STUB_PDS environment variable and just +# we should remove the PDS_URL environment variable and just # use the stub client -STUB_PDS = os.environ.get("STUB_PDS", "false").lower() == "true" +STUB_PDS = os.environ["PDS_URL"].lower() == "stub" -get: Callable[..., requests.Response] if not STUB_PDS: - get = requests.get + from requests import get else: from stubs.pds.stub import PdsFhirApiStub pds = PdsFhirApiStub() - get = pds.get + get = pds.get # type: ignore + +_logger = logging.getLogger(__name__) class PdsClient: @@ -67,15 +68,10 @@ class PdsClient: print(result) """ - # URLs for different PDS environments. Requires authentication to use live. - SANDBOX_URL = "https://sandbox.api.service.nhs.uk/personal-demographics/FHIR/R4" - INT_URL = "https://int.api.service.nhs.uk/personal-demographics/FHIR/R4" - PROD_URL = "https://api.service.nhs.uk/personal-demographics/FHIR/R4" - def __init__( self, auth_token: str, - base_url: str = SANDBOX_URL, + base_url: str, timeout: int = 10, ignore_dates: bool = False, ) -> None: @@ -84,6 +80,12 @@ def __init__( self.timeout = timeout self.ignore_dates = ignore_dates + log_details = { + "description": "Initialized PdsClient", + "base_url": self.base_url, + } + _logger.info(log_details) + def _build_headers( self, request_id: str | None = None, @@ -123,13 +125,23 @@ def search_patient_by_nhs_number( url = f"{self.base_url}/Patient/{nhs_number}" - # This normally calls requests.get, but if STUB_PDS is set it uses the stub. + log_details = { + "description": "PDS request", + "url": url, + } + _logger.info(log_details) + # This normally calls requests.get, but if PDS_URL is set it uses the stub. response = get( url, headers=headers, params={}, timeout=timeout or self.timeout, ) + log_details = { + "description": "PDS response received", + "status_code": str(response.status_code), + } + _logger.info(log_details) try: response.raise_for_status() diff --git a/gateway-api/src/gateway_api/pds/test_client.py b/gateway-api/src/gateway_api/pds/test_client.py index 0263ea89..aee33bf2 100644 --- a/gateway-api/src/gateway_api/pds/test_client.py +++ b/gateway-api/src/gateway_api/pds/test_client.py @@ -24,7 +24,7 @@ def test_search_patient_by_nhs_number_happy_path( ) mocker.patch("gateway_api.pds.client.get", return_value=happy_path_response) - client = PdsClient(auth_token) + client = PdsClient(auth_token, base_url="https://test.com") patient = client.search_patient_by_nhs_number("9999999999") assert isinstance(patient, Patient) @@ -44,7 +44,7 @@ def test_search_patient_by_nhs_number_has_no_gp_returns_gp_ods_code_none( ) mocker.patch("gateway_api.pds.client.get", return_value=gp_less_response) - client = PdsClient(auth_token) + client = PdsClient(auth_token, base_url="https://test.com") patient = client.search_patient_by_nhs_number("9999999999") assert isinstance(patient, Patient) @@ -67,7 +67,7 @@ def test_search_patient_by_nhs_number_sends_expected_headers( request_id = str(uuid4()) correlation_id = "corr-123" - client = PdsClient(auth_token) + client = PdsClient(auth_token, base_url="https://test.com") _ = client.search_patient_by_nhs_number( "9000000009", request_id=request_id, @@ -96,7 +96,7 @@ def test_search_patient_by_nhs_number_generates_request_id( "gateway_api.pds.client.get", return_value=happy_path_response ) - client = PdsClient(auth_token) + client = PdsClient(auth_token, base_url="https://test.com") _ = client.search_patient_by_nhs_number("9000000009") @@ -117,7 +117,7 @@ def test_search_patient_by_nhs_number_not_found_raises_error( reason="Not Found", ) mocker.patch("gateway_api.pds.client.get", return_value=not_found_response) - pds = PdsClient(auth_token) + pds = PdsClient(auth_token, base_url="https://test.com") with pytest.raises( PdsRequestFailedError, match="PDS FHIR API request failed: Not Found" @@ -140,10 +140,29 @@ def test_search_patient_by_nhs_number_missing_nhs_number_raises_error( ) mocker.patch("gateway_api.pds.client.get", return_value=response) - client = PdsClient(auth_token) + client = PdsClient(auth_token, base_url="https://test.com") with pytest.raises(PdsRequestFailedError) as error: client.search_patient_by_nhs_number("9999999999") assert "'identifier'" in str(error.value) assert "too_short" in str(error.value) + + +def test_search_patient_respects_url( + auth_token: str, + mocker: MockerFixture, + happy_path_pds_response_body: dict[str, Any], +) -> None: + happy_path_response = FakeResponse( + status_code=200, headers={}, _json=happy_path_pds_response_body + ) + mocked_get = mocker.patch( + "gateway_api.pds.client.get", return_value=happy_path_response + ) + + client = PdsClient(auth_token, base_url="https://a.different.url/base") + _ = client.search_patient_by_nhs_number("9000000009") + + actual_url = mocked_get.call_args.args[0] + assert actual_url == "https://a.different.url/base/Patient/9000000009" diff --git a/gateway-api/src/gateway_api/provider/client.py b/gateway-api/src/gateway_api/provider/client.py index 731bcafc..338e6093 100644 --- a/gateway-api/src/gateway_api/provider/client.py +++ b/gateway-api/src/gateway_api/provider/client.py @@ -22,6 +22,7 @@ The response from the provider FHIR API. """ +import logging import os from urllib.parse import urljoin @@ -33,9 +34,9 @@ from gateway_api.get_structured_record import ACCESS_RECORD_STRUCTURED_INTERACTION_ID # TODO [GPCAPIM-359]: Once stub servers/containers made for PDS, SDS and provider -# we should remove the STUB_PROVIDER environment variable and just +# we should remove the PROVIDER_URL environment variable and just # use the stub client -STUB_PROVIDER = os.environ.get("STUB_PROVIDER", "false").lower() == "true" +STUB_PROVIDER = os.environ["PROVIDER_URL"].lower() == "stub" if not STUB_PROVIDER: from requests import post else: @@ -44,6 +45,8 @@ provider_stub = GpProviderStub() post = provider_stub.post # type: ignore +_logger = logging.getLogger(__name__) + # Default endpoint path for access record structured interaction (standard GP Connect) ARS_ENDPOINT_PATH = "Patient/$gpc.getstructuredrecord" TIMEOUT: int | None = None # None used for quicker dev, adjust as needed @@ -83,6 +86,15 @@ def __init__( self.token = token self.endpoint_path = endpoint_path + log_details = { + "description": "Initialized GpProviderClient", + "provider_endpoint": self.provider_endpoint, + "provider_asid": self.provider_asid, + "consumer_asid": self.consumer_asid, + "endpoint_path": self.endpoint_path, + } + _logger.info(log_details) + def _build_headers(self, trace_id: str) -> dict[str, str]: """ Build the headers required for the GPProvider FHIR API request. @@ -117,12 +129,23 @@ def access_structured_record( base_endpoint = self.provider_endpoint.rstrip("/") + "/" url = urljoin(base_endpoint, self.endpoint_path) + log_details = { + "description": "GPProvider FHIR API request", + "url": url, + } + _logger.info(log_details) + response = post( url, headers=headers, data=body, timeout=TIMEOUT, ) + log_details = { + "description": "GPProvider FHIR API response received", + "status_code": str(response.status_code), + } + _logger.info(log_details) try: response.raise_for_status() diff --git a/gateway-api/src/gateway_api/sds/client.py b/gateway-api/src/gateway_api/sds/client.py index 11272a8a..f195876d 100644 --- a/gateway-api/src/gateway_api/sds/client.py +++ b/gateway-api/src/gateway_api/sds/client.py @@ -8,6 +8,7 @@ from __future__ import annotations +import logging import os from enum import StrEnum from typing import Any @@ -25,9 +26,9 @@ from gateway_api.sds.search_results import SdsSearchResults # TODO [GPCAPIM-359]: Once stub servers/containers made for PDS, SDS and provider -# we should remove the STUB_SDS environment variable and just +# we should remove the SDS_URL environment variable and just # use the stub client -STUB_SDS = os.environ.get("STUB_SDS", "false").lower() == "true" +STUB_SDS = os.environ["SDS_URL"].lower() == "stub" if not STUB_SDS: from requests import get else: @@ -36,6 +37,8 @@ sds = SdsFhirApiStub() get = sds.get # type: ignore +_logger = logging.getLogger(__name__) + class SdsResourceType(StrEnum): """SDS FHIR resource types.""" @@ -57,7 +60,7 @@ class SdsClient: **Stubbing**: - For testing, set the environment variable ``$STUB_SDS`` to use the + For testing, set the environment variable ``$SDS_URL`` to use the :class:`SdsFhirApiStub` instead of making real HTTP requests. **Usage example**:: @@ -74,16 +77,12 @@ class SdsClient: print(f"ASID: {result.asid}, Endpoint: {result.endpoint}") """ - # URLs for different SDS environments. Will move to a config file eventually. - SANDBOX_URL = "https://sandbox.api.service.nhs.uk/spine-directory/FHIR/R4" - INT_URL = "https://int.api.service.nhs.uk/spine-directory/FHIR/R4" - # Default service interaction ID for GP Connect DEFAULT_SERVICE_INTERACTION_ID = ACCESS_RECORD_STRUCTURED_INTERACTION_ID def __init__( self, - base_url: str = SANDBOX_URL, + base_url: str, timeout: int = 10, service_interaction_id: str | None = None, ) -> None: @@ -93,11 +92,21 @@ def __init__( if service_interaction_id is not None: self.service_interaction_id = service_interaction_id - elif self.base_url == self.SANDBOX_URL: + elif ( + self.base_url + == "https://sandbox.api.service.nhs.uk/spine-directory/FHIR/R4" + ): self.service_interaction_id = SDS_SANDBOX_INTERACTION_ID else: self.service_interaction_id = self.DEFAULT_SERVICE_INTERACTION_ID + log_details = { + "description": "Initialized SdsClient", + "base_url": self.base_url, + "service_interaction_id": self.service_interaction_id, + } + _logger.info(log_details) + def _build_headers(self, correlation_id: str | None = None) -> dict[str, str]: """ Build mandatory and optional headers for an SDS request. @@ -193,12 +202,23 @@ def _query_sds( ], } + log_details = { + "description": "SDS request", + "url": url, + "params": params, + } + _logger.info(log_details) response = get( url, headers=headers, params=params, timeout=timeout or self.timeout, ) + log_details = { + "description": "SDS response received", + "status_code": str(response.status_code), + } + _logger.info(log_details) try: response.raise_for_status() diff --git a/gateway-api/src/gateway_api/sds/test_client.py b/gateway-api/src/gateway_api/sds/test_client.py index fc946cde..51358c30 100644 --- a/gateway-api/src/gateway_api/sds/test_client.py +++ b/gateway-api/src/gateway_api/sds/test_client.py @@ -2,13 +2,14 @@ Unit tests for :mod:`gateway_api.sds_search`. """ -from __future__ import annotations - import pytest from fhir.constants import FHIRSystem +from fhir.r4.resources.bundle import Bundle +from pytest_mock import MockerFixture from stubs.sds.stub import SdsFhirApiStub from gateway_api.common.error import SdsRequestFailedError +from gateway_api.conftest import FakeResponse from gateway_api.get_structured_record import ( ACCESS_RECORD_STRUCTURED_INTERACTION_ID, SDS_SANDBOX_INTERACTION_ID, @@ -39,7 +40,7 @@ def test_sds_client_get_org_details_success( :param stub: SDS stub fixture. """ - client = SdsClient(base_url=SdsClient.INT_URL) + client = SdsClient(base_url="https://test.com") result = client.get_org_details(ods_code="PROVIDER") @@ -110,7 +111,7 @@ def test_sds_client_get_org_details_with_endpoint( }, ) - client = SdsClient(base_url=SdsClient.INT_URL) + client = SdsClient(base_url="https://test.com") result = client.get_org_details(ods_code="TESTORG") assert result is not None @@ -127,7 +128,7 @@ def test_sds_client_sends_correct_headers( :param stub: SDS stub fixture. :param mock_requests_get: Capture fixture for request details. """ - client = SdsClient(base_url=SdsClient.INT_URL) + client = SdsClient(base_url="https://test.com") correlation_id = "test-correlation-123" client.get_org_details(ods_code="PROVIDER", correlation_id=correlation_id) @@ -149,7 +150,7 @@ def test_sds_client_timeout_parameter( :param stub: SDS stub fixture. :param mock_requests_get: Capture fixture for request details. """ - client = SdsClient(base_url=SdsClient.INT_URL, timeout=30) + client = SdsClient(base_url="https://test.com", timeout=30) client.get_org_details(ods_code="PROVIDER", timeout=60) @@ -191,7 +192,7 @@ def test_sds_client_custom_service_interaction_id( ) client = SdsClient( - base_url=SdsClient.INT_URL, + base_url="https://test.com", service_interaction_id=custom_interaction, ) @@ -217,7 +218,7 @@ def test_sds_client_builds_correct_device_query_params( :param stub: SDS stub fixture. :param mock_requests_get: Capture fixture for request details. """ - client = SdsClient(base_url=SdsClient.INT_URL) + client = SdsClient(base_url="https://test.com") client.get_org_details(ods_code="PROVIDER") @@ -246,7 +247,7 @@ def test_sds_client_uses_sandbox_interaction_id_for_sandbox_url( """ # Seed the stub with data keyed by the sandbox interaction ID stub.upsert_device( - organization_ods="SANDBOXORG", + organization_ods="SANDBOX_ORG", service_interaction_id=SDS_SANDBOX_INTERACTION_ID, device={ "resourceType": "Device", @@ -260,14 +261,16 @@ def test_sds_client_uses_sandbox_interaction_id_for_sandbox_url( "owner": { "identifier": { "system": FHIRSystem.ODS_CODE, - "value": "SANDBOXORG", + "value": "SANDBOX_ORG", } }, }, ) - client = SdsClient(base_url=SdsClient.SANDBOX_URL) - result = client.get_org_details(ods_code="SANDBOXORG", get_endpoint=False) + client = SdsClient( + base_url="https://sandbox.api.service.nhs.uk/spine-directory/FHIR/R4" + ) + result = client.get_org_details(ods_code="SANDBOX_ORG", get_endpoint=False) # Verify the sandbox interaction ID was sent params = stub.get_params @@ -310,7 +313,7 @@ def get_without_apikey( monkeypatch.setattr("gateway_api.sds.client.get", get_without_apikey) - client = SdsClient(base_url=SdsClient.SANDBOX_URL) + client = SdsClient(base_url="https://test.com") with pytest.raises(SdsRequestFailedError, match="SDS FHIR API request failed"): client.get_org_details(ods_code="PROVIDER") @@ -350,7 +353,7 @@ def test_sds_client_endpoint_entry_without_address_returns_none( }, ) - client = SdsClient(base_url=SdsClient.INT_URL) + client = SdsClient(base_url="https://test.com") result = client.get_org_details(ods_code="NOADDR") assert result.asid == "111111111111" @@ -366,9 +369,9 @@ def test_sds_client_empty_device_bundle_returns_none_asid() -> None: :param stub: SDS stub fixture. """ - client = SdsClient(base_url=SdsClient.INT_URL) - # "UNKNOWNORG" has no seeded devices, so the bundle entry list will be empty - result = client.get_org_details(ods_code="UNKNOWNORG", get_endpoint=False) + client = SdsClient(base_url="https://test.com") + # "UNKNOWN_ORG" has no seeded devices, so the bundle entry list will be empty + result = client.get_org_details(ods_code="UNKNOWN_ORG", get_endpoint=False) assert result.asid is None @@ -395,8 +398,24 @@ def test_sds_client_no_endpoint_bundle_entries_returns_none_endpoint( ) # Deliberately do not seed any endpoint for NOENDPOINT - client = SdsClient(base_url=SdsClient.INT_URL) + client = SdsClient(base_url="https://test.com") result = client.get_org_details(ods_code="NOENDPOINT") assert result.asid == "222222222222" assert result.endpoint is None + + +def test_sds_client_respects_url( + mocker: MockerFixture, +) -> None: + empty_bundle = Bundle.empty("searchset").model_dump() + mocked_get = mocker.patch( + "gateway_api.sds.client.get", + return_value=FakeResponse(status_code=200, headers={}, _json=empty_bundle), + ) + + client = SdsClient(base_url="https://a.different.url/base") + _ = client.get_org_details(ods_code="A12345", get_endpoint=False) + + actual_url = mocked_get.call_args.args[0] + assert actual_url == "https://a.different.url/base/Device" diff --git a/gateway-api/src/gateway_api/test_app.py b/gateway-api/src/gateway_api/test_app.py index e6f808e0..140a8905 100644 --- a/gateway-api/src/gateway_api/test_app.py +++ b/gateway-api/src/gateway_api/test_app.py @@ -1,7 +1,6 @@ """Unit tests for the Flask app endpoints.""" import json -import os from collections.abc import Generator from copy import copy from typing import Any @@ -12,40 +11,101 @@ from flask.testing import FlaskClient from pytest_mock import MockerFixture -from gateway_api.app import app, get_app_host, get_app_port +from gateway_api.app import ( + app, + configure_app, + get_env_var, + log_env_vars, + start_app, +) +from gateway_api.conftest import ScopedEnvVars @pytest.fixture def client() -> Generator[FlaskClient[Flask]]: - app.config["TESTING"] = True + with ScopedEnvVars( + { + "FLASK_HOST": "localhost", + "FLASK_PORT": "5000", + "PDS_URL": "http://test-pds-url", + "SDS_URL": "http://test-sds-url", + } + ): + configure_app(app) + app.config["TESTING"] = True with app.test_client() as client: yield client class TestAppInitialization: - def test_get_app_host_returns_set_host_name(self) -> None: - os.environ["FLASK_HOST"] = "host_is_set" + def test_get_env_var_when_env_var_is_set(self) -> None: + with ScopedEnvVars({"FLASK_HOST": "host_is_set"}): + actual = get_env_var("FLASK_HOST", str) + assert actual == "host_is_set" + + def test_get_env_var_raises_runtime_error_if_env_var_not_set(self) -> None: + with ScopedEnvVars({"FLASK_HOST": None}), pytest.raises(RuntimeError): + _ = get_env_var("FLASK_HOST", str) + + def test_get_env_var_raises_runtime_error_if_loader_fails(self) -> None: + with ScopedEnvVars({"FLASK_PORT": "not_an_int"}), pytest.raises(RuntimeError): + _ = get_env_var("FLASK_PORT", int) + + def test_configure_app(self) -> None: + test_app = Mock() + config = { + "FLASK_HOST": "test_host", + "FLASK_PORT": "1234", + "PDS_URL": "test_pds_url", + "SDS_URL": "test_sds_url", + } - actual = get_app_host() - assert actual == "host_is_set" + with ScopedEnvVars(config): + configure_app(test_app) - def test_get_app_host_raises_runtime_error_if_host_name_not_set(self) -> None: - del os.environ["FLASK_HOST"] + expected = { + "FLASK_HOST": "test_host", + "FLASK_PORT": 1234, + "PDS_URL": "test_pds_url", + "SDS_URL": "test_sds_url", + } + test_app.config.update.assert_called_with(expected) - with pytest.raises(RuntimeError): - _ = get_app_host() + def test_logging_environment_variables_on_app_initialization( + self, mocker: MockerFixture + ) -> None: + log_mock_info = mocker.patch("gateway_api.app._logger.info") - def test_get_app_port_returns_set_port_number(self) -> None: - os.environ["FLASK_PORT"] = "8080" + config = { + "FLASK_HOST": "test_host", + "FLASK_PORT": "1234", + "PDS_URL": "test_pds_url", + "SDS_URL": "test_sds_url", + } + with ScopedEnvVars(config): + log_env_vars() + + # Check that the environment variables were logged + log_mock_info.assert_called_with( + { + "description": "Initializing Flask app", + "env_vars": config, + } + ) - actual = get_app_port() - assert actual == 8080 + def test_start_app_logs_startup_details(self) -> None: + test_app = Mock() + test_app.config = {} + + test_env_vars = { + "FLASK_HOST": "test_host", + "FLASK_PORT": "1234", + } - def test_get_app_port_raises_runtime_error_if_port_not_set(self) -> None: - del os.environ["FLASK_PORT"] + with ScopedEnvVars(test_env_vars): + start_app(test_app) - with pytest.raises(RuntimeError): - _ = get_app_port() + test_app.run.assert_called_with(host="test_host", port=1234) class TestGetStructuredRecord: diff --git a/gateway-api/src/gateway_api/test_controller.py b/gateway-api/src/gateway_api/test_controller.py index d6cfb1c1..0cb660ef 100644 --- a/gateway-api/src/gateway_api/test_controller.py +++ b/gateway-api/src/gateway_api/test_controller.py @@ -42,12 +42,19 @@ def _create_patient(nhs_number: str, gp_ods_code: str | None) -> Patient: ) +def create_test_controller( + pds_base_url: str = "https://example.test/pds", + sds_base_url: str = "https://example.test/sds", +) -> Controller: + return Controller(pds_base_url=pds_base_url, sds_base_url=sds_base_url) + + def test_controller_run_happy_path_returns_200_status_code( mock_happy_path_get_structured_record_request: Request, ) -> None: request = GetStructuredRecordRequest(mock_happy_path_get_structured_record_request) - controller = Controller() + controller = create_test_controller() actual_response = controller.run(request) assert actual_response.status_code == 200 @@ -59,7 +66,7 @@ def test_controller_run_happy_path_returns_returns_expected_body( ) -> None: request = GetStructuredRecordRequest(mock_happy_path_get_structured_record_request) - controller = Controller() + controller = create_test_controller() actual_response = controller.run(request) assert actual_response.json() == valid_simple_response_payload @@ -74,7 +81,7 @@ def test_get_pds_details_returns_provider_ods_code_for_happy_path( "gateway_api.pds.PdsClient.search_patient_by_nhs_number", return_value=_create_patient(nhs_number, "A12345"), ) - controller = Controller(pds_base_url="https://example.test/pds", timeout=7) + controller = create_test_controller() actual = controller._get_pds_details(auth_token, nhs_number) # noqa: SLF001 testing private method @@ -91,7 +98,7 @@ def test_get_pds_details_raises_no_current_provider_when_ods_code_missing_in_pds return_value=_create_patient(nhs_number, None), ) - controller = Controller() + controller = create_test_controller() with pytest.raises( NoCurrentProviderError, @@ -117,7 +124,7 @@ def test_get_sds_details_returns_consumer_and_provider_details_for_happy_path( side_effect=sds_results, ) - controller = Controller() + controller = create_test_controller() expected = ("ConsumerASID", "ProviderASID", "https://example.provider.org/endpoint") actual = controller._get_sds_details(consumer_ods, provider_ods) # noqa: SLF001 testing private method @@ -135,7 +142,7 @@ def test_get_sds_details_raises_no_organisation_found_when_sds_returns_none( return_value=no_results_for_provider, ) - controller = Controller() + controller = create_test_controller() with pytest.raises( NoOrganisationFoundError, @@ -157,7 +164,7 @@ def test_get_sds_details_raises_no_asid_found_when_sds_returns_empty_asid( return_value=blank_asid_sds_result, ) - controller = Controller() + controller = create_test_controller() with pytest.raises( NoAsidFoundError, @@ -180,7 +187,7 @@ def test_get_sds_details_raises_no_current_endpoint_when_sds_returns_empty_endpo return_value=blank_endpoint_sds_result, ) - controller = Controller() + controller = create_test_controller() with pytest.raises( NoCurrentEndpointError, @@ -207,7 +214,7 @@ def test_get_sds_details_raises_no_org_found_when_sds_returns_none_for_consumer( side_effect=[happy_path_provider_sds_result, none_result_for_consumer], ) - controller = Controller() + controller = create_test_controller() with pytest.raises( NoOrganisationFoundError, @@ -233,7 +240,7 @@ def test_get_sds_details_raises_no_asid_found_when_sds_returns_empty_consumer_as side_effect=[happy_path_provider_sds_result, consumer_asid_blank_sds_result], ) - controller = Controller() + controller = create_test_controller() with pytest.raises( NoAsidFoundError, @@ -337,7 +344,7 @@ def test_controller_creates_jwt_token_with_correct_claims( get_structured_record_request = GetStructuredRecordRequest(request) - controller = Controller() + controller = create_test_controller() controller.run(get_structured_record_request) # Verify that GpProviderClient was called and extract the JWT token @@ -351,3 +358,85 @@ def test_controller_creates_jwt_token_with_correct_claims( # Verify the requesting organization matches the consumer ODS assert jwt_token.requesting_organization["identifier"][0]["value"] == consumer_ods + + +def test_controller_respects_pds_url( + mocker: MockerFixture, + happy_path_pds_response_body: dict[str, Any], +) -> None: + """ + Test that the controller uses the PDS URL provided in the constructor. + """ + mocked_get_pds = mocker.patch( + "gateway_api.pds.client.get", + return_value=FakeResponse( + status_code=200, headers={}, _json=happy_path_pds_response_body + ), + ) + custom_pds_url = "https://a.different.url/base" + + controller = create_test_controller(pds_base_url=custom_pds_url) + controller._get_pds_details("auth_token", "9000000009") + + actual_pds_url = mocked_get_pds.call_args.args[0] + assert actual_pds_url == "https://a.different.url/base/Patient/9000000009" + + +def test_controller_respects_sds_url( + mocker: MockerFixture, +) -> None: + """ + Test that the controller uses the SDS URL provided in the constructor. + """ + device_bundle = { + "resourceType": "Bundle", + "type": "searchset", + "total": 1, + "entry": [ + { + "fullUrl": "https://example.test/sds/Device/1", + "resource": { + "resourceType": "Device", + "identifier": [ + { + "system": "https://fhir.nhs.uk/Id/nhsSpineASID", + "value": "test-asid", + }, + { + "system": "https://fhir.nhs.uk/Id/nhsMhsPartyKey", + "value": "test-party-key", + }, + ], + }, + } + ], + } + endpoint_bundle = { + "resourceType": "Bundle", + "type": "searchset", + "total": 1, + "entry": [ + { + "fullUrl": "https://example.test/sds/Endpoint/1", + "resource": { + "resourceType": "Endpoint", + "address": "https://example.provider.org/endpoint", + }, + } + ], + } + mocked_get_sds = mocker.patch( + "gateway_api.sds.client.get", + side_effect=[ + FakeResponse(status_code=200, headers={}, _json=device_bundle), + FakeResponse(status_code=200, headers={}, _json=endpoint_bundle), + FakeResponse(status_code=200, headers={}, _json=device_bundle), + ], + ) + custom_sds_url = "https://a.different.url/base" + + controller = create_test_controller(sds_base_url=custom_sds_url) + controller._get_sds_details("test-ods-1", "test-ods-2") + + actual_sds_url = mocked_get_sds.call_args.args[0] + assert actual_sds_url == "https://a.different.url/base/Device" diff --git a/gateway-api/stubs/stubs/data/patients/orange_box_trigger_9690937278.json b/gateway-api/stubs/stubs/data/patients/orange_box_trigger_9690937278.json index 736fe103..9f1eb640 100644 --- a/gateway-api/stubs/stubs/data/patients/orange_box_trigger_9690937278.json +++ b/gateway-api/stubs/stubs/data/patients/orange_box_trigger_9690937278.json @@ -28,6 +28,7 @@ "identifier": { "system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "S55555", + "system": "https://fhir.nhs.uk/Id/ods-organization-code", "period": {"start": "2020-01-01", "end": "9999-12-31"} } } diff --git a/gateway-api/tests/README.md b/gateway-api/tests/README.md index 9a8a9315..9cc116b0 100644 --- a/gateway-api/tests/README.md +++ b/gateway-api/tests/README.md @@ -23,12 +23,6 @@ tests/ ``` ## Running Tests -> -> [!NOTE]
-> When running tests the following environment variables need to be provided: -> -> - `BASE_URL` - defines the protocol, hostname and port that should used to access the running APIs. Should be included as a URL in the format ::, for example "" if the APIs are available on the "localhost" host via HTTP using port 5000. If running locally in a dev container, using gateway-api as the host will allow the tests to communicate with an instance launched from `make deploy`. -> - `HOST` - defines the hostname that should be used to access the running APIs. This should match the host portion of the URL provided in the `BASE_URL` environment variable above. ### Install Dependencies (if not using Dev container) @@ -39,6 +33,19 @@ cd gateway-api poetry sync ``` +### Setup `.env.test` file + +When `make test-*` is called, the script will place `source` the variables in `.env.test` before running the tests. + +Use the following commands to create the appropriate `.env.test` file for the target environment. + +* `make env-test-local` to write a `.env.test` file for testing a locally deployed application, from within the dev container. +* `make env-test-ci` to write a `.env.test` file for testing a locally deployed application, from outside the dev container. +* `make env-test-pr-` to write a `.env.test` file for testing an application deployed behind a PR proxy. +* `make env-test-alpha-int` to write a `.env.test` file for testing an application deployed to the "alpha integration" environment. + +_Note: Unit tests require the `.env` file, as these tests do not test a deployed application`_ + ### Run All Tests (with Verbose Output) From the root of the repository: @@ -67,14 +74,16 @@ pytest gateway-api/tests/schema/ -v Run `make deploy` before running any tests that hit the real API. -- **Requires deployed/running app (`make deploy`):** - - Acceptance tests (`tests/acceptance/`) - - Integration tests (`tests/integration/`) - - Schema validation tests (`tests/schema/`) - - Provider contract tests (`tests/contract/test_provider_contract.py`) -- **Does not require deployed/running app:** - - Unit tests - - Consumer contract tests (`tests/contract/test_consumer_contract.py`) - these run against a Pact mock server +This command requires a `.env` file to set the app's behaviour - see `env.mk` on how to make `.env` files. + +* **Requires deployed/running app (`make deploy`):** + * Acceptance tests (`tests/acceptance/`) + * Integration tests (`tests/integration/`) + * Schema validation tests (`tests/schema/`) + * Provider contract tests (`tests/contract/test_provider_contract.py`) +* **Does not require deployed/running app:** + * Unit tests + * Consumer contract tests (`tests/contract/test_consumer_contract.py`) - these run against a Pact mock server ## Test Types @@ -84,9 +93,9 @@ behaviour-driven development (BDD) tests using pytest-bdd and Gherkin syntax. Th **Structure:** -- **Feature files** (`features/*.feature`): Written in Gherkin, these define scenarios in plain language -- **Step definitions** (`steps/*.py`): Python implementations that map Gherkin steps to actual test code -- **Test bindings** (`scenarios/test_*.py`): Link scenarios to pytest test functions using `@scenario` decorator +* **Feature files** (`features/*.feature`): Written in Gherkin, these define scenarios in plain language +* **Step definitions** (`steps/*.py`): Python implementations that map Gherkin steps to actual test code +* **Test bindings** (`scenarios/test_*.py`): Link scenarios to pytest test functions using `@scenario` decorator **How it works:** @@ -114,23 +123,23 @@ Integration tests that validate the APIs’ behaviour through HTTP requests. The **How it works:** -- Tests use the `Client` class from `conftest.py` to interact with the API -- The client sends HTTP POST requests to the APIs -- Tests verify response status codes, headers, and response bodies -- Tests validate both successful requests and error handling +* Tests use the `Client` class from `conftest.py` to interact with the API +* The client sends HTTP POST requests to the APIs +* Tests verify response status codes, headers, and response bodies +* Tests validate both successful requests and error handling **Example test cases:** -- Successful "hello world" responses -- Error handling for missing or empty payloads -- Error handling for non-existent resources -- Content-Type header validation +* Successful "hello world" responses +* Error handling for missing or empty payloads +* Error handling for non-existent resources +* Content-Type header validation **Key difference from acceptance tests:** -- Integration tests use direct pytest assertions without Gherkin syntax -- More focused on testing specific API behaviours and edge cases -- Uses the same `Client` fixture as acceptance tests but with standard pytest structure +* Integration tests use direct pytest assertions without Gherkin syntax +* More focused on testing specific API behaviours and edge cases +* Uses the same `Client` fixture as acceptance tests but with standard pytest structure ### Schema Validation Tests (`schema/`) @@ -138,14 +147,14 @@ Property-based API schema validation tests using Schemathesis. These tests autom **How it works:** -- Loads the OpenAPI schema from `openapi.yaml` -- Uses the `base_url` fixture to test against the running API -- Automatically generates test cases including: - - Valid inputs - - Edge cases - - Boundary values - - Invalid inputs -- Validates that responses match the schema definitions +* Loads the OpenAPI schema from `openapi.yaml` +* Uses the `base_url` fixture to test against the running API +* Automatically generates test cases including: + * Valid inputs + * Edge cases + * Boundary values + * Invalid inputs +* Validates that responses match the schema definitions ### Contract Testing with Pact (`contract/`) @@ -156,17 +165,17 @@ There are two types of tests in this folder. The stub tests (contained in the st **How it works (Pact-based tests):** 1. **Consumer Tests** (`test_consumer_contract.py`): - - Define what the consumer EXPECTS from the API - - Test against a **mock Pact server** (not the real API) - - The mock server responds based on the defined expectations - - Generates a pact contract file (`GatewayAPIConsumer-GatewayAPIProvider.json`) with all interactions - - **Key point:** These tests don't call the real API + * Define what the consumer EXPECTS from the API + * Test against a **mock Pact server** (not the real API) + * The mock server responds based on the defined expectations + * Generates a pact contract file (`GatewayAPIConsumer-GatewayAPIProvider.json`) with all interactions + * **Key point:** These tests don't call the real API 2. **Provider Integration Tests** (`test_provider_contract.py`): - - Verify the **actual deployed API** implementation - - Read the pact contract file generated by consumer tests - - Verify that the real API implementation satisfies the consumer's expectations - - **Key point:** This is where the real API gets tested + * Verify the **actual deployed API** implementation + * Read the pact contract file generated by consumer tests + * Verify that the real API implementation satisfies the consumer's expectations + * **Key point:** This is where the real API gets tested **The Flow:** @@ -178,11 +187,11 @@ Consumer Test → Mock Pact Server → Contract File (JSON) **Why this approach as opposed to a standard integration test?** -- **Explicit contract documentation** - The pact file is a versioned artefact that documents the API contract -- **Contract evolution tracking** - Because of the above - Git diffs will show exactly how API contracts change over time +* **Explicit contract documentation** - The pact file is a versioned artefact that documents the API contract +* **Contract evolution tracking** - Because of the above - Git diffs will show exactly how API contracts change over time -- **Consumer-driven development** - Consumers define their needs; providers verify they meet them -- **Prevents breaking changes** - Provider tests fail if changes break existing consumer expectations +* **Consumer-driven development** - Consumers define their needs; providers verify they meet them +* **Prevents breaking changes** - Provider tests fail if changes break existing consumer expectations ### Contract Testing Workflow @@ -194,18 +203,18 @@ Consumer tests generate the pact contract files in `tests/contract/pacts/` (e.g. **Key points:** -- The pact contract file represents the contract between the consumer and provider -- This file is committed to version control so you can track contract changes through git diffs -- The `pact.write_file()` call merges interactions (updates existing or adds new ones) -- Interactions with the same description get replaced; different descriptions get added +* The pact contract file represents the contract between the consumer and provider +* This file is committed to version control so you can track contract changes through git diffs +* The `pact.write_file()` call merges interactions (updates existing or adds new ones) +* Interactions with the same description get replaced; different descriptions get added ## Shared Fixtures Shared fixtures in `tests/conftest.py` are available across all test types: -- **`base_url`**: The base URL of the deployed Lambda function (from `BASE_URL` environment variable highlighted above) -- **`host`**: The hostname of the deployed application (from `HOST` environment variable highlighted above) -- **`client`**: An HTTP client instance for sending requests to the APIs +* **`base_url`**: The base URL of the deployed Lambda function (from `BASE_URL` environment variable highlighted above) +* **`host`**: The hostname of the deployed application (from `HOST` environment variable highlighted above) +* **`client`**: An HTTP client instance for sending requests to the APIs ### Load Testing with Locust ('load/') @@ -253,8 +262,8 @@ JUnit XML format is used for CI/CD integration and test result summaries. All re **All Test Types (Unit, Contract, Schema, Integration, Acceptance):** -- Generated with `--junit-xml=test-artefacts/{type}-tests.xml` -- Contains test results, execution times, and failure details +* Generated with `--junit-xml=test-artefacts/{type}-tests.xml` +* Contains test results, execution times, and failure details ### HTML Test Reports @@ -262,12 +271,12 @@ Human-readable HTML reports for detailed test analysis: **Tests using pytest (Unit, Contract, Schema, Integration, Acceptance):** -- Generated with `--html=test-artefacts/{type}-tests.html --self-contained-html` -- Self-contained HTML files with embedded CSS/JavaScript -- Include: - - Test results with pass/fail status - - Execution times - - Test metadata +* Generated with `--html=test-artefacts/{type}-tests.html --self-contained-html` +* Self-contained HTML files with embedded CSS/JavaScript +* Include: + * Test results with pass/fail status + * Execution times + * Test metadata ### CI/CD Report Artefacts @@ -289,10 +298,10 @@ Each test execution script (`scripts/tests/*.sh`) collects coverage data indepen **Unit, Contract, Schema, Integration, and Acceptance Tests** (pytest-based): -- Use `pytest-cov` plugin with `--cov=src/gateway_api` flag -- Generate individual coverage data files: `.coverage` -- Each test type saves its coverage file as `coverage.{type}` (e.g., `coverage.unit`, `coverage.contract`, `coverage.schema`) -- Produce HTML reports for local viewing and terminal output for CI logs +* Use `pytest-cov` plugin with `--cov=src/gateway_api` flag +* Generate individual coverage data files: `.coverage` +* Each test type saves its coverage file as `coverage.{type}` (e.g., `coverage.unit`, `coverage.contract`, `coverage.schema`) +* Produce HTML reports for local viewing and terminal output for CI logs ### CI/CD Coverage Workflow diff --git a/gateway-api/tests/conftest.py b/gateway-api/tests/conftest.py index a44f4961..3a73f619 100644 --- a/gateway-api/tests/conftest.py +++ b/gateway-api/tests/conftest.py @@ -2,18 +2,14 @@ import copy import os +from collections.abc import Callable from datetime import timedelta -from typing import Any, Protocol, cast +from typing import Any, cast import pytest import requests -from dotenv import find_dotenv, load_dotenv from fhir.constants import FHIRSystem -# Load environment variables from .env file in the workspace root -load_dotenv(find_dotenv(usecwd=True)) - - DEFAULT_REQUEST_HEADERS = { "Content-Type": "application/fhir+json", "Ods-from": "CONSUMER", @@ -35,93 +31,18 @@ } -class Client(Protocol): - """Protocol defining the interface for HTTP clients.""" - - base_url: str - - def send_to_get_structured_record_endpoint( - self, payload: str, headers: dict[str, str] | None = None - ) -> requests.Response: - """ - Send a request to the get_structured_record endpoint with the given NHS number. - """ - ... - - def send_health_check(self) -> requests.Response: - """ - Send a health check request to the API. - """ - ... - - def send_post_to_path( - self, - path: str, - payload: str, - headers: dict[str, str] | None = None, - ) -> requests.Response: - """Send a POST request to the given API path.""" - ... - - -class LocalClient: - """HTTP client that sends requests directly to the API. - - Includes optional Apigee Authorization headers when an access token is provided. - """ - - def __init__( - self, - base_url: str, - auth_headers: dict[str, str], - timeout: timedelta = timedelta(seconds=35), - ): - self.base_url = base_url - self._auth_headers = auth_headers - self._timeout = timeout.total_seconds() - - def send_to_get_structured_record_endpoint( - self, payload: str, headers: dict[str, str] | None = None - ) -> requests.Response: - return self.send_post_to_path( - path="/patient/$gpc.getstructuredrecord", - payload=payload, - headers=headers, - ) - - def send_post_to_path( - self, - path: str, - payload: str, - headers: dict[str, str] | None = None, - ) -> requests.Response: - url = f"{self.base_url}/{path.lstrip('/')}" - default_headers = self._auth_headers | DEFAULT_REQUEST_HEADERS - if headers: - default_headers.update(headers) - - return requests.post( - url=url, - data=payload, - headers=default_headers, - timeout=self._timeout, - ) - - def send_health_check(self) -> requests.Response: - url = f"{self.base_url}/health" - return requests.get(url=url, headers=self._auth_headers, timeout=self._timeout) - - -class RemoteClient: - """HTTP client for remote testing via the APIM proxy.""" +class Client: + """Client for sending HTTP requests""" def __init__( self, base_url: str, + health_endpoint: str, auth_headers: dict[str, str], timeout: timedelta = timedelta(seconds=35), ): self.base_url = base_url + self.health_endpoint = health_endpoint self._auth_headers = auth_headers self._timeout = timeout.total_seconds() @@ -154,7 +75,7 @@ def send_post_to_path( ) def send_health_check(self) -> requests.Response: - url = f"{self.base_url}/_status" + url = f"{self.base_url}/{self.health_endpoint}" return requests.get(url=url, headers=self._auth_headers, timeout=self._timeout) @@ -164,9 +85,8 @@ def simple_request_payload() -> dict[str, Any]: @pytest.fixture -def get_headers(request: pytest.FixtureRequest) -> dict[str, str]: +def headers(env: str, request: pytest.FixtureRequest) -> dict[str, str]: """Return auth headers for remote tests, or Apigee token for local.""" - env = request.config.getoption("--env") if env == "local": token = os.environ.get("APIGEE_ACCESS_TOKEN", "") return {"Authorization": f"Bearer {token}"} if token else {} @@ -182,63 +102,55 @@ def get_headers(request: pytest.FixtureRequest) -> dict[str, str]: @pytest.fixture def client( - request: pytest.FixtureRequest, base_url: str, get_headers: dict[str, str] + base_url: str, + health_endpoint: str, + headers: dict[str, str], ) -> Client: - env = request.config.getoption("--env") - - if env == "local": - return LocalClient(base_url=base_url, auth_headers=get_headers) - elif env == "remote": - proxy_url = request.getfixturevalue("nhsd_apim_proxy_url") - return RemoteClient(base_url=proxy_url, auth_headers=get_headers) - else: - raise ValueError(f"Unknown env: {env}") + return Client( + base_url=base_url, health_endpoint=health_endpoint, auth_headers=headers + ) @pytest.fixture(scope="module") -def base_url() -> str: +def env() -> str: + return get_env() + + +def get_env() -> str: + return _fetch_env_variable("TARGET_ENV", str) + + +@pytest.fixture +def base_url(env: str, request: pytest.FixtureRequest) -> str: """Retrieves the base URL of the currently deployed application.""" + if env == "remote": + return str(request.getfixturevalue("nhsd_apim_proxy_url")) return _fetch_env_variable("BASE_URL", str) -@pytest.fixture(scope="module") -def hostname() -> str: - """Retrieves the hostname of the currently deployed application.""" - return _fetch_env_variable("HOST", str) +@pytest.fixture +def health_endpoint(env: str) -> str: + if env == "local": + return "health" + else: + return "_status" -def _fetch_env_variable[T]( - name: str, - t: type[T], # NOQA ARG001 This is actually used for type hinting -) -> T: +def _fetch_env_variable[T](name: str, parser: Callable[[str], T]) -> T: value = os.getenv(name) - if not value: + if value is None: raise ValueError(f"{name} environment variable is not set.") - return cast("T", value) - - -REMOTE_TEST_USERNAME_ENV_VAR = "REMOTE_TEST_USERNAME" -DEFAULT_REMOTE_TEST_USERNAME = "656005750101" + return parser(value) def _get_remote_test_username() -> str: """Return the username to use for remote tests, allowing override via env.""" - return os.getenv(REMOTE_TEST_USERNAME_ENV_VAR, DEFAULT_REMOTE_TEST_USERNAME) - - -def pytest_addoption(parser: pytest.Parser) -> None: - parser.addoption( - "--env", - action="store", - default="local", - help="Environment to run tests against", - ) + return _fetch_env_variable("REMOTE_TEST_USERNAME", str) -def pytest_collection_modifyitems( - config: pytest.Config, items: list[pytest.Item] -) -> None: - if config.getoption("--env") == "remote": +def pytest_collection_modifyitems(items: list[pytest.Item]) -> None: + target_is_remote = get_env() == "remote" + if target_is_remote: for item in items: item.add_marker( pytest.mark.nhsd_apim_authorization( diff --git a/gateway-api/tests/contract/test_provider_contract.py b/gateway-api/tests/contract/test_provider_contract.py index ac231f25..1fa29b1b 100644 --- a/gateway-api/tests/contract/test_provider_contract.py +++ b/gateway-api/tests/contract/test_provider_contract.py @@ -12,7 +12,7 @@ from tests.conftest import Client -def test_provider_honors_consumer_contract(get_headers: Any, client: Client) -> None: +def test_provider_honors_consumer_contract(headers: Any, client: Client) -> None: host = urlparse(client.base_url).hostname @@ -22,7 +22,7 @@ def test_provider_honors_consumer_contract(get_headers: Any, client: Client) -> verifier.add_transport(url=client.base_url) # So the Verifier can authenticate with the APIM proxy - verifier.add_custom_headers(get_headers) + verifier.add_custom_headers(headers) verifier.add_source( "tests/contract/pacts/GatewayAPIConsumer-GatewayAPIProvider.json" diff --git a/gateway-api/tests/integration/test_get_structured_record.py b/gateway-api/tests/integration/test_get_structured_record.py index 480ca4f5..c3324622 100644 --- a/gateway-api/tests/integration/test_get_structured_record.py +++ b/gateway-api/tests/integration/test_get_structured_record.py @@ -150,6 +150,11 @@ def test_502_status_code_return_when_provider_returns_error( def test_internal_server_error_message_returned_when_provider_returns_error( self, response_when_provider_returns_error: Response ) -> None: + """ + WARNING: This can fail if the CDG_DEBUG env var is set to true in the + deployed app - see GpProviderClient.access_structured_record(). + TODO [GPCAPIM-401]: Ensure integration tests are not affected by CDG_DEBUG + """ expected = { "resourceType": "OperationOutcome", "issue": [ diff --git a/gateway-api/tests/schema/test_openapi_schema.py b/gateway-api/tests/schema/test_openapi_schema.py index 1a239158..1ee0f06a 100644 --- a/gateway-api/tests/schema/test_openapi_schema.py +++ b/gateway-api/tests/schema/test_openapi_schema.py @@ -8,6 +8,7 @@ import schemathesis import yaml +from hypothesis import HealthCheck, settings from schemathesis.generation.case import Case from schemathesis.openapi import from_dict @@ -18,7 +19,15 @@ schema = from_dict(schema_dict) +# Schemathesis warns you that this test is running a function-scoped fixture - a fixture +# that is executed once per test function, not per test case. Given schemathesis +# generates multiple test cases from the same test function, this means that the fixture +# will be executed only once for all generated cases, this can be a problem for certain +# fixtures. However, in this case, as base_url should not change during a test run, +# using the cached value for each test case is acceptable. +# See https://hypothesis.readthedocs.io/en/latest/reference/api.html#hypothesis.HealthCheck.function_scoped_fixture @schema.parametrize() +@settings(suppress_health_check=[HealthCheck.function_scoped_fixture]) def test_api_schema_compliance(case: Case, base_url: str) -> None: """Test API endpoints against the OpenAPI schema. diff --git a/infrastructure/README.md b/infrastructure/README.md index dd92267a..2b01b038 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -60,7 +60,7 @@ A lightweight Alpine-based Python image that runs the Flask application. Built w - A configurable `PYTHON_VERSION` build argument - A non-root user (`gateway_api_user`) -- Stubs enabled by default (`STUB_PDS`, `STUB_SDS`, `STUB_PROVIDER` all set to `true`) +- Stubs enabled by default (`PDS_URL`, `SDS_URL`, `PROVIDER_URL` all set to `stub`) - Flask listening on `0.0.0.0:8080` ### `build-container` diff --git a/infrastructure/images/gateway-api/Dockerfile b/infrastructure/images/gateway-api/Dockerfile index 00cf681f..f318dada 100644 --- a/infrastructure/images/gateway-api/Dockerfile +++ b/infrastructure/images/gateway-api/Dockerfile @@ -14,9 +14,6 @@ WORKDIR /resources/build/gateway-api ENV PYTHONPATH=/resources/build/gateway-api ENV FLASK_HOST="0.0.0.0" ENV FLASK_PORT="8080" -ENV STUB_PDS="true" -ENV STUB_PROVIDER="true" -ENV STUB_SDS="true" ARG COMMIT_VERSION ENV COMMIT_VERSION=$COMMIT_VERSION diff --git a/scripts/env/app/env.sh b/scripts/env/app/env.sh new file mode 100755 index 00000000..25d74972 --- /dev/null +++ b/scripts/env/app/env.sh @@ -0,0 +1,40 @@ +#!/bin/bash +set -e + +source scripts/env/app/pds.sh +source scripts/env/app/sds.sh +source scripts/env/app/provider.sh +source scripts/env/app/logging.sh + +ENV_FILE=".env" + +PDS_URL=$(get_pds_url "$env") +PDS_API_TOKEN=$(get_pds_api_token "$env") +PDS_API_SECRET=$(get_pds_api_secret "$env") +PDS_API_KID=$(get_pds_api_kid "$env") + +SDS_URL=$(get_sds_url "$env") +SDS_API_TOKEN=$(get_sds_api_token "$env") + +PROVIDER_URL=$(get_provider_url "$env") + +LOG_LEVEL=$(get_log_level "$env") + +cat > "$ENV_FILE" <&2 + return 1 + fi + ;; + *) + echo "" + return 0 + ;; + esac +} + +get_pds_api_secret() { + env="$1" + secret_file=".secrets/pds/api_secret" + case "$env" in + int) + if [[ -f "$secret_file" ]]; then + cat "$secret_file" + return 0 + else + printf "Warning: $secret_file not found." >&2 + return 1 + fi + ;; + *) + echo "" + return 0 + ;; + esac +} + +get_pds_api_kid() { + env="$1" + secret_file=".secrets/pds/api_kid" + case "$env" in + int) + if [[ -f "$secret_file" ]]; then + cat "$secret_file" + return 0 + else + printf "Warning: $secret_file not found." >&2 + return 1 + fi + ;; + *) + echo "" + return 0 + ;; + esac +} diff --git a/scripts/env/app/provider.sh b/scripts/env/app/provider.sh new file mode 100755 index 00000000..800fb6a2 --- /dev/null +++ b/scripts/env/app/provider.sh @@ -0,0 +1,21 @@ +#!/bin/bash +set -e + +get_provider_url() { + env="$1" + case "$env" in + int) + # TODO [GPCAPIM-397]: Update this. + echo "stub" + return 0 + ;; + orangebox) + echo "https://orange.testlab.nhs.uk/B82617/STU3/1/gpconnect/structured/fhir/" + return 0 + ;; + *) + echo "stub" + return 0 + ;; + esac +} diff --git a/scripts/env/app/sds.sh b/scripts/env/app/sds.sh new file mode 100755 index 00000000..0d4df765 --- /dev/null +++ b/scripts/env/app/sds.sh @@ -0,0 +1,40 @@ +#!/bin/bash +set -e + +get_sds_url() { + env="$1" + case "$env" in + sandbox) + echo "https://sandbox.api.service.nhs.uk/spine-directory/FHIR/R4" + return 0 + ;; + int) + echo "https://int.api.service.nhs.uk/spine-directory/FHIR/R4" + return 0 + ;; + *) + echo "stub" + return 0 + ;; + esac +} + +get_sds_api_token() { + env="$1" + secret_file=".secrets/sds/api_token" + case "$env" in + int) + if [[ -f "$secret_file" ]]; then + cat "$secret_file" + return 0 + else + printf "Warning: $secret_file not found." >&2 + return 1 + fi + ;; + *) + echo "" + return 0 + ;; + esac +} diff --git a/scripts/env/env.mk b/scripts/env/env.mk new file mode 100644 index 00000000..55b8983b --- /dev/null +++ b/scripts/env/env.mk @@ -0,0 +1,51 @@ +.PHONY: env env-% _env + +env env-dev: # Create .env file with environment variables for development environment (stubs) + make _env env="dev" + +env-ci: # Create .env file with environment variables for CI environment (stubs) + make _env env="ci" + +env-orangebox: # Create .env file that will have the app send requests to the provider "orangebox", stubs otherwise. + make _env env="orangebox" + +env-sandbox: # Create .env file that will have the app send requests to the SDS and PDS sandbox environment, stubs otherwise. + make _env env="sandbox" + +env-int: # Create .env file that will have the app send requests to the SDS and PDS integration environments. + make _env env="int" + +env-test-local: # Create .env.test file that will have tests send requests to the local app. + make _env-test env="local" + make env-dev # Ensure unit tests run with stub environment variables + +env-test-ci: # Create .env.test file that will have tests send requests to a CI-local app. + make _env-test env="ci" + make env-ci # Ensure unit tests run with stub environment variables + +env-test-pr-%: # Create .env.test file that will have tests send requests to a proxy deployed for the PR. + make _env-test env="pr-$*" + +env-test-alpha-int: # Create .env.test file that will have tests send requests to the alpha integration environment. + make _env-test env="alpha-int" + +_env: + scripts/env/app/env.sh "$(env)" + +_env-test: + scripts/env/test/env.sh "$(env)" + +${VERBOSE}.SILENT: \ + _env \ + env \ + env-dev \ + env-ci \ + env-orangebox \ + env-sandbox \ + env-int \ + env-int-pds \ + env-int-sds \ + _env-test \ + env-test-local \ + env-test-ci \ + env-test-pr-% diff --git a/scripts/env/test/apigee.sh b/scripts/env/test/apigee.sh new file mode 100644 index 00000000..782b4f41 --- /dev/null +++ b/scripts/env/test/apigee.sh @@ -0,0 +1,16 @@ +#!/bin/bash +set -e + +get_apigee_access_token() { + env="$1" + case "$env" in + pr-*|alpha-int) + echo $(./scripts/get_apigee_token.sh) + return 0 + ;; + *) + echo "" + return 0 + ;; + esac +} diff --git a/scripts/env/test/env.sh b/scripts/env/test/env.sh new file mode 100755 index 00000000..ff3cc857 --- /dev/null +++ b/scripts/env/test/env.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# Use make env-test- to generate a .env.test file for the specified environment. +# See scripts/env/env.mk for the make targets that call this script. +set -e + +source scripts/env/test/target.sh +source scripts/env/test/apigee.sh +source scripts/env/test/user.sh + +env="$1" +BASE_URL=$(get_base_url "$env") +PROXYGEN_API_NAME=$(get_proxygen_api_name "$env") +PROXY_BASE_PATH=$(get_proxy_base_path "$env") +if [[ -z "$PROXYGEN_API_NAME" || -z "$PROXY_BASE_PATH" ]]; then + PROXY_NAME="" +else + PROXY_NAME="${PROXYGEN_API_NAME}--internal-dev--${PROXY_BASE_PATH}" +fi +APIGEE_ACCESS_TOKEN=$(get_apigee_access_token "$env") +TARGET_ENV=$(get_target_env "$env") +REMOTE_TEST_USERNAME=$(get_test_user "$env") + +ENV_FILE=".env.test" + +cat > "$ENV_FILE" <' to generate it." >&2 + exit 1 +fi +source .env.test + +if [[ "$TEST_TYPE" = "unit" ]]; then + if [[ ! -f ".env" ]]; then + echo "Error: .env file not found. Please run 'make env-' to generate it." >&2 + exit 1 + fi + set -a + source .env + set +a +fi + cd gateway-api mkdir -p test-artefacts -echo "Running ${TEST_TYPE} tests..." +echo "Running ${TEST_TYPE} tests against ${TARGET_ENV} environment..." +if [[ -n "${PROXY_BASE_PATH:-}" && "$PROXY_BASE_PATH" != "none" ]]; then + echo "Using proxy base path: ${PROXY_BASE_PATH}" +fi # Set coverage path based on test type if [[ "$TEST_TYPE" = "unit" ]]; then @@ -43,34 +62,14 @@ else COV_PATH="src/gateway_api" fi -if [[ "${ENV:-local}" = "remote" ]] && [[ "$TEST_TYPE" != "unit" ]]; then - echo "[run-test] Branch: remote non-unit path" - echo "[run-test] ENV=${ENV:-local}, TEST_TYPE=${TEST_TYPE}" - echo "[run-test] Running via APIM proxy options" - if [[ -z "${PROXY_BASE_PATH:-}" ]]; then - echo "Error: PROXY_BASE_PATH must be set when ENV=remote and TEST_TYPE is not unit" >&2 - exit 1 - fi - # Note: TEST_PATH is intentionally unquoted to allow glob expansion - poetry run pytest ${TEST_PATH} --env="remote" -v \ - --api-name="${PROXYGEN_API_NAME}" \ - --proxy-name="${PROXYGEN_API_NAME}--internal-dev--${PROXY_BASE_PATH}" \ - --cov="${COV_PATH}" \ - --cov-report=html:test-artefacts/coverage-html \ - --cov-report=term \ - --junit-xml="test-artefacts/${TEST_TYPE}-tests.xml" \ - --html="test-artefacts/${TEST_TYPE}-tests.html" --self-contained-html -else - echo "[run-test] Branch: local/default path" - echo "[run-test] ENV=${ENV:-local}, TEST_TYPE=${TEST_TYPE}" - echo "[run-test] Running direct tests without APIM proxy options" - poetry run pytest ${TEST_PATH} -v \ - --cov="${COV_PATH}" \ - --cov-report=html:test-artefacts/coverage-html \ - --cov-report=term \ - --junit-xml="test-artefacts/${TEST_TYPE}-tests.xml" \ - --html="test-artefacts/${TEST_TYPE}-tests.html" --self-contained-html -fi +poetry run pytest ${TEST_PATH} -v \ + --api-name="${PROXYGEN_API_NAME}" \ + --proxy-name="${PROXYGEN_API_NAME}--internal-dev--${PROXY_BASE_PATH}" \ + --cov="${COV_PATH}" \ + --cov-report=html:test-artefacts/coverage-html \ + --cov-report=term \ + --junit-xml="test-artefacts/${TEST_TYPE}-tests.xml" \ + --html="test-artefacts/${TEST_TYPE}-tests.html" --self-contained-html # Save coverage data file for merging mv .coverage "test-artefacts/coverage.${TEST_TYPE}" diff --git a/scripts/tests/test.mk b/scripts/tests/test.mk index e03fe39a..e9f4752f 100644 --- a/scripts/tests/test.mk +++ b/scripts/tests/test.mk @@ -69,6 +69,12 @@ test: # Run all the test tasks @Testing test-acceptance\ test-schema +test-load-initial: # Run your load tests from scripts/test/load @Testing + @$(MAKE) _test name="load" + +test-load-ui: # Run your load tests from scripts/test/load @Testing with UI enabled + @UI=true $(MAKE) _test name="load" + _test: set -e script="./scripts/tests/${name}.sh" @@ -97,68 +103,3 @@ ${VERBOSE}.SILENT: \ test-acceptance \ test-schema\ -# --------------------------------------------------------------------------- -# Local vs remote environment helpers -# --------------------------------------------------------------------------- -# Usage: -# make test-local - activate .env.local, run tests against local -# and api-gateway-mock -# make test-remote - activate .env.remote, obtain an APIGEE token, run -# tests against APIM proxy -# -# .env.local - local env config (ENV=local, BASE_URL,HOST) -# .env.remote - remote env config (ENV=remote, BASE_URL,HOST,PR_NUMBER,PROXYGEN_API_NAME) -# .env - active env, overwritten by env-local/env-remote -# -# The .env file is deterministically overwritten each time you switch -# environments so the test runner always sees a consistent configuration. -# =========================================================================== - -.PHONY: env-local env-remote test-local test-remote - -env-local: - @if [ ! -f .env.local ]; then \ - echo "ERROR: .env.local not found. Needs creating." >&2; \ - exit 1; \ - fi - cp -f .env.local .env - @echo "Activated local environment: .env.local -> .env (ENV=local)" - -env-remote: - @if [ ! -f .env.remote ]; then \ - echo "ERROR: .env.remote not found. Needs creating." >&2; \ - exit 1; \ - fi - cp -f .env.remote .env - @echo "Activated remote environment: .env.remote -> .env (ENV=remote)" - -# Run tests against local -test-local: env-local - @echo "Obtaining APIGEE access token..." - @set -a && source .env && set +a && \ - APIGEE_ACCESS_TOKEN="$$(./scripts/get_apigee_token.sh)" && \ - export APIGEE_ACCESS_TOKEN && \ - $(MAKE) test - -# Run tests against remote, exporting APIGEE_ACCESS_TOKEN only -test-remote: env-remote - @echo "Obtaining APIGEE access token..." - @set -a && source .env && set +a && \ - APIGEE_ACCESS_TOKEN="$$(./scripts/get_apigee_token.sh)" && \ - BASE_URL="$${BASE_URL}-pr-$${PR_NUMBER}" && \ - export APIGEE_ACCESS_TOKEN BASE_URL && \ - $(MAKE) test - -# Run your load tests from scripts/test/load @Testing -test-load-initial: - @$(MAKE) _test name="load" - -# Run your load tests from scripts/test/load @Testing with UI enabled -test-load-ui: - @bash -c '\ - if [[ -z "$$APIGEE_ACCESS_TOKEN" ]]; then \ - set -a && source .env && set +a && \ - APIGEE_ACCESS_TOKEN=$$(./scripts/get_apigee_token.sh) && \ - export APIGEE_ACCESS_TOKEN; \ - fi && \ - UI=true $(MAKE) _test name="load"'