From 3b983d955c8b0b901d8e669a1a5fa9dff369e54c Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Fri, 16 Aug 2024 16:05:46 +0100 Subject: [PATCH] [PRMP-594] - ODS improvements MI API (#13) New DynamoDB tables have been created to hold weekly updated ODS information, and monthly updated ICB information. The enrichment lambda has been modified to query the respective tables for enrichment data, and using the previous API method as a fallback if no data exists. GitHub Actions has also been introduced as a replacement for GoCD. --- .github/workflows/PR-terraform-plan.yml | 28 +++ .../workflows/base-gp-registrations-mi.yml | 171 +++++++++++++++ .../base-terraform-plan-and-apply.yml | 122 +++++++++++ .../workflows/deploy-gp-registrations-mi.yml | 25 +++ .../workflows/deploy-terraform-by-stack.yml | 37 ++++ .github/workflows/pr-gp-registrations-mi.yml | 25 +++ .gitignore | 1 + Dojofile | 1 - README.md | 47 +--- environment.json | 16 -- gocd/base-networking.pipeline.gocd.yaml | 114 ---------- .../container-repositories.pipeline.gocd.yaml | 114 ---------- gocd/ecs-cluster.pipeline.gocd.yaml | 114 ---------- gocd/gp-registrations-mi.pipeline.gocd.yaml | 204 ------------------ lambda/bulk-ods-update/bulk_ods_update.py | 188 ++++++++++++++++ .../event-enrichment/event_enrichment_main.py | 179 +++++++++++---- .../test_event_enrichment_main.py | 8 +- lambda/mi-enrichment-requirements.txt | 1 + scripts/ecr-helper | 84 -------- scripts/promote-image | 40 ---- sonar-project.properties | 2 +- stacks/base-networking/terraform/output.tf | 6 +- .../terraform/private-subnet.tf | 8 +- .../terraform/public-subnet.tf | 10 +- stacks/base-networking/terraform/vpc.tf | 4 +- .../container-repositories/terraform/ecr.tf | 33 ++- .../terraform/output.tf | 2 +- stacks/ecs-cluster/terraform/ecs-cluster.tf | 4 +- stacks/ecs-cluster/terraform/output.tf | 6 +- .../scripts/get_latest_ods_csv.py | 116 ++++++++++ .../terraform/api-gateway.tf | 16 +- .../terraform/dynamodb-gp-ods.tf | 73 +++++++ .../terraform/dynamodb-icb-ods.tf | 74 +++++++ .../terraform/ecs-service.tf | 2 +- .../terraform/error-alarm-alert-lambda.tf | 20 +- .../terraform/event-enrichment-lambda.tf | 39 ++-- .../terraform/iam-event-enrichment.tf | 34 ++- .../terraform/lambda-bulk-ods-update.tf | 74 +++++++ .../terraform/lambda_layer.tf | 6 + .../terraform/load-balancer.tf | 6 +- .../terraform/placeholder_lambda_payload.zip | Bin 0 -> 280 bytes .../gp-registrations-mi/terraform/provider.tf | 2 +- stacks/gp-registrations-mi/terraform/queue.tf | 32 +-- .../terraform/s3-event-uploader-lambda.tf | 18 +- .../terraform/s3-ods-csv-updates.tf | 88 ++++++++ stacks/gp-registrations-mi/terraform/s3.tf | 24 ++- .../terraform/security-group.tf | 2 +- .../splunk-cloud-event-uploader-lambda.tf | 20 +- .../terraform/task-definition.tf | 2 +- stacks/gp-registrations-mi/terraform/topic.tf | 6 +- .../terraform/variables.tf | 26 ++- tasks | 126 ----------- tasks_github_actions.sh | 61 ++++++ utils/__init__.py | 0 utils/enums/__init__.py | 0 utils/enums/trud.py | 12 ++ utils/models/__init__.py | 0 utils/models/ods_models.py | 25 +++ utils/services/__init__.py | 0 utils/services/ssm_service.py | 7 + utils/services/trud_api_service.py | 67 ++++++ utils/trud_files.py | 68 ++++++ 62 files changed, 1616 insertions(+), 1024 deletions(-) create mode 100644 .github/workflows/PR-terraform-plan.yml create mode 100644 .github/workflows/base-gp-registrations-mi.yml create mode 100644 .github/workflows/base-terraform-plan-and-apply.yml create mode 100644 .github/workflows/deploy-gp-registrations-mi.yml create mode 100644 .github/workflows/deploy-terraform-by-stack.yml create mode 100644 .github/workflows/pr-gp-registrations-mi.yml delete mode 100644 Dojofile delete mode 100644 environment.json delete mode 100644 gocd/base-networking.pipeline.gocd.yaml delete mode 100644 gocd/container-repositories.pipeline.gocd.yaml delete mode 100644 gocd/ecs-cluster.pipeline.gocd.yaml delete mode 100644 gocd/gp-registrations-mi.pipeline.gocd.yaml create mode 100644 lambda/bulk-ods-update/bulk_ods_update.py create mode 100644 lambda/mi-enrichment-requirements.txt delete mode 100755 scripts/ecr-helper delete mode 100755 scripts/promote-image create mode 100644 stacks/gp-registrations-mi/scripts/get_latest_ods_csv.py create mode 100644 stacks/gp-registrations-mi/terraform/dynamodb-gp-ods.tf create mode 100644 stacks/gp-registrations-mi/terraform/dynamodb-icb-ods.tf create mode 100644 stacks/gp-registrations-mi/terraform/lambda-bulk-ods-update.tf create mode 100644 stacks/gp-registrations-mi/terraform/lambda_layer.tf create mode 100644 stacks/gp-registrations-mi/terraform/placeholder_lambda_payload.zip create mode 100644 stacks/gp-registrations-mi/terraform/s3-ods-csv-updates.tf delete mode 100755 tasks create mode 100755 tasks_github_actions.sh create mode 100644 utils/__init__.py create mode 100644 utils/enums/__init__.py create mode 100644 utils/enums/trud.py create mode 100644 utils/models/__init__.py create mode 100644 utils/models/ods_models.py create mode 100644 utils/services/__init__.py create mode 100644 utils/services/ssm_service.py create mode 100644 utils/services/trud_api_service.py create mode 100644 utils/trud_files.py diff --git a/.github/workflows/PR-terraform-plan.yml b/.github/workflows/PR-terraform-plan.yml new file mode 100644 index 0000000..a490507 --- /dev/null +++ b/.github/workflows/PR-terraform-plan.yml @@ -0,0 +1,28 @@ +name: "PR-terraform-plan" +on: + pull_request: + branches: + - master + paths: + - 'stacks/**' + +permissions: + pull-requests: write + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + +jobs: + terraform_plan: + strategy: + fail-fast: false + max-parallel: 1 + matrix: + environment: [ dev, prod-plan ] + terraform_stack: [container-repositories, base-networking, ecs-cluster] + uses: ./.github/workflows/base-terraform-plan-and-apply.yml + with: + environment: ${{ matrix.environment }} + terraform_stack: ${{ matrix.terraform_stack }} + secrets: inherit + + diff --git a/.github/workflows/base-gp-registrations-mi.yml b/.github/workflows/base-gp-registrations-mi.yml new file mode 100644 index 0000000..773eade --- /dev/null +++ b/.github/workflows/base-gp-registrations-mi.yml @@ -0,0 +1,171 @@ +name: base-gp-registrations-mi +on: + workflow_call: + inputs: + environment: + description: "Which Environment settings to use." + required: true + type: string + default: "dev" + is_deployment: + description: "Is workflow run on deployment" + type: boolean + default: false + +jobs: + terraform_process: + runs-on: ubuntu-latest + environment: ${{ inputs.environment }} + steps: + - name: Checkout + uses: actions/checkout@v4 + + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: ${{ vars.AWS_REGION }} + role-to-assume: ${{(inputs.is_deployment && secrets.AWS_ASSUME_ROLE) || secrets.AWS_ASSUME_ROLE_READ_ONLY}} + role-skip-session-tagging: true + + - name: Publish Docker Image to Prod + id: push-image-to-prod + if: inputs.environment == 'prod' && inputs.is_deployment + + run: | + aws ecr get-login-password --region ${{ vars.AWS_REGION }} | docker login --username AWS --password-stdin ${{ secrets.AWS_ECR_DEV_REPOSITORY }} + IMAGE_TAG=$(aws ecr describe-images --registy-id ${{ secrets.AWS_DEV_ACCOUNT_ID }} --repository-name ${{ secrets.ECR_REPOSITORY_NAME }} --query 'sort_by(imageDetails,& imagePushedAt)[-1].imageTags[0]') + source_repo=${{ secrets.AWS_ECR_DEV_REPOSITORY }}/${{ secrets.ECR_REPOSITORY_DEV_NAME }}:${IMAGE_TAG//\"} + destination_repo=${{ secrets.AWS_ECR_PROD_REPOSITORY}}/${{ secrets.ECR_REPOSITORY_NAME }}:${IMAGE_TAG//\"} + docker pull $source_repo + docker tag $source_repo $destination_repo + aws ecr get-login-password --region ${{ vars.AWS_REGION }} | docker login --username AWS --password-stdin ${{ secrets.AWS_ECR_PROD_REPOSITORY }} + docker push $destination_repo + echo "image-tag=$IMAGE_TAG" >> "$GITHUB_OUTPUT" + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: latest + + - name: Terraform Format + id: fmt + run: terraform fmt -check + working-directory: ./stacks/gp-registrations-mi/terraform + + - name: Terraform Init + id: init + run: | + terraform init -no-color -backend-config="key=${{ secrets.AWS_STATE_S3_KEY }}" \ + -backend-config="bucket=${{ secrets.AWS_STATE_BUCKET }}" \ + -backend-config="dynamodb_table=${{ secrets.AWS_STATE_LOCK_TABLE }}" + working-directory: ./stacks/gp-registrations-mi/terraform + shell: bash + + - name: Terraform Validate + id: validate + run: terraform validate -no-color + working-directory: ./stacks/gp-registrations-mi/terraform + + - name: Build Lambdas + run: | + ./tasks_github_actions.sh build-lambdas + + - name: Set up Python + if: github.ref == 'refs/heads/master' && inputs.is_deployment + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Make virtual environment + if: github.ref == 'refs/heads/master' && inputs.is_deployment + run: | + python3 -m venv ./venv + ./venv/bin/pip3 install --upgrade pip requests + + - name: Get ODS CSV Files + if: github.ref == 'refs/heads/master' && inputs.is_deployment + run: | + PYTHONPATH=$PYTHONPATH:. ./venv/bin/python3 stacks/gp-registrations-mi/scripts/get_latest_ods_csv.py ${{ secrets.TRUD_API_KEY }} ${{ vars.TRUD_API_URL }} + + - name: Setup Terraform variables + id: vars-prod + if: inputs.environment == 'prod' && inputs.is_deployment + run: |- + cat > pipeline.auto.tfvars < pipeline.auto.tfvars < ${{ vars.AWS_ENVIRONMENT }}.tfplan.txt + echo "summary=$(grep -E 'Plan: [0-9]+ to add, [0-9]+ to change, [0-9]+ to destroy\.|No changes\. Your infrastructure matches the configuration\.' ${{ vars.AWS_ENVIRONMENT }}.tfplan.txt | sed 's/.*No changes\. Your infrastructure matches the configuration/Plan: no changes/g' | sed 's/.*Plan: //g' | sed 's/\..*//g')" >> $GITHUB_OUTPUT + working-directory: ./stacks/gp-registrations-mi/terraform + shell: bash + + - name: Add PR comment + uses: actions/github-script@v7 + if: github.event_name == 'pull_request' && (success() || failure()) + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + // 1. Retrieve existing bot comments for the PR + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + const botComment = comments.find(comment => { + return comment.user.type === 'Bot' && comment.body.includes('Report for gp-registrations-mi environment: ${{ inputs.environment }}') + }); + + // 2. Prepare format of the comment + const output = `### Report for gp-registrations-mi environment: ${{ inputs.environment }} + + #### Terraform Format and Style 🖌\`${{ steps.fmt.outcome }}\` + + + #### Terraform Initialization ⚙️\`${{ steps.init.outcome }}\` + + + #### Terraform Validation 🤖\`${{ steps.validate.outcome }}\` + + + #### Terraform Plan 📖\`${{ steps.plan.outcome }}\` + + Plan results: ${{ steps.plan.outputs.summary }}`; + + // 3. If we have a comment, update it, otherwise create a new one + if (botComment) { + github.rest.issues.deleteComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: botComment.id, + }) + } + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: output + }); + + - name: Terraform Apply + if: github.ref == 'refs/heads/master' && inputs.is_deployment + run: terraform apply -auto-approve -input=false ${{ vars.AWS_ENVIRONMENT }}.tfplan + working-directory: ./stacks/gp-registrations-mi/terraform \ No newline at end of file diff --git a/.github/workflows/base-terraform-plan-and-apply.yml b/.github/workflows/base-terraform-plan-and-apply.yml new file mode 100644 index 0000000..73e1824 --- /dev/null +++ b/.github/workflows/base-terraform-plan-and-apply.yml @@ -0,0 +1,122 @@ +name: base-terraform-plan-and-apply +on: + workflow_call: + inputs: + environment: + description: "Which Environment settings to use" + required: true + type: string + default: "dev" + is_deployment: + description: "Is workflow run on deployment" + type: boolean + default: false + terraform_stack: + description: "Which terraform stack directory to run" + type: string + required: true + +jobs: + terraform_process: + runs-on: ubuntu-latest + environment: ${{ inputs.environment }} + defaults: + run: + working-directory: ./stacks/${{ inputs.terraform_stack }}/terraform + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: ${{ vars.AWS_REGION }} + role-to-assume: ${{inputs.is_deployment && secrets.AWS_ASSUME_ROLE || secrets.AWS_ASSUME_ROLE_READ_ONLY}} + role-skip-session-tagging: true + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: latest + + - name: Terraform Format + id: fmt + run: terraform fmt -check + + - name: Terraform Init + id: init + run: | + terraform init -no-color -backend-config="key=gp-registrations-mi/${{ inputs.terraform_stack }}/terraform.tfstate" \ + -backend-config="bucket=${{ secrets.AWS_STATE_BUCKET }}" \ + -backend-config="dynamodb_table=${{ secrets.AWS_STATE_LOCK_TABLE }}" + shell: bash + + - name: Terraform Validate + id: validate + run: terraform validate -no-color + + - name: Setup Terraform variables + id: vars + run: |- + cat > pipeline.auto.tfvars < ${{ vars.AWS_ENVIRONMENT }}.tfplan.txt + echo "summary=$(grep -E 'Plan: [0-9]+ to add, [0-9]+ to change, [0-9]+ to destroy\.|No changes\. Your infrastructure matches the configuration\.' ${{ vars.AWS_ENVIRONMENT }}.tfplan.txt | sed 's/.*No changes\. Your infrastructure matches the configuration/Plan: no changes/g' | sed 's/.*Plan: //g' | sed 's/\..*//g')" >> $GITHUB_OUTPUT + shell: bash + + - name: Add PR comment + uses: actions/github-script@v7 + if: github.event_name == 'pull_request' && (success() || failure()) + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + // 1. Retrieve existing bot comments for the PR + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + const botComment = comments.find(comment => { + return comment.user.type === 'Bot' && comment.body.includes('Report for ${{inputs.terraform_stack}} environment: ${{ inputs.environment }}') + }); + + // 2. Prepare format of the comment + const output = `### Report for ${{inputs.terraform_stack}} environment: ${{ inputs.environment }} + + #### Terraform Format and Style 🖌\`${{ steps.fmt.outcome }}\` + + + #### Terraform Initialization ⚙️\`${{ steps.init.outcome }}\` + + + #### Terraform Validation 🤖\`${{ steps.validate.outcome }}\` + + + #### Terraform Plan 📖\`${{ steps.plan.outcome }}\` + + Plan results: ${{ steps.plan.outputs.summary }}`; + + // 3. If we have a comment, update it, otherwise create a new one + if (botComment) { + github.rest.issues.deleteComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: botComment.id, + }) + } + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: output + }); + + - name: Terraform Apply + if: github.ref == 'refs/heads/master' && inputs.is_deployment + run: terraform apply -auto-approve -input=false ${{ vars.AWS_ENVIRONMENT }}.tfplan diff --git a/.github/workflows/deploy-gp-registrations-mi.yml b/.github/workflows/deploy-gp-registrations-mi.yml new file mode 100644 index 0000000..21dd6b6 --- /dev/null +++ b/.github/workflows/deploy-gp-registrations-mi.yml @@ -0,0 +1,25 @@ +name: deploy-gp-registrations-mi +on: + workflow_dispatch: + inputs: + environment: + default: "dev" + description: "Which environment should this run against" + required: true + type: choice + options: + - dev + - prod + +permissions: + pull-requests: write + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + +jobs: + terraform_process: + uses: ./.github/workflows/base-gp-registrations-mi.yml + with: + environment: ${{ inputs.environment }} + is_deployment: true + secrets: inherit diff --git a/.github/workflows/deploy-terraform-by-stack.yml b/.github/workflows/deploy-terraform-by-stack.yml new file mode 100644 index 0000000..8e24203 --- /dev/null +++ b/.github/workflows/deploy-terraform-by-stack.yml @@ -0,0 +1,37 @@ +name: deploy-terraform-by-stack +on: + workflow_dispatch: + inputs: + environment: + default: "dev" + description: "Which environment should this run against" + required: true + type: choice + options: + - dev + - prod + terraform_stack: + description: "Which terraform stack directory to run" + type: choice + required: true + options: + - container-repositories + - base-networking + - ecs-cluster + +permissions: + pull-requests: write + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + +jobs: + terraform_process: + uses: ./.github/workflows/base-terraform-plan-and-apply.yml + with: + environment: ${{ inputs.environment }} + is_deployment: true + terraform_stack: ${{ inputs.terraform_stack }} + secrets: inherit + + + diff --git a/.github/workflows/pr-gp-registrations-mi.yml b/.github/workflows/pr-gp-registrations-mi.yml new file mode 100644 index 0000000..76ba8f0 --- /dev/null +++ b/.github/workflows/pr-gp-registrations-mi.yml @@ -0,0 +1,25 @@ +name: 'pr-gp-registrations-mi' +on: + pull_request: + branches: + - master + paths: + - 'stacks/gp-registrations-mi/**' + +permissions: + pull-requests: write + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + +jobs: + terraform_plan: + strategy: + fail-fast: false + matrix: + environment: [ dev, prod-plan ] + uses: ./.github/workflows/base-gp-registrations-mi.yml + with: + environment: ${{ matrix.environment }} + secrets: inherit + + diff --git a/.gitignore b/.gitignore index 831bdd7..76fa86b 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ *.tfplan .idea +__pycache__/ */*/__pycache__ */*/_trial_temp diff --git a/Dojofile b/Dojofile deleted file mode 100644 index b2a4ec7..0000000 --- a/Dojofile +++ /dev/null @@ -1 +0,0 @@ -DOJO_DOCKER_IMAGE="nhsdev/deductions-infra-dojo:24-47f9f50f" \ No newline at end of file diff --git a/README.md b/README.md index 9ba0a31..ed838c1 100644 --- a/README.md +++ b/README.md @@ -1,54 +1,9 @@ # prm-gp-registrations-mi-infra -## Setup - -These instructions assume you are using: - -- [aws-vault](https://github.com/99designs/aws-vault) to validate your AWS credentials. -- [dojo](https://github.com/kudulab/dojo) to provide an execution environment -- [colima](https://github.com/abiosoft/colima) to run the docker dojo images ## Applying terraform -Rolling out terraform against each environment is managed by the GoCD pipeline. If you'd like to test it locally, run the following commands: - -1. Enter the container: - -`aws-vault exec -- dojo` - - -2. Invoke terraform locally - -``` - ./tasks validate - ./tasks plan -``` - -The stack name denotes the specific stack you would like to validate. -The environment can be `dev` or `prod`. - -To run the formatting, run `./tasks format ` - -## Troubleshooting -Error: `Too many command line arguments. Did you mean to use -chdir?` - -If you are unable to validate/plan, make sure you doing it inside the dojo container by typing -``` - dojo (then running command inside) - or - ./tasks dojo-validate - -``` - -Error: `Error: Error inspecting states in the "s3" backend: -S3 bucket does not exist.` - -Try deleting the .terraform and the plans (dev.tfplan/prod.tfplan) +Rolling out terraform against each environment is managed by the GitHub Actions pipeline. The workflow files can be found in `.github/workflows` -Error: `docker: Cannot connect to the Docker daemon at unix:///Users/jnewman/.colima/docker.sock. Is the docker daemon running?.` -You need to install and start colima: -``` -colima start -``` diff --git a/environment.json b/environment.json deleted file mode 100644 index 4cc3638..0000000 --- a/environment.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "dev": { - "name": "dev", - "bucket_name": "prm-gp2gp-terraform-state-dev", - "dynamo_table": "prm-gp2gp-terraform-table", - "role_arn_param": "/registrations/dev/user-input/cross-account-admin-role", - "state_key_prefix": "gp-registrations-mi/" - }, - "prod": { - "name": "prod", - "bucket_name": "prm-gp2gp-terraform-state-prod", - "dynamo_table": "prm-gp2gp-terraform-table", - "role_arn_param": "/registrations/prod/user-input/cross-account-admin-role", - "state_key_prefix": "gp-registrations-mi/" - } -} \ No newline at end of file diff --git a/gocd/base-networking.pipeline.gocd.yaml b/gocd/base-networking.pipeline.gocd.yaml deleted file mode 100644 index 68b741b..0000000 --- a/gocd/base-networking.pipeline.gocd.yaml +++ /dev/null @@ -1,114 +0,0 @@ -format_version: 4 -pipelines: - "gp-registrations-mi-base-networking": - group: gp-registrations-mi-infra - label_template: "${gp_registrations_mi_base_networking[:8]}" - materials: - gp_registrations_mi_base_networking: - plugin_configuration: - id: git-path - options: - url: https://github.com/nhsconnect/prm-gp-registrations-mi-infra.git - path: stacks/base-networking - stages: - - validate: - clean_workspace: true - jobs: - test: - resources: - - docker - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-validate base-networking dev - - plan_dev: - clean_workspace: true - jobs: - plan: - resources: - - docker - artifacts: - - build: - source: stacks/base-networking/terraform/dev.tfplan - destination: stacks/base-networking/terraform - - build: - source: stacks/base-networking/terraform/.terraform.lock.hcl - destination: stacks/base-networking/terraform - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-plan base-networking dev - - apply_dev: - clean_workspace: true - jobs: - apply: - resources: - - docker - tasks: - - fetch: - stage: plan_dev - job: plan - source: stacks/base-networking/terraform/dev.tfplan - destination: stacks/base-networking/terraform - is_file: yes - - fetch: - stage: plan_dev - job: plan - source: stacks/base-networking/terraform/.terraform.lock.hcl - destination: stacks/base-networking/terraform - is_file: yes - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-apply base-networking dev - - plan_prod: - clean_workspace: true - jobs: - plan: - resources: - - docker - artifacts: - - build: - source: stacks/base-networking/terraform/prod.tfplan - destination: stacks/base-networking/terraform - - build: - source: stacks/base-networking/terraform/.terraform.lock.hcl - destination: stacks/base-networking/terraform - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-plan base-networking prod - - apply_prod: - clean_workspace: true - approval: - type: manual - allow_only_on_success: true - jobs: - apply: - resources: - - docker - tasks: - - fetch: - stage: plan_prod - job: plan - source: stacks/base-networking/terraform/prod.tfplan - destination: stacks/base-networking/terraform - is_file: yes - - fetch: - stage: plan_prod - job: plan - source: stacks/base-networking/terraform/.terraform.lock.hcl - destination: stacks/base-networking/terraform - is_file: yes - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-apply base-networking prod diff --git a/gocd/container-repositories.pipeline.gocd.yaml b/gocd/container-repositories.pipeline.gocd.yaml deleted file mode 100644 index 2e23ba9..0000000 --- a/gocd/container-repositories.pipeline.gocd.yaml +++ /dev/null @@ -1,114 +0,0 @@ -format_version: 4 -pipelines: - "gp-registrations-mi-container-repositories": - group: gp-registrations-mi-infra - label_template: "${gp_registrations_mi_container_repositories[:8]}" - materials: - gp_registrations_mi_container_repositories: - plugin_configuration: - id: git-path - options: - url: https://github.com/nhsconnect/prm-gp-registrations-mi-infra.git - path: stacks/container-repositories - stages: - - validate: - clean_workspace: true - jobs: - test: - resources: - - docker - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-validate container-repositories dev - - plan_dev: - clean_workspace: true - jobs: - plan: - resources: - - docker - artifacts: - - build: - source: stacks/container-repositories/terraform/dev.tfplan - destination: stacks/container-repositories/terraform - - build: - source: stacks/container-repositories/terraform/.terraform.lock.hcl - destination: stacks/container-repositories/terraform - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-plan container-repositories dev - - apply_dev: - clean_workspace: true - jobs: - apply: - resources: - - docker - tasks: - - fetch: - stage: plan_dev - job: plan - source: stacks/container-repositories/terraform/dev.tfplan - destination: stacks/container-repositories/terraform - is_file: yes - - fetch: - stage: plan_dev - job: plan - source: stacks/container-repositories/terraform/.terraform.lock.hcl - destination: stacks/container-repositories/terraform - is_file: yes - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-apply container-repositories dev - - plan_prod: - clean_workspace: true - jobs: - plan: - resources: - - docker - artifacts: - - build: - source: stacks/container-repositories/terraform/prod.tfplan - destination: stacks/container-repositories/terraform - - build: - source: stacks/container-repositories/terraform/.terraform.lock.hcl - destination: stacks/container-repositories/terraform - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-plan container-repositories prod - - apply_prod: - clean_workspace: true - approval: - type: manual - allow_only_on_success: true - jobs: - apply: - resources: - - docker - tasks: - - fetch: - stage: plan_prod - job: plan - source: stacks/container-repositories/terraform/prod.tfplan - destination: stacks/container-repositories/terraform - is_file: yes - - fetch: - stage: plan_prod - job: plan - source: stacks/container-repositories/terraform/.terraform.lock.hcl - destination: stacks/container-repositories/terraform - is_file: yes - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-apply container-repositories prod diff --git a/gocd/ecs-cluster.pipeline.gocd.yaml b/gocd/ecs-cluster.pipeline.gocd.yaml deleted file mode 100644 index d083ba2..0000000 --- a/gocd/ecs-cluster.pipeline.gocd.yaml +++ /dev/null @@ -1,114 +0,0 @@ -format_version: 4 -pipelines: - "gp-registrations-mi-ecs-cluster": - group: gp-registrations-mi-infra - label_template: "${gp_registrations_mi_ecs_cluster[:8]}" - materials: - gp_registrations_mi_ecs_cluster: - plugin_configuration: - id: git-path - options: - url: https://github.com/nhsconnect/prm-gp-registrations-mi-infra.git - path: stacks/ecs-cluster - stages: - - validate: - clean_workspace: true - jobs: - test: - resources: - - docker - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-validate ecs-cluster dev - - plan_dev: - clean_workspace: true - jobs: - plan: - resources: - - docker - artifacts: - - build: - source: stacks/ecs-cluster/terraform/dev.tfplan - destination: stacks/ecs-cluster/terraform - - build: - source: stacks/ecs-cluster/terraform/.terraform.lock.hcl - destination: stacks/ecs-cluster/terraform - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-plan ecs-cluster dev - - apply_dev: - clean_workspace: true - jobs: - apply: - resources: - - docker - tasks: - - fetch: - stage: plan_dev - job: plan - source: stacks/ecs-cluster/terraform/dev.tfplan - destination: stacks/ecs-cluster/terraform - is_file: yes - - fetch: - stage: plan_dev - job: plan - source: stacks/ecs-cluster/terraform/.terraform.lock.hcl - destination: stacks/ecs-cluster/terraform - is_file: yes - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-apply ecs-cluster dev - - plan_prod: - clean_workspace: true - jobs: - plan: - resources: - - docker - artifacts: - - build: - source: stacks/ecs-cluster/terraform/prod.tfplan - destination: stacks/ecs-cluster/terraform - - build: - source: stacks/ecs-cluster/terraform/.terraform.lock.hcl - destination: stacks/ecs-cluster/terraform - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-plan ecs-cluster prod - - apply_prod: - clean_workspace: true - approval: - type: manual - allow_only_on_success: true - jobs: - apply: - resources: - - docker - tasks: - - fetch: - stage: plan_prod - job: plan - source: stacks/ecs-cluster/terraform/prod.tfplan - destination: stacks/ecs-cluster/terraform - is_file: yes - - fetch: - stage: plan_prod - job: plan - source: stacks/ecs-cluster/terraform/.terraform.lock.hcl - destination: stacks/ecs-cluster/terraform - is_file: yes - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-apply ecs-cluster prod \ No newline at end of file diff --git a/gocd/gp-registrations-mi.pipeline.gocd.yaml b/gocd/gp-registrations-mi.pipeline.gocd.yaml deleted file mode 100644 index 2166c8c..0000000 --- a/gocd/gp-registrations-mi.pipeline.gocd.yaml +++ /dev/null @@ -1,204 +0,0 @@ -format_version: 4 -pipelines: - "gp-registrations-mi-task": - group: gp-registrations-mi-infra - label_template: "${gp_registrations_mi_task[:8]}" - materials: - gp_registrations_mi_task: - plugin_configuration: - id: git-path - options: - url: https://github.com/nhsconnect/prm-gp-registrations-mi-infra.git - path: stacks/gp-registrations-mi, lambda/error-alarm-alert,lambda/splunk-cloud-event-uploader, lambda/event-enrichment, lambda/s3-event-uploader - gp_registrations_mi_image: - pipeline: prm-gp-registrations-mi - stage: publish_docker - stages: - - validate: - clean_workspace: true - jobs: - test: - resources: - - docker - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-validate gp-registrations-mi dev - - plan_dev: - clean_workspace: true - jobs: - plan: - resources: - - docker - artifacts: - - build: - source: stacks/gp-registrations-mi/terraform/dev.tfplan - destination: stacks/gp-registrations-mi/terraform - - build: - source: stacks/gp-registrations-mi/terraform/.terraform.lock.hcl - destination: stacks/gp-registrations-mi/terraform - - build: - source: lambda/build/error-alarm-alert.zip - destination: lambda/build/ - - build: - source: lambda/build/splunk-cloud-event-uploader.zip - destination: lambda/build/ - - build: - source: lambda/build/event-enrichment.zip - destination: lambda/build/ - - build: - source: lambda/build/s3-event-uploader.zip - destination: lambda/build/ - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - TF_VAR_gp_registrations_mi_image_tag=$GO_DEPENDENCY_LABEL_GP_REGISTRATIONS_MI_IMAGE ./tasks dojo-plan gp-registrations-mi dev - - apply_dev: - clean_workspace: true - jobs: - apply: - resources: - - docker - tasks: - - fetch: - stage: plan_dev - job: plan - source: stacks/gp-registrations-mi/terraform/dev.tfplan - destination: stacks/gp-registrations-mi/terraform - is_file: yes - - fetch: - stage: plan_dev - job: plan - source: stacks/gp-registrations-mi/terraform/.terraform.lock.hcl - destination: stacks/gp-registrations-mi/terraform - is_file: yes - - fetch: - stage: plan_dev - job: plan - source: lambda/build/error-alarm-alert.zip - destination: lambda/build/ - is_file: yes - - fetch: - stage: plan_dev - job: plan - source: lambda/build/splunk-cloud-event-uploader.zip - destination: lambda/build/ - is_file: yes - - fetch: - stage: plan_dev - job: plan - source: lambda/build/event-enrichment.zip - destination: lambda/build/ - is_file: yes - - fetch: - stage: plan_dev - job: plan - source: lambda/build/s3-event-uploader.zip - destination: lambda/build/ - is_file: yes - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-apply gp-registrations-mi dev - - promote_to_prod: - clean_workspace: true - approval: - type: manual - allow_only_on_success: true - jobs: - apply: - resources: - - docker - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - ./scripts/promote-image dev prod mi-api $GO_DEPENDENCY_LABEL_GP_REGISTRATIONS_MI_IMAGE - - plan_prod: - clean_workspace: true - jobs: - plan: - resources: - - docker - artifacts: - - build: - source: stacks/gp-registrations-mi/terraform/prod.tfplan - destination: stacks/gp-registrations-mi/terraform - - build: - source: stacks/gp-registrations-mi/terraform/.terraform.lock.hcl - destination: stacks/gp-registrations-mi/terraform - - build: - source: lambda/build/error-alarm-alert.zip - destination: lambda/build/ - - build: - source: lambda/build/splunk-cloud-event-uploader.zip - destination: lambda/build/ - - build: - source: lambda/build/event-enrichment.zip - destination: lambda/build/ - - build: - source: lambda/build/s3-event-uploader.zip - destination: lambda/build/ - tasks: - - exec: - command: /bin/bash - arguments: - - -c - - TF_VAR_gp_registrations_mi_image_tag=$GO_DEPENDENCY_LABEL_GP_REGISTRATIONS_MI_IMAGE ./tasks dojo-plan gp-registrations-mi prod - - apply_prod: - clean_workspace: true - approval: - type: manual - allow_only_on_success: true - jobs: - apply: - resources: - - docker - tasks: - - fetch: - stage: plan_prod - job: plan - source: stacks/gp-registrations-mi/terraform/prod.tfplan - destination: stacks/gp-registrations-mi/terraform - is_file: yes - - fetch: - stage: plan_prod - job: plan - source: stacks/gp-registrations-mi/terraform/.terraform.lock.hcl - destination: stacks/gp-registrations-mi/terraform - is_file: yes - - fetch: - stage: plan_prod - job: plan - source: lambda/build/error-alarm-alert.zip - destination: lambda/build/ - is_file: yes - - fetch: - stage: plan_prod - job: plan - source: lambda/build/splunk-cloud-event-uploader.zip - destination: lambda/build/ - is_file: yes - - fetch: - stage: plan_prod - job: plan - source: lambda/build/event-enrichment.zip - destination: lambda/build/ - is_file: yes - - fetch: - stage: plan_prod - job: plan - source: lambda/build/s3-event-uploader.zip - destination: lambda/build/ - is_file: yes - - exec: - command: /bin/bash - arguments: - - -c - - ./tasks dojo-apply gp-registrations-mi prod diff --git a/lambda/bulk-ods-update/bulk_ods_update.py b/lambda/bulk-ods-update/bulk_ods_update.py new file mode 100644 index 0000000..0fab56b --- /dev/null +++ b/lambda/bulk-ods-update/bulk_ods_update.py @@ -0,0 +1,188 @@ +import os +import tempfile +from datetime import date, timedelta +import calendar +import csv + +import boto3 + +from utils.enums.trud import OdsDownloadType, TrudItem +from utils.models.ods_models import PracticeOds, IcbOds +from utils.services.trud_api_service import TrudApiService + +import logging + +from utils.trud_files import ( + GP_FILE_HEADERS, + ICB_FILE_HEADERS, + ICB_MONTHLY_FILE_PATH, + ICB_QUARTERLY_FILE_PATH, + ICB_MONTHLY_FILE_NAME, + ICB_QUARTERLY_FILE_NAME, + GP_WEEKLY_FILE_NAME, + GP_WEEKLY_ZIP_FILE_PATH, +) + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +TEMP_DIR = tempfile.mkdtemp(dir="/tmp") + + +def lambda_handler(event, context): + download_type = determine_ods_manifest_download_type() + ssm = boto3.client("ssm") + trud_api_key_param = os.environ.get("TRUD_API_KEY_PARAM_NAME") + trud_api_key = ssm.get_parameter(trud_api_key_param) if trud_api_key_param else "" + trud_service = TrudApiService( + api_key=trud_api_key, + api_url=os.environ.get("TRUD_FHIR_API_URL_PARAM_NAME"), + ) + + extract_and_process_ods_gp_data(trud_service) + + if download_type == OdsDownloadType.BOTH: + extract_and_process_ods_icb_data(trud_service) + + return {"statusCode": 200} + + +def determine_ods_manifest_download_type() -> OdsDownloadType: + logger.info("Determining download type") + today = date.today() + + total_days_in_month = calendar.monthrange(today.year, today.month)[1] + last_date_of_month = date(today.year, today.month, total_days_in_month) + + last_sunday_of_month = last_date_of_month + + while last_sunday_of_month.weekday() != 6: + last_sunday_of_month -= timedelta(days=1) + + is_icb_download_date = today == last_sunday_of_month + + if is_icb_download_date: + logger.info("Download type set to: GP and ICB") + return OdsDownloadType.BOTH + + logger.info("Download type set to: GP") + return OdsDownloadType.GP + + +def extract_and_process_ods_gp_data(trud_service: TrudApiService): + logger.info("Extracting and processing ODS GP data") + + gp_ods_releases = trud_service.get_release_list( + TrudItem.NHS_ODS_WEEKLY, is_latest=True + ) + + logger.info(gp_ods_releases) + + download_file_bytes = trud_service.get_download_file( + gp_ods_releases[0].get("archiveFileUrl") + ) + + eppracur_csv_path = os.path.join(TEMP_DIR, GP_WEEKLY_FILE_NAME) + + epraccur_zip_file = trud_service.unzipping_files( + download_file_bytes, GP_WEEKLY_ZIP_FILE_PATH, TEMP_DIR, True + ) + trud_service.unzipping_files(epraccur_zip_file, GP_WEEKLY_FILE_NAME, TEMP_DIR) + + gp_ods_data = trud_csv_to_dict(eppracur_csv_path, GP_FILE_HEADERS) + gp_ods_data_amended_data = get_amended_records(gp_ods_data) + + if gp_ods_data_amended_data: + logger.info( + f"Found {len(gp_ods_data_amended_data)} amended GP data records to update" + ) + compare_and_overwrite(OdsDownloadType.GP, gp_ods_data_amended_data) + return + + logger.info("No amended GP data found") + + +def extract_and_process_ods_icb_data(trud_service: TrudApiService): + logger.info("Extracting and processing ODS ICB data") + + icb_ods_releases = trud_service.get_release_list( + TrudItem.ORG_REF_DATA_MONTHLY, True + ) + + is_quarterly_release = icb_ods_releases[0].get("name").endswith(".0.0") + download_file = trud_service.get_download_file( + icb_ods_releases[0].get("archiveFileUrl") + ) + + icb_zip_file_path = ( + ICB_MONTHLY_FILE_PATH if not is_quarterly_release else ICB_QUARTERLY_FILE_PATH + ) + icb_csv_file_name = ( + ICB_MONTHLY_FILE_NAME if not is_quarterly_release else ICB_QUARTERLY_FILE_NAME + ) + + icb_ods_data_amended_data = [] + if icb_zip_file := trud_service.unzipping_files( + download_file, icb_zip_file_path, TEMP_DIR, True + ): + if icb_csv_file := trud_service.unzipping_files( + icb_zip_file, icb_csv_file_name, TEMP_DIR + ): + icb_ods_data = trud_csv_to_dict(icb_csv_file, ICB_FILE_HEADERS) + icb_ods_data_amended_data = get_amended_records(icb_ods_data) + + if icb_ods_data_amended_data: + logger.info( + f"Found {len(icb_ods_data_amended_data)} amended ICB data records to update" + ) + compare_and_overwrite(OdsDownloadType.ICB, icb_ods_data_amended_data) + return + + logger.info("No amended ICB data found") + + +def get_amended_records(data: list[dict]) -> list[dict]: + return [ + amended_data + for amended_data in data + if amended_data.get("AmendedRecordIndicator") == "1" + ] + + +def trud_csv_to_dict(file_path: str, headers: list[str]) -> list[dict]: + with open(file_path, mode="r") as csv_file: + csv_reader = csv.DictReader(csv_file) + csv_reader.fieldnames = headers + data_list = [] + for row in csv_reader: + data_list.append(dict(row)) + return data_list + + +def compare_and_overwrite(download_type: OdsDownloadType, data: list[dict]): + if download_type == OdsDownloadType.GP: + logger.info("Comparing GP Practice data") + for amended_record in data: + try: + practice = PracticeOds(amended_record.get("PracticeOdsCode")) + practice.update( + actions=[ + PracticeOds.practice_name.set( + amended_record.get("PracticeName") + ), + PracticeOds.icb_ods_code.set(amended_record.get("IcbOdsCode")), + ] + ) + except Exception as e: + logger.info( + f"Failed to create/update record by Practice ODS code: {str(e)}" + ) + + if download_type == OdsDownloadType.ICB: + logger.info("Comparing ICB data") + for amended_record in data: + try: + icb = IcbOds(amended_record.get("IcbOdsCode")) + icb.update(actions=[IcbOds.icb_name.set(amended_record.get("IcbName"))]) + except Exception as e: + logger.info(f"Failed to create/update record by ICB ODS code: {str(e)}") diff --git a/lambda/event-enrichment/event_enrichment_main.py b/lambda/event-enrichment/event_enrichment_main.py index 1d94f94..482aeb6 100644 --- a/lambda/event-enrichment/event_enrichment_main.py +++ b/lambda/event-enrichment/event_enrichment_main.py @@ -1,10 +1,13 @@ import json import os +from datetime import date, timedelta, datetime from typing import Optional import boto3 import urllib3 +from utils.models.ods_models import PracticeOds, IcbOds + ODS_PORTAL_URL = "https://directory.spineservices.nhs.uk/ORD/2-0-0/organisations/" ICB_ROLE_ID = "RO98" EMPTY_ORGANISATION = {"Name": None} @@ -65,55 +68,133 @@ def _enrich_events(sqs_messages: dict) -> list: f"Skipping enrichment for degrades event with eventId: {event['eventId']}." ) continue - # set requesting practice info - requesting_practice_organisation = _fetch_organisation( - event["requestingPracticeOdsCode"] + event.update( + _requesting_practice_info( + ods_code=event["requestingPracticeOdsCode"], + practice_name_key="requestingPracticeName", + icb_name_key="requestingPracticeIcbName", + icb_ods_code_key="requestingPracticeIcbOdsCode", + supplier_key="requestingSupplierName", + ) ) - event["requestingPracticeName"] = requesting_practice_organisation["Name"] - event["requestingPracticeIcbOdsCode"] = _find_icb_ods_code( - requesting_practice_organisation + # set sending practice info + event.update( + _requesting_practice_info( + ods_code=event["sendingPracticeOdsCode"], + practice_name_key="sendingPracticeName", + icb_name_key="sendingPracticeIcbName", + icb_ods_code_key="sendingPracticeIcbOdsCode", + supplier_key="sendingSupplierName", + ) ) - event["requestingPracticeIcbName"] = _fetch_organisation( - event["requestingPracticeIcbOdsCode"] - )["Name"] - # set sending practice info - sending_practice_organisation = _fetch_organisation( - event["sendingPracticeOdsCode"] + # temporary fix for EMIS wrong reportingSystemSupplier data + reporting_system_supplier = event["reportingSystemSupplier"] + if reporting_system_supplier.isnumeric(): + print( + f"TEMP FIX. Reporting system supplier received: {reporting_system_supplier}. Changed to 'EMIS'." + ) + event["reportingSystemSupplier"] = "EMIS" + + return events + + +def _requesting_practice_info( + ods_code: str, practice_name_key: str, icb_name_key: str, icb_ods_code_key: str, supplier_key: str +) -> dict: + enrichment_info = {} + print("requesting data for: " + ods_code) + gp_dynamo_item = arrange_gp_data_from_dynamo(ods_code) or get_gp_data_from_api( + ods_code + ) + if gp_dynamo_item: + enrichment_info.update( + { + practice_name_key: gp_dynamo_item.practice_name, + icb_ods_code_key: gp_dynamo_item.icb_ods_code, + } ) - event["sendingPracticeName"] = sending_practice_organisation["Name"] - event["sendingPracticeIcbOdsCode"] = _find_icb_ods_code( - sending_practice_organisation + enrichment_info[supplier_key] = ( + get_supplier_data(ods_code, gp_dynamo_item) or "UNKNOWN" ) - event["sendingPracticeIcbName"] = _fetch_organisation( - event["sendingPracticeIcbOdsCode"] - )["Name"] - - # set requesting supplier info - requesting_supplier_name = get_supplier_name(event["requestingPracticeOdsCode"]) - event["requestingSupplierName"] = ( - requesting_supplier_name - if requesting_supplier_name is not None - else "UNKNOWN" + enrichment_info[icb_name_key] = ( + get_icb_name(gp_dynamo_item.icb_ods_code) or "UNKNOWN" ) + else: + enrichment_info[supplier_key] = "UNKNOWN" + return enrichment_info - # set sending supplier info - sending_supplier_name = get_supplier_name(event["sendingPracticeOdsCode"]) - event["sendingSupplierName"] = ( - sending_supplier_name - if sending_supplier_name is not None - else "UNKNOWN" - ) - # temporary fix for EMIS wrong reportingSystemSupplier data - reporting_system_supplier = event["reportingSystemSupplier"] - if reporting_system_supplier.isnumeric(): - print(f"TEMP FIX. Reporting system supplier received: {reporting_system_supplier}. Changed to 'EMIS'.") - event["reportingSystemSupplier"] = "EMIS" +def arrange_gp_data_from_dynamo(ods_code: str): + try: + gp_dynamo_data = get_gp_data_from_dynamo_request(ods_code) + print("Successfully query dynamo for GP data") + return gp_dynamo_data + except PracticeOds.DoesNotExist: + print("Failed to find GP data in dynamo table") + return None - return events +def get_icb_name(ods_code: str): + if ods_code is None: + return None + else: + return get_icb_name_from_dynamo(ods_code) or get_icb_name_from_api(ods_code) + + +def get_icb_name_from_dynamo(ods_code: str): + try: + icb_dynamo_item = get_icb_data_from_dynamo_request(ods_code) + print("Successfully query dynamo for ICB data") + return icb_dynamo_item.icb_name + except IcbOds.DoesNotExist: + print("Failed to find ICB data in dynamo table") + return None + + +def get_gp_data_from_api(ods_code: str): + requesting_practice_organisation = _fetch_organisation(ods_code) + + practice_name = requesting_practice_organisation["Name"] + if practice_name is None: + return None + icb_ods_code = _find_icb_ods_code(requesting_practice_organisation) + gp_api_item = PracticeOds(ods_code, practice_name, icb_ods_code=icb_ods_code) + gp_api_item.save() + return gp_api_item + + +def get_icb_name_from_api(ods_code: str): + icb_name = _fetch_organisation(ods_code)["Name"] + icb_api_item = IcbOds(ods_code, icb_name) + icb_api_item.save() + return icb_name + + +def get_supplier_data(ods_code: str, gp_dynamo_item: PracticeOds): + date_today = date.today() + date_one_month_ago = date_today - timedelta(days=30) + supplier_last_update_date = ( + gp_dynamo_item.supplier_last_updated.date() + if gp_dynamo_item.supplier_last_updated + else None + ) + is_out_of_date = ( + supplier_last_update_date < date_one_month_ago + if supplier_last_update_date + else True + ) + if not gp_dynamo_item.supplier_name and is_out_of_date: + requesting_supplier_name = get_supplier_name_from_sds_api(ods_code) + gp_dynamo_item.supplier_name = requesting_supplier_name + gp_dynamo_item.update( + actions=[ + PracticeOds.supplier_name.set(requesting_supplier_name), + PracticeOds.supplier_last_updated.set(datetime.now()), + ] + ) + return gp_dynamo_item.supplier_name def _find_icb_ods_code(practice_organisation: dict) -> Optional[str]: @@ -260,9 +341,9 @@ def _find_supplier_ods_codes_from_supplier_details(supplier_details: dict) -> li return supplier_ods_codes -def get_supplier_name(practice_ods_code: str) -> Optional[str]: +def get_supplier_name_from_sds_api(practice_ods_code: str) -> Optional[str]: """uses the SDS FHIR API to get the system supplier from an ODS code""" - + print("Requesting supplier info from SDS") if not practice_ods_code or practice_ods_code.isspace(): return None @@ -278,7 +359,7 @@ def get_supplier_name(practice_ods_code: str) -> Optional[str]: } supplier_name = None - + for supplier_ods_code in supplier_ods_codes: try: supplier_name = supplier_name_mapping[supplier_ods_code] @@ -286,13 +367,19 @@ def get_supplier_name(practice_ods_code: str) -> Optional[str]: break except KeyError: continue - - if supplier_name is None: + + if supplier_name is None: print( f"Unable to map supplier ODS code(s) found from SDS FHI API: {str(supplier_ods_codes)}" + " to a known supplier name. Practice ODS code from event: {practice_ods_code}." - ) + ) return supplier_name - - \ No newline at end of file + + +def get_gp_data_from_dynamo_request(ods_code: str): + return PracticeOds.get(ods_code) + + +def get_icb_data_from_dynamo_request(ods_code: str): + return IcbOds.get(ods_code) diff --git a/lambda/event-enrichment/test_event_enrichment_main.py b/lambda/event-enrichment/test_event_enrichment_main.py index b58d8c5..3eef3e5 100644 --- a/lambda/event-enrichment/test_event_enrichment_main.py +++ b/lambda/event-enrichment/test_event_enrichment_main.py @@ -18,7 +18,7 @@ _find_supplier_ods_codes_from_supplier_details, _has_supplier_ods_code, UnableToFetchSupplierDetailsFromSDSFHIRException, - get_supplier_name, + get_supplier_name_from_sds_api, UnableToMapSupplierOdsCodeToSupplierNameException, ) @@ -626,7 +626,7 @@ def test_returns_supplier_name_given_a_practice_ods_code( {"Parameter": {"Value": "some_url.net?"}}, ] - supplier_name = get_supplier_name("test_supplier_ods_code") + supplier_name = get_supplier_name_from_sds_api("test_supplier_ods_code") expected_supplier_name = "EMIS" assert supplier_name == expected_supplier_name @@ -646,7 +646,7 @@ def test_supplier_name_returns_none_when_supplier_ods_code_is_not_found_from_sds {"Parameter": {"Value": "some_url.net?"}}, ] - supplier_name = get_supplier_name("PRACTICE_ODS_123") + supplier_name = get_supplier_name_from_sds_api("PRACTICE_ODS_123") assert supplier_name is None @@ -665,6 +665,6 @@ def test_supplier_name_returns_none_when_supplier_ods_code_is_none( {"Parameter": {"Value": "some_url.net?"}}, ] - supplier_name = get_supplier_name(None) + supplier_name = get_supplier_name_from_sds_api(None) assert supplier_name is None diff --git a/lambda/mi-enrichment-requirements.txt b/lambda/mi-enrichment-requirements.txt new file mode 100644 index 0000000..bb868cc --- /dev/null +++ b/lambda/mi-enrichment-requirements.txt @@ -0,0 +1 @@ +pynamodb==6.0.1 \ No newline at end of file diff --git a/scripts/ecr-helper b/scripts/ecr-helper deleted file mode 100755 index c3df32b..0000000 --- a/scripts/ecr-helper +++ /dev/null @@ -1,84 +0,0 @@ -#!/bin/bash - -set -Eeo pipefail - -aws_region=eu-west-2 - -function assume_role() { - env=$1 - role_param="/registrations/${env}/user-input/cross-account-admin-role" - - role_arn=$( - aws ssm get-parameters \ - --region ${aws_region} \ - --names ${role_param} \ - --query 'Parameters[0].Value' \ - --output text - ) - - timestamp=$(date +%s) - session_name="ci-ecr-helper-${timestamp}" - sts=$( - aws sts assume-role \ - --role-arn $role_arn \ - --role-session-name $session_name \ - --output json - ) - - unset AWS_ACCESS_KEY_ID - unset AWS_SECRET_ACCESS_KEY - unset AWS_SESSION_TOKEN - unset AWS_SECURITY_TOKEN - - export AWS_ACCESS_KEY_ID=$(echo $sts | jq -r .Credentials.AccessKeyId) - export AWS_SECRET_ACCESS_KEY=$(echo $sts | jq -r .Credentials.SecretAccessKey) - export AWS_SESSION_TOKEN=$(echo $sts | jq -r .Credentials.SessionToken) -} - -function get_repo_url() { - env=$1 - image=$2 - - repo_url_param="/registrations/${env}/gp-registrations-mi/ecr/url/${image}" - - aws ssm get-parameters \ - --region ${aws_region} \ - --names ${repo_url_param} \ - --query 'Parameters[0].Value' \ - --output text - -} - -function get_login() { - aws ecr get-login --no-include-email --region ${aws_region} -} - -function usage() { - echo "Usage:" - echo "ecr-helper [get-login environment] | [get-repo-url environment image]" - exit 1 -} - -task=$1 -environment=$2 - -if [ "$#" -le 1 ]; then - echo "Incorrect number of parameters" - usage -fi - -case "${task}" in - get-login) - assume_role $environment - get_login - ;; - get-repo-url) - assume_role $environment - image=$3 - get_repo_url $environment $image - ;; - *) - echo "Invalid command: '${task}'" - usage - ;; -esac diff --git a/scripts/promote-image b/scripts/promote-image deleted file mode 100755 index 18a5bfe..0000000 --- a/scripts/promote-image +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -set -Eeo pipefail - -source_environment=$1 -destination_environment=$2 -image_name=$3 -image_tag=$4 - -function usage() { - echo "Usage:" - echo "promote-image SOURCE-ENV DESTINATION-ENV IMAGE-NAME IMAGE-TAG" - exit 1 -} - -if [ "$#" -ne 4 ]; then - echo "Incorrect number of parameters" - usage -fi - -source_login=$(dojo "./scripts/ecr-helper get-login $source_environment") -destination_login=$(dojo "./scripts/ecr-helper get-login $destination_environment") - -source_repo_url=$(dojo "./scripts/ecr-helper get-repo-url $source_environment $image_name") -destination_repo_url=$(dojo "./scripts/ecr-helper get-repo-url $destination_environment $image_name") - -source_repo=$source_repo_url:$image_tag -destination_repo=$destination_repo_url:$image_tag - -echo "Promoting $image_name:$image_tag from $source_environment to $destination_environment" - -eval $source_login - -echo "Pulling docker image from $source_repo" -docker pull $source_repo -docker tag $source_repo $destination_repo -eval $destination_login - -echo "Pushing docker image to $destination_repo" -docker push $destination_repo diff --git a/sonar-project.properties b/sonar-project.properties index c740862..dbf350a 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -9,7 +9,7 @@ sonar.python.version=3.9 # Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows. #sonar.sources=. -sonar.sources=lambda/ +sonar.sources=lambda/, stacks/ # Encoding of the source code. Default is default system encoding #sonar.sourceEncoding=UTF-8 \ No newline at end of file diff --git a/stacks/base-networking/terraform/output.tf b/stacks/base-networking/terraform/output.tf index cc75490..0a817df 100644 --- a/stacks/base-networking/terraform/output.tf +++ b/stacks/base-networking/terraform/output.tf @@ -5,7 +5,7 @@ resource "aws_ssm_parameter" "private_subnet_ids" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-private-subnet-ids" + Name = "${var.environment}-gp-registrations-mi-private-subnet-ids" ApplicationRole = "AwsSsmParameter" } ) @@ -18,7 +18,7 @@ resource "aws_ssm_parameter" "vpc_id" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-vpc-id" + Name = "${var.environment}-gp-registrations-mi-vpc-id" ApplicationRole = "AwsSsmParameter" } ) @@ -31,7 +31,7 @@ resource "aws_ssm_parameter" "vpc_cidr_block" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-vpc-cidr-block" + Name = "${var.environment}-gp-registrations-mi-vpc-cidr-block" ApplicationRole = "AwsSsmParameter" } ) diff --git a/stacks/base-networking/terraform/private-subnet.tf b/stacks/base-networking/terraform/private-subnet.tf index 912c74e..54a8b28 100644 --- a/stacks/base-networking/terraform/private-subnet.tf +++ b/stacks/base-networking/terraform/private-subnet.tf @@ -6,7 +6,7 @@ resource "aws_subnet" "private_a" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-private-a" + Name = "${var.environment}-gp-registrations-mi-private-a" ApplicationRole = "AwsSubnet" } ) @@ -20,7 +20,7 @@ resource "aws_subnet" "private_b" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-private-b" + Name = "${var.environment}-gp-registrations-mi-private-b" ApplicationRole = "AwsSubnet" } ) @@ -34,9 +34,9 @@ resource "aws_route" "private" { resource "aws_route_table" "private" { vpc_id = aws_vpc.vpc.id - tags = merge( + tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-private" + Name = "${var.environment}-gp-registrations-mi-private" ApplicationRole = "AwsRouteTable" } ) diff --git a/stacks/base-networking/terraform/public-subnet.tf b/stacks/base-networking/terraform/public-subnet.tf index 6edb623..c24cc1f 100644 --- a/stacks/base-networking/terraform/public-subnet.tf +++ b/stacks/base-networking/terraform/public-subnet.tf @@ -6,7 +6,7 @@ resource "aws_subnet" "public_a" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-public-a" + Name = "${var.environment}-gp-registrations-mi-public-a" ApplicationRole = "AwsSubnet" } ) @@ -20,7 +20,7 @@ resource "aws_subnet" "public_b" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-public-b" + Name = "${var.environment}-gp-registrations-mi-public-b" ApplicationRole = "AwsSubnet" } ) @@ -31,7 +31,7 @@ resource "aws_route_table" "public" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-public" + Name = "${var.environment}-gp-registrations-mi-public" ApplicationRole = "AwsRouteTable" } ) @@ -59,7 +59,7 @@ resource "aws_eip" "nat" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-nat" + Name = "${var.environment}-gp-registrations-mi-nat" ApplicationRole = "AwsEip" } ) @@ -72,7 +72,7 @@ resource "aws_nat_gateway" "nat_gateway" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-nat-gateway" + Name = "${var.environment}-gp-registrations-mi-nat-gateway" ApplicationRole = "AwsNatGateway" } ) diff --git a/stacks/base-networking/terraform/vpc.tf b/stacks/base-networking/terraform/vpc.tf index 8895cbb..752959b 100644 --- a/stacks/base-networking/terraform/vpc.tf +++ b/stacks/base-networking/terraform/vpc.tf @@ -4,7 +4,7 @@ resource "aws_vpc" "vpc" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-vpc" + Name = "${var.environment}-gp-registrations-mi-vpc" ApplicationRole = "AwsVpc" } ) @@ -16,7 +16,7 @@ resource "aws_internet_gateway" "internet_gateway" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsInternetGateway" } ) diff --git a/stacks/container-repositories/terraform/ecr.tf b/stacks/container-repositories/terraform/ecr.tf index 3bf39b4..e8ac56d 100644 --- a/stacks/container-repositories/terraform/ecr.tf +++ b/stacks/container-repositories/terraform/ecr.tf @@ -3,8 +3,39 @@ resource "aws_ecr_repository" "gp_registrations_mi" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsEcrRepository" } ) } + +data "aws_iam_policy_document" "ecr_gp_registrations_mi" { + count = var.environment == "dev" ? 1 : 0 + statement { + effect = "Allow" + + principals { + type = "AWS" + identifiers = [data.aws_ssm_parameter.prod-aws-account-id[0].value] + } + + actions = [ + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage", + "ecr:BatchCheckLayerAvailability", + "ecr:DescribeImages", + "ecr:ListImages", + ] + } +} + +resource "aws_ecr_repository_policy" "ecr_gp_registrations_mi" { + count = var.environment == "dev" ? 1 : 0 + repository = aws_ecr_repository.gp_registrations_mi.name + policy = data.aws_iam_policy_document.ecr_gp_registrations_mi[0].json +} + +data "aws_ssm_parameter" "prod-aws-account-id" { + count = var.environment == "dev" ? 1 : 0 + name = "/registrations/dev/user-input/prod-aws-account-id" +} \ No newline at end of file diff --git a/stacks/container-repositories/terraform/output.tf b/stacks/container-repositories/terraform/output.tf index 3d0c70a..bb8706d 100644 --- a/stacks/container-repositories/terraform/output.tf +++ b/stacks/container-repositories/terraform/output.tf @@ -5,7 +5,7 @@ resource "aws_ssm_parameter" "gp_registrations_mi" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsSsmParameter" } ) diff --git a/stacks/ecs-cluster/terraform/ecs-cluster.tf b/stacks/ecs-cluster/terraform/ecs-cluster.tf index 38c43e5..87cd77d 100644 --- a/stacks/ecs-cluster/terraform/ecs-cluster.tf +++ b/stacks/ecs-cluster/terraform/ecs-cluster.tf @@ -3,7 +3,7 @@ resource "aws_ecs_cluster" "gp_registrations_mi_cluster" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-cluster" + Name = "${var.environment}-gp-registrations-mi-cluster" ApplicationRole = "AwsEcsCluster" } ) @@ -19,7 +19,7 @@ resource "aws_cloudwatch_log_group" "gp_registrations_mi" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsCloudwatchLogGroup" } ) diff --git a/stacks/ecs-cluster/terraform/output.tf b/stacks/ecs-cluster/terraform/output.tf index c9596a6..e6bdf9f 100644 --- a/stacks/ecs-cluster/terraform/output.tf +++ b/stacks/ecs-cluster/terraform/output.tf @@ -5,7 +5,7 @@ resource "aws_ssm_parameter" "cloudwatch_log_group_name" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsSsmParameter" } ) @@ -18,7 +18,7 @@ resource "aws_ssm_parameter" "execution_role_arn" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsSsmParameter" } ) @@ -31,7 +31,7 @@ resource "aws_ssm_parameter" "ecs_cluster_arn" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsSsmParameter" } ) diff --git a/stacks/gp-registrations-mi/scripts/get_latest_ods_csv.py b/stacks/gp-registrations-mi/scripts/get_latest_ods_csv.py new file mode 100644 index 0000000..fca41e1 --- /dev/null +++ b/stacks/gp-registrations-mi/scripts/get_latest_ods_csv.py @@ -0,0 +1,116 @@ +import csv +import sys + +from utils.enums.trud import TrudItem +from utils.services.trud_api_service import TrudApiService +from utils.trud_files import ( + ICB_MONTHLY_FILE_PATH, + ICB_QUARTERLY_FILE_PATH, + ICB_MONTHLY_FILE_NAME, + ICB_QUARTERLY_FILE_NAME, + ICB_FILE_HEADERS, + GP_FILE_HEADERS, + GP_WEEKLY_FILE_NAME, + GP_WEEKLY_ZIP_FILE_PATH, +) + + +def create_modify_csv( + file_path: str, + modify_file_path: str, + headers_list: list, + modify_headers: list, + write_to_file: bool, + additional_rows=None, +): + with open(file_path, newline="") as original, open( + modify_file_path, "w", newline="" + ) as output: + reader = csv.DictReader(original, delimiter=",", fieldnames=headers_list) + csv_modified_rows = [ + {key: row[key] for key in modify_headers} for row in reader + ] + if additional_rows and write_to_file: + csv_modified_rows.extend(additional_rows) + if write_to_file: + write_to_csv(output, modify_headers, csv_modified_rows) + return None + return csv_modified_rows + + +def write_to_csv(file_path, headers_list: list, rows_list: list): + writer = csv.DictWriter(file_path, delimiter=",", fieldnames=headers_list) + writer.writeheader() + writer.writerows(rows_list) + + +def get_gp_latest_ods_csv(service): + release_list_response = service.get_release_list(TrudItem.NHS_ODS_WEEKLY, True) + download_file = service.get_download_file( + release_list_response[0].get("archiveFileUrl") + ) + epraccur_zip_file = service.unzipping_files( + download_file, GP_WEEKLY_ZIP_FILE_PATH, byte=True + ) + epraccur_csv_file = service.unzipping_files(epraccur_zip_file, GP_WEEKLY_FILE_NAME) + create_modify_csv( + epraccur_csv_file, + "initial_full_gps_ods.csv", + GP_FILE_HEADERS, + ["PracticeOdsCode", "PracticeName", "IcbOdsCode"], + True, + ) + + +def get_icb_latest_ods_csv(service): + release_list_response = service.get_release_list( + TrudItem.ORG_REF_DATA_MONTHLY, False + ) + download_url_by_release = service.get_download_url_by_release(release_list_response) + icb_update_changes = [] + for release, url in download_url_by_release.items(): + download_file = service.get_download_file(url) + csv_modified_rows = None + is_quarterly_release = release.endswith(".0.0") + zip_file_path = ( + ICB_MONTHLY_FILE_PATH + if not is_quarterly_release + else ICB_QUARTERLY_FILE_PATH + ) + output_name = ( + "update_icb_" + release + ".csv" + if not is_quarterly_release + else "initial_full_icb_ods.csv" + ) + csv_file_name = ( + ICB_MONTHLY_FILE_NAME + if not is_quarterly_release + else ICB_QUARTERLY_FILE_NAME + ) + + if epraccur_zip_file := service.unzipping_files( + download_file, zip_file_path, byte=True + ): + if epraccur_csv_file := service.unzipping_files( + epraccur_zip_file, csv_file_name + ): + csv_modified_rows = create_modify_csv( + epraccur_csv_file, + output_name, + ICB_FILE_HEADERS, + ["IcbOdsCode", "IcbName"], + is_quarterly_release, + icb_update_changes, + ) + if csv_modified_rows: + icb_update_changes.extend(csv_modified_rows) + + +if __name__ == "__main__": + try: + trud_service = TrudApiService(sys.argv[1], sys.argv[2]) + get_gp_latest_ods_csv(trud_service) + get_icb_latest_ods_csv(trud_service) + print("\nOds download process complete.") + except Exception as e: + print(f"\nExiting Process! {e}") diff --git a/stacks/gp-registrations-mi/terraform/api-gateway.tf b/stacks/gp-registrations-mi/terraform/api-gateway.tf index 24f88cc..d6b9fda 100644 --- a/stacks/gp-registrations-mi/terraform/api-gateway.tf +++ b/stacks/gp-registrations-mi/terraform/api-gateway.tf @@ -5,7 +5,7 @@ resource "aws_api_gateway_vpc_link" "vpc_link" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-api-gateway-vpc-link" + Name = "${var.environment}-gp-registrations-mi-api-gateway-vpc-link" ApplicationRole = "AwsApiGatewayVpcLink" } ) @@ -17,9 +17,9 @@ resource "aws_api_gateway_rest_api" "rest_api" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-api-gateway-rest-api" + Name = "${var.environment}-gp-registrations-mi-api-gateway-rest-api" ApplicationRole = "AwsApiGatewayRestApi" - PublicFacing = "Y" + PublicFacing = "Y" } ) } @@ -108,7 +108,7 @@ resource "aws_api_gateway_stage" "api_gateway_stage" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-api-gateway-stage" + Name = "${var.environment}-gp-registrations-mi-api-gateway-stage" ApplicationRole = "AwsApiGatewayStage" } ) @@ -132,7 +132,7 @@ resource "aws_cloudwatch_log_group" "access_logs" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsCloudwatchLogGroup" } ) @@ -145,15 +145,15 @@ resource "aws_cloudwatch_log_group" "execution_logs" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsCloudwatchLogGroup" } ) } resource "aws_api_gateway_usage_plan" "api_gateway_usage_plan" { - name = "${var.environment}-gp-registrations-mi-api-gateway-usage-plan-api-key" - description = "Usage plan to configure api key to connect to the apigee proxy" + name = "${var.environment}-gp-registrations-mi-api-gateway-usage-plan-api-key" + description = "Usage plan to configure api key to connect to the apigee proxy" api_stages { api_id = aws_api_gateway_rest_api.rest_api.id diff --git a/stacks/gp-registrations-mi/terraform/dynamodb-gp-ods.tf b/stacks/gp-registrations-mi/terraform/dynamodb-gp-ods.tf new file mode 100644 index 0000000..e7a9001 --- /dev/null +++ b/stacks/gp-registrations-mi/terraform/dynamodb-gp-ods.tf @@ -0,0 +1,73 @@ +resource "aws_dynamodb_table" "mi_api_gp_ods" { + name = "${var.environment}_mi_enrichment_practice_ods" + billing_mode = "PAY_PER_REQUEST" + deletion_protection_enabled = true + + hash_key = "PracticeOdsCode" + + attribute { + name = "PracticeOdsCode" + type = "S" + } + + tags = { + Name = "mi_enrichment_practice_ods" + Environment = var.environment + } + + import_table { + input_format = "CSV" + input_compression_type = "NONE" + s3_bucket_source { + bucket = aws_s3_bucket.ods_csv_files.id + key_prefix = aws_s3_object.initial_gp_ods_csv.key + } + input_format_options { + csv { + delimiter = "," + } + } + } +} + +resource "aws_iam_policy" "dynamodb_policy_ods_enrichment_lambda" { + name = "dynamodb_${aws_dynamodb_table.mi_api_gp_ods.name}_enrichment_lambda_policy" + path = "/" + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + "Effect" : "Allow", + "Action" : [ + "dynamodb:GetItem", + "dynamodb:UpdateItem", + "dynamodb:PutItem", + ], + "Resource" : [ + aws_dynamodb_table.mi_api_gp_ods.arn + ] + } + ] + }) +} + +resource "aws_iam_policy" "dynamodb_policy_bulk_ods_data_lambda" { + name = "dynamodb_${aws_dynamodb_table.mi_api_gp_ods.name}_bulk_update_lambda_policy" + path = "/" + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + "Effect" : "Allow", + "Action" : [ + "dynamodb:UpdateItem", + ], + "Resource" : [ + aws_dynamodb_table.mi_api_gp_ods.arn + ] + } + ] + }) +} \ No newline at end of file diff --git a/stacks/gp-registrations-mi/terraform/dynamodb-icb-ods.tf b/stacks/gp-registrations-mi/terraform/dynamodb-icb-ods.tf new file mode 100644 index 0000000..4db97a7 --- /dev/null +++ b/stacks/gp-registrations-mi/terraform/dynamodb-icb-ods.tf @@ -0,0 +1,74 @@ +resource "aws_dynamodb_table" "mi_api_icb_ods" { + name = "${var.environment}_mi_enrichment_icb_ods" + billing_mode = "PAY_PER_REQUEST" + deletion_protection_enabled = true + hash_key = "IcbOdsCode" + + attribute { + name = "IcbOdsCode" + type = "S" + } + + + import_table { + input_format = "CSV" + input_compression_type = "NONE" + s3_bucket_source { + bucket = aws_s3_bucket.ods_csv_files.id + key_prefix = aws_s3_object.initial_icb_ods_csv.key + } + + input_format_options { + csv { + delimiter = "," + } + } + } + + tags = { + Name = "mi_enrichment_icb_ods" + Environment = var.environment + } +} + +resource "aws_iam_policy" "dynamodb_policy_icb_ods_enrichment_lambda" { + name = "dynamodb_${aws_dynamodb_table.mi_api_icb_ods.name}_enrichment_lambda_policy" + path = "/" + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + "Effect" : "Allow", + "Action" : [ + "dynamodb:GetItem", + "dynamodb:UpdateItem", + "dynamodb:PutItem", + ], + "Resource" : [ + aws_dynamodb_table.mi_api_icb_ods.arn + ] + } + ] + }) +} + +resource "aws_iam_policy" "dynamodb_policy_bulk_icb_ods_data_lambda" { + name = "dynamodb_${aws_dynamodb_table.mi_api_icb_ods.name}_bulk_update_lambda_policy" + path = "/" + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + "Effect" : "Allow", + "Action" : [ + "dynamodb:UpdateItem", + ], + "Resource" : [ + aws_dynamodb_table.mi_api_icb_ods.arn + ] + } + ] + }) +} \ No newline at end of file diff --git a/stacks/gp-registrations-mi/terraform/ecs-service.tf b/stacks/gp-registrations-mi/terraform/ecs-service.tf index 5b026ec..616c7b6 100644 --- a/stacks/gp-registrations-mi/terraform/ecs-service.tf +++ b/stacks/gp-registrations-mi/terraform/ecs-service.tf @@ -13,7 +13,7 @@ resource "aws_ecs_service" "mi_service" { task_definition = aws_ecs_task_definition.gp_registrations_mi.arn desired_count = 1 launch_type = "FARGATE" - propagate_tags = "SERVICE" + propagate_tags = "SERVICE" network_configuration { subnets = split(",", data.aws_ssm_parameter.private_subnet_ids.value) diff --git a/stacks/gp-registrations-mi/terraform/error-alarm-alert-lambda.tf b/stacks/gp-registrations-mi/terraform/error-alarm-alert-lambda.tf index d47ad86..acc5b16 100644 --- a/stacks/gp-registrations-mi/terraform/error-alarm-alert-lambda.tf +++ b/stacks/gp-registrations-mi/terraform/error-alarm-alert-lambda.tf @@ -3,17 +3,17 @@ variable "error_alarm_alert_lambda_name" { } resource "aws_lambda_function" "error_alarm_alert_lambda" { - filename = "${path.cwd}/${var.error_alarm_alert_lambda_zip}" - function_name = "${var.environment}-${var.error_alarm_alert_lambda_name}" - role = aws_iam_role.error_alarm_alert_lambda_role.arn - handler = "error_alarm_alert_main.lambda_handler" - source_code_hash = filebase64sha256("${path.cwd}/${var.error_alarm_alert_lambda_zip}") - runtime = "python3.9" - timeout = 15 + filename = var.error_alarm_alert_lambda_zip + function_name = "${var.environment}-${var.error_alarm_alert_lambda_name}" + role = aws_iam_role.error_alarm_alert_lambda_role.arn + handler = "error_alarm_alert_main.lambda_handler" + source_code_hash = filebase64sha256(var.error_alarm_alert_lambda_zip) + runtime = "python3.12" + timeout = 15 tags = merge( local.common_tags, { - Name = "${var.environment}-error-alarm-alerts-lambda" + Name = "${var.environment}-error-alarm-alerts-lambda" ApplicationRole = "AwsLambdaFunction" } ) @@ -21,8 +21,8 @@ resource "aws_lambda_function" "error_alarm_alert_lambda" { environment { variables = { LOG_ALERTS_GENERAL_WEBHOOK_URL_PARAM_NAME = var.log_alerts_general_webhook_url_param_name, - CLOUDWATCH_ALARM_URL = "${data.aws_region.current.name}.console.aws.amazon.com/cloudwatch/home#alarmsV2:?~(alarmStateFilter~'ALARM)", - CLOUDWATCH_DASHBOARD_URL = "${data.aws_region.current.name}.console.aws.amazon.com/cloudwatch/home#dashboards:name=${aws_cloudwatch_dashboard.mi_api.dashboard_name}", + CLOUDWATCH_ALARM_URL = "${data.aws_region.current.name}.console.aws.amazon.com/cloudwatch/home#alarmsV2:?~(alarmStateFilter~'ALARM)", + CLOUDWATCH_DASHBOARD_URL = "${data.aws_region.current.name}.console.aws.amazon.com/cloudwatch/home#dashboards:name=${aws_cloudwatch_dashboard.mi_api.dashboard_name}", } } } diff --git a/stacks/gp-registrations-mi/terraform/event-enrichment-lambda.tf b/stacks/gp-registrations-mi/terraform/event-enrichment-lambda.tf index 29c2f0c..1758ad8 100644 --- a/stacks/gp-registrations-mi/terraform/event-enrichment-lambda.tf +++ b/stacks/gp-registrations-mi/terraform/event-enrichment-lambda.tf @@ -3,27 +3,29 @@ variable "event_enrichment_lambda_name" { } resource "aws_lambda_function" "event_enrichment_lambda" { - filename = "${path.cwd}/${var.event_enrichment_lambda_zip}" - function_name = "${var.environment}-${var.event_enrichment_lambda_name}" - role = aws_iam_role.event_enrichment_lambda_role.arn - handler = "event_enrichment_main.lambda_handler" - source_code_hash = filebase64sha256("${path.cwd}/${var.event_enrichment_lambda_zip}") - runtime = "python3.9" - timeout = 300 + filename = var.event_enrichment_lambda_zip + function_name = "${var.environment}-${var.event_enrichment_lambda_name}" + role = aws_iam_role.event_enrichment_lambda_role.arn + handler = "event_enrichment_main.lambda_handler" + source_code_hash = filebase64sha256(var.event_enrichment_lambda_zip) + runtime = "python3.12" + timeout = 300 tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsLambdaFunction" } ) - + layers = [aws_lambda_layer_version.mi_enrichment.arn] environment { variables = { SPLUNK_CLOUD_EVENT_UPLOADER_SQS_QUEUE_URL = aws_sqs_queue.incoming_mi_events_for_splunk_cloud_event_uploader.url, - ENRICHED_EVENTS_SNS_TOPIC_ARN = aws_sns_topic.enriched_events_topic.arn, - SDS_FHIR_API_KEY_PARAM_NAME = var.sds_fhir_api_key_param_name, - SDS_FHIR_API_URL_PARAM_NAME = var.sds_fhir_api_url_param_name, + ENRICHED_EVENTS_SNS_TOPIC_ARN = aws_sns_topic.enriched_events_topic.arn, + SDS_FHIR_API_KEY_PARAM_NAME = var.sds_fhir_api_key_param_name, + SDS_FHIR_API_URL_PARAM_NAME = var.sds_fhir_api_url_param_name, + GP_ODS_DYNAMO_TABLE_NAME = aws_dynamodb_table.mi_api_gp_ods.name, + ICB_ODS_DYNAMO_TABLE_NAME = aws_dynamodb_table.mi_api_icb_ods.name, } } } @@ -31,6 +33,15 @@ resource "aws_lambda_function" "event_enrichment_lambda" { resource "aws_lambda_event_source_mapping" "sqs_queue_event_enrichment_lambda_trigger" { event_source_arn = aws_sqs_queue.incoming_mi_events_for_event_enrichment_lambda.arn function_name = aws_lambda_function.event_enrichment_lambda.arn + filter_criteria { + filter { + pattern = jsonencode({ + body = { + eventType : [{ "anything-but" : ["DEGRADES"] }] + } + }) + } + } } resource "aws_cloudwatch_log_group" "event_enrichment_lambda" { @@ -38,9 +49,9 @@ resource "aws_cloudwatch_log_group" "event_enrichment_lambda" { tags = merge( local.common_tags, { - Name = "${var.environment}-${var.event_enrichment_lambda_name}" + Name = "${var.environment}-${var.event_enrichment_lambda_name}" ApplicationRole = "AwsCloudwatchLogGroup" } ) retention_in_days = 60 -} +} \ No newline at end of file diff --git a/stacks/gp-registrations-mi/terraform/iam-event-enrichment.tf b/stacks/gp-registrations-mi/terraform/iam-event-enrichment.tf index ccf4bdd..de482f3 100644 --- a/stacks/gp-registrations-mi/terraform/iam-event-enrichment.tf +++ b/stacks/gp-registrations-mi/terraform/iam-event-enrichment.tf @@ -8,6 +8,19 @@ resource "aws_iam_role" "event_enrichment_lambda_role" { aws_iam_policy.event_enrichment_lambda_cloudwatch_log_access.arn, aws_iam_policy.enriched_mi_events_sns_publish_access.arn, aws_iam_policy.event_enrichment_lambda_ssm_access.arn, + aws_iam_policy.dynamodb_policy_ods_enrichment_lambda.arn, + aws_iam_policy.dynamodb_policy_icb_ods_enrichment_lambda.arn + ] +} + +resource "aws_iam_role" "bulk_ods_lambda" { + name = "${var.environment}-bulk-ods-lambda-role" + assume_role_policy = data.aws_iam_policy_document.lambda_assume_role.json + managed_policy_arns = [ + aws_iam_policy.dynamodb_policy_bulk_icb_ods_data_lambda.arn, + aws_iam_policy.dynamodb_policy_bulk_ods_data_lambda.arn, + aws_iam_policy.bulk_ods_update_lambda_cloudwatch_log_access.arn, + aws_iam_policy.ods_csv_files_data_policy.arn ] } @@ -24,7 +37,6 @@ data "aws_iam_policy_document" "event_enrichment_lambda_ssm_access" { actions = [ "ssm:GetParameter" ] - resources = [ "arn:aws:ssm:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:parameter${var.sds_fhir_api_key_param_name}", "arn:aws:ssm:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:parameter${var.sds_fhir_api_url_param_name}", @@ -66,7 +78,6 @@ data "aws_iam_policy_document" "incoming_event_enrichment_lambda_to_send_to_dlq_ actions = [ "sqs:SendMessage" ] - resources = [ aws_sqs_queue.incoming_mi_events_for_event_enrichment_lambda_dlq.arn ] @@ -91,3 +102,22 @@ data "aws_iam_policy_document" "event_enrichment_lambda_cloudwatch_log_access" { ] } } + + +resource "aws_iam_policy" "bulk_ods_update_lambda_cloudwatch_log_access" { + name = "${var.environment}-bulk-ods-update-lambda-log-access" + policy = data.aws_iam_policy_document.bulk_ods_update_lambda_cloudwatch_log_access.json +} + +data "aws_iam_policy_document" "bulk_ods_update_lambda_cloudwatch_log_access" { + statement { + sid = "CloudwatchLogs" + actions = [ + "logs:CreateLogStream", + "logs:PutLogEvents" + ] + resources = [ + "${aws_cloudwatch_log_group.bulk_ods_update_lambda.arn}:*", + ] + } +} \ No newline at end of file diff --git a/stacks/gp-registrations-mi/terraform/lambda-bulk-ods-update.tf b/stacks/gp-registrations-mi/terraform/lambda-bulk-ods-update.tf new file mode 100644 index 0000000..213fa01 --- /dev/null +++ b/stacks/gp-registrations-mi/terraform/lambda-bulk-ods-update.tf @@ -0,0 +1,74 @@ +resource "aws_lambda_function" "ods_bulk_update" { + filename = var.ods_bulk_update_lambda_name + function_name = "${var.environment}-${var.ods_bulk_update_lambda_name}" + role = aws_iam_role.bulk_ods_lambda.arn + handler = "ods_bulk_update.lambda_handler" + source_code_hash = filebase64sha256(var.bulk_ods_update_lambda_zip) + runtime = "python3.12" + timeout = 300 + layers = [aws_lambda_layer_version.mi_enrichment.arn] + environment { + variables = { + TRUD_API_KEY_PARAM_NAME = data.aws_ssm_parameter.trud_api_key.name, + TRUD_FHIR_API_URL_PARAM_NAME = data.aws_ssm_parameter.trud_api_endpoint.value, + GP_ODS_DYNAMO_TABLE_NAME = aws_dynamodb_table.mi_api_gp_ods.name, + ICB_ODS_DYNAMO_TABLE_NAME = aws_dynamodb_table.mi_api_icb_ods.name, + ODS_S3_BUCKET_NAME = aws_s3_bucket.ods_csv_files.bucket + } + } + tags = merge( + local.common_tags, + { + Name = "${var.environment}-gp-mi-ods-bulk" + ApplicationRole = "AwsLambdaFunction" + } + ) +} + +resource "aws_cloudwatch_log_group" "bulk_ods_update_lambda" { + name = "/aws/lambda/${var.environment}-${var.ods_bulk_update_lambda_name}" + tags = merge( + local.common_tags, + { + Name = "${var.environment}-${var.ods_bulk_update_lambda_name}" + ApplicationRole = "AwsCloudwatchLogGroup" + } + ) + retention_in_days = 60 +} + +data "aws_ssm_parameter" "trud_api_key" { + name = "/registrations/${var.environment}/user-input/trud-api-key" +} + +data "aws_ssm_parameter" "trud_api_endpoint" { + name = "/registrations/${var.environment}/user-input/trud-api-url" +} + +resource "aws_cloudwatch_event_rule" "ods_bulk_update_schedule" { + name = "${var.environment}_ods_bulk_update_schedule" + description = "Schedule for ODS Update Lambda" + schedule_expression = "cron(0 2 ? * 1 *)" +} + +resource "aws_cloudwatch_event_target" "ods_bulk_update_schedule_event" { + rule = aws_cloudwatch_event_rule.ods_bulk_update_schedule.name + target_id = "ods_bulk_update_schedule" + + arn = aws_lambda_function.ods_bulk_update.arn + depends_on = [ + aws_lambda_function.ods_bulk_update, + aws_cloudwatch_event_rule.ods_bulk_update_schedule + ] +} + +resource "aws_lambda_permission" "bulk_upload_metadata_schedule_permission" { + statement_id = "AllowExecutionFromCloudWatch" + action = "lambda:InvokeFunction" + function_name = aws_lambda_function.ods_bulk_update.function_name + principal = "events.amazonaws.com" + source_arn = aws_cloudwatch_event_rule.ods_bulk_update_schedule.arn + depends_on = [ + aws_lambda_function.ods_bulk_update, + ] +} diff --git a/stacks/gp-registrations-mi/terraform/lambda_layer.tf b/stacks/gp-registrations-mi/terraform/lambda_layer.tf new file mode 100644 index 0000000..7024dbf --- /dev/null +++ b/stacks/gp-registrations-mi/terraform/lambda_layer.tf @@ -0,0 +1,6 @@ +resource "aws_lambda_layer_version" "mi_enrichment" { + filename = var.mi_enrichment_lambda_layer_zip + layer_name = "${var.environment}_mi_enrichment_layer" + compatible_runtimes = ["python3.12"] + compatible_architectures = ["x86_64"] +} \ No newline at end of file diff --git a/stacks/gp-registrations-mi/terraform/load-balancer.tf b/stacks/gp-registrations-mi/terraform/load-balancer.tf index 787d4d7..ed25c50 100644 --- a/stacks/gp-registrations-mi/terraform/load-balancer.tf +++ b/stacks/gp-registrations-mi/terraform/load-balancer.tf @@ -18,7 +18,7 @@ resource "aws_lb" "nlb" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-nlb" + Name = "${var.environment}-gp-registrations-mi-nlb" ApplicationRole = "AwsLb" } ) @@ -40,7 +40,7 @@ resource "aws_lb_target_group" "nlb" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-nlb-tg" + Name = "${var.environment}-gp-registrations-mi-nlb-tg" ApplicationRole = "AwsLbTargetGroup" } ) @@ -57,7 +57,7 @@ resource "aws_lb_listener" "nlb_listener" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-nlb-listener" + Name = "${var.environment}-gp-registrations-mi-nlb-listener" ApplicationRole = "AwsLbListener" } ) diff --git a/stacks/gp-registrations-mi/terraform/placeholder_lambda_payload.zip b/stacks/gp-registrations-mi/terraform/placeholder_lambda_payload.zip new file mode 100644 index 0000000000000000000000000000000000000000..6a5540e29caf7959e6d9e9e9a4ae4041c235a4d4 GIT binary patch literal 280 zcmWIWW@Zs#-~d7f2E{HQ0SBT$RzXf;a%x6?PD*M~d`@C+Qc9v;L8Z&tji;Cl1y~-~ z`5V?xwYI3^JH5qBY+v(}JGLQ{-fXvjoUCX_7 zrbMvsE8fVNu6c}Kx|x|FH9P9VtEbZOx>2jE6~8g=uAEnQS?SA{h1~zId+PD8^VPN?GKfs%vgH34K!Wy9CCIE4OHzSh>Gr|MNav%@Dz>-D~3#)em Syjj^ml8iv;45asiI1B)iT3G-9 literal 0 HcmV?d00001 diff --git a/stacks/gp-registrations-mi/terraform/provider.tf b/stacks/gp-registrations-mi/terraform/provider.tf index dc88521..df7478d 100644 --- a/stacks/gp-registrations-mi/terraform/provider.tf +++ b/stacks/gp-registrations-mi/terraform/provider.tf @@ -6,7 +6,7 @@ terraform { required_providers { aws = { source = "hashicorp/aws" - version = "~> 4.38" + version = "~> 5.0" } } } \ No newline at end of file diff --git a/stacks/gp-registrations-mi/terraform/queue.tf b/stacks/gp-registrations-mi/terraform/queue.tf index 171163b..dd6af81 100644 --- a/stacks/gp-registrations-mi/terraform/queue.tf +++ b/stacks/gp-registrations-mi/terraform/queue.tf @@ -1,7 +1,7 @@ # Splunk cloud uploader resource "aws_sqs_queue" "incoming_mi_events_for_splunk_cloud_event_uploader" { - name = "${var.environment}-gp-registrations-mi-events-queue-for-splunk-cloud-lambda" - sqs_managed_sse_enabled = true + name = "${var.environment}-gp-registrations-mi-events-queue-for-splunk-cloud-lambda" + sqs_managed_sse_enabled = true message_retention_seconds = 1209600 redrive_policy = jsonencode({ @@ -12,21 +12,21 @@ resource "aws_sqs_queue" "incoming_mi_events_for_splunk_cloud_event_uploader" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-splunk-cloud" + Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-splunk-cloud" ApplicationRole = "AwsSqsQueue" } ) } resource "aws_sqs_queue" "incoming_mi_events_for_splunk_cloud_event_uploader_dlq" { - name = "${var.environment}-gp-registrations-mi-events-queue-for-splunk-uploader-dlq" - sqs_managed_sse_enabled = true + name = "${var.environment}-gp-registrations-mi-events-queue-for-splunk-uploader-dlq" + sqs_managed_sse_enabled = true message_retention_seconds = 1209600 tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-splunk-uploader-dlq" + Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-splunk-uploader-dlq" ApplicationRole = "AwsSqsQueue" } ) @@ -43,7 +43,7 @@ resource "aws_sqs_queue_redrive_allow_policy" "incoming_mi_events_for_splunk_clo # Event enrichment lambda resource "aws_sqs_queue" "incoming_mi_events_for_event_enrichment_lambda" { - name = "${var.environment}-gp-registrations-mi-events-queue-for-enrichment-lambda" + name = "${var.environment}-gp-registrations-mi-events-queue-for-enrichment-lambda" sqs_managed_sse_enabled = true redrive_policy = jsonencode({ @@ -54,21 +54,21 @@ resource "aws_sqs_queue" "incoming_mi_events_for_event_enrichment_lambda" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-enrichment" + Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-enrichment" ApplicationRole = "AwsSqsQueue" } ) } resource "aws_sqs_queue" "incoming_mi_events_for_event_enrichment_lambda_dlq" { - name = "${var.environment}-gp-registrations-mi-events-queue-for-enrichment-dlq" - sqs_managed_sse_enabled = true + name = "${var.environment}-gp-registrations-mi-events-queue-for-enrichment-dlq" + sqs_managed_sse_enabled = true message_retention_seconds = 1209600 tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-enrichment-dlq" + Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-enrichment-dlq" ApplicationRole = "AwsSqsQueue" } ) @@ -85,7 +85,7 @@ resource "aws_sqs_queue_redrive_allow_policy" "incoming_mi_events_for_event_enri # S3 uploader resource "aws_sqs_queue" "incoming_mi_events_for_s3_event_uploader" { - name = "${var.environment}-gp-registrations-mi-events-queue-for-s3-uploader-lambda" + name = "${var.environment}-gp-registrations-mi-events-queue-for-s3-uploader-lambda" sqs_managed_sse_enabled = true redrive_policy = jsonencode({ @@ -96,21 +96,21 @@ resource "aws_sqs_queue" "incoming_mi_events_for_s3_event_uploader" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-s3-uploader" + Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-s3-uploader" ApplicationRole = "AwsSqsQueue" } ) } resource "aws_sqs_queue" "incoming_mi_events_for_s3_event_uploader_dlq" { - name = "${var.environment}-gp-registrations-mi-events-queue-for-s3-uploader-dlq" - sqs_managed_sse_enabled = true + name = "${var.environment}-gp-registrations-mi-events-queue-for-s3-uploader-dlq" + sqs_managed_sse_enabled = true message_retention_seconds = 1209600 tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-s3-uploader-dlq" + Name = "${var.environment}-gp-registrations-mi-sqs-queue-for-s3-uploader-dlq" ApplicationRole = "AwsSqsQueue" } ) diff --git a/stacks/gp-registrations-mi/terraform/s3-event-uploader-lambda.tf b/stacks/gp-registrations-mi/terraform/s3-event-uploader-lambda.tf index 5e0b7b4..9da2104 100644 --- a/stacks/gp-registrations-mi/terraform/s3-event-uploader-lambda.tf +++ b/stacks/gp-registrations-mi/terraform/s3-event-uploader-lambda.tf @@ -3,17 +3,17 @@ variable "s3_event_uploader_lambda_name" { } resource "aws_lambda_function" "s3_event_uploader_lambda" { - filename = "${path.cwd}/${var.s3_event_uploader_lambda_zip}" - function_name = "${var.environment}-${var.s3_event_uploader_lambda_name}" - role = aws_iam_role.s3_event_uploader_role.arn - handler = "s3_event_uploader_main.lambda_handler" - source_code_hash = filebase64sha256("${path.cwd}/${var.s3_event_uploader_lambda_zip}") - runtime = "python3.9" - timeout = 15 + filename = var.s3_event_uploader_lambda_zip + function_name = "${var.environment}-${var.s3_event_uploader_lambda_name}" + role = aws_iam_role.s3_event_uploader_role.arn + handler = "s3_event_uploader_main.lambda_handler" + source_code_hash = filebase64sha256(var.s3_event_uploader_lambda_zip) + runtime = "python3.12" + timeout = 15 tags = merge( local.common_tags, { - Name = "${var.environment}-${var.s3_event_uploader_lambda_name}" + Name = "${var.environment}-${var.s3_event_uploader_lambda_name}" ApplicationRole = "AwsLambdaFunction" } ) @@ -35,7 +35,7 @@ resource "aws_cloudwatch_log_group" "s3_event_uploader_lambda" { tags = merge( local.common_tags, { - Name = "${var.environment}-${var.s3_event_uploader_lambda_name}" + Name = "${var.environment}-${var.s3_event_uploader_lambda_name}" ApplicationRole = "AwsCloudwatchLogGroup" } ) diff --git a/stacks/gp-registrations-mi/terraform/s3-ods-csv-updates.tf b/stacks/gp-registrations-mi/terraform/s3-ods-csv-updates.tf new file mode 100644 index 0000000..56f439c --- /dev/null +++ b/stacks/gp-registrations-mi/terraform/s3-ods-csv-updates.tf @@ -0,0 +1,88 @@ +resource "aws_s3_bucket" "ods_csv_files" { + bucket = "${var.environment}-ods-csv-files" + force_destroy = true + + tags = { + Name = "${var.environment}-ods-csv-files" + Environment = var.environment + } +} + +resource "aws_s3_bucket_lifecycle_configuration" "ods_csv_files" { + bucket = aws_s3_bucket.ods_csv_files.id + + rule { + id = "expire-ods-csv-after-3-months" + status = "Enabled" + + expiration { + days = 90 + } + } +} + +resource "aws_s3_bucket_public_access_block" "ods_csv_files" { + bucket = aws_s3_bucket.ods_csv_files.id + + block_public_acls = true + block_public_policy = true + ignore_public_acls = true + restrict_public_buckets = true +} + +resource "aws_s3_bucket_versioning" "ods_csv_files" { + bucket = aws_s3_bucket.ods_csv_files.id + + versioning_configuration { + status = "Enabled" + } +} + +resource "aws_s3_bucket_ownership_controls" "ods_csv_files" { + bucket = aws_s3_bucket.ods_csv_files.id + rule { + object_ownership = "ObjectWriter" + } +} + +resource "aws_s3_bucket_acl" "ods_csv_files" { + bucket = aws_s3_bucket.ods_csv_files.id + acl = "private" +} + +resource "aws_iam_policy" "ods_csv_files_data_policy" { + name = "${aws_s3_bucket.ods_csv_files.bucket}_get_document_data_policy" + + policy = jsonencode({ + "Version" : "2012-10-17", + "Statement" : [ + { + "Effect" : "Allow", + "Action" : [ + "s3:GetObject", + "s3:PutObject", + ], + "Resource" : ["${aws_s3_bucket.ods_csv_files.arn}/*"] + } + ] + }) +} + +resource "aws_s3_object" "initial_gp_ods_csv" { + bucket = aws_s3_bucket.ods_csv_files.id + key = "init/initial-gps-ods-csv" + source = "../scripts/initial_full_gps_ods.csv" + lifecycle { + ignore_changes = all + } +} + + +resource "aws_s3_object" "initial_icb_ods_csv" { + bucket = aws_s3_bucket.ods_csv_files.id + key = "init/initial-icb-ods-csv" + source = "../scripts/initial_full_icb_ods.csv" + lifecycle { + ignore_changes = all + } +} \ No newline at end of file diff --git a/stacks/gp-registrations-mi/terraform/s3.tf b/stacks/gp-registrations-mi/terraform/s3.tf index 0537ff2..f2d2395 100644 --- a/stacks/gp-registrations-mi/terraform/s3.tf +++ b/stacks/gp-registrations-mi/terraform/s3.tf @@ -5,24 +5,28 @@ resource "aws_s3_bucket" "mi_events_output" { prevent_destroy = true } - lifecycle_rule { - enabled = true - id = "expire-mi-objects-after-2-years" - - expiration { - days = 730 - } - } - tags = merge( local.common_tags, { - Name = "${var.environment}-prm-gp-registrations-mi-s3-mi-events" + Name = "${var.environment}-prm-gp-registrations-mi-s3-mi-events" ApplicationRole = "AwsS3Bucket" } ) } +resource "aws_s3_bucket_lifecycle_configuration" "mi_events_output" { + bucket = aws_s3_bucket.mi_events_output.id + + rule { + id = "expire-mi-objects-after-2-years" + status = "Enabled" + + expiration { + days = 730 + } + } +} + resource "aws_s3_bucket_acl" "mi_events_output" { bucket = aws_s3_bucket.mi_events_output.id acl = "private" diff --git a/stacks/gp-registrations-mi/terraform/security-group.tf b/stacks/gp-registrations-mi/terraform/security-group.tf index 4999ac9..02b4c96 100644 --- a/stacks/gp-registrations-mi/terraform/security-group.tf +++ b/stacks/gp-registrations-mi/terraform/security-group.tf @@ -4,7 +4,7 @@ resource "aws_security_group" "gp_registrations_mi_container" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-container" + Name = "${var.environment}-gp-registrations-mi-container" ApplicationRole = "AwsSecurityGroup" } ) diff --git a/stacks/gp-registrations-mi/terraform/splunk-cloud-event-uploader-lambda.tf b/stacks/gp-registrations-mi/terraform/splunk-cloud-event-uploader-lambda.tf index 05b6d8e..564a596 100644 --- a/stacks/gp-registrations-mi/terraform/splunk-cloud-event-uploader-lambda.tf +++ b/stacks/gp-registrations-mi/terraform/splunk-cloud-event-uploader-lambda.tf @@ -3,17 +3,17 @@ variable "splunk_cloud_event_uploader_lambda_name" { } resource "aws_lambda_function" "splunk_cloud_event_uploader_lambda" { - filename = "${path.cwd}/${var.splunk_cloud_event_uploader_lambda_zip}" - function_name = "${var.environment}-${var.splunk_cloud_event_uploader_lambda_name}" - role = aws_iam_role.splunk_cloud_event_uploader_lambda_role.arn - handler = "splunk_cloud_event_uploader_main.lambda_handler" - source_code_hash = filebase64sha256("${path.cwd}/${var.splunk_cloud_event_uploader_lambda_zip}") - runtime = "python3.9" - timeout = 15 + filename = var.splunk_cloud_event_uploader_lambda_zip + function_name = "${var.environment}-${var.splunk_cloud_event_uploader_lambda_name}" + role = aws_iam_role.splunk_cloud_event_uploader_lambda_role.arn + handler = "splunk_cloud_event_uploader_main.lambda_handler" + source_code_hash = filebase64sha256(var.splunk_cloud_event_uploader_lambda_zip) + runtime = "python3.12" + timeout = 15 tags = merge( local.common_tags, { - Name = "${var.environment}-${var.splunk_cloud_event_uploader_lambda_name}" + Name = "${var.environment}-${var.splunk_cloud_event_uploader_lambda_name}" ApplicationRole = "AwsLambdaFunction" } ) @@ -21,7 +21,7 @@ resource "aws_lambda_function" "splunk_cloud_event_uploader_lambda" { environment { variables = { SPLUNK_CLOUD_API_TOKEN = var.splunk_cloud_api_token_param_name, - SPLUNK_CLOUD_URL = var.splunk_cloud_url_param_name + SPLUNK_CLOUD_URL = var.splunk_cloud_url_param_name } } } @@ -36,7 +36,7 @@ resource "aws_cloudwatch_log_group" "splunk_cloud_event_uploader_lambda" { tags = merge( local.common_tags, { - Name = "${var.environment}-${var.splunk_cloud_event_uploader_lambda_name}" + Name = "${var.environment}-${var.splunk_cloud_event_uploader_lambda_name}" ApplicationRole = "AwsCloudwatchLogGroup" } ) diff --git a/stacks/gp-registrations-mi/terraform/task-definition.tf b/stacks/gp-registrations-mi/terraform/task-definition.tf index 6ac14e6..8cb3987 100644 --- a/stacks/gp-registrations-mi/terraform/task-definition.tf +++ b/stacks/gp-registrations-mi/terraform/task-definition.tf @@ -49,7 +49,7 @@ resource "aws_ecs_task_definition" "gp_registrations_mi" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi" + Name = "${var.environment}-gp-registrations-mi" ApplicationRole = "AwsEcsTaskDefinition" } ) diff --git a/stacks/gp-registrations-mi/terraform/topic.tf b/stacks/gp-registrations-mi/terraform/topic.tf index e0faefc..31179d1 100644 --- a/stacks/gp-registrations-mi/terraform/topic.tf +++ b/stacks/gp-registrations-mi/terraform/topic.tf @@ -1,5 +1,5 @@ resource "aws_sns_topic" "enriched_events_topic" { - name = "${var.environment}-${var.enriched_mi_events_sns_topic_name}" + name = "${var.environment}-${var.enriched_mi_events_sns_topic_name}" kms_master_key_id = "alias/aws/sns" sqs_failure_feedback_role_arn = aws_iam_role.sns_topic_enriched_mi_events_cloudwatch_log_access_role.arn @@ -9,7 +9,7 @@ resource "aws_sns_topic" "enriched_events_topic" { tags = merge( local.common_tags, { - Name = "${var.environment}-gp-registrations-mi-enriched-events-sns-topic" + Name = "${var.environment}-gp-registrations-mi-enriched-events-sns-topic" ApplicationRole = "AwsSnsTopic" } ) @@ -21,7 +21,7 @@ resource "aws_sns_topic" "error_alarm_alert_topic" { tags = merge( local.common_tags, { - Name = "${var.environment}-error-alarm-alert-topic" + Name = "${var.environment}-error-alarm-alert-topic" ApplicationRole = "AwsSnsTopic" } ) diff --git a/stacks/gp-registrations-mi/terraform/variables.tf b/stacks/gp-registrations-mi/terraform/variables.tf index c9015fe..005f1bb 100644 --- a/stacks/gp-registrations-mi/terraform/variables.tf +++ b/stacks/gp-registrations-mi/terraform/variables.tf @@ -84,28 +84,40 @@ variable "splunk_cloud_api_token_param_name" { description = "SSM param containing splunk cloud api token to send MI events to" } +variable "mi_enrichment_lambda_layer_zip" { + type = string + description = "Path to zipfile containing relevant packages for MI lambdas" + default = "../../../lambda/build/layers/mi-enrichment.zip" +} + variable "splunk_cloud_event_uploader_lambda_zip" { type = string description = "Path to zipfile containing lambda code for uploading events to splunk cloud" - default = "lambda/build/splunk-cloud-event-uploader.zip" + default = "../../../lambda/build/splunk-cloud-event-uploader.zip" } variable "event_enrichment_lambda_zip" { type = string description = "Path to zipfile containing lambda code for enriching MI events" - default = "lambda/build/event-enrichment.zip" + default = "../../../lambda/build/event-enrichment.zip" +} + +variable "bulk_ods_update_lambda_zip" { + type = string + description = "Path to zipfile containing lambda code for ODS update" + default = "../../../lambda/build/bulk-ods-update.zip" } variable "s3_event_uploader_lambda_zip" { type = string description = "Path to zipfile containing lambda code for uploading to s3 bucket" - default = "lambda/build/s3-event-uploader.zip" + default = "../../../lambda/build/s3-event-uploader.zip" } variable "error_alarm_alert_lambda_zip" { type = string description = "Path to zipfile containing lambda code for sending alerts to" - default = "lambda/build/error-alarm-alert.zip" + default = "../../../lambda/build/error-alarm-alert.zip" } variable "log_alerts_general_webhook_url_param_name" { @@ -127,3 +139,9 @@ variable "sds_fhir_api_url_param_name" { type = string description = "SSM param containing SDS FHIR API url to retrieve supplier details" } + +variable "ods_bulk_update_lambda_name" { + default = "ods_bulk_update_lambda" + type = string + description = "Name of the ODS bulk upload lambda" +} \ No newline at end of file diff --git a/tasks b/tasks deleted file mode 100755 index 7e2e340..0000000 --- a/tasks +++ /dev/null @@ -1,126 +0,0 @@ -#!/bin/bash - -set -Eeo pipefail - -if [ "$#" -ne 3 ]; then - echo "Usage: $0 TASK STACK STACK-ENVIRONMENT" - exit 1 -fi - -task="$1" -stack_name="$2" -stack_env="$3" -state_region="eu-west-2" -tf_dir=stacks/${stack_name}/terraform -env_name=$(jq -r .${stack_env}.name environment.json) -state_lock_table=$(jq -r .${stack_env}.dynamo_table environment.json) -state_bucket=$(jq -r .${stack_env}.bucket_name environment.json) -s3_state_key=$(jq -r .${stack_env}.state_key_prefix environment.json)${stack_name}/terraform.tfstate - -function assume_role() { - role_arn_param=$(jq -r .${stack_env}.role_arn_param environment.json) - if [ "$role_arn_param" != "null" ]; then - role_arn=$(aws ssm get-parameters --region ${state_region} --names ${role_arn_param} --query 'Parameters[0].Value' --output text) - session_name="gp-registrations-mi-${env_name}-session" - - - - sts=$( - aws sts assume-role \ - --role-arn $role_arn \ - --role-session-name $session_name \ - --output json - ) - - export AWS_ACCESS_KEY_ID=$(echo $sts | jq -r .Credentials.AccessKeyId) - export AWS_SECRET_ACCESS_KEY=$(echo $sts | jq -r .Credentials.SecretAccessKey) - export AWS_SESSION_TOKEN=$(echo $sts | jq -r .Credentials.SessionToken) - fi - -} - -export TF_DATA_DIR=.terraform/${stack_env}/${stack_name} - -function tf_init() { - assume_role - terraform -chdir=${tf_dir} init \ - -backend-config key=${s3_state_key} \ - -backend-config bucket=${state_bucket} \ - -backend-config dynamodb_table=${state_lock_table} \ - -backend-config region=${state_region} -} - - -function build_lambda { - lambda_name=$1 - - build_dir=lambda/build/$lambda_name - rm -rf $build_dir - mkdir -p $build_dir - - requirements_file=lambda/$lambda_name/requirements.txt - if test -f "$requirements_file"; then - pip install -r $requirements_file -t $build_dir - fi - - cp lambda/$lambda_name/*.py $build_dir - - pushd $build_dir - zip -r -X ../$lambda_name.zip . - popd -} - - -echo "--- ${task} ---" -case "${task}" in -validate) - tf_init - terraform -chdir=${tf_dir} validate - ;; -dojo-validate) - dojo "./tasks validate ${stack_name} ${stack_env}" - ;; -plan) - build_lambda error-alarm-alert - build_lambda splunk-cloud-event-uploader - build_lambda event-enrichment - build_lambda s3-event-uploader - tf_init - var_file=$(eval "pwd")/stacks/${stack_name}/vars/${stack_env}.tfvars - plan_output=$(eval "pwd")/stacks/${stack_name}/terraform/${stack_env}.tfplan - terraform -chdir=${tf_dir} plan -var environment=$env_name \ - -var-file=${var_file} \ - -out=${plan_output} - ;; -dojo-plan) - dojo "./tasks plan ${stack_name} ${stack_env}" - ;; -apply) - tf_init - terraform -chdir=${tf_dir} apply ${stack_env}.tfplan - ;; -dojo-apply) - dojo "./tasks apply ${stack_name} ${stack_env}" - ;; -format) - terraform -chdir=${tf_dir} fmt - ;; -destroy) - tf_init - var_file=$(eval "pwd")/stacks/${stack_name}/vars/${stack_env}.tfvars - terraform -chdir=${tf_dir} destroy -var environment=$env_name \ - -var-file=${var_file} - ;; -build-lambda) - build_lambda error-alarm-alert - build_lambda splunk-cloud-event-uploader - build_lambda event-enrichment - build_lambda s3-event-uploader -;; -*) - echo "Invalid task: '${task}'" - exit 1 - ;; -esac - -set +e diff --git a/tasks_github_actions.sh b/tasks_github_actions.sh new file mode 100755 index 0000000..6079094 --- /dev/null +++ b/tasks_github_actions.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +set -Eeo pipefail + +task="$1" + +function build_lambda { + lambda_name=$1 + lambda_services=$2 + + build_dir=lambda/build/$lambda_name + rm -rf $build_dir + mkdir -p $build_dir + + if test "$lambda_services"; then + cp -r ./$lambda_services $build_dir + fi + cp lambda/$lambda_name/*.py $build_dir + + pushd $build_dir + zip -r -X ../$lambda_name.zip . + popd +} + +function build_lambda_layer { + layer_name=$1 + build_dir=lambda/build/layers/$layer_name + + rm -rf $build_dir/python + mkdir -p $build_dir/python + + requirements_file=lambda/$layer_name-requirements.txt + if test -f "$requirements_file"; then + python3 -m venv create_layer + source create_layer/bin/activate + pip install -r $requirements_file + fi + + cp -r create_layer/lib $build_dir/python + pushd $build_dir + zip -r -X ../$layer_name.zip . + popd +} + +echo "--- ${task} ---" +case "${task}" in +build-lambdas) + build_lambda_layer mi-enrichment + build_lambda bulk-ods-update utils + build_lambda error-alarm-alert + build_lambda splunk-cloud-event-uploader + build_lambda event-enrichment utils + build_lambda s3-event-uploader +;; +*) + echo "Invalid task: '${task}'" + exit 1 + ;; +esac + +set +e diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/enums/__init__.py b/utils/enums/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/enums/trud.py b/utils/enums/trud.py new file mode 100644 index 0000000..2cceb3b --- /dev/null +++ b/utils/enums/trud.py @@ -0,0 +1,12 @@ +from enum import StrEnum + + +class TrudItem(StrEnum): + NHS_ODS_WEEKLY = "58" + ORG_REF_DATA_MONTHLY = "242" + + +class OdsDownloadType(StrEnum): + GP = "ODS_GP" + ICB = "ODS_ICB" + BOTH = "ODS_GP_ICB" diff --git a/utils/models/__init__.py b/utils/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/models/ods_models.py b/utils/models/ods_models.py new file mode 100644 index 0000000..4001feb --- /dev/null +++ b/utils/models/ods_models.py @@ -0,0 +1,25 @@ +import os + +from pynamodb.attributes import UnicodeAttribute, UTCDateTimeAttribute +from pynamodb.models import Model + + +class PracticeOds(Model): + class Meta: + table_name = os.getenv("GP_ODS_DYNAMO_TABLE_NAME") + + practice_ods_code = UnicodeAttribute(hash_key=True, attr_name="PracticeOdsCode") + practice_name = UnicodeAttribute(attr_name="PracticeName") + icb_ods_code = UnicodeAttribute(null=True, attr_name="IcbOdsCode") + supplier_name = UnicodeAttribute(null=True, attr_name="SupplierName") + supplier_last_updated = UTCDateTimeAttribute( + null=True, attr_name="SupplierLastUpdated" + ) + + +class IcbOds(Model): + class Meta: + table_name = os.getenv("ICB_ODS_DYNAMO_TABLE_NAME") + + icb_ods_code = UnicodeAttribute(hash_key=True, attr_name="IcbOdsCode") + icb_name = UnicodeAttribute(attr_name="IcbName") diff --git a/utils/services/__init__.py b/utils/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/services/ssm_service.py b/utils/services/ssm_service.py new file mode 100644 index 0000000..b0a1fe3 --- /dev/null +++ b/utils/services/ssm_service.py @@ -0,0 +1,7 @@ +class SsmSecretManager: + def __init__(self, ssm): + self._ssm = ssm + + def get_secret(self, name): + response = self._ssm.get_parameter(Name=name, WithDecryption=True) + return response["Parameter"]["Value"] \ No newline at end of file diff --git a/utils/services/trud_api_service.py b/utils/services/trud_api_service.py new file mode 100644 index 0000000..67dad0e --- /dev/null +++ b/utils/services/trud_api_service.py @@ -0,0 +1,67 @@ +import os +from io import BytesIO +from zipfile import ZipFile +import urllib3 +from urllib3.util.retry import Retry + +import logging + +from utils.enums.trud import TrudItem + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + + +class TrudApiService: + def __init__(self, api_key: str, api_url: str): + self.api_key = api_key + self.api_url = api_url + + retry_strategy = Retry( + total=3, backoff_factor=1, status_forcelist=[400, 404, 500, 502, 503, 504] + ) + + self.http = urllib3.PoolManager(retries=retry_strategy) + + def get_release_list(self, item_number: TrudItem, is_latest=False): + latest = "?latest" if is_latest else "" + url_endpoint = ( + self.api_url + self.api_key + "/items/" + item_number + "/releases" + latest + ) + + try: + trud_response = self.http.request("GET", url_endpoint) + response = trud_response.json().get("releases", []) + trud_response.release_conn() + + return response + except Exception as e: + logger.info(f"An unexpected error occurred: {e}") + raise e + + def get_download_url_by_release( + self, releases_list, break_at_quarterly_release=True + ): + download_url_by_release = {} + for release in releases_list: + download_url_by_release[release["name"]] = release.get("archiveFileUrl") + if break_at_quarterly_release and release["name"].endswith(".0.0"): + break + return download_url_by_release + + def get_download_file(self, download_url): + try: + download_response = self.http.request("GET", download_url) + logger.info(download_response) + return download_response.data + except Exception as e: + logger.info(f"An unexpected error occurred: {e}") + raise e + + def unzipping_files(self, zip_file, path=None, path_to_extract=None, byte: bool = False): + myzip = ZipFile(BytesIO(zip_file) if byte else zip_file) + if path_to_extract is None: + path_to_extract = os.getcwd() + if path in myzip.namelist(): + return myzip.extract(path, path_to_extract) + return None diff --git a/utils/trud_files.py b/utils/trud_files.py new file mode 100644 index 0000000..26b5527 --- /dev/null +++ b/utils/trud_files.py @@ -0,0 +1,68 @@ +GP_WEEKLY_ZIP_FILE_PATH = "Data/epraccur.zip" +GP_WEEKLY_FILE_NAME = "epraccur.csv" + +ICB_MONTHLY_FILE_PATH = "eamendam.zip" +ICB_MONTHLY_FILE_NAME = "eccgam.csv" + +ICB_QUARTERLY_FILE_PATH = "ocsissue/data/eccg.zip" +ICB_QUARTERLY_FILE_NAME = "eccg.csv" + +GP_FILE_HEADERS = [ + "PracticeOdsCode", + "PracticeName", + "NationalGrouping", + "HighLevelHealthGeography", + "AddressLine1", + "AddressLine2", + "AddressLine3", + "AddressLine4", + "AddressLine5", + "Postcode", + "OpenDate", + "CloseDate", + "StatusCode", + "OrganisationSubTypeCode", + "IcbOdsCode", + "JoinParentDate", + "LeftParentDate", + "ContactTelephoneNumber", + "Null", + "Null2", + "Null3", + "AmendedRecordIndicator", + "Null4", + "ProviderPurchaser", + "Null5", + "PracticeType", + "Null6", +] + +ICB_FILE_HEADERS = [ + "IcbOdsCode", + "IcbName", + "NationalGrouping", + "HighLevelHealthGeography", + "AddressLine1", + "AddressLine2", + "AddressLine3", + "AddressLine4", + "AddressLine5", + "Postcode", + "OpenDate", + "CloseDate", + "Null1", + "OrganisationSubTypeCode", + "Null2", + "Null3", + "Null4", + "Null5", + "Null6", + "Null7", + "Null8", + "AmendedRecordIndicator", + "Null9", + "Null10", + "Null11", + "Null12", + "Null13", +]