diff --git a/.github/workflows/aws-bootstrap-template.yml b/.github/workflows/aws-bootstrap-template.yml deleted file mode 100644 index 3f634a3d..00000000 --- a/.github/workflows/aws-bootstrap-template.yml +++ /dev/null @@ -1,134 +0,0 @@ -name: AWS Bootstrap Workflow - -on: - workflow_call: - inputs: - CONTEXT_FOLDER: - required: true - type: string - ENVIRONMENT_NAME: - required: true - type: string - TOOLKIT_STACK_NAME: - required: true - type: string - QUALIFIER: - required: true - type: string - -permissions: - id-token: write - contents: read - -jobs: - check_changes: - name: Check Changes - runs-on: ubuntu-20.04 - outputs: - infra_changed: ${{ steps.check_changes.outputs.infra_changed }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 2 - - name: Check modified folders - id: check_changes - env: - CONTEXT_FOLDER: ${{ inputs.CONTEXT_FOLDER }} - CHANGE_FOLDER_NAME: ${{ inputs.CHANGE_FOLDER_NAME }} - run: | - echo "=============== list modified files ===============" - git diff --name-only HEAD^ HEAD - - echo "========== check paths of modified files ==========" - git diff --name-only HEAD^ HEAD >> files.txt - infra_changed=false - while IFS= read -r file - do - echo $file - if [[ $file == $CHANGE_FOLDER_NAME/* ]]; then - infra_changed=true - break - fi - done < files.txt - echo "infra_changed=$infra_changed" >> "$GITHUB_OUTPUT" - - - - synth_deploy_state_components: - name: Synth and Deploy Terraform State Components - runs-on: ubuntu-20.04 - environment: ${{ inputs.ENVIRONMENT_NAME }} - needs: [check_changes, build_push_api_auth_lambda, build_push_cdc_events_lambda, build_push_cdc_auth_lambda] - # if: needs.check_changes.outputs.infra_changed == 'true' - steps: - - uses: actions/checkout@v4 - - - name: Get AWS Account ID - run: echo "CDK_DEFAULT_ACCOUNT=${{ vars.AWS_ACCOUNT }}" >> $GITHUB_ENV - - - name: Get AWS Region - run: echo "CDK_DEFAULT_REGION=${{ vars.AWS_REGION }}" >> $GITHUB_ENV - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-skip-session-tagging: true - aws-region: ${{ vars.AWS_REGION }} - role-to-assume: ${{ vars.AWS_ROLE_ARN }} - role-duration-seconds: 1800 - role-session-name: ci-deployment - - - - - name: Install Node.js - uses: actions/setup-node@v4 - with: - node-version: '20.12.2' - - - name: Install NPM Modules - run: "npm config set engine-strict true && npm ci" - working-directory: ${{ inputs.CONTEXT_FOLDER }} - - - name: Build - run: "npm run build" - working-directory: ${{ inputs.CONTEXT_FOLDER }} - - - name: Install AWS CDK - run: "npm install -g aws-cdk@2.85.0" - - - name: CDK Bootstrap - env: - TOOLKIT_STACK_NAME: ${{ inputs.TOOLKIT_STACK_NAME }} - QUALIFIER: ${{ inputs.QUALIFIER }} - BRANCH_NAME: ${{ inputs.BRANCH_NAME }} - ENV_NAME: ${{ inputs.ENVIRONMENT_NAME }} - run: | - echo "Running CDK Bootstrap" - npx cdk bootstrap --toolkit-stack-name $TOOLKIT_STACK_NAME --qualifier $QUALIFIER --context branch-name=$BRANCH_NAME - working-directory: ${{ inputs.CONTEXT_FOLDER }} - - - name: CDK Synth - env: - TOOLKIT_STACK_NAME: ${{ inputs.TOOLKIT_STACK_NAME }} - QUALIFIER: ${{ inputs.QUALIFIER }} - BRANCH_NAME: ${{ inputs.BRANCH_NAME }} - ENV_NAME: ${{ inputs.BRANCH_NAME }} - run: | - echo "Running CDK Synth" - npx cdk synth --toolkit-stack-name $TOOLKIT_STACK_NAME --qualifier $QUALIFIER --context branch-name=$BRANCH_NAME - working-directory: ${{ inputs.CONTEXT_FOLDER }} - - - - - name: CDK Deploy - id: cdk_deploy - env: - TOOLKIT_STACK_NAME: ${{ inputs.TOOLKIT_STACK_NAME }} - QUALIFIER: ${{ inputs.QUALIFIER }} - BRANCH_NAME: ${{ inputs.BRANCH_NAME }} - ENV_NAME: ${{ inputs.BRANCH_NAME }} - run: | - npx cdk deploy --toolkit-stack-name $TOOLKIT_STACK_NAME --qualifier $QUALIFIER --require-approval never --all - working-directory: ${{ inputs.CONTEXT_FOLDER }} - - \ No newline at end of file diff --git a/.github/workflows/aws-template-terraform.yml b/.github/workflows/aws-template-terraform.yml deleted file mode 100644 index f2c7081e..00000000 --- a/.github/workflows/aws-template-terraform.yml +++ /dev/null @@ -1,131 +0,0 @@ -name: AWS Template Workflow - -on: - workflow_call: - inputs: - CONTEXT_FOLDER: - required: true - type: string - ENVIRONMENT_NAME: - required: true - type: string - CHANGE_FOLDER_NAME: - required: true - type: string - APPLY_TF_CODE: - required: true - default: false - type: boolean - -permissions: - id-token: write - contents: read - -jobs: - check_changes: - name: Check Changes - runs-on: ubuntu-20.04 - outputs: - infra_changed: ${{ steps.check_changes.outputs.infra_changed }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 2 - - name: Check modified folders - id: check_changes - env: - CONTEXT_FOLDER: ${{ inputs.CONTEXT_FOLDER }} - CHANGE_FOLDER_NAME: ${{ inputs.CHANGE_FOLDER_NAME }} - run: | - echo "=============== list modified files ===============" - git diff --name-only HEAD^ HEAD - - echo "========== check paths of modified files ==========" - git diff --name-only HEAD^ HEAD >> files.txt - infra_changed=false - while IFS= read -r file - do - echo $file - if [[ $file == $CHANGE_FOLDER_NAME/* ]]; then - infra_changed=true - break - fi - done < files.txt - echo "infra_changed=$infra_changed" >> "$GITHUB_OUTPUT" - - scan: - name: Scan TF Code - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: tfsec - uses: aquasecurity/tfsec-sarif-action@v0.1.4 - with: - sarif_file: tfsec.sarif - working_directory: ${{ inputs.CONTEXT_FOLDER }} - tfsec_args: "--tfvars-file=${{ inputs.CONTEXT_FOLDER }}/${{ inputs.ENVIRONMENT_NAME }}.tfvars" - - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: tfsec.sarif - needs: [check_changes] - - deploy_infra: - name: Deploy Infra - runs-on: ubuntu-latest - environment: - name: ${{ inputs.ENVIRONMENT_NAME }} - env: - TF_VAR_app_name: ${{ vars.APP_NAME }} - TF_VAR_environment: ${{ vars.ENVIRONMENT_NAME }} - TF_VAR_kms_key_name: ${{ vars.KMS_KEY_NAME }} - TF_VAR_vpc_id: ${{ vars.VPC_ID }} - TF_VAR_lambda_memory_size: ${{ vars.LAMBDA_MEMORY_SIZE }} - needs: [check_changes, scan] - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-skip-session-tagging: true - aws-region: ${{ vars.AWS_REGION }} - role-to-assume: ${{ vars.AWS_ROLE_ARN }} - role-duration-seconds: 1800 - role-session-name: ci-deployment - - - name: Setup Terraform - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.9.0 - - name: Terraform Init - id: init - env: - CONTEXT_FOLDER: ${{ inputs.CONTEXT_FOLDER }} - run: | - terraform init -input=false -backend-config=backend.tfvars -var-file=${{ inputs.ENVIRONMENT_NAME }}.tfvars - working-directory: ${{ inputs.CONTEXT_FOLDER }} - - name: Terraform Plan - id: plan - env: - CONTEXT_FOLDER: ${{ inputs.CONTEXT_FOLDER }} - run: | - terraform plan -no-color -input=false -var-file=${{ inputs.ENVIRONMENT_NAME }}.tfvars - continue-on-error: true - working-directory: ${{ inputs.CONTEXT_FOLDER }} - - name: Terraform Plan Status - if: steps.plan.outcome == 'failure' - run: exit 1 - - name: Terraform Apply - if: inputs.APPLY_TF_CODE == true - env: - CONTEXT_FOLDER: ${{ inputs.CONTEXT_FOLDER }} - run: | - terraform apply --auto-approve -input=false -var-file=${{ inputs.ENVIRONMENT_NAME }}.tfvars - working-directory: ${{ inputs.CONTEXT_FOLDER }} diff --git a/.github/workflows/build-infra.yml b/.github/workflows/build-infra.yml index 1a6d36c4..c8b791c9 100644 --- a/.github/workflows/build-infra.yml +++ b/.github/workflows/build-infra.yml @@ -21,23 +21,89 @@ on: - test - prod -jobs: - build-initial: - uses: ./.github/workflows/aws-template-terraform.yml - with: - CONTEXT_FOLDER: "./infrastructure/cloud/environments/initial" - CHANGE_FOLDER_NAME: environments/${{ inputs.environment || 'dev' }} - ENVIRONMENT_NAME: ${{ inputs.environment || 'dev' }} - APPLY_TF_CODE: false - secrets: inherit +env: + WORKING_DIRECTORY: "./infrastructure/cloud/environments/${{ inputs.environment }}" +jobs: build: - needs: [build-initial] - uses: ./.github/workflows/aws-template-terraform.yml - with: - CONTEXT_FOLDER: "./infrastructure/cloud/environments/${{ inputs.environment || 'dev' }}" - CHANGE_FOLDER_NAME: environments/${{ inputs.environment || 'dev' }} - ENVIRONMENT_NAME: ${{ inputs.environment || 'dev' }} - APPLY_TF_CODE: false - secrets: inherit - \ No newline at end of file + runs-on: ubuntu-latest + environment: + name: ${{ inputs.environment || 'dev' }} + env: + TF_VAR_app_name: ${{ vars.APP_NAME }} + TF_VAR_environment: ${{ vars.ENVIRONMENT_NAME }} + TF_VAR_kms_key_name: ${{ vars.KMS_KEY_NAME }} + TF_VAR_vpc_id: ${{ vars.VPC_ID }} + TF_VAR_lambda_memory_size: ${{ vars.LAMBDA_MEMORY_SIZE }} + permissions: + id-token: write + actions: read + contents: read + security-events: write + steps: + - name: Determine environment + id: determine-environment + run: | + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo "environment=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + else + echo "environment=dev" >> $GITHUB_ENV + echo "WORKING_DIRECTORY=${{ env.WORKING_DIRECTORY }}/dev" >> $GITHUB_ENV + fi + + - name: Checkout repository + uses: actions/checkout@v4 + + - name: tfsec + uses: aquasecurity/tfsec-sarif-action@v0.1.4 + with: + sarif_file: tfsec.sarif + working_directory: ${{ env.WORKING_DIRECTORY }} + tfsec_args: "--tfvars-file=${{ env.WORKING_DIRECTORY }}/${{ env.environment }}.tfvars" + + - name: Upload SARIF file + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: tfsec.sarif + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-skip-session-tagging: true + aws-region: ${{ vars.AWS_REGION }} + role-to-assume: ${{ vars.AWS_ROLE_ARN }} + role-duration-seconds: 1800 + role-session-name: ci-deployment + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: 1.9.0 + + - name: Terraform Init + id: init + run: | + terraform init -input=false -backend-config=backend.tfvars -var-file=${{ env.environment }}.tfvars + working-directory: ${{ env.WORKING_DIRECTORY }} + + - name: Terraform Plan (Initial Stack) + id: plan-initial + run: | + terraform plan -target=module.initial -no-color -input=false -var-file=${{ env.environment }}.tfvars + continue-on-error: true + working-directory: ${{ env.WORKING_DIRECTORY }} + + - name: Terraform Plan (Initial) Status + if: steps.plan-initial.outcome == 'failure' + run: exit 1 + + - name: Terraform Plan (Main Stack) + id: plan-main + run: | + terraform plan -no-color -input=false -var-file=${{ env.environment }}.tfvars + continue-on-error: true + working-directory: ${{ env.WORKING_DIRECTORY }} + + - name: Terraform Plan (Main) Status + if: steps.plan-main.outcome == 'failure' + run: exit 1 \ No newline at end of file diff --git a/.github/workflows/deploy-infra-sandbox.yml b/.github/workflows/deploy-infra-sandbox.yml deleted file mode 100644 index d1992e21..00000000 --- a/.github/workflows/deploy-infra-sandbox.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: Deploy AWS Infra to Sandbox - -on: - workflow_dispatch: - -jobs: - infrastructure_deploy_snd: - uses: ./.github/workflows/aws-template-terraform.yml - with: - CONTEXT_FOLDER: ./infrastructure/cloud/environments/sandbox - CHANGE_FOLDER_NAME: environments/sandbox - ENVIRONMENT_NAME: sandbox - TEST_BUCKET_NAME: jasper-test-bucket - secrets: inherit - - \ No newline at end of file diff --git a/.github/workflows/publish-api.yml b/.github/workflows/publish-api.yml index 4358c940..eb7465aa 100644 --- a/.github/workflows/publish-api.yml +++ b/.github/workflows/publish-api.yml @@ -118,20 +118,19 @@ jobs: - name: Checkout uses: actions/checkout@v4 - # Uncomment when infra in AWS in TEST environment has been configured - # - name: Deploy to ${{ env.ENVIRONMENT }} - # uses: ./.github/workflows/actions/deploy-app - # with: - # environment: ${{ env.ENVIRONMENT }} - # aws_account: ${{ vars.AWS_ACCOUNT }} - # region: ${{ vars.AWS_REGION }} - # app_name: ${{ vars.APP_NAME }} - # aws_role_arn: ${{ vars.AWS_ROLE_ARN }} - # ghcr_token: ${{ secrets.GITHUB_TOKEN }} - # github_image_repo: ${{ env.GITHUB_IMAGE_REPO }} - # image_name: ${{ env.IMAGE_NAME }} - # tier_name: api - # short_sha: ${{ needs.build.outputs.short_sha }} + - name: Deploy to ${{ env.ENVIRONMENT }} + uses: ./.github/workflows/actions/deploy-app + with: + environment: ${{ env.ENVIRONMENT }} + aws_account: ${{ vars.AWS_ACCOUNT }} + region: ${{ vars.AWS_REGION }} + app_name: ${{ vars.APP_NAME }} + aws_role_arn: ${{ vars.AWS_ROLE_ARN }} + ghcr_token: ${{ secrets.GITHUB_TOKEN }} + github_image_repo: ${{ env.GITHUB_IMAGE_REPO }} + image_name: ${{ env.IMAGE_NAME }} + tier_name: api + short_sha: ${{ needs.build.outputs.short_sha }} deploy2prod: name: Deploy to PROD diff --git a/.github/workflows/publish-infra.yml b/.github/workflows/publish-infra.yml index dae1467a..39c6f787 100644 --- a/.github/workflows/publish-infra.yml +++ b/.github/workflows/publish-infra.yml @@ -20,43 +20,83 @@ on: - dev - test - prod - run_initial: - description: "Run initial Terraform setup?" - required: false - default: "No" - type: choice - options: - - "No" - - "Yes" + +env: + WORKING_DIRECTORY: "./infrastructure/cloud/environments/${{ inputs.environment }}" + DUMMY_IMAGE_NAME: dummy-image + GITHUB_IMAGE_REPO: ghcr.io/bcgov/jasper + APP_ECR_REPO_URL: ${{ vars.AWS_ACCOUNT }}.dkr.ecr.${{ vars.AWS_REGION }}.amazonaws.com/${{ vars.APP_NAME }}-app-repo-${{ inputs.environment }} + LAMBDA_ECR_REPO_URL: ${{ vars.AWS_ACCOUNT }}.dkr.ecr.${{ vars.AWS_REGION }}.amazonaws.com/${{ vars.APP_NAME }}-lambda-repo-${{ inputs.environment }} jobs: - initial: - if: ${{ inputs.run_initial == 'Yes' }} - uses: ./.github/workflows/aws-template-terraform.yml - with: - CONTEXT_FOLDER: "./infrastructure/cloud/environments/initial" - CHANGE_FOLDER_NAME: environments/${{ inputs.environment }} - ENVIRONMENT_NAME: ${{ inputs.environment || 'dev' }} - APPLY_TF_CODE: true - secrets: inherit - - post-initial: + deploy: runs-on: ubuntu-latest - needs: initial - if: ${{ inputs.run_initial == 'Yes' }} - environment: ${{ inputs.environment }} + environment: + name: ${{ inputs.environment }} + env: + TF_VAR_app_name: ${{ vars.APP_NAME }} + TF_VAR_environment: ${{ vars.ENVIRONMENT_NAME }} + TF_VAR_kms_key_name: ${{ vars.KMS_KEY_NAME }} + TF_VAR_vpc_id: ${{ vars.VPC_ID }} + TF_VAR_lambda_memory_size: ${{ vars.LAMBDA_MEMORY_SIZE }} permissions: id-token: write + actions: read + contents: read + security-events: write packages: write - env: - DUMMY_IMAGE_NAME: dummy-image - GITHUB_IMAGE_REPO: ghcr.io/bcgov/jasper - APP_ECR_REPO_URL: ${{ vars.AWS_ACCOUNT }}.dkr.ecr.${{ vars.AWS_REGION }}.amazonaws.com/${{ vars.APP_NAME }}-app-repo-${{ inputs.environment }} - LAMBDA_ECR_REPO_URL: ${{ vars.AWS_ACCOUNT }}.dkr.ecr.${{ vars.AWS_REGION }}.amazonaws.com/${{ vars.APP_NAME }}-lambda-repo-${{ inputs.environment }} steps: - name: Checkout repository uses: actions/checkout@v4 + + - name: tfsec + uses: aquasecurity/tfsec-sarif-action@v0.1.4 + with: + sarif_file: tfsec.sarif + working_directory: ${{ env.WORKING_DIRECTORY }} + tfsec_args: "--tfvars-file=${{ env.WORKING_DIRECTORY }}/${{ inputs.environment }}.tfvars" + + - name: Upload SARIF file + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: tfsec.sarif + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-skip-session-tagging: true + aws-region: ${{ vars.AWS_REGION }} + role-to-assume: ${{ vars.AWS_ROLE_ARN }} + role-duration-seconds: 1800 + role-session-name: ci-deployment + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: 1.9.0 + + - name: Terraform Init + id: init + run: | + terraform init -input=false -backend-config=backend.tfvars -var-file=${{ inputs.environment }}.tfvars + working-directory: ${{ env.WORKING_DIRECTORY }} + + - name: Terraform Plan (Initial Stack) + id: plan-initial + run: | + terraform plan -target=module.initial -no-color -input=false -var-file=${{ inputs.environment }}.tfvars + continue-on-error: true + working-directory: ${{ env.WORKING_DIRECTORY }} + - name: Terraform Plan (Initial) Status + if: steps.plan-initial.outcome == 'failure' + run: exit 1 + + - name: Terraform Apply (Initial Stack) + run: | + terraform apply -target=module.initial --auto-approve -input=false -var-file=${{ inputs.environment }}.tfvars + working-directory: ${{ env.WORKING_DIRECTORY }} + - name: Log in to the GHCR uses: docker/login-action@v3 with: @@ -72,7 +112,7 @@ jobs: ${{ env.GITHUB_IMAGE_REPO }}/${{ env.DUMMY_IMAGE_NAME }} tags: | type=raw,value=latest - + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 with: @@ -87,39 +127,71 @@ jobs: tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-skip-session-tagging: true - aws-region: ${{ vars.AWS_REGION }} - role-to-assume: ${{ vars.AWS_ROLE_ARN }} - role-duration-seconds: 1800 - role-session-name: ci-deployment - - name: Login to Amazon ECR uses: aws-actions/amazon-ecr-login@v2 - - name: Push dummy images to App ECR + - name: Check dummy-image in App ECR exists + id: app-ecr-check + shell: bash + run: | + IMAGE_TAG=${{ env.DUMMY_IMAGE_NAME }} + REPOSITORY_NAME=${{ vars.APP_NAME }}-app-repo-${{ inputs.environment }} + + IMAGE_EXISTS=$(aws ecr describe-images --repository-name $REPOSITORY_NAME --query "imageDetails[?contains(imageTags, '$IMAGE_TAG')]" --output text) + + if [ -z "$IMAGE_EXISTS" ]; then + echo "Image with tag $IMAGE_TAG does not exist." + echo "exists=false" >> $GITHUB_OUTPUT + else + echo "Image with tag $IMAGE_TAG already exists." + echo "exists=true" >> $GITHUB_OUTPUT + fi + + - name: Push dummy image to App ECR + if: steps.app-ecr-check.outputs.exists == 'false' shell: bash run: | echo 'Deploying ${{ env.DUMMY_IMAGE_NAME }} to App ECR' docker tag ${{ env.GITHUB_IMAGE_REPO }}/${{ env.DUMMY_IMAGE_NAME }}:latest ${{ env.APP_ECR_REPO_URL }}:${{ env.DUMMY_IMAGE_NAME }} docker push ${{ env.APP_ECR_REPO_URL }}:${{ env.DUMMY_IMAGE_NAME }} + - name: Check dummy-image in Lambda ECR exists + id: lambda-ecr-check + shell: bash + run: | + IMAGE_TAG=${{ env.DUMMY_IMAGE_NAME }} + REPOSITORY_NAME=${{ vars.APP_NAME }}-lambda-repo-${{ inputs.environment }} + + IMAGE_EXISTS=$(aws ecr describe-images --repository-name $REPOSITORY_NAME --query "imageDetails[?contains(imageTags, '$IMAGE_TAG')]" --output text) + + if [ -z "$IMAGE_EXISTS" ]; then + echo "Image with tag $IMAGE_TAG does not exist." + echo "exists=false" >> $GITHUB_OUTPUT + else + echo "Image with tag $IMAGE_TAG already exists." + echo "exists=true" >> $GITHUB_OUTPUT + fi + - name: Push dummy images to Lambda ECR + if: steps.lambda-ecr-check.outputs.exists == 'false' shell: bash run: | echo 'Deploying ${{ env.env.DUMMY_IMAGE_NAME }} to Lambda ECR' docker tag ${{ env.GITHUB_IMAGE_REPO }}/${{ env.DUMMY_IMAGE_NAME }}:latest ${{ env.LAMBDA_ECR_REPO_URL }}:${{ env.DUMMY_IMAGE_NAME }} docker push ${{ env.LAMBDA_ECR_REPO_URL }}:${{ env.DUMMY_IMAGE_NAME }} - deploy: - needs: [initial, post-initial] - if: always() - uses: ./.github/workflows/aws-template-terraform.yml - with: - CONTEXT_FOLDER: "./infrastructure/cloud/environments/${{ inputs.environment || 'dev' }}" - CHANGE_FOLDER_NAME: environments/${{ inputs.environment || 'dev' }} - ENVIRONMENT_NAME: ${{ inputs.environment || 'dev' }} - APPLY_TF_CODE: true - secrets: inherit + - name: Terraform Plan (Main Stack) + id: plan-main + run: | + terraform plan -no-color -input=false -var-file=${{ inputs.environment }}.tfvars + continue-on-error: true + working-directory: ${{ env.WORKING_DIRECTORY }} + + - name: Terraform Plan (Main) Status + if: steps.plan-main.outcome == 'failure' + run: exit 1 + + - name: Terraform Apply (Main Stack) + run: | + terraform apply --auto-approve -input=false -var-file=${{ inputs.environment }}.tfvars + working-directory: ${{ env.WORKING_DIRECTORY }} \ No newline at end of file diff --git a/.github/workflows/publish-web.yml b/.github/workflows/publish-web.yml index 716bc3eb..176813c7 100644 --- a/.github/workflows/publish-web.yml +++ b/.github/workflows/publish-web.yml @@ -124,20 +124,19 @@ jobs: - name: Checkout uses: actions/checkout@v4 - # Uncomment when infra in AWS in TEST environment has been configured - # - name: Deploy to ${{ env.ENVIRONMENT }} - # uses: ./.github/workflows/actions/deploy-app - # with: - # environment: ${{ env.ENVIRONMENT }} - # aws_account: ${{ vars.AWS_ACCOUNT }} - # region: ${{ vars.AWS_REGION }} - # app_name: ${{ vars.APP_NAME }} - # aws_role_arn: ${{ vars.AWS_ROLE_ARN }} - # ghcr_token: ${{ secrets.GITHUB_TOKEN }} - # github_image_repo: ${{ env.GITHUB_IMAGE_REPO }} - # image_name: ${{ env.WEB_IMAGE_NAME }} - # tier_name: web - # short_sha: ${{ needs.build.outputs.short_sha }} + - name: Deploy to ${{ env.ENVIRONMENT }} + uses: ./.github/workflows/actions/deploy-app + with: + environment: ${{ env.ENVIRONMENT }} + aws_account: ${{ vars.AWS_ACCOUNT }} + region: ${{ vars.AWS_REGION }} + app_name: ${{ vars.APP_NAME }} + aws_role_arn: ${{ vars.AWS_ROLE_ARN }} + ghcr_token: ${{ secrets.GITHUB_TOKEN }} + github_image_repo: ${{ env.GITHUB_IMAGE_REPO }} + image_name: ${{ env.WEB_IMAGE_NAME }} + tier_name: web + short_sha: ${{ needs.build.outputs.short_sha }} deploy2prod: name: Deploy to PROD @@ -167,4 +166,4 @@ jobs: # github_image_repo: ${{ env.GITHUB_IMAGE_REPO }} # image_name: ${{ env.WEB_IMAGE_NAME }} # tier_name: web - # short_sha: ${{ needs.build.outputs.short_sha }} + # short_sha: ${{ needs.build.outputs.short_sha }} \ No newline at end of file diff --git a/api/Controllers/FilesController.cs b/api/Controllers/FilesController.cs index fd917f96..d7f31333 100644 --- a/api/Controllers/FilesController.cs +++ b/api/Controllers/FilesController.cs @@ -101,7 +101,9 @@ public async Task> GetCivilFileDet if (User.IsVcUser() && civilFileDetailResponse.SealedYN != "N") return Forbid(); - if (User.IsSupremeUser() && civilFileDetailResponse.CourtLevelCd != CivilFileDetailResponseCourtLevelCd.S) + // CourtLevel = "S" Supreme court data, CourtLevel = "P" - Province. + // Only Provincial files can be accessed in JASPER + if (User.IsSupremeUser() && civilFileDetailResponse.CourtLevelCd != CivilFileDetailResponseCourtLevelCd.P) return Forbid(); return Ok(civilFileDetailResponse); @@ -133,7 +135,9 @@ public async Task> GetCivilAppearanceDetails if (civilAppearanceDetail == null) throw new NotFoundException("Couldn't find appearance detail with the provided file id and appearance id."); - if (User.IsSupremeUser() && civilAppearanceDetail.CourtLevelCd != CivilFileDetailResponseCourtLevelCd.S) + // CourtLevel = "S" Supreme court data, CourtLevel = "P" - Province. + // Only Provincial files can be accessed in JASPER + if (User.IsSupremeUser() && civilAppearanceDetail.CourtLevelCd != CivilFileDetailResponseCourtLevelCd.P) return Forbid(); return Ok(civilAppearanceDetail); @@ -223,7 +227,9 @@ public async Task> GetCriminalF if (redactedCriminalFileDetailResponse?.JustinNo == null) throw new NotFoundException("Couldn't find criminal file with this id."); - if (User.IsSupremeUser() && redactedCriminalFileDetailResponse.CourtLevelCd != CriminalFileDetailResponseCourtLevelCd.S) + // CourtLevel = "S" Supreme court data, CourtLevel = "P" - Province. + // Only Provincial files can be accessed in JASPER + if (User.IsSupremeUser() && redactedCriminalFileDetailResponse.CourtLevelCd != CriminalFileDetailResponseCourtLevelCd.P) return Forbid(); return Ok(redactedCriminalFileDetailResponse); @@ -244,7 +250,9 @@ public async Task> GetCriminalAppearanceD if (appearanceDetail == null) throw new NotFoundException("Couldn't find appearance details with the provided parameters."); - if (User.IsSupremeUser() && appearanceDetail.CourtLevelCd != CriminalFileDetailResponseCourtLevelCd.S) + // CourtLevel = "S" Supreme court data, CourtLevel = "P" - Province. + // Only Provincial files can be accessed in JASPER + if (User.IsSupremeUser() && appearanceDetail.CourtLevelCd != CriminalFileDetailResponseCourtLevelCd.P) return Forbid(); diff --git a/api/Services/Files/CivilFilesService.cs b/api/Services/Files/CivilFilesService.cs index 7885edb6..88bd2bef 100644 --- a/api/Services/Files/CivilFilesService.cs +++ b/api/Services/Files/CivilFilesService.cs @@ -72,9 +72,12 @@ public async Task SearchAsync(FilesCivilQuery fcq) { fcq.FilePermissions = "[\"A\", \"Y\", \"T\", \"F\", \"C\", \"M\", \"L\", \"R\", \"B\", \"D\", \"E\", \"G\", \"H\", \"N\", \"O\", \"P\", \"S\", \"V\"]"; // for now, use all types - TODO: determine proper list of types? + + // CourtLevel = "S" Supreme court data, CourtLevel = "P" - Province. + // Only Provincial files can be accessed in JASPER return await _filesClient.FilesCivilGetAsync(_requestAgencyIdentifierId, _requestPartId, _applicationCode, fcq.SearchMode, fcq.FileHomeAgencyId, fcq.FileNumber, fcq.FilePrefix, - fcq.FilePermissions, fcq.FileSuffixNumber, fcq.MDocReferenceTypeCode, fcq.CourtClass, fcq.CourtLevel, + fcq.FilePermissions, fcq.FileSuffixNumber, fcq.MDocReferenceTypeCode, fcq.CourtClass, CourtLevelCd.P, fcq.NameSearchType, fcq.LastName, fcq.OrgName, fcq.GivenName, fcq.Birth?.ToString("yyyy-MM-dd"), fcq.SearchByCrownPartId, fcq.SearchByCrownActiveOnly, fcq.SearchByCrownFileDesignation, fcq.MdocJustinNumberSet, fcq.PhysicalFileIdSet); @@ -109,7 +112,7 @@ public async Task> GetFilesByAgencyIdCodeA //Return the basic entry without doing a lookup. if (fileIdAndAppearanceDate.Count == 1) - return new List { new RedactedCivilFileDetailResponse { PhysicalFileId = fileIdAndAppearanceDate.First().PhysicalFileId }} ; + return new List { new RedactedCivilFileDetailResponse { PhysicalFileId = fileIdAndAppearanceDate.First().PhysicalFileId } }; var fileDetailTasks = new List>(); foreach (var fileId in fileIdAndAppearanceDate) @@ -152,7 +155,7 @@ public async Task FileIdAsync(string fileId, bo foreach (var document in PopulateDetailCsrsDocuments(fileDetail.Appearance)) if (!isVcUser) detail.Document.Add(document); - + detail = await PopulateBaseDetail(detail); detail.Appearances = appearances; @@ -160,7 +163,8 @@ public async Task FileIdAsync(string fileId, bo detail.Party = await PopulateDetailParties(detail.Party); detail.Document = await PopulateDetailDocuments(detail.Document, fileContentCivilFile, isVcUser, isStaff); detail.HearingRestriction = await PopulateDetailHearingRestrictions(fileDetail.HearingRestriction); - if (isVcUser) { + if (isVcUser) + { //SCV-266 - Disable comments for VC Users. foreach (var document in detail.Document) document.CommentTxt = ""; @@ -427,7 +431,7 @@ private async Task> PopulateDetailedAppe party.Representative = _mapper.Map>(courtListParty.Representative); foreach (var representative in party.Representative) { - representative.AttendanceMethodDesc = await _lookupService.GetCivilAssetsDescription(representative.AttendanceMethodCd); + representative.AttendanceMethodDesc = await _lookupService.GetCivilAssetsDescription(representative.AttendanceMethodCd); } party.LegalRepresentative = courtListParty.LegalRepresentative; } @@ -449,7 +453,7 @@ private async Task> PopulateDetailedAppe if (!counsel.AdditionalProperties.ContainsKey("counselName")) continue; - + var targetCounsel = party.Counsel?.FirstOrDefault(c => c.CounselFullName == counsel.CounselName); if (targetCounsel == null) continue; @@ -473,7 +477,7 @@ private async Task> PopulateDetailedAppeara { document.Category = _lookupService.GetDocumentCategory(document.DocumentTypeCd); document.DocumentTypeDescription = await _lookupService.GetDocumentDescriptionAsync(document.DocumentTypeCd); - document.ImageId = document.SealedYN != "N" ? null : document.ImageId; + document.ImageId = document.SealedYN != "N" ? null : document.ImageId; foreach (var issue in document.Issue) { issue.IssueTypeDesc = await _lookupService.GetCivilDocumentIssueType(issue.IssueTypeCd); diff --git a/api/Services/Files/CriminalFilesService.cs b/api/Services/Files/CriminalFilesService.cs index c7bfd4ba..d534398a 100644 --- a/api/Services/Files/CriminalFilesService.cs +++ b/api/Services/Files/CriminalFilesService.cs @@ -61,11 +61,12 @@ public async Task SearchAsync(FilesCriminalQuery fcq) fcq.FilePermissions = "[\"A\", \"Y\", \"T\", \"F\", \"C\", \"M\", \"L\", \"R\", \"B\", \"D\", \"E\", \"G\", \"H\", \"N\", \"O\", \"P\", \"S\", \"V\"]"; // for now, use all types - TODO: determine proper list of types? - //CourtLevel = "S" Supreme court data, CourtLevel = "P" - Province. + // CourtLevel = "S" Supreme court data, CourtLevel = "P" - Province. + // Only Provincial files can be accessed in JASPER return await _filesClient.FilesCriminalGetAsync(_requestAgencyIdentifierId, _requestPartId, _applicationCode, fcq.SearchMode, fcq.FileHomeAgencyId, fcq.FileNumberTxt, fcq.FilePrefixTxt, fcq.FilePermissions, fcq.FileSuffixNo, fcq.MdocRefTypeCode, fcq.CourtClass, - fcq.CourtLevel, fcq.NameSearchTypeCd, fcq.LastName, fcq.OrgName, fcq.GivenName, + CourtLevelCd.P, fcq.NameSearchTypeCd, fcq.LastName, fcq.OrgName, fcq.GivenName, fcq.Birth?.ToString("yyyy-MM-dd"), fcq.SearchByCrownPartId, fcq.SearchByCrownActiveOnly, fcq.SearchByCrownFileDesignation, fcq.MdocJustinNoSet, fcq.PhysicalFileIdSet); } @@ -86,14 +87,14 @@ public async Task> GetFilesByAgencyIdCo }); var fileIdAndAppearanceDate = fileSearchResponse?.FileDetail?.Where(fd => mdocSequenceNumber == null || fd.MdocSeqNo == mdocSequenceNumber) - .SelectToList(fd => new { fd.MdocJustinNo , fd.NextApprDt }); + .SelectToList(fd => new { fd.MdocJustinNo, fd.NextApprDt }); if (fileIdAndAppearanceDate == null || fileIdAndAppearanceDate.Count == 0) return fileDetails; //Return the basic entry without doing a lookup. if (fileIdAndAppearanceDate.Count == 1) - return new List {new RedactedCriminalFileDetailResponse {JustinNo = fileIdAndAppearanceDate.First().MdocJustinNo}}; + return new List { new RedactedCriminalFileDetailResponse { JustinNo = fileIdAndAppearanceDate.First().MdocJustinNo } }; //It seems the fileSearch and the FileDetails/FileContent bring up two different participant lists //The fileSearch seems to include have extra participants. @@ -220,7 +221,7 @@ public async Task AppearanceDetailAsync(string fileId, private List GetInitiatingDocuments(ICollection documents) { return documents?.Where(doc => doc?.DocmClassification == "Initiating" && !string.IsNullOrEmpty(doc.ImageId)) - .Select(a => new CriminalDocument {IssueDate = a.IssueDate, ImageId = a.ImageId}).ToList(); + .Select(a => new CriminalDocument { IssueDate = a.IssueDate, ImageId = a.ImageId }).ToList(); } private async Task PopulateDetailsAppearancesAsync(string fileId, FutureYN? future, HistoryYN? history) @@ -379,12 +380,12 @@ private async Task PopulateAppearanceCriminalAccused(string ful FullName = fullName, PartId = partId, //partyAppearanceMethod, doesn't always have a partId on DEV at least. PartyAppearanceMethod = partyAppearanceMethod?.PartyAppearanceMethod, - PartyAppearanceMethodDesc = await _lookupService.GetCriminalAccusedAttend(partyAppearanceMethod?.PartyAppearanceMethod), + PartyAppearanceMethodDesc = await _lookupService.GetCriminalAccusedAttend(partyAppearanceMethod?.PartyAppearanceMethod), AttendanceMethodCd = attendanceMethod?.AttendanceMethodCd, AttendanceMethodDesc = await _lookupService.GetCriminalAssetsDescriptions(attendanceMethod?.AttendanceMethodCd), AppearanceMethodCd = appearanceMethod?.AppearanceMethodCd, AppearanceMethodDesc = await _lookupService.GetCriminalAssetsDescriptions(appearanceMethod?.AppearanceMethodCd) - }; + }; } private async Task PopulateAppearanceDetailAdjudicator(CfcAppearance appearanceFromAccused, ICollection attendanceMethods, ICollection appearanceMethods) diff --git a/docker/openshift/update-aws-creds.sh b/docker/openshift/update-aws-creds.sh index bd32aea4..44fac4c2 100644 --- a/docker/openshift/update-aws-creds.sh +++ b/docker/openshift/update-aws-creds.sh @@ -13,9 +13,9 @@ if [ $? -eq 0 ]; then currentSecretAccessKey=$(echo "$param_value" | jq -r '.current.SecretAccessKey') if [ "$AWS_ACCESS_KEY_ID" = "$pendingAccessKeyId" ] || [ "$AWS_SECRET_ACCESS_KEY" = "$pendingSecretAccessKey" ]; then - echo "Updating AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY..." + echo "Updating AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY for aws-secret-${ENVIRONMENT} secret..." - oc create secret generic aws-secret \ + oc create secret generic aws-secret-${ENVIRONMENT} \ --from-literal=AWS_ACCESS_KEY_ID=$currentAccessKeyId \ --from-literal=AWS_SECRET_ACCESS_KEY=$currentSecretAccessKey \ --dry-run=client -o yaml | oc apply -f - diff --git a/infrastructure/cloud/environments/dev/webapp.tf b/infrastructure/cloud/environments/dev/webapp.tf index efd37bcd..0f7afcdf 100644 --- a/infrastructure/cloud/environments/dev/webapp.tf +++ b/infrastructure/cloud/environments/dev/webapp.tf @@ -1,16 +1,22 @@ -# This the rest of JASPER's infra resources. -# Make sure that the "initial" stack has been deployed first. +# +# "initial" stack containing resources that main stack depends on (e.g. ECR, KMS, openshiftuser) +# +module "initial" { + source = "../../modules/initial" + openshift_iam_user = var.openshift_iam_user + iam_user_table_name = var.iam_user_table_name + test_s3_bucket_name = var.test_s3_bucket_name + region = var.region + kms_key_name = var.kms_key_name + app_name = var.app_name + environment = var.environment +} # -# Existing Resources +# The "main" stack # data "aws_caller_identity" "current" {} -# KMS Key -data "aws_kms_key" "kms_key" { - key_id = "alias/${var.kms_key_name}-${var.environment}" -} - # VPC data "aws_vpc" "vpc" { id = var.vpc_id @@ -29,16 +35,6 @@ data "aws_security_group" "data_sg" { name = "Data_sg" } -# App ECR Repo -data "aws_ecr_repository" "app_ecr_repo" { - name = "${var.app_name}-app-repo-${var.environment}" -} - -# Lambda ECR Repo -data "aws_ecr_repository" "lambda_ecr_repo" { - name = "${var.app_name}-lambda-repo-${var.environment}" -} - # # Modules # @@ -49,7 +45,7 @@ module "secrets_manager" { environment = var.environment app_name = var.app_name region = var.region - kms_key_arn = data.aws_kms_key.kms_key.arn + kms_key_arn = module.initial.kms_key_arn rotate_key_lambda_arn = module.lambda.lambda_functions["rotate-key"].arn } @@ -62,8 +58,9 @@ module "rds" { db_password = module.secrets_manager.db_password data_sg_id = data.aws_security_group.data_sg.id vpc_id = data.aws_vpc.vpc.id - kms_key_arn = data.aws_kms_key.kms_key.arn + kms_key_arn = module.initial.kms_key_arn rds_db_ca_cert = var.rds_db_ca_cert + all_subnet_ids = module.subnets.all_subnet_ids } # Create IAM Roles/Policies @@ -71,11 +68,13 @@ module "iam" { source = "../../modules/IAM" environment = var.environment app_name = var.app_name - kms_key_arn = data.aws_kms_key.kms_key.arn - app_ecr_repo_arn = data.aws_ecr_repository.app_ecr_repo.arn + kms_key_arn = module.initial.kms_key_arn + app_ecr_repo_arn = module.initial.app_ecr.ecr_repo_arn openshift_iam_user = var.openshift_iam_user iam_user_table_name = var.iam_user_table_name secrets_arn_list = module.secrets_manager.secrets_arn_list + account_id = data.aws_caller_identity.current.account_id + kms_key_id = module.initial.kms_key_arn } # Parse Subnets @@ -128,7 +127,7 @@ module "lambda" { app_name = var.app_name lambda_role_arn = module.iam.lambda_role_arn apigw_execution_arn = module.apigw.apigw_execution_arn - lambda_ecr_repo_url = data.aws_ecr_repository.lambda_ecr_repo.repository_url + lambda_ecr_repo_url = module.initial.lambda_ecr.ecr_repo_url mtls_secret_name = module.secrets_manager.mtls_secret_name lambda_memory_size = var.lambda_memory_size functions = { @@ -158,7 +157,7 @@ module "ecs_api_td_log_group" { source = "../../modules/Cloudwatch/LogGroup" environment = var.environment app_name = var.app_name - kms_key_arn = data.aws_kms_key.kms_key.arn + kms_key_arn = module.initial.kms_key_arn resource_name = "ecs" name = "api-td" } @@ -167,7 +166,7 @@ module "ecs_web_td_log_group" { source = "../../modules/Cloudwatch/LogGroup" environment = var.environment app_name = var.app_name - kms_key_arn = data.aws_kms_key.kms_key.arn + kms_key_arn = module.initial.kms_key_arn resource_name = "ecs" name = "web-td" } @@ -176,7 +175,7 @@ module "apigw_api_log_group" { source = "../../modules/Cloudwatch/LogGroup" environment = var.environment app_name = var.app_name - kms_key_arn = data.aws_kms_key.kms_key.arn + kms_key_arn = module.initial.kms_key_arn resource_name = "apigateway" name = "api" } @@ -210,10 +209,10 @@ module "ecs_web_td" { name = "web" region = var.region ecs_execution_role_arn = module.iam.ecs_execution_role_arn - ecr_repository_url = data.aws_ecr_repository.app_ecr_repo.repository_url + ecr_repository_url = module.initial.app_ecr.ecr_repo_url port = 8080 secret_env_variables = module.secrets_manager.web_secrets - kms_key_arn = data.aws_kms_key.kms_key.arn + kms_key_arn = module.initial.kms_key_arn log_group_name = module.ecs_web_td_log_group.log_group.name } @@ -225,7 +224,7 @@ module "ecs_api_td" { name = "api" region = var.region ecs_execution_role_arn = module.iam.ecs_execution_role_arn - ecr_repository_url = data.aws_ecr_repository.app_ecr_repo.repository_url + ecr_repository_url = module.initial.app_ecr.ecr_repo_url port = 5000 env_variables = [ { @@ -238,7 +237,7 @@ module "ecs_api_td" { } ] secret_env_variables = module.secrets_manager.api_secrets - kms_key_arn = data.aws_kms_key.kms_key.arn + kms_key_arn = module.initial.kms_key_arn log_group_name = module.ecs_api_td_log_group.log_group.name } diff --git a/infrastructure/cloud/environments/initial/backend.tfvars b/infrastructure/cloud/environments/initial/backend.tfvars deleted file mode 100644 index 1c0b5d64..00000000 --- a/infrastructure/cloud/environments/initial/backend.tfvars +++ /dev/null @@ -1,4 +0,0 @@ -bucket = "terraform-remote-state-b5e4f5-dev" -dynamodb_table = "terraform-remote-state-lock-b5e4f5" -key = "initial.terraform.tfstate" -region = "ca-central-1" diff --git a/infrastructure/cloud/environments/initial/dev.tfvars b/infrastructure/cloud/environments/initial/dev.tfvars deleted file mode 100644 index 1a02c1a4..00000000 --- a/infrastructure/cloud/environments/initial/dev.tfvars +++ /dev/null @@ -1,10 +0,0 @@ -region = "ca-central-1" -test_s3_bucket_name = "jasper-test-s3-bucket-dev" -web_subnet_names = ["Web_Dev_aza_net", "Web_Dev_azb_net"] -app_subnet_names = ["App_Dev_aza_net", "App_Dev_azb_net"] -data_subnet_names = ["Data_Dev_aza_net", "Data_Dev_azb_net"] -openshift_iam_user = "openshiftuserdev" -iam_user_table_name = "BCGOV_IAM_USER_TABLE" -lb_name = "default" -rds_db_ca_cert = "rds-ca-rsa2048-g1" -cert_domain_name = "*.example.ca" diff --git a/infrastructure/cloud/environments/sandbox/backend.tfvars b/infrastructure/cloud/environments/sandbox/backend.tfvars deleted file mode 100644 index e28c510f..00000000 --- a/infrastructure/cloud/environments/sandbox/backend.tfvars +++ /dev/null @@ -1,4 +0,0 @@ -bucket = "terraform-remote-state-sandbox-12345" -dynamodb_table = "terraform-remote-state-lock-12345" -key = "terraform.tfstate" -region = "ca-central-1" diff --git a/infrastructure/cloud/environments/sandbox/providers.tf b/infrastructure/cloud/environments/sandbox/providers.tf deleted file mode 100644 index 4cafa436..00000000 --- a/infrastructure/cloud/environments/sandbox/providers.tf +++ /dev/null @@ -1,21 +0,0 @@ -terraform { - required_version = "~> 1.9.0" - required_providers { - aws = { - source = "hashicorp/aws" - version = "~> 5.0" - } - - tls = { - source = "hashicorp/tls" - version = "4.0.5" - } - } - - backend "s3" { - } -} - -provider "aws" { - region = var.region -} diff --git a/infrastructure/cloud/environments/sandbox/sandbox.tfvars b/infrastructure/cloud/environments/sandbox/sandbox.tfvars deleted file mode 100644 index 3342cc52..00000000 --- a/infrastructure/cloud/environments/sandbox/sandbox.tfvars +++ /dev/null @@ -1,2 +0,0 @@ -region = "ca-central-1" -test_s3_bucket_name = "jasper-test-s3-bucket-snd" diff --git a/infrastructure/cloud/environments/sandbox/webapp.tf b/infrastructure/cloud/environments/sandbox/webapp.tf deleted file mode 100644 index 53319b19..00000000 --- a/infrastructure/cloud/environments/sandbox/webapp.tf +++ /dev/null @@ -1,40 +0,0 @@ -module "security" { - source = "../../modules/security" - environment = var.environment - app_name = var.app_name - kms_key_name = var.kms_key_name -} - -module "storage" { - source = "../../modules/storage" - environment = var.environment - app_name = var.app_name - kms_key_name = module.security.kms_key_alias - test_s3_bucket_name = var.test_s3_bucket_name - depends_on = [module.security] -} - -module "networking" { - source = "../../modules/networking" - environment = var.environment - app_name = var.app_name - region = var.region -} - -module "container" { - source = "../../modules/container" - environment = var.environment - app_name = var.app_name - region = var.region - ecs_execution_role_arn = module.security.ecs_execution_role_arn - subnet_ids = [module.networking.private_subnets_web[0], module.networking.private_subnets_web[1]] - sg_id = module.networking.sg_id - lb_tg_arn = module.networking.lb_tg_arn - ecs_web_log_group_name = module.monitoring.ecs_web_log_group_name -} - -module "monitoring" { - source = "../../modules/monitoring" - environment = var.environment - app_name = var.app_name -} diff --git a/infrastructure/cloud/environments/test/backend.tfvars b/infrastructure/cloud/environments/test/backend.tfvars new file mode 100644 index 00000000..fa26269d --- /dev/null +++ b/infrastructure/cloud/environments/test/backend.tfvars @@ -0,0 +1,4 @@ +bucket = "terraform-remote-state-b5e4f5-test" +dynamodb_table = "terraform-remote-state-lock-b5e4f5" +key = "terraform.tfstate" +region = "ca-central-1" diff --git a/infrastructure/cloud/environments/test/main.tf b/infrastructure/cloud/environments/test/main.tf new file mode 100644 index 00000000..0f7afcdf --- /dev/null +++ b/infrastructure/cloud/environments/test/main.tf @@ -0,0 +1,270 @@ +# +# "initial" stack containing resources that main stack depends on (e.g. ECR, KMS, openshiftuser) +# +module "initial" { + source = "../../modules/initial" + openshift_iam_user = var.openshift_iam_user + iam_user_table_name = var.iam_user_table_name + test_s3_bucket_name = var.test_s3_bucket_name + region = var.region + kms_key_name = var.kms_key_name + app_name = var.app_name + environment = var.environment +} + +# +# The "main" stack +# +data "aws_caller_identity" "current" {} + +# VPC +data "aws_vpc" "vpc" { + id = var.vpc_id +} + +# Security Groups +data "aws_security_group" "web_sg" { + name = "Web_sg" +} + +data "aws_security_group" "app_sg" { + name = "App_sg" +} + +data "aws_security_group" "data_sg" { + name = "Data_sg" +} + +# +# Modules +# + +# Create Secrets placeholder for Secrets Manager +module "secrets_manager" { + source = "../../modules/SecretsManager" + environment = var.environment + app_name = var.app_name + region = var.region + kms_key_arn = module.initial.kms_key_arn + rotate_key_lambda_arn = module.lambda.lambda_functions["rotate-key"].arn +} + +# Create RDS Database +module "rds" { + source = "../../modules/RDS" + environment = var.environment + app_name = var.app_name + db_username = module.secrets_manager.db_username + db_password = module.secrets_manager.db_password + data_sg_id = data.aws_security_group.data_sg.id + vpc_id = data.aws_vpc.vpc.id + kms_key_arn = module.initial.kms_key_arn + rds_db_ca_cert = var.rds_db_ca_cert + all_subnet_ids = module.subnets.all_subnet_ids +} + +# Create IAM Roles/Policies +module "iam" { + source = "../../modules/IAM" + environment = var.environment + app_name = var.app_name + kms_key_arn = module.initial.kms_key_arn + app_ecr_repo_arn = module.initial.app_ecr.ecr_repo_arn + openshift_iam_user = var.openshift_iam_user + iam_user_table_name = var.iam_user_table_name + secrets_arn_list = module.secrets_manager.secrets_arn_list + account_id = data.aws_caller_identity.current.account_id + kms_key_id = module.initial.kms_key_arn +} + +# Parse Subnets +module "subnets" { + source = "../../modules/Subnets" + web_subnet_names = var.web_subnet_names + app_subnet_names = var.app_subnet_names + data_subnet_names = var.data_subnet_names + vpc_id = data.aws_vpc.vpc.id +} + +# Create Target Groups +module "tg_web" { + source = "../../modules/TargetGroup" + environment = var.environment + app_name = var.app_name + name = "web" + port = 8080 + health_check_path = "/" + vpc_id = data.aws_vpc.vpc.id + protocol = "HTTPS" +} + +module "tg_api" { + source = "../../modules/TargetGroup" + environment = var.environment + app_name = var.app_name + name = "api" + port = 5000 + health_check_path = "/api/test/headers" + vpc_id = data.aws_vpc.vpc.id + protocol = "HTTP" +} + +# Setup ALB Listeners +module "alb" { + source = "../../modules/ALB" + environment = var.environment + app_name = var.app_name + lb_name = var.lb_name + cert_domain_name = var.cert_domain_name + tg_web_arn = module.tg_web.tg_arn + tg_api_arn = module.tg_api.tg_arn +} + +# Create Lambda Functions +module "lambda" { + source = "../../modules/Lambda" + environment = var.environment + app_name = var.app_name + lambda_role_arn = module.iam.lambda_role_arn + apigw_execution_arn = module.apigw.apigw_execution_arn + lambda_ecr_repo_url = module.initial.lambda_ecr.ecr_repo_url + mtls_secret_name = module.secrets_manager.mtls_secret_name + lambda_memory_size = var.lambda_memory_size + functions = { + "authorizer" = { + http_method = "*" + resource_path = "" + env_variables = { + VERIFY_SECRET_NAME = module.secrets_manager.api_authorizer_secret.name + } + }, + "rotate-key" = { + http_method = "POST" + resource_path = "/*" + statement_id_prefix = "AllowSecretsManagerInvoke" + source_arn = module.secrets_manager.api_authorizer_secret.arn + principal = "secretsmanager.amazonaws.com" + env_variables = { + VERIFY_SECRET_NAME = module.secrets_manager.api_authorizer_secret.name + CLUSTER_NAME = module.ecs_cluster.ecs_cluster.name + } + } + } +} + +# Create Cloudwatch LogGroups +module "ecs_api_td_log_group" { + source = "../../modules/Cloudwatch/LogGroup" + environment = var.environment + app_name = var.app_name + kms_key_arn = module.initial.kms_key_arn + resource_name = "ecs" + name = "api-td" +} + +module "ecs_web_td_log_group" { + source = "../../modules/Cloudwatch/LogGroup" + environment = var.environment + app_name = var.app_name + kms_key_arn = module.initial.kms_key_arn + resource_name = "ecs" + name = "web-td" +} + +module "apigw_api_log_group" { + source = "../../modules/Cloudwatch/LogGroup" + environment = var.environment + app_name = var.app_name + kms_key_arn = module.initial.kms_key_arn + resource_name = "apigateway" + name = "api" +} + +# Create API Gateway +module "apigw" { + source = "../../modules/APIGateway" + environment = var.environment + app_name = var.app_name + region = var.region + account_id = data.aws_caller_identity.current.account_id + lambda_functions = module.lambda.lambda_functions + ecs_execution_role_arn = module.iam.ecs_execution_role_arn + log_group_arn = module.apigw_api_log_group.log_group.arn + apigw_logging_role_arn = module.iam.apigw_logging_role_arn +} + +# Create ECS Cluster +module "ecs_cluster" { + source = "../../modules/ECS/Cluster" + environment = var.environment + app_name = var.app_name + name = "app" +} + +# Create Web ECS Task Definition +module "ecs_web_td" { + source = "../../modules/ECS/TaskDefinition" + environment = var.environment + app_name = var.app_name + name = "web" + region = var.region + ecs_execution_role_arn = module.iam.ecs_execution_role_arn + ecr_repository_url = module.initial.app_ecr.ecr_repo_url + port = 8080 + secret_env_variables = module.secrets_manager.web_secrets + kms_key_arn = module.initial.kms_key_arn + log_group_name = module.ecs_web_td_log_group.log_group.name +} + +# Create API ECS Task Definition +module "ecs_api_td" { + source = "../../modules/ECS/TaskDefinition" + environment = var.environment + app_name = var.app_name + name = "api" + region = var.region + ecs_execution_role_arn = module.iam.ecs_execution_role_arn + ecr_repository_url = module.initial.app_ecr.ecr_repo_url + port = 5000 + env_variables = [ + { + name = "CORS_DOMAIN" + value = module.alb.default_lb_dns_name + }, + { + name = "AWS_API_GATEWAY_URL" + value = module.apigw.apigw_invoke_url + } + ] + secret_env_variables = module.secrets_manager.api_secrets + kms_key_arn = module.initial.kms_key_arn + log_group_name = module.ecs_api_td_log_group.log_group.name +} + +# Create Web ECS Service +module "ecs_web_service" { + source = "../../modules/ECS/Service" + environment = var.environment + app_name = var.app_name + name = "web" + ecs_cluster_id = module.ecs_cluster.ecs_cluster.id + ecs_td_arn = module.ecs_web_td.ecs_td_arn + tg_arn = module.tg_web.tg_arn + sg_id = data.aws_security_group.app_sg.id + subnet_ids = module.subnets.web_subnets_ids + port = module.ecs_web_td.port +} + +# Create Api ECS Service +module "ecs_api_service" { + source = "../../modules/ECS/Service" + environment = var.environment + app_name = var.app_name + name = "api" + ecs_cluster_id = module.ecs_cluster.ecs_cluster.id + ecs_td_arn = module.ecs_api_td.ecs_td_arn + tg_arn = module.tg_api.tg_arn + sg_id = data.aws_security_group.app_sg.id + subnet_ids = module.subnets.app_subnets_ids + port = module.ecs_api_td.port +} diff --git a/infrastructure/cloud/environments/initial/providers.tf b/infrastructure/cloud/environments/test/providers.tf similarity index 100% rename from infrastructure/cloud/environments/initial/providers.tf rename to infrastructure/cloud/environments/test/providers.tf diff --git a/infrastructure/cloud/environments/test/test.tfvars b/infrastructure/cloud/environments/test/test.tfvars new file mode 100644 index 00000000..c1ccbded --- /dev/null +++ b/infrastructure/cloud/environments/test/test.tfvars @@ -0,0 +1,10 @@ +region = "ca-central-1" +test_s3_bucket_name = "jasper-test-s3-bucket-test" +web_subnet_names = ["Web_Test_aza_net", "Web_Test_azb_net"] +app_subnet_names = ["App_Test_aza_net", "App_Test_azb_net"] +data_subnet_names = ["Data_Test_aza_net", "Data_Test_azb_net"] +openshift_iam_user = "openshiftusertest" +iam_user_table_name = "BCGOV_IAM_USER_TABLE" +lb_name = "default" +rds_db_ca_cert = "rds-ca-rsa2048-g1" +cert_domain_name = "*.example.ca" diff --git a/infrastructure/cloud/environments/initial/variables.tf b/infrastructure/cloud/environments/test/variables.tf similarity index 92% rename from infrastructure/cloud/environments/initial/variables.tf rename to infrastructure/cloud/environments/test/variables.tf index 007c64be..5acbe981 100644 --- a/infrastructure/cloud/environments/initial/variables.tf +++ b/infrastructure/cloud/environments/test/variables.tf @@ -68,3 +68,7 @@ variable "cert_domain_name" { type = string } +variable "lambda_memory_size" { + description = "The Lambda Function default Memory Size" + type = number +} diff --git a/infrastructure/cloud/modules/APIGateway/main.tf b/infrastructure/cloud/modules/APIGateway/main.tf index 358815ea..d5f65afe 100644 --- a/infrastructure/cloud/modules/APIGateway/main.tf +++ b/infrastructure/cloud/modules/APIGateway/main.tf @@ -77,8 +77,6 @@ resource "aws_api_gateway_rest_api_policy" "apigw_rest_api_policy" { resource "aws_api_gateway_account" "apigateway_account" { cloudwatch_role_arn = var.apigw_logging_role_arn - - depends_on = [aws_api_gateway_stage.apigw_stage] } resource "aws_api_gateway_usage_plan" "apigw_usage_plan" { diff --git a/infrastructure/cloud/modules/DynamoDb/main.tf b/infrastructure/cloud/modules/DynamoDb/main.tf new file mode 100644 index 00000000..581c056c --- /dev/null +++ b/infrastructure/cloud/modules/DynamoDb/main.tf @@ -0,0 +1,48 @@ +# +# Openshift +# +# https://developer.gov.bc.ca/docs/default/component/public-cloud-techdocs/design-build-and-deploy-an-application/iam-user-service/ +# Step 1: Add opeshiftuser if not exist +data "aws_dynamodb_table" "iam_user_table" { + name = var.iam_user_table_name +} + +resource "null_resource" "check_and_insert_openshiftuser_record" { + triggers = { + always_run = timestamp() + } + + provisioner "local-exec" { + command = <