diff --git a/.env b/.env index 42caf8cf4..ccab03ec6 100644 --- a/.env +++ b/.env @@ -10,4 +10,6 @@ MHS_SECRET_CA_CERTS="" INBOUND_BUILD_TAG=latest OUTBOUND_BUILD_TAG=latest ROUTE_BUILD_TAG=latest -WEB_SERVICE_BUILD_TAG=latest \ No newline at end of file +WEB_SERVICE_BUILD_TAG=latest + +ENVIRONMENT_ID=build diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..dddf02765 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,251 @@ +name: "Build" + +on: + pull_request: + types: [opened, synchronize, reopened] + branches: + - main + push: + branches: + - main + - TEST-GITHUB-ACTIONS + +jobs: + build_common: + name: "Common & MHS Common Tests" + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + - name: Install pipenv + run: python3 -m pip install pipenv + - name: "Install Common directory dependencies" + working-directory: ./common + run: pipenv install --dev + - name: "Run Common directory tests" + working-directory: ./common + run: pipenv run unittests-cov + - name: "Install MHS Common directory dependencies" + working-directory: ./mhs/common + run: pipenv install --dev + - name: "Run MHS Common directory tests" + working-directory: ./mhs/common + run: pipenv run unittests-cov + - name: Upload Test Reports + if: always() + uses: actions/upload-artifact@v4 + with: + name: Common-Test-Reports + path: | + common/test-reports/** + mhs/common/test-reports/** + + inbound_tests: + name: "Inbound Tests" + uses: ./.github/workflows/test.yml + with: + name: Inbound + path: ./mhs/inbound + secrets: inherit + + outbound_tests: + name: "Outbound Tests" + uses: ./.github/workflows/test.yml + with: + name: Outbound + path: ./mhs/outbound + secrets: inherit + + route_tests: + name: "Route Tests" + uses: ./.github/workflows/test.yml + with: + name: Route + path: ./mhs/spineroutelookup + secrets: inherit + + generate-build-id: + name: "Generate Build Id" + needs: [build_common, inbound_tests, outbound_tests, route_tests] + runs-on: ubuntu-latest + outputs: + build-id: ${{ steps.generate.outputs.buildId }} + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + - id: generate + run: | + BUILD_ID=$(python3 pipeline/scripts/tag.py ${{ github.ref }} ${{ github.run_number }} ${{ github.sha }}) + echo "Generated the build tag: $BUILD_ID" + echo "buildId=$BUILD_ID" >> "$GITHUB_OUTPUT" + + + publish-docker-images: + name: "Publish Docker Images" + needs: [generate-build-id] + strategy: + matrix: + config: + - directory: mhs/inbound + repository: mhs/inbound + dockerfile: docker/inbound/Dockerfile + - directory: mhs/outbound + repository: mhs/outbound + dockerfile: docker/outbound/Dockerfile + - directory: mhs/spineroutelookup + repository: mhs/route + dockerfile: docker/spineroutelookup/Dockerfile + - directory: integration-tests/fake_spine + repository: fake-spine + dockerfile: integration-tests/fake_spine/Dockerfile + uses: ./.github/workflows/publish.yml + with: + directory: ${{ matrix.config.directory }} + repository: ${{ matrix.config.repository }} + dockerfile: ${{ matrix.config.dockerfile }} + build-id: ${{ needs.generate-build-id.outputs.build-id }} + secrets: + AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} + AWS_ROLE_TO_ASSUME: ${{ secrets.AWS_ROLE_TO_ASSUME }} + AWS_REGION: ${{ secrets.AWS_REGION }} + + component_tests: + name: "Component Tests" + needs: [ publish-docker-images, generate-build-id ] + runs-on: ubuntu-latest + env: + BUILD_TAG: ${{ needs.generate-build-id.outputs.build-id }} + strategy: + matrix: + component: [ 'SpineRouteLookup' ] + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Check Docker Compose Version + run: docker compose version + + - name: Install other dependencies + run: sudo apt-get update + + - name: Set Lowercase Build Tag + run: | + echo "BUILD_TAG_LOWER=$(echo -n ${{ env.BUILD_TAG }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV + + - name: Build Docker Images + run: | + chmod +x ./build.sh + ./build.sh + + - name: Setup Docker Environment + run: | + ./integration-tests/setup_component_test_env.sh + if [ -f component-test-source.sh ]; then + source component-test-source.sh + fi + + if [[ "${{ matrix.component }}" == "SpineRouteLookup" ]]; then + docker compose -f docker-compose.yml -f docker-compose.component.override.yml -p ${{ env.BUILD_TAG_LOWER }} up --wait -d + elif [[ "${{ matrix.component }}" == "SDS API" ]]; then + docker compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml -p ${{ env.BUILD_TAG_LOWER }} up --wait -d + fi + docker ps + + - name: Docker Setup debug + run: | + docker network ls + + - name: Ensure Network Exists + run: | + BUILD_TAG_LOWER_NETWORK=${{ env.BUILD_TAG_LOWER }}_default + if ! docker network ls --format '{{.Name}}' | grep -q "^$BUILD_TAG_LOWER_NETWORK$"; then + echo "Network $BUILD_TAG_LOWER_NETWORK not found, creating it." + docker network create $BUILD_TAG_LOWER_NETWORK + fi + + - name: Run Component Tests + run: | + docker build -t local/mhs-componenttest:${{ env.BUILD_TAG }} -f ./component-test.Dockerfile . + docker run --rm --network "${{ env.BUILD_TAG_LOWER }}_default" \ + --env "MHS_ADDRESS=http://outbound" \ + --env "AWS_ACCESS_KEY_ID=test" \ + --env "AWS_SECRET_ACCESS_KEY=test" \ + --env "MHS_DB_ENDPOINT_URL=http://dynamodb:8000" \ + --env "FAKE_SPINE_ADDRESS=http://fakespine" \ + --env "MHS_INBOUND_QUEUE_BROKERS=amqp://rabbitmq:5672" \ + --env "MHS_INBOUND_QUEUE_NAME=inbound" \ + --env "SCR_ADDRESS=http://scradaptor" \ + local/mhs-componenttest:${{ env.BUILD_TAG }} + + - name: Dump Logs and Cleanup + if: always() + run: | + mkdir -p logs + docker logs ${{ env.BUILD_TAG_LOWER }}-outbound-1 > logs/outbound_1.log + docker logs ${{ env.BUILD_TAG_LOWER }}-inbound-1 > logs/inbound_1.log + docker logs ${{ env.BUILD_TAG_LOWER }}-fakespine-1 > logs/fakespine_1.log + docker logs ${{ env.BUILD_TAG_LOWER }}-rabbitmq-1 > logs/rabbitmq_1.log + docker logs ${{ env.BUILD_TAG_LOWER }}-redis-1 > logs/redis_1.log + docker logs ${{ env.BUILD_TAG_LOWER }}-dynamodb-1 > logs/dynamodb_1.log + docker logs ${{ env.BUILD_TAG_LOWER }}-mongodb-1 > logs/mongodb_1.log + docker compose -f docker-compose.yml -f docker-compose.component.override.yml -p ${{ env.BUILD_TAG_LOWER }} down -v + + rm -f .integration-tests/component-test.source.sh + + - name: Archive Logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.component }} Component Test Logs + path: logs/ + +# integration_tests: +# name: "Integration Tests" +# needs: [component_tests] +# runs-on: ubuntu-latest +# steps: +# - name: Checkout Repository +# uses: actions/checkout@v4 +# - name: Setup Terraform +# uses: hashicorp/setup-terraform@v3 +# with: +# terraform_version: 1.0.0 +# - name: Configure AWS Credentials +# uses: aws-actions/configure-aws-credentials@v4 +# with: +# role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_TO_ASSUME }} +# aws-region: ${{ secrets.AWS_REGION }} +# - name: Terraform Init +# run: terraform init -backend-config="bucket=${{ secrets.TF_STATE_BUCKET }}" -backend-config="region=${{ secrets.TF_STATE_BUCKET_REGION }}" -backend-config="key=${{ env.ENVIRONMENT_ID }}-mhs.tfstate" +# working-directory: ./pipeline/terraform/mhs-environment +# - name: Terraform Apply +# run: | +# terraform apply -auto-approve \ +# -var environment_id=build \ +# -var build_id=${{ needs.generate-build-id.outputs.build-id }} +# working-directory: ./pipeline/terraform/mhs-environment +# - name: Run Integration Tests +# run: | +# LB_DNS=$(terraform output -raw outbound_lb_domain_name) +# echo "MHS_ADDRESS=https://$LB_DNS" >> $GITHUB_ENV +# # Execute your integration test script here +# working-directory: ./pipeline/terraform/mhs-environment +# - name: Clean up Terraform +# if: always() +# run: terraform destroy -auto-approve +# working-directory: ./pipeline/terraform/mhs-environment + + post-build-cleanup: + name: "Post-build Cleanup" + needs: [build_common, inbound_tests, outbound_tests, route_tests, generate-build-id, publish-docker-images, component_tests ] + if: always() + runs-on: ubuntu-latest + steps: + - name: "Prune Docker Images" + run: | + docker system prune --force + docker volume prune --force diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 000000000..1e43bfe8b --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,52 @@ +name: "Publish" + +on: + workflow_call: + inputs: + directory: + required: true + type: string + repository: + required: true + type: string + dockerfile: + required: true + type: string + build-id: + required: true + type: string + secrets: + AWS_ACCOUNT_ID: + required: true + AWS_ROLE_TO_ASSUME: + required: true + AWS_REGION: + required: true + +jobs: + publish_image: + name: "Publish ${{ inputs.repository }} Image" + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + submodules: 'false' + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_TO_ASSUME }} + aws-region: ${{ secrets.AWS_REGION }} + - name: Login to ECR + run: | + DOCKER_REGISTRY="https://${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com" + aws ecr get-login-password --region ${{ secrets.AWS_REGION }} | docker login --username AWS --password-stdin $DOCKER_REGISTRY + - name: Build and Push Docker Image + run: | + ECR_REGISTRY="${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com" + DOCKER_IMAGE="$ECR_REGISTRY/${{ inputs.repository }}:${{ inputs.build-id }}" + docker build -t $DOCKER_IMAGE -f ${{ inputs.dockerfile }} . + docker push $DOCKER_IMAGE \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..0198c80cc --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,31 @@ +name: "Test" + +on: + workflow_call: + inputs: + name: + required: true + type: string + path: + required: true + type: string + +jobs: + unit_tests: + name: ${{ inputs.name }} Unit Tests + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + - name: Install pipenv + run: python3 -m pip install pipenv + - name: Install Dependencies + working-directory: ${{ inputs.path }} + run: pipenv install --dev + - name: Execute Unit Tests + working-directory: ${{ inputs.path }} + run: pipenv run unittests-cov \ No newline at end of file diff --git a/Jenkinsfile b/Jenkinsfile index d18f5c307..be9e4c66f 100755 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,131 +1,7 @@ -pipeline { - agent{ - label 'jenkins-workers' - } - environment { - BUILD_TAG = sh label: 'Generating build tag', returnStdout: true, script: 'python3 pipeline/scripts/tag.py ${GIT_BRANCH} ${BUILD_NUMBER} ${GIT_COMMIT}' - BUILD_TAG_LOWER = sh label: 'Lowercase build tag', returnStdout: true, script: "echo -n ${BUILD_TAG} | tr '[:upper:]' '[:lower:]'" - ENVIRONMENT_ID = "build" - MHS_INBOUND_QUEUE_NAME = "${ENVIRONMENT_ID}-inbound" - LOCAL_INBOUND_IMAGE_NAME = "local/mhs-inbound:${BUILD_TAG}" - LOCAL_OUTBOUND_IMAGE_NAME = "local/mhs-outbound:${BUILD_TAG}" - LOCAL_ROUTE_IMAGE_NAME = "local/mhs-route:${BUILD_TAG}" - LOCAL_FAKE_SPINE_IMAGE_NAME = "local/fake-spine:${BUILD_TAG}" - INBOUND_IMAGE_NAME = "${DOCKER_REGISTRY}/mhs/inbound:${BUILD_TAG}" - OUTBOUND_IMAGE_NAME = "${DOCKER_REGISTRY}/mhs/outbound:${BUILD_TAG}" - ROUTE_IMAGE_NAME = "${DOCKER_REGISTRY}/mhs/route:${BUILD_TAG}" - FAKE_SPINE_IMAGE_NAME = "${DOCKER_REGISTRY}/fake-spine:${BUILD_TAG}" - } - stages { - stage('Build & test Common directory') { - steps { - dir('common') { - buildModules('Installing common dependencies') - executeUnitTestsWithCoverage() - } - } - } - stage('Build & test MHS Common directory') { - steps { - dir('mhs/common') { - buildModules('Installing mhs common dependencies') - executeUnitTestsWithCoverage() - } - } - } - stage('Build MHS') { - parallel { - stage('Inbound') { - stages { - stage('Build') { - steps { - dir('mhs/inbound') { - buildModules('Installing inbound dependencies') - } - } - } - stage('Unit test') { - steps { - dir('mhs/inbound') { - executeUnitTestsWithCoverage() - } - } - } - stage('Build and Push image') { - when { - expression { currentBuild.resultIsBetterOrEqualTo('SUCCESS') } - } - steps { - buildAndPushImage('${LOCAL_INBOUND_IMAGE_NAME}', '${INBOUND_IMAGE_NAME}', 'docker/inbound/Dockerfile') - } - } - } - } - stage('Outbound') { - stages { - stage('Build') { - steps { - dir('mhs/outbound') { - buildModules('Installing outbound dependencies') - } - } - } - stage('Unit test') { - steps { - dir('mhs/outbound') { - executeUnitTestsWithCoverage() - } - } - } - stage('Build and Push image') { - when { - expression { currentBuild.resultIsBetterOrEqualTo('SUCCESS') } - } - steps { - buildAndPushImage('${LOCAL_OUTBOUND_IMAGE_NAME}', '${OUTBOUND_IMAGE_NAME}', 'docker/outbound/Dockerfile') - } - } - } - } - stage('Route') { - stages { - stage('Build') { - steps { - dir('mhs/spineroutelookup') { - buildModules('Installing route lookup dependencies') - } - } - } - stage('Unit test') { - steps { - dir('mhs/spineroutelookup') { - executeUnitTestsWithCoverage() - } - } - } - stage('Build and Push image') { - when { - expression { currentBuild.resultIsBetterOrEqualTo('SUCCESS') } - } - steps { - buildAndPushImage('${LOCAL_ROUTE_IMAGE_NAME}', '${ROUTE_IMAGE_NAME}', 'docker/spineroutelookup/Dockerfile') - } - } - } - } - stage('Fake Spine') { - stages { - stage('Build and Push image') { - steps { - buildAndPushImage('${LOCAL_FAKE_SPINE_IMAGE_NAME}', '${FAKE_SPINE_IMAGE_NAME}', 'integration-tests/fake_spine/Dockerfile') - } - } - } - } - } - } + + stage('Run Tests') { stages { @@ -182,269 +58,280 @@ pipeline { } } - stage('Run Component Tests (SDS API)') { - stages { - stage('Deploy component locally (SDS API)') { - steps { - sh label: 'Setup component test environment', script: './integration-tests/setup_component_test_env.sh' - sh label: 'Start containers', script: ''' - docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml down -v - docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml -p custom_network down -v - . ./component-test-source.sh - docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml build - docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml -p ${BUILD_TAG_LOWER} up -d - ''' - } - } - stage('Component Tests (SDS API)') { - steps { - sh label: 'Run component tests', script: ''' - docker build -t local/mhs-componenttest:$BUILD_TAG -f ./component-test.Dockerfile . - docker run --rm --network "${BUILD_TAG_LOWER}_default" \ - --env "MHS_ADDRESS=http://outbound" \ - --env "AWS_ACCESS_KEY_ID=test" \ - --env "AWS_SECRET_ACCESS_KEY=test" \ - --env "MHS_DB_ENDPOINT_URL=http://dynamodb:8000" \ - --env "FAKE_SPINE_ADDRESS=http://fakespine" \ - --env "MHS_INBOUND_QUEUE_BROKERS=amqp://rabbitmq:5672" \ - --env "MHS_INBOUND_QUEUE_NAME=inbound" \ - --env "SCR_ADDRESS=http://scradaptor" \ - local/mhs-componenttest:$BUILD_TAG - ''' - } - } - } - post { - always { - sh label: 'Docker status', script: 'docker ps --all' - sh label: 'Docker inspect network', script: 'docker network inspect ${BUILD_TAG_LOWER}_default' - sh label: 'Dump container logs to files', script: ''' - mkdir -p logs - docker logs ${BUILD_TAG_LOWER}_outbound_1 > logs/outbound_2.log - docker logs ${BUILD_TAG_LOWER}_inbound_1 > logs/inbound_2.log - docker logs ${BUILD_TAG_LOWER}_fakespine_1 > logs/fakespine_2.log - docker logs ${BUILD_TAG_LOWER}_rabbitmq_1 > logs/rabbitmq_2.log - docker logs ${BUILD_TAG_LOWER}_dynamodb_1 > logs/dynamodb_2.log - docker logs ${BUILD_TAG_LOWER}_sds-api-mock_1 > logs/sdsapimock_2.log - ''' - archiveArtifacts artifacts: 'logs/*.log', fingerprint: true - sh label: 'Docker compose down', script: 'docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml -p ${BUILD_TAG_LOWER} down -v' - } - } - } - stage('Run Integration Tests (SpineRouteLookup)') { - options { - lock('exemplar-test-environment') - } - stages { - stage('Deploy MHS (SpineRouteLookup)') { - steps { - dir('pipeline/terraform/mhs-environment') { - script { - terraformBinPath = tfEnv() - } - sh label: 'Initialising Terraform', script: """ - ${terraformBinPath} init \ - -backend-config="bucket=${TF_STATE_BUCKET}" \ - -backend-config="region=${TF_STATE_BUCKET_REGION}" \ - -backend-config="key=${ENVIRONMENT_ID}-mhs.tfstate" \ - -backend-config="dynamodb_table=${ENVIRONMENT_ID}-${TF_MHS_LOCK_TABLE_NAME}" \ - -input=false -no-color - """ - sh label: 'Applying Terraform configuration', script: """ - ${terraformBinPath} apply -no-color -auto-approve \ - -var environment_id=${ENVIRONMENT_ID} \ - -var build_id=${BUILD_TAG} \ - -var supplier_vpc_id=${SUPPLIER_VPC_ID} \ - -var opentest_vpc_id=${OPENTEST_VPC_ID} \ - -var internal_root_domain=${INTERNAL_ROOT_DOMAIN} \ - -var mhs_outbound_service_minimum_instance_count=3 \ - -var mhs_outbound_service_maximum_instance_count=9 \ - -var mhs_inbound_service_minimum_instance_count=3 \ - -var mhs_inbound_service_maximum_instance_count=9 \ - -var mhs_route_service_minimum_instance_count=3 \ - -var mhs_route_service_maximum_instance_count=9 \ - -var task_role_arn=${TASK_ROLE} \ - -var execution_role_arn=${TASK_EXECUTION_ROLE} \ - -var task_scaling_role_arn=${TASK_SCALING_ROLE} \ - -var ecr_address=${DOCKER_REGISTRY} \ - -var mhs_outbound_validate_certificate=${MHS_OUTBOUND_VALIDATE_CERTIFICATE} \ - -var mhs_log_level=DEBUG \ - -var mhs_outbound_spineroutelookup_verify_certificate="False" \ - -var mhs_outbound_http_proxy=${MHS_OUTBOUND_HTTP_PROXY} \ - -var mhs_state_table_read_capacity=5 \ - -var mhs_state_table_write_capacity=5 \ - -var mhs_sync_async_table_read_capacity=5 \ - -var mhs_sync_async_table_write_capacity=5 \ - -var mhs_spine_org_code=${SPINE_ORG_CODE} \ - -var inbound_queue_brokers="${MHS_INBOUND_QUEUE_BROKERS}" \ - -var inbound_queue_name="${MHS_INBOUND_QUEUE_NAME}" \ - -var inbound_queue_username_arn=${INBOUND_QUEUE_USERNAME_ARN} \ - -var inbound_queue_password_arn=${INBOUND_QUEUE_PASSWORD_ARN} \ - -var party_key_arn=${PARTY_KEY_ARN} \ - -var client_cert_arn=${CLIENT_CERT_ARN} \ - -var client_key_arn=${CLIENT_KEY_ARN} \ - -var ca_certs_arn=${CA_CERTS_ARN} \ - -var route_ca_certs_arn=${ROUTE_CA_CERTS_ARN} \ - -var outbound_alb_certificate_arn=${OUTBOUND_ALB_CERT_ARN} \ - -var route_alb_certificate_arn=${ROUTE_ALB_CERT_ARN} \ - -var mhs_resynchroniser_max_retries=${MHS_RESYNC_RETRIES} \ - -var mhs_resynchroniser_interval=${MHS_RESYNC_INTERVAL} \ - -var spineroutelookup_service_sds_url=${SPINEROUTELOOKUP_SERVICE_LDAP_URL} \ - -var spineroutelookup_service_search_base=${SPINEROUTELOOKUP_SERVICE_SEARCH_BASE} \ - -var spineroutelookup_service_disable_sds_tls=${SPINEROUTELOOKUP_SERVICE_DISABLE_TLS} \ - -var elasticache_node_type="cache.t2.micro" \ - -var mhs_forward_reliable_endpoint_url=${MHS_FORWARD_RELIABLE_ENDPOINT_URL} \ - -var mhs_outbound_routing_lookup_method="SPINE_ROUTE_LOOKUP" \ - -var mhs_sds_api_url="" \ - -var mhs_sds_api_key_arn=${MHS_SDS_API_KEY_ARN} \ - """ - script { - env.MHS_ADDRESS = sh ( - label: 'Obtaining outbound LB DNS name', - returnStdout: true, - script: "echo \"https://\$(${terraformBinPath} output outbound_lb_domain_name)\"" - ).trim() - env.MHS_OUTBOUND_TARGET_GROUP = sh ( - label: 'Obtaining outbound LB target group ARN', - returnStdout: true, - script: "${terraformBinPath} output outbound_lb_target_group_arn" - ).trim() - env.MHS_INBOUND_TARGET_GROUP = sh ( - label: 'Obtaining inbound LB target group ARN', - returnStdout: true, - script: "${terraformBinPath} output inbound_lb_target_group_arn" - ).trim() - env.MHS_ROUTE_TARGET_GROUP = sh ( - label: 'Obtaining route LB target group ARN', - returnStdout: true, - script: "${terraformBinPath} output route_lb_target_group_arn" - ).trim() - env.MHS_STATE_TABLE_NAME = sh ( - label: 'Obtaining the table name used for the MHS state', - returnStdout: true, - script: "${terraformBinPath} output mhs_state_table_name" - ).trim() - env.MHS_SYNC_ASYNC_TABLE_NAME = sh ( - label: 'Obtaining the table name used for the MHS sync/async state', - returnStdout: true, - script: "${terraformBinPath} output mhs_sync_async_table_name" - ).trim() - } - } - } - } - } - } - stage('Run Integration Tests (SDS API)') { - options { - lock('exemplar-test-environment') - } - stages { - stage('Deploy MHS (SDS API)') { - steps { - dir('pipeline/terraform/mhs-environment') { - script { - terraformBinPath = tfEnv() - } - sh label: 'Initialising Terraform', script: """ - ${terraformBinPath} init \ - -backend-config="bucket=${TF_STATE_BUCKET}" \ - -backend-config="region=${TF_STATE_BUCKET_REGION}" \ - -backend-config="key=${ENVIRONMENT_ID}-mhs.tfstate" \ - -backend-config="dynamodb_table=${ENVIRONMENT_ID}-${TF_MHS_LOCK_TABLE_NAME}" \ - -input=false -no-color - """ - sh label: 'Applying Terraform configuration', script: """ - ${terraformBinPath} apply -no-color -auto-approve \ - -var environment_id=${ENVIRONMENT_ID} \ - -var build_id=${BUILD_TAG} \ - -var supplier_vpc_id=${SUPPLIER_VPC_ID} \ - -var opentest_vpc_id=${OPENTEST_VPC_ID} \ - -var internal_root_domain=${INTERNAL_ROOT_DOMAIN} \ - -var mhs_outbound_service_minimum_instance_count=3 \ - -var mhs_outbound_service_maximum_instance_count=9 \ - -var mhs_inbound_service_minimum_instance_count=3 \ - -var mhs_inbound_service_maximum_instance_count=9 \ - -var mhs_route_service_minimum_instance_count=3 \ - -var mhs_route_service_maximum_instance_count=9 \ - -var task_role_arn=${TASK_ROLE} \ - -var execution_role_arn=${TASK_EXECUTION_ROLE} \ - -var task_scaling_role_arn=${TASK_SCALING_ROLE} \ - -var ecr_address=${DOCKER_REGISTRY} \ - -var mhs_outbound_validate_certificate=${MHS_OUTBOUND_VALIDATE_CERTIFICATE} \ - -var mhs_log_level=DEBUG \ - -var mhs_outbound_spineroutelookup_verify_certificate="False" \ - -var mhs_outbound_http_proxy=${MHS_OUTBOUND_HTTP_PROXY} \ - -var mhs_state_table_read_capacity=5 \ - -var mhs_state_table_write_capacity=5 \ - -var mhs_sync_async_table_read_capacity=5 \ - -var mhs_sync_async_table_write_capacity=5 \ - -var mhs_spine_org_code=${SPINE_ORG_CODE} \ - -var inbound_queue_brokers="${MHS_INBOUND_QUEUE_BROKERS}" \ - -var inbound_queue_name="${MHS_INBOUND_QUEUE_NAME}" \ - -var inbound_queue_username_arn=${INBOUND_QUEUE_USERNAME_ARN} \ - -var inbound_queue_password_arn=${INBOUND_QUEUE_PASSWORD_ARN} \ - -var party_key_arn=${PARTY_KEY_ARN} \ - -var client_cert_arn=${CLIENT_CERT_ARN} \ - -var client_key_arn=${CLIENT_KEY_ARN} \ - -var ca_certs_arn=${CA_CERTS_ARN} \ - -var route_ca_certs_arn=${ROUTE_CA_CERTS_ARN} \ - -var outbound_alb_certificate_arn=${OUTBOUND_ALB_CERT_ARN} \ - -var route_alb_certificate_arn=${ROUTE_ALB_CERT_ARN} \ - -var mhs_resynchroniser_max_retries=${MHS_RESYNC_RETRIES} \ - -var mhs_resynchroniser_interval=${MHS_RESYNC_INTERVAL} \ - -var spineroutelookup_service_sds_url=${SPINEROUTELOOKUP_SERVICE_LDAP_URL} \ - -var spineroutelookup_service_search_base=${SPINEROUTELOOKUP_SERVICE_SEARCH_BASE} \ - -var spineroutelookup_service_disable_sds_tls=${SPINEROUTELOOKUP_SERVICE_DISABLE_TLS} \ - -var elasticache_node_type="cache.t2.micro" \ - -var mhs_forward_reliable_endpoint_url=${MHS_FORWARD_RELIABLE_ENDPOINT_URL} \ - -var mhs_outbound_routing_lookup_method="SDS_API" \ - -var mhs_sds_api_url=${MHS_SDS_API_URL} \ - -var mhs_sds_api_key_arn=${MHS_SDS_API_KEY_ARN} \ - """ - script { - env.MHS_ADDRESS = sh ( - label: 'Obtaining outbound LB DNS name', - returnStdout: true, - script: "echo \"https://\$(${terraformBinPath} output outbound_lb_domain_name)\"" - ).trim() - env.MHS_OUTBOUND_TARGET_GROUP = sh ( - label: 'Obtaining outbound LB target group ARN', - returnStdout: true, - script: "${terraformBinPath} output outbound_lb_target_group_arn" - ).trim() - env.MHS_INBOUND_TARGET_GROUP = sh ( - label: 'Obtaining inbound LB target group ARN', - returnStdout: true, - script: "${terraformBinPath} output inbound_lb_target_group_arn" - ).trim() - env.MHS_ROUTE_TARGET_GROUP = sh ( - label: 'Obtaining route LB target group ARN', - returnStdout: true, - script: "${terraformBinPath} output route_lb_target_group_arn" - ).trim() - env.MHS_STATE_TABLE_NAME = sh ( - label: 'Obtaining the table name used for the MHS state', - returnStdout: true, - script: "${terraformBinPath} output mhs_state_table_name" - ).trim() - env.MHS_SYNC_ASYNC_TABLE_NAME = sh ( - label: 'Obtaining the table name used for the MHS sync/async state', - returnStdout: true, - script: "${terraformBinPath} output mhs_sync_async_table_name" - ).trim() - } - } - } - } - } - } - } - } - } + +// +// +// +// stage('Run Component Tests (SDS API)') { +// stages { +// stage('Deploy component locally (SDS API)') { +// steps { +// sh label: 'Setup component test environment', script: './integration-tests/setup_component_test_env.sh' +// sh label: 'Start containers', script: ''' +// docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml down -v +// docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml -p custom_network down -v +// . ./component-test-source.sh +// docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml build +// docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml -p ${BUILD_TAG_LOWER} up -d +// ''' +// } +// } +// stage('Component Tests (SDS API)') { +// steps { +// sh label: 'Run component tests', script: ''' +// docker build -t local/mhs-componenttest:$BUILD_TAG -f ./component-test.Dockerfile . +// docker run --rm --network "${BUILD_TAG_LOWER}_default" \ +// --env "MHS_ADDRESS=http://outbound" \ +// --env "AWS_ACCESS_KEY_ID=test" \ +// --env "AWS_SECRET_ACCESS_KEY=test" \ +// --env "MHS_DB_ENDPOINT_URL=http://dynamodb:8000" \ +// --env "FAKE_SPINE_ADDRESS=http://fakespine" \ +// --env "MHS_INBOUND_QUEUE_BROKERS=amqp://rabbitmq:5672" \ +// --env "MHS_INBOUND_QUEUE_NAME=inbound" \ +// --env "SCR_ADDRESS=http://scradaptor" \ +// local/mhs-componenttest:$BUILD_TAG +// ''' +// } +// } +// } +// post { +// always { +// sh label: 'Docker status', script: 'docker ps --all' +// sh label: 'Docker inspect network', script: 'docker network inspect ${BUILD_TAG_LOWER}_default' +// sh label: 'Dump container logs to files', script: ''' +// mkdir -p logs +// docker logs ${BUILD_TAG_LOWER}_outbound_1 > logs/outbound_2.log +// docker logs ${BUILD_TAG_LOWER}_inbound_1 > logs/inbound_2.log +// docker logs ${BUILD_TAG_LOWER}_fakespine_1 > logs/fakespine_2.log +// docker logs ${BUILD_TAG_LOWER}_rabbitmq_1 > logs/rabbitmq_2.log +// docker logs ${BUILD_TAG_LOWER}_dynamodb_1 > logs/dynamodb_2.log +// docker logs ${BUILD_TAG_LOWER}_sds-api-mock_1 > logs/sdsapimock_2.log +// +// ''' +// archiveArtifacts artifacts: 'logs/*.log', fingerprint: true +// sh label: 'Docker compose down', script: 'docker-compose -f docker-compose.yml -f docker-compose.component.override.yml -f docker-compose.component-sds.override.yml -p ${BUILD_TAG_LOWER} down -v' +// } +// } +// } +// +// +// +// +// +// +// stage('Run Integration Tests (SpineRouteLookup)') { +// options { +// lock('exemplar-test-environment') +// } +// stages { +// stage('Deploy MHS (SpineRouteLookup)') { +// steps { +// dir('pipeline/terraform/mhs-environment') { +// script { +// terraformBinPath = tfEnv() +// } +// sh label: 'Initialising Terraform', script: """ +// ${terraformBinPath} init \ +// -backend-config="bucket=${TF_STATE_BUCKET}" \ +// -backend-config="region=${TF_STATE_BUCKET_REGION}" \ +// -backend-config="key=${ENVIRONMENT_ID}-mhs.tfstate" \ +// -backend-config="dynamodb_table=${ENVIRONMENT_ID}-${TF_MHS_LOCK_TABLE_NAME}" \ +// -input=false -no-color +// """ +// sh label: 'Applying Terraform configuration', script: """ +// ${terraformBinPath} apply -no-color -auto-approve \ +// -var environment_id=${ENVIRONMENT_ID} \ +// -var build_id=${BUILD_TAG} \ +// -var supplier_vpc_id=${SUPPLIER_VPC_ID} \ +// -var opentest_vpc_id=${OPENTEST_VPC_ID} \ +// -var internal_root_domain=${INTERNAL_ROOT_DOMAIN} \ +// -var mhs_outbound_service_minimum_instance_count=3 \ +// -var mhs_outbound_service_maximum_instance_count=9 \ +// -var mhs_inbound_service_minimum_instance_count=3 \ +// -var mhs_inbound_service_maximum_instance_count=9 \ +// -var mhs_route_service_minimum_instance_count=3 \ +// -var mhs_route_service_maximum_instance_count=9 \ +// -var task_role_arn=${TASK_ROLE} \ +// -var execution_role_arn=${TASK_EXECUTION_ROLE} \ +// -var task_scaling_role_arn=${TASK_SCALING_ROLE} \ +// -var ecr_address=${DOCKER_REGISTRY} \ +// -var mhs_outbound_validate_certificate=${MHS_OUTBOUND_VALIDATE_CERTIFICATE} \ +// -var mhs_log_level=DEBUG \ +// -var mhs_outbound_spineroutelookup_verify_certificate="False" \ +// -var mhs_outbound_http_proxy=${MHS_OUTBOUND_HTTP_PROXY} \ +// -var mhs_state_table_read_capacity=5 \ +// -var mhs_state_table_write_capacity=5 \ +// -var mhs_sync_async_table_read_capacity=5 \ +// -var mhs_sync_async_table_write_capacity=5 \ +// -var mhs_spine_org_code=${SPINE_ORG_CODE} \ +// -var inbound_queue_brokers="${MHS_INBOUND_QUEUE_BROKERS}" \ +// -var inbound_queue_name="${MHS_INBOUND_QUEUE_NAME}" \ +// -var inbound_queue_username_arn=${INBOUND_QUEUE_USERNAME_ARN} \ +// -var inbound_queue_password_arn=${INBOUND_QUEUE_PASSWORD_ARN} \ +// -var party_key_arn=${PARTY_KEY_ARN} \ +// -var client_cert_arn=${CLIENT_CERT_ARN} \ +// -var client_key_arn=${CLIENT_KEY_ARN} \ +// -var ca_certs_arn=${CA_CERTS_ARN} \ +// -var route_ca_certs_arn=${ROUTE_CA_CERTS_ARN} \ +// -var outbound_alb_certificate_arn=${OUTBOUND_ALB_CERT_ARN} \ +// -var route_alb_certificate_arn=${ROUTE_ALB_CERT_ARN} \ +// -var mhs_resynchroniser_max_retries=${MHS_RESYNC_RETRIES} \ +// -var mhs_resynchroniser_interval=${MHS_RESYNC_INTERVAL} \ +// -var spineroutelookup_service_sds_url=${SPINEROUTELOOKUP_SERVICE_LDAP_URL} \ +// -var spineroutelookup_service_search_base=${SPINEROUTELOOKUP_SERVICE_SEARCH_BASE} \ +// -var spineroutelookup_service_disable_sds_tls=${SPINEROUTELOOKUP_SERVICE_DISABLE_TLS} \ +// -var elasticache_node_type="cache.t2.micro" \ +// -var mhs_forward_reliable_endpoint_url=${MHS_FORWARD_RELIABLE_ENDPOINT_URL} \ +// -var mhs_outbound_routing_lookup_method="SPINE_ROUTE_LOOKUP" \ +// -var mhs_sds_api_url="" \ +// -var mhs_sds_api_key_arn=${MHS_SDS_API_KEY_ARN} \ +// """ +// script { +// env.MHS_ADDRESS = sh ( +// label: 'Obtaining outbound LB DNS name', +// returnStdout: true, +// script: "echo \"https://\$(${terraformBinPath} output outbound_lb_domain_name)\"" +// ).trim() +// env.MHS_OUTBOUND_TARGET_GROUP = sh ( +// label: 'Obtaining outbound LB target group ARN', +// returnStdout: true, +// script: "${terraformBinPath} output outbound_lb_target_group_arn" +// ).trim() +// env.MHS_INBOUND_TARGET_GROUP = sh ( +// label: 'Obtaining inbound LB target group ARN', +// returnStdout: true, +// script: "${terraformBinPath} output inbound_lb_target_group_arn" +// ).trim() +// env.MHS_ROUTE_TARGET_GROUP = sh ( +// label: 'Obtaining route LB target group ARN', +// returnStdout: true, +// script: "${terraformBinPath} output route_lb_target_group_arn" +// ).trim() +// env.MHS_STATE_TABLE_NAME = sh ( +// label: 'Obtaining the table name used for the MHS state', +// returnStdout: true, +// script: "${terraformBinPath} output mhs_state_table_name" +// ).trim() +// env.MHS_SYNC_ASYNC_TABLE_NAME = sh ( +// label: 'Obtaining the table name used for the MHS sync/async state', +// returnStdout: true, +// script: "${terraformBinPath} output mhs_sync_async_table_name" +// ).trim() +// } +// } +// } +// } +// } +// } +// stage('Run Integration Tests (SDS API)') { +// options { +// lock('exemplar-test-environment') +// } +// stages { +// stage('Deploy MHS (SDS API)') { +// steps { +// dir('pipeline/terraform/mhs-environment') { +// script { +// terraformBinPath = tfEnv() +// } +// sh label: 'Initialising Terraform', script: """ +// ${terraformBinPath} init \ +// -backend-config="bucket=${TF_STATE_BUCKET}" \ +// -backend-config="region=${TF_STATE_BUCKET_REGION}" \ +// -backend-config="key=${ENVIRONMENT_ID}-mhs.tfstate" \ +// -backend-config="dynamodb_table=${ENVIRONMENT_ID}-${TF_MHS_LOCK_TABLE_NAME}" \ +// -input=false -no-color +// """ +// sh label: 'Applying Terraform configuration', script: """ +// ${terraformBinPath} apply -no-color -auto-approve \ +// -var environment_id=${ENVIRONMENT_ID} \ +// -var build_id=${BUILD_TAG} \ +// -var supplier_vpc_id=${SUPPLIER_VPC_ID} \ +// -var opentest_vpc_id=${OPENTEST_VPC_ID} \ +// -var internal_root_domain=${INTERNAL_ROOT_DOMAIN} \ +// -var mhs_outbound_service_minimum_instance_count=3 \ +// -var mhs_outbound_service_maximum_instance_count=9 \ +// -var mhs_inbound_service_minimum_instance_count=3 \ +// -var mhs_inbound_service_maximum_instance_count=9 \ +// -var mhs_route_service_minimum_instance_count=3 \ +// -var mhs_route_service_maximum_instance_count=9 \ +// -var task_role_arn=${TASK_ROLE} \ +// -var execution_role_arn=${TASK_EXECUTION_ROLE} \ +// -var task_scaling_role_arn=${TASK_SCALING_ROLE} \ +// -var ecr_address=${DOCKER_REGISTRY} \ +// -var mhs_outbound_validate_certificate=${MHS_OUTBOUND_VALIDATE_CERTIFICATE} \ +// -var mhs_log_level=DEBUG \ +// -var mhs_outbound_spineroutelookup_verify_certificate="False" \ +// -var mhs_outbound_http_proxy=${MHS_OUTBOUND_HTTP_PROXY} \ +// -var mhs_state_table_read_capacity=5 \ +// -var mhs_state_table_write_capacity=5 \ +// -var mhs_sync_async_table_read_capacity=5 \ +// -var mhs_sync_async_table_write_capacity=5 \ +// -var mhs_spine_org_code=${SPINE_ORG_CODE} \ +// -var inbound_queue_brokers="${MHS_INBOUND_QUEUE_BROKERS}" \ +// -var inbound_queue_name="${MHS_INBOUND_QUEUE_NAME}" \ +// -var inbound_queue_username_arn=${INBOUND_QUEUE_USERNAME_ARN} \ +// -var inbound_queue_password_arn=${INBOUND_QUEUE_PASSWORD_ARN} \ +// -var party_key_arn=${PARTY_KEY_ARN} \ +// -var client_cert_arn=${CLIENT_CERT_ARN} \ +// -var client_key_arn=${CLIENT_KEY_ARN} \ +// -var ca_certs_arn=${CA_CERTS_ARN} \ +// -var route_ca_certs_arn=${ROUTE_CA_CERTS_ARN} \ +// -var outbound_alb_certificate_arn=${OUTBOUND_ALB_CERT_ARN} \ +// -var route_alb_certificate_arn=${ROUTE_ALB_CERT_ARN} \ +// -var mhs_resynchroniser_max_retries=${MHS_RESYNC_RETRIES} \ +// -var mhs_resynchroniser_interval=${MHS_RESYNC_INTERVAL} \ +// -var spineroutelookup_service_sds_url=${SPINEROUTELOOKUP_SERVICE_LDAP_URL} \ +// -var spineroutelookup_service_search_base=${SPINEROUTELOOKUP_SERVICE_SEARCH_BASE} \ +// -var spineroutelookup_service_disable_sds_tls=${SPINEROUTELOOKUP_SERVICE_DISABLE_TLS} \ +// -var elasticache_node_type="cache.t2.micro" \ +// -var mhs_forward_reliable_endpoint_url=${MHS_FORWARD_RELIABLE_ENDPOINT_URL} \ +// -var mhs_outbound_routing_lookup_method="SDS_API" \ +// -var mhs_sds_api_url=${MHS_SDS_API_URL} \ +// -var mhs_sds_api_key_arn=${MHS_SDS_API_KEY_ARN} \ +// """ +// script { +// env.MHS_ADDRESS = sh ( +// label: 'Obtaining outbound LB DNS name', +// returnStdout: true, +// script: "echo \"https://\$(${terraformBinPath} output outbound_lb_domain_name)\"" +// ).trim() +// env.MHS_OUTBOUND_TARGET_GROUP = sh ( +// label: 'Obtaining outbound LB target group ARN', +// returnStdout: true, +// script: "${terraformBinPath} output outbound_lb_target_group_arn" +// ).trim() +// env.MHS_INBOUND_TARGET_GROUP = sh ( +// label: 'Obtaining inbound LB target group ARN', +// returnStdout: true, +// script: "${terraformBinPath} output inbound_lb_target_group_arn" +// ).trim() +// env.MHS_ROUTE_TARGET_GROUP = sh ( +// label: 'Obtaining route LB target group ARN', +// returnStdout: true, +// script: "${terraformBinPath} output route_lb_target_group_arn" +// ).trim() +// env.MHS_STATE_TABLE_NAME = sh ( +// label: 'Obtaining the table name used for the MHS state', +// returnStdout: true, +// script: "${terraformBinPath} output mhs_state_table_name" +// ).trim() +// env.MHS_SYNC_ASYNC_TABLE_NAME = sh ( +// label: 'Obtaining the table name used for the MHS sync/async state', +// returnStdout: true, +// script: "${terraformBinPath} output mhs_sync_async_table_name" +// ).trim() +// } +// } +// } +// } +// } +// } +// } +// } +// } post { always { cobertura coberturaReportFile: '**/coverage.xml' @@ -458,34 +345,4 @@ pipeline { } } -String tfEnv(String tfEnvRepo="https://github.com/tfutils/tfenv.git", String tfEnvPath="~/.tfenv") { - sh(label: "Get tfenv" , script: "git clone ${tfEnvRepo} ${tfEnvPath}", returnStatus: true) - sh(label: "Install TF", script: "${tfEnvPath}/bin/tfenv install" , returnStatus: true) - return "${tfEnvPath}/bin/terraform" -} -void executeUnitTestsWithCoverage() { - sh label: 'Running unit tests', script: 'pipenv run unittests-cov' - sh label: 'Displaying code coverage report', script: 'pipenv run coverage-report' - sh label: 'Exporting code coverage report', script: 'pipenv run coverage-report-xml' -// SonarQube disabled as atm it's not set up on AWS -// sh label: 'Running SonarQube analysis', script: "sonar-scanner -Dsonar.host.url=${SONAR_HOST} -Dsonar.login=${SONAR_TOKEN}" -} - -void buildModules(String action) { - sh label: action, script: 'pipenv install --dev --deploy --ignore-pipfile' -} - -int ecrLogin(String aws_region) { - String dockerLogin = "aws ecr get-login-password --region ${aws_region} | docker login -u AWS --password-stdin \"https://\$(aws sts get-caller-identity --query 'Account' --output text).dkr.ecr.${aws_region}.amazonaws.com\"" - return sh(label: "Logging in with Docker", script: dockerLogin, returnStatus: true) -} - -void buildAndPushImage(String localImageName, String imageName, String dockerFile, String context = '.') { - sh label: 'Running docker build', script: 'docker build -t ' + localImageName + ' -f ' + dockerFile + ' ' + context - if (ecrLogin(TF_STATE_BUCKET_REGION) != 0 ) { error("Docker login to ECR failed") } - sh label: 'Tag ecr image', script: 'docker tag ' + localImageName + ' ' + imageName - String dockerPushCommand = "docker push " + imageName - if (sh (label: "Pushing image", script: dockerPushCommand, returnStatus: true) !=0) { error("Docker push image failed") } - sh label: 'Deleting local ECR image', script: 'docker rmi ' + imageName -} diff --git a/docker-compose.component.override.yml b/docker-compose.component.override.yml index a3c7e21bd..a66607126 100644 --- a/docker-compose.component.override.yml +++ b/docker-compose.component.override.yml @@ -1,5 +1,3 @@ -version: '3' - services: route: build: diff --git a/docker-compose.release.image.override.yml b/docker-compose.release.image.override.yml index 4d58c5ade..1fda33440 100644 --- a/docker-compose.release.image.override.yml +++ b/docker-compose.release.image.override.yml @@ -1,5 +1,3 @@ -version: '3' - services: inbound: image: nhsdev/nia-mhs-inbound:${BUILD_TAG} diff --git a/docker-compose.yml b/docker-compose.yml index 8ec8238e0..bc97728a3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,8 @@ -version: '3' - services: inbound: + build: + context: . + dockerfile: ./docker/inbound/Dockerfile image: local/mhs-inbound:${BUILD_TAG} ports: - "443" @@ -20,8 +21,6 @@ services: - MHS_SYNC_ASYNC_STATE_TABLE_NAME=sync_async_state - MHS_DB_ENDPOINT_URL=http://dynamodb:8000 - MHS_INBOUND_QUEUE_MESSAGE_TTL_IN_SECONDS=0 - # boto3 requires some AWS creds to be provided, even - # when connecting to local DynamoDB - AWS_ACCESS_KEY_ID=test - AWS_SECRET_ACCESS_KEY=test - MHS_INBOUND_USE_SSL=True @@ -29,6 +28,9 @@ services: - SERVICE_PORTS=443,80 - SUPPORTED_FILE_TYPES outbound: + build: + context: . + dockerfile: ./docker/outbound/Dockerfile image: local/mhs-outbound:${BUILD_TAG} ports: - "80" @@ -47,19 +49,18 @@ services: - MAX_RESYNC_RETRIES=20 - MHS_SPINE_ROUTE_LOOKUP_URL=http://route - MHS_SPINE_ORG_CODE=YES - - MHS_SPINE_REQUEST_MAX_SIZE=4999600 # 5 000 000 - 400 - # Note that this endpoint URL is Opentest-specific + - MHS_SPINE_REQUEST_MAX_SIZE=4999600 - MHS_FORWARD_RELIABLE_ENDPOINT_URL=https://192.168.128.11/reliablemessaging/forwardreliable - # This is for disabling hostname validation so OpenTest ip address will work - MHS_OUTBOUND_VALIDATE_CERTIFICATE=${MHS_OUTBOUND_VALIDATE_CERTIFICATE:-False} - SERVICE_PORTS=80 - MHS_OUTBOUND_ROUTING_LOOKUP_METHOD - MHS_SDS_API_URL - MHS_SDS_API_KEY route: + build: ./docker/spineroutelookup image: local/mhs-route:${BUILD_TAG} ports: - - "8080:80" + - "8080:80" environment: - MHS_LOG_LEVEL=NOTSET - MHS_SDS_URL=ldap://192.168.128.11 diff --git a/integration-tests/integration_tests/integration_tests/component_tests/component_asynchronous_express_message_pattern_tests.py b/integration-tests/integration_tests/integration_tests/component_tests/x_component_asynchronous_express_message_pattern_tests.py similarity index 100% rename from integration-tests/integration_tests/integration_tests/component_tests/component_asynchronous_express_message_pattern_tests.py rename to integration-tests/integration_tests/integration_tests/component_tests/x_component_asynchronous_express_message_pattern_tests.py diff --git a/integration-tests/integration_tests/integration_tests/component_tests/component_asynchronous_reliable_message_pattern_tests.py b/integration-tests/integration_tests/integration_tests/component_tests/x_component_asynchronous_reliable_message_pattern_tests.py similarity index 100% rename from integration-tests/integration_tests/integration_tests/component_tests/component_asynchronous_reliable_message_pattern_tests.py rename to integration-tests/integration_tests/integration_tests/component_tests/x_component_asynchronous_reliable_message_pattern_tests.py diff --git a/integration-tests/integration_tests/integration_tests/component_tests/component_persistence_adaptor_tests.py b/integration-tests/integration_tests/integration_tests/component_tests/x_component_persistence_adaptor_tests.py similarity index 100% rename from integration-tests/integration_tests/integration_tests/component_tests/component_persistence_adaptor_tests.py rename to integration-tests/integration_tests/integration_tests/component_tests/x_component_persistence_adaptor_tests.py diff --git a/integration-tests/integration_tests/integration_tests/component_tests/component_summary_care_record_adaptor_tests.py b/integration-tests/integration_tests/integration_tests/component_tests/x_component_summary_care_record_adaptor_tests.py similarity index 100% rename from integration-tests/integration_tests/integration_tests/component_tests/component_summary_care_record_adaptor_tests.py rename to integration-tests/integration_tests/integration_tests/component_tests/x_component_summary_care_record_adaptor_tests.py diff --git a/integration-tests/integration_tests/integration_tests/component_tests/component_synchronous_messaging_pattern_tests.py b/integration-tests/integration_tests/integration_tests/component_tests/x_component_synchronous_messaging_pattern_tests.py similarity index 100% rename from integration-tests/integration_tests/integration_tests/component_tests/component_synchronous_messaging_pattern_tests.py rename to integration-tests/integration_tests/integration_tests/component_tests/x_component_synchronous_messaging_pattern_tests.py diff --git a/integration-tests/integration_tests/integration_tests/component_tests/test_tests.py b/integration-tests/integration_tests/integration_tests/component_tests/x_test_tests.py similarity index 100% rename from integration-tests/integration_tests/integration_tests/component_tests/test_tests.py rename to integration-tests/integration_tests/integration_tests/component_tests/x_test_tests.py diff --git a/integration-tests/setup_component_test_env.sh b/integration-tests/setup_component_test_env.sh index baa4960b7..c6b39f315 100755 --- a/integration-tests/setup_component_test_env.sh +++ b/integration-tests/setup_component_test_env.sh @@ -23,4 +23,4 @@ echo -e "export INBOUND_CA_STORE=\"$(cat ./generated-certs/fake-spine/cert.pem)\ echo -e "export MHS_SECRET_PARTY_KEY=\"test-party-key\"" >> component-test-source.sh echo -e "export MHS_OUTBOUND_VALIDATE_CERTIFICATE=\"False\"" >> component-test-source.sh -rm -rf ./generated-certs \ No newline at end of file +#rm -rf ./generated-certs \ No newline at end of file diff --git a/mhs-config.yaml b/mhs-config.yaml index 6dc7cd17e..24b5a3e1f 100644 --- a/mhs-config.yaml +++ b/mhs-config.yaml @@ -26,6 +26,7 @@ AWS_SECRET_ACCESS_KEY: test MHS_RESYNC_INTERVAL: "1" MAX_RESYNC_RETRIES: "20" MHS_SPINE_ROUTE_LOOKUP_URL: http://localhost:8088 +SPINE_ROUTE_LOOKUP_URL: http://localhost:8088 MHS_SPINE_ORG_CODE: "YES" MHS_SPINE_REQUEST_MAX_SIZE: "4999600" diff --git a/mhs/common/mhs_common/routing/spine_route_lookup_client.py b/mhs/common/mhs_common/routing/spine_route_lookup_client.py index 8d2162328..4fe058549 100644 --- a/mhs/common/mhs_common/routing/spine_route_lookup_client.py +++ b/mhs/common/mhs_common/routing/spine_route_lookup_client.py @@ -64,6 +64,8 @@ async def get_end_point(self, interaction_id: str, ods_code: str = None) -> Dict logger.info("Proxy setup: {proxy_host} & {proxy_port}.", fparams={"proxy_host": self._proxy_host, "proxy_port": self._proxy_port}) + logger.info("URL: {url}.", fparams={"url": url}) + http_response = await common_https.CommonHttps.make_request(url=url, method="GET", headers=build_tracking_headers(), body=None, diff --git a/mhs/outbound/pycurl b/mhs/outbound/pycurl new file mode 160000 index 000000000..b0afc76fc --- /dev/null +++ b/mhs/outbound/pycurl @@ -0,0 +1 @@ +Subproject commit b0afc76fc251ebb0e784328ce0f6c915a16fdf42