Commit 3c72a5e2 authored by Matt Wise's avatar Matt Wise
Browse files

rebase dev onto GL master

parent 5666b511
......@@ -35,43 +35,34 @@
##### Authentication / Secrets #####
# Replace placeholder text with your own AWS secret access keys
# and rename to `.env` - do NOT check-in .env with your credentials! Leave it in .gitignore
AWS_ACCESS_KEY_ID=
AWS_SECRET_KEY=
AWS_ACCOUNT_ID=
AWS_ACCESS_KEY_ID=<YOUR_ACCESS_KEY_ID>
AWS_SECRET_KEY=<YOUR_SECRET_KEY>
#### Urls/Ports #############
APPLICATION_PORT=
ENTITLEMENTS_DOMAIN=
DOMAIN=
INDEXER_HOST=
SEARCH_HOST=
CACHE_CLUSTER_ENDPOINT=
CACHE_CLUSTER_PORT=
ELASTIC_HOST=
ELASTIC_PORT=
APPLICATION_PORT=8080
CACHE_CLUSTER_ENDPOINT=127.0.0.1
CACHE_CLUSTER_PORT=6379
ELASTIC_HOST=localhost
ELASTIC_PORT=9200
##### Other environment variables ##########################################################
JAVA_HEAP_MEMORY=
SNS_TOPIC_NAME=
ENVIRONMENT=
AWS_REGION=
LOG_LEVEL=
ENVIRONMENT=dev
AWS_REGION=us-east-1
LOG_LEVEL=DEBUG
SSM_ENABLED=True
##### Integration test-specific - these are only used for integration tests, not the app ###
OTHER_RELEVANT_DATA_COUNTRIES=
LEGAL_TAG=
DEFAULT_DATA_PARTITION_ID_TENANT1=
DEFAULT_DATA_PARTITION_ID_TENANT2=
ENTITLEMENTS_DOMAIN=
AWS_COGNITO_CLIENT_ID=
AWS_COGNITO_AUTH_FLOW=
AWS_COGNITO_AUTH_PARAMS_PASSWORD=
AWS_COGNITO_AUTH_PARAMS_USER=
AWS_COGNITO_AUTH_PARAMS_USER_NO_ACCESS=
ELASTIC_HOST=
DEFAULT_ELASTIC_USER_NAME=
DEFAULT_ELASTIC_PASSWORD=
ELASTIC_PORT=
SEARCH_HOST=
STORAGE_HOST=
\ No newline at end of file
AWS_COGNITO_CLIENT_ID=<YOUR_COGNITO_CLIENT_ID>
AWS_COGNITO_AUTH_FLOW=USER_PASSWORD_AUTH
AWS_COGNITO_AUTH_PARAMS_USER=<YOUR_AUTHORIZED_USER>
AWS_COGNITO_AUTH_PARAMS_USER_NO_ACCESS=test-user-without-access@testing.com
AWS_COGNITO_AUTH_PARAMS_PASSWORD=<YOUR_AUTHORIZED_USER_PASSWORD>
SEARCH_HOST=<YOUR_API_URL>/api/search/v2/
STORAGE_HOST=<YOUR_API_URL>/api/storage/v2/
OTHER_RELEVANT_DATA_COUNTRIES=US
LEGAL_TAG=opendes-public-usa-dataset-1
DEFAULT_DATA_PARTITION_ID_TENANT1=opendes
DEFAULT_DATA_PARTITION_ID_TENANT2=common
ENTITLEMENTS_DOMAIN=testing.com
ELASTIC_HOST=localhost
ELASTIC_PORT=9200
\ No newline at end of file
import boto3
import json
import os
import argparse
# Create the build-info.json
parser = argparse.ArgumentParser(description="")
# env - CODEBUILD_SOURCE_VERSION
parser.add_argument("--branch", type=str, help="")
# env - CODEBUILD_RESOLVED_SOURCE_VERSION
parser.add_argument("--commit", type=str, help="")
# env - CODEBUILD_BUILD_ID
parser.add_argument("--buildid", type=str, help="")
# env - CODEBUILD_BUILD_NUMBER
parser.add_argument("--buildnumber", type=str, help="")
# Get from directory name
parser.add_argument("--reponame", type=str, help="")
# env OUTPUT_DIR
parser.add_argument("--outdir", type=str, help="")
# full ecr image and tag, and any other artifacts
parser.add_argument("--artifact", type=str, action="append", help="")
args = parser.parse_args()
branch = args.branch
commitId = args.commit
buildId = args.buildid
buildNumber = args.buildnumber
repoName = args.reponame
outputDir = args.outdir
artifacts = args.artifact
buildInfoFilePath = os.path.join(".", outputDir, "build-info.json")
print(buildInfoFilePath)
commitArgs = {
"repositoryName": repoName,
"commitId": commitId
}
commitDetail = {
"commit": ""
}
# get the commit detail
try:
codecommit = boto3.client("codecommit")
commitDetail = codecommit.get_commit(**commitArgs)
except Exception as e:
print("Getting commit information from codecommit failed")
buildInfo = {
"branch": branch,
"build-id": buildId,
"build-number": buildNumber,
"repo": repoName,
"artifacts": artifacts,
"commit": commitDetail["commit"]
}
print(json.dumps(buildInfo, sort_keys=True, indent=4))
# write the build.json file to dist
f = open(buildInfoFilePath, "w")
f.write(json.dumps(buildInfo, sort_keys=True, indent=4))
f.close()
# Copyright © Amazon Web Services
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# https://docs.aws.amazon.com/codebuild/latest/userguide/build-spec-ref.html
# https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-env-vars.html
version: 0.2
phases:
install:
runtime-versions:
java: openjdk8
commands:
- if [ $(echo $CODEBUILD_SOURCE_VERSION | grep -c ^refs/heads.*) -eq 1 ]; then echo "Branch name found"; else echo "This build only supports branch builds" && exit 1; fi
- apt-get update -y
- apt-get install -y maven
- java -version
- mvn -version
- echo $JAVA_HOME #WHY
- mkdir -p /root/.m2
- aws s3 sync s3://$M2_REPO_S3_BUCKET /root/.m2 # copy previous state of the shared libraries' .m2 folder from S3 to local
- cp ./provider/search-aws/maven/settings.xml /root/.m2/settings.xml # copy the AWS-specific settings.xml to the CodeBuild instance's .m2 folder
- cat /root/.m2/settings.xml
pre_build:
commands:
- echo "Logging in to Amazon ECR..."
- $(aws ecr get-login --no-include-email --region $AWS_REGION) # authenticate with ECR via the AWS CLI
build:
commands:
- export REPO_NAME=${PWD##*/}
- export OUTPUT_DIR="dist"
- export BRANCH_NAME=`echo ${CODEBUILD_SOURCE_VERSION} | awk '{gsub("refs/heads/","");gsub("\\.","-");gsub("[[:space:]]","-")}1' | sed 's/\//-/g' | awk '{print tolower($0)}'`
- export ECR_TAG=`echo build.${BRANCH_NAME}.${CODEBUILD_BUILD_NUMBER}.${CODEBUILD_RESOLVED_SOURCE_VERSION} | cut -c 1-120`
- export ECR_IMAGE=${ECR_REGISTRY}:${ECR_TAG}
- export ECR_IMAGE_BRANCH_LATEST=${ECR_REGISTRY}:${BRANCH_NAME}
- export INTEGRATION_TEST_OUTPUT=${OUTPUT_DIR}/testing/integration
- export INTEGRATION_TEST_OUTPUT_BIN=${INTEGRATION_TEST_OUTPUT}/bin
- mkdir -p ${OUTPUT_DIR}/bin
- mkdir -p ${OUTPUT_DIR}/testing && mkdir -p ${INTEGRATION_TEST_OUTPUT} && mkdir -p ${INTEGRATION_TEST_OUTPUT}/bin
- echo "Placeholder" >> ${OUTPUT_DIR}/build-info.json # touched so that the output directory has some content incase the build fails so that testing reports are uploaded
- printenv
- echo "Building primary service assemblies..."
- mvn -B test install -pl search-core,provider/search-aws -Ddeployment.environment=prod
# Suspended until further notice
# - echo "Copying assemblies to dist..."
# - cp ./provider/search-aws/target/*spring-boot.jar ${OUTPUT_DIR}/bin # copy aws jars
# - cp ./search-core/target/*.jar ${OUTPUT_DIR}/bin # copy core jar
- echo "Building integration testing assemblies and gathering artifacts..."
- ./testing/integration-tests/search-test-aws/build-aws/prepare-dist.sh
- echo "Building docker image..."
- docker build -f provider/search-aws/build-aws/Dockerfile -t ${ECR_IMAGE} .
- docker tag ${ECR_IMAGE} ${ECR_IMAGE_BRANCH_LATEST}
- echo "Pushing docker image..."
- docker push ${ECR_IMAGE}
- docker push ${ECR_IMAGE_BRANCH_LATEST}
- echo "Generate build-info.json"
- |
python provider/search-aws/build-aws/build-info.py --branch ${CODEBUILD_SOURCE_VERSION} --commit ${CODEBUILD_RESOLVED_SOURCE_VERSION} \
--buildid ${CODEBUILD_BUILD_ID} --buildnumber ${CODEBUILD_BUILD_NUMBER} --reponame ${REPO_NAME} --outdir ${OUTPUT_DIR} \
--artifact ${ECR_IMAGE}
reports:
SurefireReports: # CodeBuild will create a report group called "SurefireReports".
files: #Store all of the files
- "search-core/target/surefire-reports/**/*"
- "provider/search-aws/target/surefire-reports/**/*"
base-directory: "." # Location of the reports
artifacts:
files:
- "**/*"
base-directory: "dist"
name: ${REPO_NAME}_${BRANCH_NAME}_$(date +%F)_${CODEBUILD_BUILD_NUMBER}.zip
cache:
paths:
- "/root/.m2/**/*"
\ No newline at end of file
......@@ -52,7 +52,7 @@
<dependency>
<groupId>org.opengroup.osdu.core.aws</groupId>
<artifactId>os-core-lib-aws</artifactId>
<version>0.2.0</version>
<version>0.3.1</version>
</dependency>
<dependency>
<groupId>org.opengroup.osdu</groupId>
......
# This script prepares the dist directory for the integration tests.
# Must be run from the root of the repostiory
set -e
OUTPUT_DIR="${OUTPUT_DIR:-dist}"
INTEGRATION_TEST_OUTPUT_DIR=${INTEGRATION_TEST_OUTPUT_DIR:-$OUTPUT_DIR}/testing/integration
INTEGRATION_TEST_OUTPUT_BIN_DIR=${INTEGRATION_TEST_OUTPUT_DIR:-$INTEGRATION_TEST_OUTPUT_DIR}/bin
INTEGRATION_TEST_SOURCE_DIR=testing/integration-tests
INTEGRATION_TEST_SOURCE_DIR_AWS="$INTEGRATION_TEST_SOURCE_DIR"/search-test-aws
INTEGRATION_TEST_SOURCE_DIR_CORE="$INTEGRATION_TEST_SOURCE_DIR"/search-test-core
echo "--Source directories variables--"
echo $INTEGRATION_TEST_SOURCE_DIR_AWS
echo $INTEGRATION_TEST_SOURCE_DIR_CORE
echo "--Output directories variables--"
echo $OUTPUT_DIR
echo $INTEGRATION_TEST_OUTPUT_DIR
echo $INTEGRATION_TEST_OUTPUT_BIN_DIR
rm -rf "$INTEGRATION_TEST_OUTPUT_DIR"
mkdir -p "$INTEGRATION_TEST_OUTPUT_DIR" && mkdir -p "$INTEGRATION_TEST_OUTPUT_BIN_DIR"
echo "Building integration testing assemblies and gathering artifacts..."
mvn install -f "$INTEGRATION_TEST_SOURCE_DIR_CORE"/pom.xml
mvn install dependency:copy-dependencies -DskipTests -f "$INTEGRATION_TEST_SOURCE_DIR_AWS"/pom.xml -DincludeGroupIds=org.opengroup.osdu -Dmdep.copyPom
cp "$INTEGRATION_TEST_SOURCE_DIR_AWS"/target/dependency/* "${INTEGRATION_TEST_OUTPUT_BIN_DIR}"
(cd "${INTEGRATION_TEST_OUTPUT_BIN_DIR}" && ls *.jar | sed -e 's/\.jar$//' | xargs -I {} echo mvn install:install-file -Dfile={}.jar -DpomFile={}.pom >> install-deps.sh)
chmod +x "${INTEGRATION_TEST_OUTPUT_BIN_DIR}"/install-deps.sh
mvn clean -f "$INTEGRATION_TEST_SOURCE_DIR_AWS"/pom.xml
cp -R "$INTEGRATION_TEST_SOURCE_DIR_AWS"/* "${INTEGRATION_TEST_OUTPUT_DIR}"/
# This script executes the test and copies reports to the provided output directory
# To call this script from the service working directory
# ./dist/testing/integration/build-aws/run-tests.sh "./reports/"
SCRIPT_SOURCE_DIR=$(dirname "$0")
echo "Script source location"
echo "$SCRIPT_SOURCE_DIR"
(cd "$SCRIPT_SOURCE_DIR"/../bin && ./install-deps.sh)
#### ADD REQUIRED ENVIRONMENT VARIABLES HERE ###############################################
# The following variables are automatically populated from the environment during integration testing
# see os-deploy-aws/build-aws/integration-test-env-variables.py for an updated list
# AWS_COGNITO_CLIENT_ID
# ELASTIC_HOST
# ELASTIC_PORT
# FILE_URL
# LEGAL_URL
# SEARCH_URL
# STORAGE_URL
export SEARCH_HOST=$SEARCH_URL
export STORAGE_HOST=$STORAGE_URL
export OTHER_RELEVANT_DATA_COUNTRIES=US
export LEGAL_TAG=opendes-public-usa-dataset-1
export DEFAULT_DATA_PARTITION_ID_TENANT1=opendes
export DEFAULT_DATA_PARTITION_ID_TENANT2=common
export ENTITLEMENTS_DOMAIN=testing.com
export AWS_COGNITO_AUTH_FLOW=USER_PASSWORD_AUTH
export AWS_COGNITO_AUTH_PARAMS_PASSWORD=$ADMIN_PASSWORD
export AWS_COGNITO_AUTH_PARAMS_USER=$ADMIN_USER
export AWS_COGNITO_AUTH_PARAMS_USER_NO_ACCESS=$USER_NO_ACCESS
#### RUN INTEGRATION TEST #########################################################################
mvn test -f "$SCRIPT_SOURCE_DIR"/../pom.xml -Dcucumber.options="--plugin junit:target/junit-report.xml"
TEST_EXIT_CODE=$?
#### COPY TEST REPORTS #########################################################################
if [ -n "$1" ]
then
cp "$SCRIPT_SOURCE_DIR"/../target/junit-report.xml "$1"/os-search-junit-report.xml
fi
exit $TEST_EXIT_CODE
......@@ -39,7 +39,7 @@
<dependency>
<groupId>org.opengroup.osdu.core.aws</groupId>
<artifactId>os-core-lib-aws</artifactId>
<version>0.2.0</version>
<version>0.3.1</version>
</dependency>
<!-- Third party Apache 2.0 license packages -->
<dependency>
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/base.xml" />
<logger name="org.springframework" level="WARN"/>
<include resource="org/springframework/boot/logging/logback/base.xml" />
<root level="INFO" />
<logger name="org.springframework" level="INFO"/>
</configuration>
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment