Skip to content
Snippets Groups Projects
Commit 76ae5917 authored by Matt Wise's avatar Matt Wise
Browse files

rebase GL master onto dev

parent b347bb54
No related branches found
No related tags found
2 merge requests!19Aws integration,!18Aws integration
...@@ -35,44 +35,37 @@ ...@@ -35,44 +35,37 @@
##### Authentication / Secrets ##### ##### Authentication / Secrets #####
# Replace placeholder text with your own AWS secret access keys # Replace placeholder text with your own AWS secret access keys
# and rename to `.env` - do NOT check-in .env with your credentials! Leave it in .gitignore # and rename to `.env` - do NOT check-in .env with your credentials! Leave it in .gitignore
AWS_ACCESS_KEY_ID= AWS_ACCESS_KEY_ID=<YOUR_ACCESS_KEY_ID>
AWS_SECRET_KEY= AWS_SECRET_KEY=<YOUR_SECRET_KEY>
AWS_ACCOUNT_ID=
#### Urls/Ports ############# #### Urls/Ports #############
STORAGE_HOST= STORAGE_HOST=<YOUR_API_URL>
APPLICATION_PORT=8080
APPLICATION_PORT= CACHE_CLUSTER_ENDPOINT=127.0.0.1
CACHE_CLUSTER_INDEX_ENDPOINT= CACHE_CLUSTER_PORT=6379
CACHE_CLUSTER_INDEX_PORT= ELASTIC_HOST=localhost
CACHE_CLUSTER_CURSOR_ENDPOINT= ELASTIC_PORT=9200
CACHE_CLUSTER_CURSOR_PORT= SSM_ENABLED=True
ELASTIC_HOST=
ELASTIC_PORT=
##### Other environment variables ########################################################## ##### Other environment variables ##########################################################
JAVA_HEAP_MEMORY= JAVA_HEAP_MEMORY=4096
SNS_TOPIC_NAME= ENVIRONMENT=dev
SNS_STORAGE_TOPIC_NAME= AWS_REGION=us-east-1
ENVIRONMENT= LOG_LEVEL=DEBUG
AWS_REGION=
LOG_LEVEL=
##### Integration test-specific - these are only used for integration tests, not the app ### ##### Integration test-specific - these are only used for integration tests, not the app ###
OTHER_RELEVANT_DATA_COUNTRIES= AWS_COGNITO_CLIENT_ID=<YOUR_COGNITO_CLIENT_ID>
LEGAL_TAG= AWS_COGNITO_AUTH_FLOW=USER_PASSWORD_AUTH
DEFAULT_DATA_PARTITION_ID_TENANT1= AWS_COGNITO_AUTH_PARAMS_USER=<YOUR_AUTHORIZED_USER>
DEFAULT_DATA_PARTITION_ID_TENANT2= AWS_COGNITO_AUTH_PARAMS_USER_NO_ACCESS=test-user-without-access@testing.com
ENTITLEMENTS_DOMAIN= AWS_COGNITO_AUTH_PARAMS_PASSWORD=<YOUR_AUTHORIZED_USER_PASSWORD>
AWS_COGNITO_CLIENT_ID= OTHER_RELEVANT_DATA_COUNTRIES=US
AWS_COGNITO_AUTH_FLOW= LEGAL_TAG=opendes-public-usa-dataset-1
AWS_COGNITO_AUTH_PARAMS_PASSWORD= DEFAULT_DATA_PARTITION_ID_TENANT1=opendes
AWS_COGNITO_AUTH_PARAMS_USER= DEFAULT_DATA_PARTITION_ID_TENANT2=common
AWS_COGNITO_AUTH_PARAMS_USER_NO_ACCESS= ENTITLEMENTS_DOMAIN=testing.com
ELASTIC_HOST= ELASTIC_HOST=localhost
DEFAULT_ELASTIC_USER_NAME= ELASTIC_PORT=9200
DEFAULT_ELASTIC_PASSWORD= SEARCH_HOST=<YOUR_API_URL_HERE>/api/search/v2/
ELASTIC_PORT= STORAGE_HOST=<YOUR_API_URL_HERE>/api/storage/v2/
SEARCH_HOST= INDEXER_HOST=<YOUR_API_URL_HERE>/indexer/v2/
STORAGE_HOST= \ No newline at end of file
INDEXER_HOST=
\ No newline at end of file
import boto3
import json
import os
import argparse
# Create the build-info.json
parser = argparse.ArgumentParser(description="")
# env - CODEBUILD_SOURCE_VERSION
parser.add_argument("--branch", type=str, help="")
# env - CODEBUILD_RESOLVED_SOURCE_VERSION
parser.add_argument("--commit", type=str, help="")
# env - CODEBUILD_BUILD_ID
parser.add_argument("--buildid", type=str, help="")
# env - CODEBUILD_BUILD_NUMBER
parser.add_argument("--buildnumber", type=str, help="")
# Get from directory name
parser.add_argument("--reponame", type=str, help="")
# env OUTPUT_DIR
parser.add_argument("--outdir", type=str, help="")
# full ecr image and tag, and any other artifacts
parser.add_argument("--artifact", type=str, action="append", help="")
args = parser.parse_args()
branch = args.branch
commitId = args.commit
buildId = args.buildid
buildNumber = args.buildnumber
repoName = args.reponame
outputDir = args.outdir
artifacts = args.artifact
buildInfoFilePath = os.path.join(".", outputDir, "build-info.json")
print(buildInfoFilePath)
commitArgs = {
"repositoryName": repoName,
"commitId": commitId
}
commitDetail = {
"commit": ""
}
# get the commit detail
try:
codecommit = boto3.client("codecommit")
commitDetail = codecommit.get_commit(**commitArgs)
except Exception as e:
print("Getting commit information from codecommit failed")
buildInfo = {
"branch": branch,
"build-id": buildId,
"build-number": buildNumber,
"repo": repoName,
"artifacts": artifacts,
"commit": commitDetail["commit"]
}
print(json.dumps(buildInfo, sort_keys=True, indent=4))
# write the build.json file to dist
f = open(buildInfoFilePath, "w")
f.write(json.dumps(buildInfo, sort_keys=True, indent=4))
f.close()
# Copyright © Amazon Web Services
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# https://docs.aws.amazon.com/codebuild/latest/userguide/build-spec-ref.html
# https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-env-vars.html
version: 0.2
phases:
install:
runtime-versions:
java: openjdk8
commands:
- if [ $(echo $CODEBUILD_SOURCE_VERSION | grep -c ^refs/heads.*) -eq 1 ]; then echo "Branch name found"; else echo "This build only supports branch builds" && exit 1; fi
- apt-get update -y
- apt-get install -y maven
- java -version
- mvn -version
- echo $JAVA_HOME #WHY
- mkdir -p /root/.m2
- aws s3 sync s3://$M2_REPO_S3_BUCKET /root/.m2 # copy previous state of the shared libraries' .m2 folder from S3 to local
- cp ./provider/indexer-aws/maven/settings.xml /root/.m2/settings.xml # copy the AWS-specific settings.xml to the CodeBuild instance's .m2 folder
- cat /root/.m2/settings.xml
pre_build:
commands:
- echo "Logging in to Amazon ECR..."
- $(aws ecr get-login --no-include-email --region $AWS_REGION) # authenticate with ECR via the AWS CLI
build:
commands:
- export REPO_NAME=${PWD##*/}
- export OUTPUT_DIR="dist"
- export BRANCH_NAME=`echo ${CODEBUILD_SOURCE_VERSION} | awk '{gsub("refs/heads/","");gsub("\\.","-");gsub("[[:space:]]","-")}1' | sed 's/\//-/g' | awk '{print tolower($0)}'`
- export ECR_TAG=`echo build.${BRANCH_NAME}.${CODEBUILD_BUILD_NUMBER}.${CODEBUILD_RESOLVED_SOURCE_VERSION} | cut -c 1-120`
- export ECR_IMAGE=${ECR_REGISTRY}:${ECR_TAG}
- export ECR_IMAGE_BRANCH_LATEST=${ECR_REGISTRY}:${BRANCH_NAME}
- export INTEGRATION_TEST_OUTPUT=${OUTPUT_DIR}/testing/integration
- export INTEGRATION_TEST_OUTPUT_BIN=${INTEGRATION_TEST_OUTPUT}/bin
- mkdir -p ${OUTPUT_DIR}/bin
- mkdir -p ${OUTPUT_DIR}/testing && mkdir -p ${INTEGRATION_TEST_OUTPUT} && mkdir -p ${INTEGRATION_TEST_OUTPUT}/bin
- echo "Placeholder" >> ${OUTPUT_DIR}/build-info.json # touched so that the output directory has some content incase the build fails so that testing reports are uploaded
- printenv
- echo "Building primary service assemblies..."
- mvn -B test install -P indexer-core,indexer-aws -Ddeployment.environment=prod
# - echo "Copying assemblies to dist..."
# - cp ./provider/indexer-aws/target/*spring-boot.jar ${OUTPUT_DIR}/bin # copy aws jars
# - cp ./indexer-core/target/*.jar ${OUTPUT_DIR}/bin # copy core jar
- echo "Building integration testing assemblies and gathering artifacts..."
- ./testing/indexer-test-aws/build-aws/prepare-dist.sh
- echo "Building docker image..."
- docker build -f provider/indexer-aws/build-aws/Dockerfile -t ${ECR_IMAGE} .
- docker tag ${ECR_IMAGE} ${ECR_IMAGE_BRANCH_LATEST}
- echo "Pushing docker image..."
- docker push ${ECR_IMAGE}
- docker push ${ECR_IMAGE_BRANCH_LATEST}
- echo "Generate build-info.json"
- |
python provider/indexer-aws/build-aws/build-info.py --branch ${CODEBUILD_SOURCE_VERSION} --commit ${CODEBUILD_RESOLVED_SOURCE_VERSION} \
--buildid ${CODEBUILD_BUILD_ID} --buildnumber ${CODEBUILD_BUILD_NUMBER} --reponame ${REPO_NAME} --outdir ${OUTPUT_DIR} \
--artifact ${ECR_IMAGE}
reports:
SurefireReports: # CodeBuild will create a report group called "SurefireReports".
files: #Store all of the files
- "indexer-core/target/surefire-reports/**/*"
- "provider/indexer-aws/target/surefire-reports/**/*"
base-directory: "." # Location of the reports
artifacts:
files:
- "**/*"
base-directory: "dist"
name: ${REPO_NAME}_${BRANCH_NAME}_$(date +%F)_${CODEBUILD_BUILD_NUMBER}.zip
cache:
paths:
- "/root/.m2/**/*"
\ No newline at end of file
...@@ -49,7 +49,7 @@ ...@@ -49,7 +49,7 @@
<dependency> <dependency>
<groupId>org.opengroup.osdu.core.aws</groupId> <groupId>org.opengroup.osdu.core.aws</groupId>
<artifactId>os-core-lib-aws</artifactId> <artifactId>os-core-lib-aws</artifactId>
<version>0.2.0</version> <version>0.3.1</version>
</dependency> </dependency>
<!-- AWS managed packages --> <!-- AWS managed packages -->
......
...@@ -14,17 +14,14 @@ ...@@ -14,17 +14,14 @@
package org.opengroup.osdu.indexer.aws.util; package org.opengroup.osdu.indexer.aws.util;
import com.amazonaws.services.sns.AmazonSNS;
import com.amazonaws.services.sns.model.MessageAttributeValue;
import com.amazonaws.services.sns.model.PublishRequest;
import com.amazonaws.services.sqs.AmazonSQS; import com.amazonaws.services.sqs.AmazonSQS;
import org.opengroup.osdu.core.aws.sqs.AmazonSQSConfig;
import com.amazonaws.services.sqs.model.MessageAttributeValue;
import com.amazonaws.services.sqs.model.SendMessageRequest; import com.amazonaws.services.sqs.model.SendMessageRequest;
import com.google.gson.Gson; import com.google.gson.Gson;
import org.opengroup.osdu.core.aws.sns.AmazonSNSConfig;
import org.opengroup.osdu.core.aws.ssm.ParameterStorePropertySource; import org.opengroup.osdu.core.aws.ssm.ParameterStorePropertySource;
import org.opengroup.osdu.core.aws.ssm.SSMConfig; import org.opengroup.osdu.core.aws.ssm.SSMConfig;
import org.opengroup.osdu.core.common.model.http.DpsHeaders; import org.opengroup.osdu.core.common.model.http.DpsHeaders;
import org.opengroup.osdu.core.aws.sqs.AmazonSQSConfig;
import org.opengroup.osdu.core.common.model.search.RecordChangedMessages; import org.opengroup.osdu.core.common.model.search.RecordChangedMessages;
import org.opengroup.osdu.indexer.util.IndexerQueueTaskBuilder; import org.opengroup.osdu.indexer.util.IndexerQueueTaskBuilder;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
...@@ -39,31 +36,34 @@ import java.util.Map; ...@@ -39,31 +36,34 @@ import java.util.Map;
@Component @Component
public class IndexerQueueTaskBuilderAws extends IndexerQueueTaskBuilder { public class IndexerQueueTaskBuilderAws extends IndexerQueueTaskBuilder {
private AmazonSNS snsClient; private static final int INITIAL_RETRY_DELAY_SECONDS = 5;
private static final int MAX_RETRY_DELAY_SECONDS = 900; // 15 minutes (900 seconds) is the hard limit SQS sets of message delays
private AmazonSQS sqsClient;
private ParameterStorePropertySource ssm; private ParameterStorePropertySource ssm;
private String amazonSNSTopic; private String amazonSQSQueueUrl;
private String retryString = "retry"; private final String retryString = "retry";
private Gson gson; private Gson gson;
@Value("${aws.region}") @Value("${aws.region}")
private String region; private String region;
@Value("${aws.storage.sns.topic.arn}") @Value("${aws.storage.sqs.queue.url}")
String parameter; String sqsStorageQueueParameter;
@Inject @Inject
public void init() { public void init() {
AmazonSNSConfig config = new AmazonSNSConfig(region); AmazonSQSConfig config = new AmazonSQSConfig(region);
snsClient = config.AmazonSNS(); sqsClient = config.AmazonSQS();
gson =new Gson(); gson =new Gson();
SSMConfig ssmConfig = new SSMConfig(); SSMConfig ssmConfig = new SSMConfig();
ssm = ssmConfig.amazonSSM(); ssm = ssmConfig.amazonSSM();
amazonSNSTopic = ssm.getProperty(parameter).toString(); amazonSQSQueueUrl = ssm.getProperty(sqsStorageQueueParameter).toString();
} }
@Override @Override
...@@ -97,20 +97,45 @@ public class IndexerQueueTaskBuilderAws extends IndexerQueueTaskBuilder { ...@@ -97,20 +97,45 @@ public class IndexerQueueTaskBuilderAws extends IndexerQueueTaskBuilder {
.withStringValue(headers.getAuthorization())); .withStringValue(headers.getAuthorization()));
RecordChangedMessages message = gson.fromJson(payload, RecordChangedMessages.class); RecordChangedMessages message = gson.fromJson(payload, RecordChangedMessages.class);
int retryCount; int retryCount;
if(message.getAttributes().containsKey(retryString)){ int retryDelay;
if (message.getAttributes().containsKey(retryString)) {
retryCount = Integer.parseInt(message.getAttributes().get(retryString)); retryCount = Integer.parseInt(message.getAttributes().get(retryString));
retryCount++; retryCount++;
retryDelay = Math.min(getWaitTimeExp(retryCount), MAX_RETRY_DELAY_SECONDS);
} else { } else {
// This will be the first retry; initialize the retry counter and set the delay to the initial constant value
retryCount = 1; retryCount = 1;
retryDelay = INITIAL_RETRY_DELAY_SECONDS;
} }
System.out.println("Re-queuing for retry attempt #: " + retryCount);
System.out.println("Delay (in seconds) before next retry: " + retryDelay);
// Append the retry count to the message attributes
messageAttributes.put(retryString, new MessageAttributeValue() messageAttributes.put(retryString, new MessageAttributeValue()
.withDataType("String") .withDataType("String")
.withStringValue(String.valueOf(retryCount))); .withStringValue(String.valueOf(retryCount))
);
PublishRequest publishRequest = new PublishRequest(amazonSNSTopic, message.getData())
// Send a message with an attribute and a delay
final SendMessageRequest sendMessageRequest = new SendMessageRequest()
.withQueueUrl(amazonSQSQueueUrl)
.withMessageBody(message.getData())
.withDelaySeconds(new Integer(retryDelay))
.withMessageAttributes(messageAttributes); .withMessageAttributes(messageAttributes);
sqsClient.sendMessage(sendMessageRequest);
}
/*
* Returns the next wait interval based on the current number of retries,
* in seconds, using an exponential backoff algorithm.
*/
public static int getWaitTimeExp(int retryCount) {
if (0 == retryCount) {
return 0;
}
snsClient.publish(publishRequest); return ((int) Math.pow(2, retryCount) * 4);
} }
} }
# This script prepares the dist directory for the integration tests.
# Must be run from the root of the repostiory
set -e
OUTPUT_DIR="${OUTPUT_DIR:-dist}"
INTEGRATION_TEST_OUTPUT_DIR=${INTEGRATION_TEST_OUTPUT_DIR:-$OUTPUT_DIR}/testing/integration
INTEGRATION_TEST_OUTPUT_BIN_DIR=${INTEGRATION_TEST_OUTPUT_DIR:-$INTEGRATION_TEST_OUTPUT_DIR}/bin
INTEGRATION_TEST_SOURCE_DIR=testing
INTEGRATION_TEST_SOURCE_DIR_AWS="$INTEGRATION_TEST_SOURCE_DIR"/indexer-test-aws
INTEGRATION_TEST_SOURCE_DIR_CORE="$INTEGRATION_TEST_SOURCE_DIR"/indexer-test-core
echo "--Source directories variables--"
echo $INTEGRATION_TEST_SOURCE_DIR_AWS
echo $INTEGRATION_TEST_SOURCE_DIR_CORE
echo "--Output directories variables--"
echo $OUTPUT_DIR
echo $INTEGRATION_TEST_OUTPUT_DIR
echo $INTEGRATION_TEST_OUTPUT_BIN_DIR
rm -rf "$INTEGRATION_TEST_OUTPUT_DIR"
mkdir -p "$INTEGRATION_TEST_OUTPUT_DIR" && mkdir -p "$INTEGRATION_TEST_OUTPUT_BIN_DIR"
echo "Building integration testing assemblies and gathering artifacts..."
mvn install -f "$INTEGRATION_TEST_SOURCE_DIR_CORE"/pom.xml
mvn install dependency:copy-dependencies -DskipTests -f "$INTEGRATION_TEST_SOURCE_DIR_AWS"/pom.xml -DincludeGroupIds=org.opengroup.osdu -Dmdep.copyPom
cp "$INTEGRATION_TEST_SOURCE_DIR_AWS"/target/dependency/* "${INTEGRATION_TEST_OUTPUT_BIN_DIR}"
(cd "${INTEGRATION_TEST_OUTPUT_BIN_DIR}" && ls *.jar | sed -e 's/\.jar$//' | xargs -I {} echo mvn install:install-file -Dfile={}.jar -DpomFile={}.pom >> install-deps.sh)
chmod +x "${INTEGRATION_TEST_OUTPUT_BIN_DIR}"/install-deps.sh
mvn clean -f "$INTEGRATION_TEST_SOURCE_DIR_AWS"/pom.xml
cp -R "$INTEGRATION_TEST_SOURCE_DIR_AWS"/* "${INTEGRATION_TEST_OUTPUT_DIR}"/
# This script executes the test and copies reports to the provided output directory
# To call this script from the service working directory
# ./dist/testing/integration/build-aws/run-tests.sh "./reports/"
SCRIPT_SOURCE_DIR=$(dirname "$0")
echo "Script source location"
echo "$SCRIPT_SOURCE_DIR"
(cd "$SCRIPT_SOURCE_DIR"/../bin && ./install-deps.sh)
#### ADD REQUIRED ENVIRONMENT VARIABLES HERE ###############################################
# The following variables are automatically populated from the environment during integration testing
# see os-deploy-aws/build-aws/integration-test-env-variables.py for an updated list
# AWS_COGNITO_CLIENT_ID
# ELASTIC_HOST
# ELASTIC_PORT
# FILE_URL
# LEGAL_URL
# SEARCH_URL
# STORAGE_URL
export AWS_COGNITO_AUTH_FLOW=USER_PASSWORD_AUTH
export AWS_COGNITO_AUTH_PARAMS_PASSWORD=$ADMIN_PASSWORD
export AWS_COGNITO_AUTH_PARAMS_USER=$ADMIN_USER
export DEFAULT_DATA_PARTITION_ID_TENANT1=opendes
export DEFAULT_DATA_PARTITION_ID_TENANT2=common
export ENTITLEMENTS_DOMAIN=testing.com
export OTHER_RELEVANT_DATA_COUNTRIES=US
export STORAGE_HOST=$STORAGE_URL
#### RUN INTEGRATION TEST #########################################################################
mvn test -f "$SCRIPT_SOURCE_DIR"/../pom.xml -Dcucumber.options="--plugin junit:target/junit-report.xml"
TEST_EXIT_CODE=$?
#### COPY TEST REPORTS #########################################################################
if [ -n "$1" ]
then
mkdir -p "$1"
cp "$SCRIPT_SOURCE_DIR"/../target/junit-report.xml "$1"/os-indexer-junit-report.xml
fi
exit $TEST_EXIT_CODE
...@@ -62,7 +62,7 @@ ...@@ -62,7 +62,7 @@
<dependency> <dependency>
<groupId>org.opengroup.osdu.core.aws</groupId> <groupId>org.opengroup.osdu.core.aws</groupId>
<artifactId>os-core-lib-aws</artifactId> <artifactId>os-core-lib-aws</artifactId>
<version>0.2.0</version> <version>0.3.1</version>
</dependency> </dependency>
<!-- Testing --> <!-- Testing -->
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment