Commit 41377e41 authored by Spencer Sutton's avatar Spencer Sutton Committed by Sutton
Browse files

Ingestion workflow AWS integration tests impl

commit 9488665c 
Author: Spencer Sutton <spencer.sutton@parivedasolutions.com> 
Date: Mon Aug 31 2020 14:26:57 GMT-0500 (Central Daylight Time) 

    Missed a few more copyrights


commit f0546e61 
Author: Spencer Sutton <spencer.sutton@parivedasolutions.com> 
Date: Mon Aug 31 2020 14:15:17 GMT-0500 (Central Daylight Time) 

    Fixing licenses


commit c072a243 
Author: Spencer Sutton <spencer.sutton@parivedasolutions.com> 
Date: Mon Aug 31 2020 11:50:07 GMT-0500 (Central Daylight Time) 

    Updated pom to generate *spring-boot.jar so that dockerfile will work


commit c5f7f64c 
Author: Spencer Sutton <spencer.sutton@parivedasolutions.com> 
Date: Mon Aug 31 2020 10:26:47 GMT-0500 (Central Daylight Time) 

    Adding specific line to install root pom in prepare-dist.sh


commit 8adfe23c 
Author: Spencer Sutton <spencer.sutton@parivedasolutions.com> 
Date: Mon Aug 31 2020 09:30:56 GMT-0500 (Central Daylight Time) 

    Testing the removal of repositories from workflow-test pom.xml


commit cbb50d2c 
Author: Spencer Sutton <spencer.sutton@parivedasolutions.com> 
Date: Mon Aug 31 2020 09:13:26 GMT-0500 (Central Daylight Time) 

    Adding relative path to workflow test core's parent


commit eb105c97 
Author: Spencer Sutton <spencer.sutton@parivedasolutions.com> 
Date: Mon Aug 31 2020 09:03:30 GMT-0500 (Central Daylight Time) 

    Changing workflow test aws pom to point to parent pom with relative path


commit 95ecc083 
Author: Spencer Sutton <spencer.sutton@parivedasolutions.com> 
Date: Mon Aug 31 2020 08:48:11 GMT-0500 (Central Daylight Time) 

    Adding some java docs, extending the int test core code to check for forbidden instead of unauthorized


commit 5ebd3ce6 
Author: Spencer Sutton <spencer.sutton@parivedasolutions.com> 
Date: Fri Aug 28 2020 16:16:35 GMT-0500 (Central Daylight Time) 

    AWS Integration tests functional and passing


commit ba11339d 
Author: Kyle Longhurst <kyle.longhurst@parivedasolutions.com> 
Date: Wed Aug 26 2020 13:59:43 GMT-0500 (Central Daylight Time) 

    WIP Integration Tests
parent d54e4b7a
# Copyright © 2020 Amazon Web Services
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##### Sample os-search .env file ###########################################################
#
# Basic use: duplicate this file, and make sure the new copy is also in the root of the AWS
# 'provider' folder, and name it `.env`. Note that on macOS, by default, files starting with
# are considered hidden system files, and are not displayed by default in Finder or the file
# selector (which you will need to use when adding the environment file(s) to the run
# configuration(s). While you can change a setting to show hidden files and folders by
# default, there is also a keyboard shortcut to quickly toggle between hide/show. With either
# Finder as the active application ("Finder" appears next to the Apple logo in the Menu Bar),
# press: command + shift + . (period). You can store configurations for multiple environments
# by adding more duplicates following a naming scheme of your choosing, for example:
# `staging.env`, `uat.env`, or `local.env`.
#
# This requires installing a plugin to your IDE that allows you to use a .env
# file in your repository folder (does NOT get checked into source control;
# only the sample environment configuration (sample.env) should be committed.
#
# Download links for .env file plugins:
# IntelliJ - https://github.com/Ashald/EnvFile
##### Authentication / Secrets #####
# Replace placeholder text with your own AWS secret access keys
# and rename to `.env` - do NOT check-in .env with your credentials! Leave it in .gitignore
AWS_ACCESS_KEY_ID=<YOUR_ACCESS_KEY_ID>
AWS_SECRET_KEY=<YOUR_SECRET_KEY>
#### Urls/Ports #############
APPLICATION_PORT=8080
CACHE_CLUSTER_ENDPOINT=127.0.0.1
CACHE_CLUSTER_PORT=6379
LEGALTAG_BASE_URL=<YOUR_API_URL>
##### Other environment variables ##########################################################
ENVIRONMENT=dev
AWS_REGION=us-east-1
LOG_LEVEL=DEBUG
SSM_ENABLED=True
AIRFLOW_BASEURL=<YOUR_URL>
OSDU_ENTITLEMENTS_URL={{entitlements_url}}/api/entitlements/v1/groups
##### Integration test-specific - these are only used for integration tests, not the app ###
AWS_COGNITO_CLIENT_ID=<YOUR_COGNITO_CLIENT_ID>
AWS_COGNITO_AUTH_FLOW=USER_PASSWORD_AUTH
AWS_COGNITO_AUTH_PARAMS_USER=<YOUR_AUTHORIZED_USER>
AWS_COGNITO_AUTH_PARAMS_USER_NO_ACCESS=<YOUR_USER_WITH_NO_ACCESS>
AWS_COGNITO_AUTH_PARAMS_PASSWORD=<YOUR_AUTHORIZED_USER_PASSWORD>
TENANT_NAME=<YOUR_TENANT>
DOMAIN=<YOUR_DOMAIN>
WORKFLOW_HOST=<YOUR_API_URL>/api/workflow/v1/
DYNAMO_DB_REGION=<YOUR_REGION>
DYNAMO_DB_ENDPOINT=<YOUR_ENDPOINT>
ENVIRONMENT=<YOUR_ENVIRONMENT>
......@@ -33,7 +33,6 @@
<description>AWS implementation of Workflow service APIs</description>
<properties>
<spring-boot.repackage.skip>true</spring-boot.repackage.skip>
<version.number>0.0.1-SNAPSHOT</version.number>
</properties>
......@@ -103,10 +102,17 @@
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<classifier>spring-boot</classifier>
<mainClass>org.opengroup.osdu.workflow.WorkflowApplication</mainClass>
</configuration>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
<configuration>
<classifier>spring-boot</classifier>
<mainClass>org.opengroup.osdu.workflow.WorkflowApplication</mainClass>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
......
......@@ -47,4 +47,14 @@ JAVA_HEAP_MEMORY=4096
ENVIRONMENT=dev
AWS_REGION=us-east-1
ENTITLEMENTS_BASE_URL=http://localhost
OSDU_ENTITLEMENTS_URL=http://localhost/api/entitlements/v1
##### Integration test-specific - these are only used for integration tests, not the app ###
AWS_COGNITO_CLIENT_ID=<YOUR_COGNITO_CLIENT_ID>
AWS_COGNITO_AUTH_FLOW=USER_PASSWORD_AUTH
AWS_COGNITO_AUTH_PARAMS_USER=<YOUR_AUTHORIZED_USER>
AWS_COGNITO_AUTH_PARAMS_USER_NO_ACCESS=test-user-without-access@testing.com
AWS_COGNITO_AUTH_PARAMS_PASSWORD=<YOUR_AUTHORIZED_USER_PASSWORD>
DEFAULT_DATA_PARTITION_ID_TENANT1=<default-tenant>
WORKFLOW_HOST=http://localhost:8080
FINISHED_WORKFLOW_ID=finished-workflow-id
......@@ -26,7 +26,6 @@ import org.springframework.stereotype.Repository;
import javax.annotation.PostConstruct;
// TODO Will be moved to registry service
@Repository
@Slf4j
@RequiredArgsConstructor
......@@ -40,11 +39,21 @@ public class IngestionStrategyRepositoryImpl implements IIngestionStrategyReposi
String dynamoDbEndpoint;
private DynamoDBQueryHelper queryHelper;
/**
* Spring boot constructor that news up an object to interact with dynamo
*/
@PostConstruct
public void init() {
queryHelper = new DynamoDBQueryHelper(dynamoDbEndpoint, dynamoDbRegion, tablePrefix);
}
/**
* Interacts with the ingestion strategy dynamo table to retrieve ingestion strategies
* @param workflowType type of workflow
* @param dataType data type
* @param userId user id
* @return
*/
@Override
public IngestionStrategy findByWorkflowTypeAndDataTypeAndUserId(WorkflowType workflowType, String dataType, String userId) {
......
......@@ -16,8 +16,9 @@ package org.opengroup.osdu.workflow.aws.repository;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.HttpStatus;
import org.opengroup.osdu.core.aws.dynamodb.DynamoDBQueryHelper;
import org.opengroup.osdu.workflow.aws.util.DateTime;
import org.opengroup.osdu.core.common.model.http.AppException;
import org.opengroup.osdu.workflow.aws.util.dynamodb.converters.WorkflowStatusDoc;
import org.opengroup.osdu.workflow.model.WorkflowStatus;
import org.opengroup.osdu.workflow.model.WorkflowStatusType;
......@@ -27,7 +28,6 @@ import org.springframework.stereotype.Repository;
import javax.annotation.PostConstruct;
// TODO Will be moved to registry service
@Repository
@Slf4j
@RequiredArgsConstructor
......@@ -40,15 +40,27 @@ public class WorkflowStatusRepositoryImpl implements IWorkflowStatusRepository {
@Value("${aws.dynamodb.endpoint}")
String dynamoDbEndpoint;
private static String FINISHED_WORKFLOW_BAD_REQUEST_REASON = "Unable to update finished workflow";
// see integration test "should_returnBadRequest_when_givenFinishedWorkflowId
private static String FINISHED_WORKFLOW_BAD_REQUEST_MSG = "Workflow status for workflow id: %s already has status:%s and can not be updated";
private DynamoDBQueryHelper queryHelper;
private DateTime dateTime;
/**
* Spring boot constructor that news up an object to interact with dynamo
* Also sets the time for the workflow's startdate
*/
@PostConstruct
public void init() {
queryHelper = new DynamoDBQueryHelper(dynamoDbEndpoint, dynamoDbRegion, tablePrefix);
dateTime = new DateTime();
}
/**
* Simple lookup on the workflow dynamo table
* @param workflowId workflow id
* @return
*/
@Override
public WorkflowStatus findWorkflowStatus(String workflowId) {
WorkflowStatusDoc doc = queryHelper.loadByPrimaryKey(WorkflowStatusDoc.class, workflowId);
......@@ -65,6 +77,11 @@ public class WorkflowStatusRepositoryImpl implements IWorkflowStatusRepository {
}
}
/**
* Simple save of a new workflow to the workflow dynamo table
* @param workflowStatus to save
* @return
*/
@Override
public WorkflowStatus saveWorkflowStatus(WorkflowStatus workflowStatus) {
if (workflowStatus != null) {
......@@ -74,10 +91,23 @@ public class WorkflowStatusRepositoryImpl implements IWorkflowStatusRepository {
return workflowStatus;
}
/**
* Simple update of an existing workflow against the workflow dynamo table.
* This also throws a bad request exception if the workflow is already in a finished state.
* @param workflowId workflow id
* @param workflowStatusType
* @return
*/
@Override
public WorkflowStatus updateWorkflowStatus(String workflowId, WorkflowStatusType workflowStatusType) {
if (workflowId != null && workflowStatusType != null) {
WorkflowStatus workflowStatus = findWorkflowStatus(workflowId);
if(workflowStatus.getWorkflowStatusType().equals(WorkflowStatusType.FINISHED)){
throw new AppException(HttpStatus.SC_BAD_REQUEST, FINISHED_WORKFLOW_BAD_REQUEST_REASON,
String.format(FINISHED_WORKFLOW_BAD_REQUEST_MSG, workflowId, workflowStatus.getWorkflowStatusType().toString().toUpperCase()));
}
workflowStatus.setWorkflowStatusType(workflowStatusType);
WorkflowStatusDoc doc = mapWorkflowStatusToDoc(workflowStatus);
queryHelper.save(doc);
......@@ -88,12 +118,17 @@ public class WorkflowStatusRepositoryImpl implements IWorkflowStatusRepository {
}
}
/**
* Helper function for converting to dynamo friendly object
* @param workflowStatus
* @return
*/
private WorkflowStatusDoc mapWorkflowStatusToDoc(WorkflowStatus workflowStatus) {
WorkflowStatusDoc doc = new WorkflowStatusDoc();
doc.setWorkflowId(workflowStatus.getWorkflowId());
doc.setAirflowRunId(workflowStatus.getAirflowRunId());
doc.setWorkflowStatusType(workflowStatus.getWorkflowStatusType().toString());
doc.setSubmittedAt(dateTime.getCurrentDate());
doc.setSubmittedAt(workflowStatus.getSubmittedAt());
doc.setSubmittedBy(workflowStatus.getSubmittedBy());
return doc;
}
......
......@@ -28,6 +28,13 @@ import java.util.Map;
@Slf4j
public class AirflowClient {
/**
* Sends a request to airflow by dag name
* @param airflowDagURL
* @param body
* @param dagName
* @throws IOException
*/
public void makeRequestToAirflow(String airflowDagURL, String body, String dagName) throws IOException {
Map<String, String> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
......@@ -44,6 +51,14 @@ public class AirflowClient {
}
}
/**
* Basic helper function for http connections
* @param body
* @param headers
* @param targetURL
* @return
* @throws IOException
*/
private HttpURLConnection getConnection(String body, Map<String, String> headers, String targetURL) throws IOException {
URL url = new URL(targetURL);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
......@@ -60,6 +75,12 @@ public class AirflowClient {
return connection;
}
/**
* Basic helper function for actually sending the request to airflow
* @param connection
* @param body
* @throws IOException
*/
public void sendRequest(HttpURLConnection connection, String body) throws IOException {
DataOutputStream writer = new DataOutputStream (
connection.getOutputStream());
......@@ -67,6 +88,12 @@ public class AirflowClient {
writer.close();
}
/**
* Basic helper function for reading the response back from airflow
* @param connection
* @return
* @throws IOException
*/
public StringBuilder getResponse(HttpURLConnection connection) throws IOException {
StringBuilder response = new StringBuilder();
InputStream is = connection.getInputStream();
......
......@@ -41,11 +41,21 @@ public class SubmitIngestServiceImpl implements ISubmitIngestService {
private final static String PARSE_ERROR_MSG = "Unable to parse data for dag kickoff";
/**
* Spring boot constructor newing up an airflow client
*/
@PostConstruct
public void init(){
airflowClient = new AirflowClient();
}
/**
* This gets called by the core code, it's meant to kick off a dag on an
* airflow instance.
* @param dagName
* @param data
* @return
*/
@Override
public boolean submitIngest(String dagName, Map<String, Object> data) {
String serializedData = serializeData(data);
......@@ -58,6 +68,11 @@ public class SubmitIngestServiceImpl implements ISubmitIngestService {
}
}
/**
* Helper function that serializes a dictionary into a string to be sent to airflow
* @param data
* @return
*/
private String serializeData(Map<String, Object> data){
String serializedData;
try {
......
package org.opengroup.osdu.workflow.aws.util;
import java.util.Date;
// This class makes testing easier so that we can mock the return value its method
// For example, this is being used in WorkflowStatusRepositoryImplTest
public class DateTime {
public Date getCurrentDate() {
return new Date();
}
}
\ No newline at end of file
AUTHORIZE_API=${ENTITLEMENTS_BASE_URL}/api/entitlements/v1
# Copyright 2020 Amazon Web Services
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
AUTHORIZE_API=${OSDU_ENTITLEMENTS_URL}
server.servlet.contextPath=/api/workflow/v1/
## AWS DynamoDB configuration
aws.dynamodb.key=kind
......
......@@ -24,17 +24,14 @@ import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import org.opengroup.osdu.core.aws.dynamodb.DynamoDBQueryHelper;
import org.opengroup.osdu.workflow.aws.WorkflowAwsApplication;
import org.opengroup.osdu.workflow.aws.util.DateTime;
import org.opengroup.osdu.workflow.aws.util.dynamodb.converters.WorkflowStatusDoc;
import org.opengroup.osdu.workflow.model.WorkflowStatus;
import org.opengroup.osdu.workflow.model.WorkflowStatusType;
import org.springframework.boot.test.context.SpringBootTest;
import java.util.Date;
import static org.mockito.MockitoAnnotations.initMocks;
@RunWith(MockitoJUnitRunner.class)
@SpringBootTest(classes={WorkflowAwsApplication.class})
public class WorkflowStatusRepositoryImplTest {
......@@ -42,14 +39,11 @@ public class WorkflowStatusRepositoryImplTest {
@InjectMocks
WorkflowStatusRepositoryImpl CUT = new WorkflowStatusRepositoryImpl();
@Mock
private DateTime dateTime;
@Mock
private DynamoDBQueryHelper queryHelper;
@Before
public void setUp() {
public void setUp() throws Exception {
initMocks(this);
}
......@@ -92,20 +86,19 @@ public class WorkflowStatusRepositoryImplTest {
public void saveWorkflowStatus()
{
// Arrange
Date d = new Date();
WorkflowStatus expected = new WorkflowStatus();
expected.setWorkflowId("TestWorkflowId");
expected.setAirflowRunId("TestAirflowRunId");
expected.setWorkflowStatusType(WorkflowStatusType.SUBMITTED);
Date testDate = new Date();
Mockito.when(dateTime.getCurrentDate()).thenReturn(testDate);
expected.setSubmittedAt(d);
WorkflowStatusDoc expectedDoc = new WorkflowStatusDoc();
expectedDoc.setWorkflowId(expected.getWorkflowId());
expectedDoc.setAirflowRunId(expected.getAirflowRunId());
expectedDoc.setWorkflowStatusType(expected.getWorkflowStatusType().toString());
expectedDoc.setSubmittedBy(expected.getSubmittedBy());
expectedDoc.setSubmittedAt(testDate);
expectedDoc.setSubmittedAt(d);
// Act
WorkflowStatus actual = CUT.saveWorkflowStatus(expected);
......@@ -121,7 +114,6 @@ public class WorkflowStatusRepositoryImplTest {
String workflowId = "6893fab0-38eb-4aed-96e9-c667f1e771c8";
WorkflowStatusType updatedWorkflowStatusType = WorkflowStatusType.FINISHED;
Date testDate = new Date();
Mockito.when(dateTime.getCurrentDate()).thenReturn(testDate);
WorkflowStatus original = new WorkflowStatus();
original.setWorkflowId(workflowId);
......
......@@ -34,6 +34,7 @@
</licenses>
<modules>
<module>workflow-test-aws</module>
<module>workflow-test-core</module>
<module>workflow-test-gcp</module>
<module>workflow-test-azure</module>
......
# Copyright © 2020 Amazon Web Services
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script prepares the dist directory for the integration tests.
# Must be run from the root of the repostiory
set -e
OUTPUT_DIR="${OUTPUT_DIR:-dist}"
INTEGRATION_TEST_OUTPUT_DIR=${INTEGRATION_TEST_OUTPUT_DIR:-$OUTPUT_DIR}/testing/integration
INTEGRATION_TEST_OUTPUT_BIN_DIR=${INTEGRATION_TEST_OUTPUT_DIR:-$INTEGRATION_TEST_OUTPUT_DIR}/bin
INTEGRATION_TEST_SOURCE_DIR=testing
INTEGRATION_TEST_SOURCE_DIR_AWS="$INTEGRATION_TEST_SOURCE_DIR"/workflow-test-aws
INTEGRATION_TEST_SOURCE_DIR_CORE="$INTEGRATION_TEST_SOURCE_DIR"/workflow-test-core
echo "--Source directories variables--"
echo $INTEGRATION_TEST_SOURCE_DIR_AWS
echo $INTEGRATION_TEST_SOURCE_DIR_CORE
echo "--Output directories variables--"
echo $OUTPUT_DIR
echo $INTEGRATION_TEST_OUTPUT_DIR
echo $INTEGRATION_TEST_OUTPUT_BIN_DIR
rm -rf "$INTEGRATION_TEST_OUTPUT_DIR"
mkdir -p "$INTEGRATION_TEST_OUTPUT_DIR" && mkdir -p "$INTEGRATION_TEST_OUTPUT_BIN_DIR"
echo "Building integration testing assemblies and gathering artifacts..."
mvn install -DskipTests -f "$INTEGRATION_TEST_SOURCE_DIR"/pom.xml
mvn install -f "$INTEGRATION_TEST_SOURCE_DIR_CORE"/pom.xml
mvn install dependency:copy-dependencies -DskipTests -f "$INTEGRATION_TEST_SOURCE_DIR_AWS"/pom.xml -DincludeGroupIds=org.opengroup.osdu -Dmdep.copyPom
cp "$INTEGRATION_TEST_SOURCE_DIR_AWS"/target/dependency/* "${INTEGRATION_TEST_OUTPUT_BIN_DIR}"
(cd "${INTEGRATION_TEST_OUTPUT_BIN_DIR}" && ls *.jar | sed -e 's/\.jar$//' | xargs -I {} echo mvn install:install-file -Dfile={}.jar -DpomFile={}.pom >> install-deps.sh)
chmod +x "${INTEGRATION_TEST_OUTPUT_BIN_DIR}"/install-deps.sh
mvn clean -f "$INTEGRATION_TEST_SOURCE_DIR_AWS"/pom.xml
cp -R "$INTEGRATION_TEST_SOURCE_DIR_AWS"/* "${INTEGRATION_TEST_OUTPUT_DIR}"/
# Copyright © 2020 Amazon Web Services
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script executes the test and copies reports to the provided output directory
# To call this script from the service working directory
# ./dist/testing/integration/build-aws/run-tests.sh "./reports/"
SCRIPT_SOURCE_DIR=$(dirname "$0")
echo "Script source location"
echo "$SCRIPT_SOURCE_DIR"
(cd "$SCRIPT_SOURCE_DIR"/../bin && ./install-deps.sh)
#### ADD REQUIRED ENVIRONMENT VARIABLES HERE ###############################################
# The following variables are automatically populated from the environment during integration testing
# see os-deploy-aws/build-aws/integration-test-env-variables.py for an updated list
# AWS_COGNITO_CLIENT_ID
# ELASTIC_HOST
# ELASTIC_PORT
# FILE_URL
# LEGAL_URL
# SEARCH_URL
# STORAGE_URL
export OTHER_RELEVANT_DATA_COUNTRIES=US
export DEPLOY_ENV=empty
export LEGAL_TAG=opendes-public-usa-dataset-1
export TENANT_NAME=int-test-workflow
export AWS_COGNITO_AUTH_FLOW=USER_PASSWORD_AUTH
export AWS_COGNITO_AUTH_PARAMS_PASSWORD=$ADMIN_PASSWORD
export AWS_COGNITO_AUTH_PARAMS_USER=$ADMIN_USER
export AWS_COGNITO_AUTH_PARAMS_USER_NO_ACCESS=$USER_NO_ACCESS
export STORAGE_URL=$STORAGE_URL
export DOMAIN=testing.com
export LEGAL_URL=$LEGAL_URL
#### RUN INTEGRATION TEST #########################################################################
mvn test -f "$SCRIPT_SOURCE_DIR"/../pom.xml
TEST_EXIT_CODE=$?
#### COPY TEST REPORTS #########################################################################
if [ -n "$1" ]
then
mkdir -p "$1"
cp -R "$SCRIPT_SOURCE_DIR"/../target/surefire-reports "$1"
fi
exit $TEST_EXIT_CODE
<!--
Copyright © 2020 Amazon Web Services
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.opengroup.osdu</groupId>
<artifactId>workflow-test</artifactId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<artifactId>workflow-test-aws</artifactId>