Commit 179bebf9 authored by harshit aggarwal's avatar harshit aggarwal
Browse files

Code Refactoring

parent 8af9ff7d
Pipeline #4589 failed with stages
in 3 minutes and 43 seconds
......@@ -19,7 +19,11 @@
*.tar.gz
*.rar
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### STS ###
target/
......
......@@ -42,6 +42,7 @@
<modules>
<module>wks-core</module>
<module>provider/wks-gcp</module>
<module>provider/wks-azure</module>
</modules>
<repositories>
......
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.opengroup.osdu</groupId>
<artifactId>os-wks</artifactId>
<version>0.0.1</version>
</parent>
<artifactId>wks-test-azure</artifactId>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<exclusion>
<artifactId>log4j-api</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
<scope>runtime</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>asm</artifactId>
<groupId>org.ow2.asm</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.retry</groupId>
<artifactId>spring-retry</artifactId>
</dependency>
<dependency>
<groupId>com.github.berkesa</groupId>
<artifactId>datatree-core</artifactId>
<version>1.0.10</version>
</dependency>
<dependency>
<groupId>com.github.berkesa</groupId>
<artifactId>datatree-adapters</artifactId>
<version>1.0.11</version>
</dependency>
<dependency>
<groupId>io.cucumber</groupId>
<artifactId>cucumber-java8</artifactId>
<version>5.4.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.cucumber</groupId>
<artifactId>cucumber-junit</artifactId>
<version>5.4.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.cucumber</groupId>
<artifactId>cucumber-guice</artifactId>
<version>5.4.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
<version>4.2.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>3.0.0</version>
</dependency>
</dependencies>
<repositories>
<repository>
<id>${gitlab-server}</id>
<url>https://community.opengroup.org/api/v4/groups/17/-/packages/maven</url>
</repository>
</repositories>
<distributionManagement>
<repository>
<id>${gitlab-server}</id>
<url>https://community.opengroup.org/api/v4/projects/191/packages/maven</url>
</repository>
<snapshotRepository>
<id>${gitlab-server}</id>
<url>https://community.opengroup.org/api/v4/projects/191/packages/maven</url>
</snapshotRepository>
</distributionManagement>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>3.0.0-M3</version>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>integration-test</goal>
</goals>
<configuration>
<skipTests>${skipItTests}</skipTests>
<includes>
<include>org.opengroup.osdu.wks.runner.IntegrationTestRunner</include>
</includes>
</configuration>
</execution>
<execution>
<id>verify</id>
<goals>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>Default</id>
<properties>
<skipUnitTests>true</skipUnitTests>
<skipItTests>false</skipItTests>
<preIntegrationPhase>pre-integration-test</preIntegrationPhase>
<postIntegrationPhase>post-integration-test</postIntegrationPhase>
<mainClass>org.opengroup.osdu.wks.WksServiceApplication</mainClass>
</properties>
</profile>
</profiles>
</project>
\ No newline at end of file
package org.opengroup.osdu.wks.constants;
public class Constants {
public static final String EXCEPTION_OCCURRED_AT = " , Exception occurred at: ";
public static final String CONTENT_TYPE = "Content-Type";
public static final String DATA_PARTITION_ID = "data-partition-id";
public static final String AUTHORIZATION = "Authorization";
public static final String ON_BEHALF_OF = "on-behalf-of";
public static final String CORRELATION_ID = "correlation-id";
public static final String APPLICATION_JSON = "application/json";
public static final String APP_KEY = "AppKey";
public static final String WKS_KIND = "wks";
public static final String WKE_KIND = "wke";
public static final String NO_MAPPING_FOR_KIND = "Mapping not found for %s";
public static final String NO_MAPPING_FOR_ANY_RECORD = "Mapping not found for any record";
public static final String ERROR_WHILE_GETTING_RAW_RECORD = "Error retrieving raw record.";
public static final String TRANFORMATION_SUCCESSFUL = "Transformed successfully raw record id: %s, wks record id: %s";
public static final String TRANFORMATION_ALREADY_DONE = "Transformation already done.";
public static final String RAW_RECORD_NOT_PRESENT = "Raw record not present";
public static final String MAPPING_NOT_PRESENT = "Mapping not present";
public static final String COLON_SEPARATOR = ":";
public static final String Bearer = "Bearer ";
public static final String MAPPING_MULTIPLE_FILE_FOUND = "Expected single mapping file";
public static final String LAST_MODIFIED_DATE = "last_modified_date";
public static final String DASH = "-";
public static final String POINT = ".";
public static final String MISSING_RAW_RECORD_DETAILS = "Missing raw records details";
public static final String DATA_PARTITION_ID_NEEDED = "Data Partition Id is needed";
public static final String CORRELATION_ID_NEEDED = "Correlation Id is needed";
public static final String TRANFORMATION_FAILED = "Transformation failed for raw record id: %s, reason: %s";
public static final String COULD_NOT_TRANSFORM = "Could not transform";
public static final String TRANFORMATION_PROCESS_COMPLETED = "Transformation process completed";
public static final String EXPECTED_CREATE_OR_UPDATE_OPERATION = "Expected create or update operation";
}
package org.opengroup.osdu.wks.model;
public class AutomationConstants {
public static final String CONTENT_TYPE = "Content-Type";
public static final String DATA_PARTITION_ID = "data-partition-id";
public static final String AUTHORIZATION = "Authorization";
public static final String ON_BEHALF_OF = "on-behalf-of";
public static final String CORRELATION_ID = "correlation-id";
public static final String APPLICATION_JSON = "application/json";
public static final String APP_KEY = "AppKey";
public static final String TENANT = "opendes";
public static final String POST_ENDPOINT = "/records";
public static final String GET_ENDPOINT = "/records/";
public static final String DELETE_ENDPOINT = "/records/";
public static final long RECORD_SEARCH_MAX_TIMEOUT_SEC = 30;
public static final String RECORD_CREATED = "201";
public static final String REQUEST_SUCCESS = "200";
public static final String RECORD_NOT_FOUND = "404";
public static final String REQUEST_SUCCESS_NO_CONTENT = "204";
public static final String MAPPING_MULTIPLE_FILE_FOUND = "Expected single mapping file";
public static final String MAPPING_NOT_PRESENT = "Mapping not present";
public static final String COLON_SEPARATOR = ":";
public static final String DASH = "-";
public static final String OS_WKS_SCHEMA_KIND = "opendes:wks:wellbore:1.0.0";
}
package org.opengroup.osdu.wks.model;
import java.util.List;
import java.util.Map;
import org.opengroup.osdu.de.automation.model.HttpResponse;
import org.opengroup.osdu.de.automation.util.FileUtils;
import com.google.inject.Inject;
import io.cucumber.guice.ScenarioScoped;
import lombok.Data;
@ScenarioScoped
public class WksIntegrationTestScope {
@Inject
private FileUtils fileUtils;
private String token;
private String inputPayload;
private HttpResponse httpResponse;
private Map<String, String> authHeaders;
private String wksRecordIdForCustomRawRecord;
private String customRecordId;
private String customValToVerifyUpdateOperation;
private String rawRecordWithVersionForAncestry;
private List<String> bulkRawRecordIdList;
public List<String> getBulkRawRecordIdList() {
return bulkRawRecordIdList;
}
public void setBulkRawRecordIdList(List<String> bulkRawRecordIdList) {
this.bulkRawRecordIdList = bulkRawRecordIdList;
}
public String getRawRecordWithVersionForAncestry() {
return rawRecordWithVersionForAncestry;
}
public void setRawRecordWithVersionForAncestry(String rawRecordWithVersionForAncestry) {
this.rawRecordWithVersionForAncestry = rawRecordWithVersionForAncestry;
}
public String getWksRecordIdForCustomRawRecord() {
return wksRecordIdForCustomRawRecord;
}
public void setWksRecordIdForCustomRawRecord(String wksRecordIdForCustomRawRecord) {
this.wksRecordIdForCustomRawRecord = wksRecordIdForCustomRawRecord;
}
public String getCustomValToVerifyUpdateOperation() {
return customValToVerifyUpdateOperation;
}
public void setCustomValToVerifyUpdateOperation(String customValToVerifyUpdateOperation) {
this.customValToVerifyUpdateOperation = customValToVerifyUpdateOperation;
}
public String getCustomRecordId() {
return customRecordId;
}
public void setCustomRecordId(String customRecordId) {
this.customRecordId = customRecordId;
}
public FileUtils getFileUtils() {
return fileUtils;
}
public void setFileUtils(FileUtils fileUtils) {
this.fileUtils = fileUtils;
}
public HttpResponse getHttpResponse() {
return httpResponse;
}
public void setHttpResponse(HttpResponse httpResponse) {
this.httpResponse = httpResponse;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public String getInputPayload() {
return inputPayload;
}
public void setInputPayload(String inputPayload) {
this.inputPayload = inputPayload;
}
public Map<String, String> getAuthHeaders() {
return authHeaders;
}
public void setAuthHeaders(Map<String, String> authHeaders) {
this.authHeaders = authHeaders;
}
}
package org.opengroup.osdu.wks.runner;
import org.junit.runner.RunWith;
import io.cucumber.junit.Cucumber;
import io.cucumber.junit.CucumberOptions;
@RunWith(Cucumber.class)
@CucumberOptions(features = "classpath:features", glue = { "classpath:org.opengroup.osdu.wks.stepdefs" }, tags = {
"@WksService" }, plugin = { "pretty", "junit:target/cucumber-reports/wks-service-test-report.xml" })
public class IntegrationTestRunner {
}
package org.opengroup.osdu.wks.util;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import org.opengroup.osdu.wks.constants.Constants;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Map;
public class AzureServicePrinciple {
public String getIdToken() {
String sp_id = System.getProperty("INTEGRATION_TESTER", System.getenv("INTEGRATION_TESTER"));
String sp_secret = System.getProperty("TESTER_SERVICEPRINCIPAL_SECRET", System.getenv("TESTER_SERVICEPRINCIPAL_SECRET"));
String tenant_id = System.getProperty("AZURE_AD_TENANT_ID", System.getenv("AZURE_AD_TENANT_ID"));
String app_resource_id = System.getProperty("AZURE_AD_APP_RESOURCE_ID", System.getenv("AZURE_AD_APP_RESOURCE_ID"));
String token = null;
try {
token = generateIdToken(sp_id, sp_secret, tenant_id, app_resource_id);
}
catch (Exception e) {
}
return Constants.Bearer + token;
}
public String generateIdToken(String sp_id, String sp_secret, String tenant_id, String app_resource_id) throws Exception {
String aad_endpoint = String.format("https://login.microsoftonline.com/%s/oauth2/token", tenant_id);
URL url = new URL(aad_endpoint);
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setRequestMethod("POST");
con.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
Map<String, String> parameters = new HashMap<>();
parameters.put("grant_type", "client_credentials");
parameters.put("client_id", sp_id);
parameters.put("client_secret", sp_secret);
parameters.put("resource", app_resource_id);
con.setDoOutput(true);
DataOutputStream out = new DataOutputStream(con.getOutputStream());
out.writeBytes(getParamsString(parameters));
out.flush();
out.close();
BufferedReader in = new BufferedReader(
new InputStreamReader(con.getInputStream()));
String inputLine;
StringBuffer content = new StringBuffer();
while ((inputLine = in.readLine()) != null) {
content.append(inputLine);
}
in.close();
con.disconnect();
Gson gson = new Gson();
JsonObject jobj = gson.fromJson(content.toString(), JsonObject.class);
String token = jobj.get("access_token").getAsString();
return token;
}
private static String getParamsString(Map<String, String> params)
throws UnsupportedEncodingException {
StringBuilder result = new StringBuilder();
for (Map.Entry<String, String> entry : params.entrySet()) {
result.append(URLEncoder.encode(entry.getKey(), "UTF-8"));
result.append("=");
result.append(URLEncoder.encode(entry.getValue(), "UTF-8"));
result.append("&");
}
String resultString = result.toString();
return resultString.length() > 0
? resultString.substring(0, resultString.length() - 1)
: resultString;
}
}
Feature: Covers all positive and negative test cases around WKS transformation service
# Pre-requisite test steps for all scenarios
Background: Generate Access token
Given I generate user token and prepare request header
@WksService
Scenario Outline: WKS Transformation should not be attempted if record is already a WKS record
When I hit Storage service put end point with <payload> which is already a WKS record
Then No corresponding transformed record should be created for this wks record in storage
Examples:
| payload |
| "/input_payloads/payload_for_wks_record.json" |
@WksService
Scenario Outline: WKS Transformation should not be attempted if record is a WKE record
When I hit Storage service put end point with <payload> which is already a WKE record
Then No corresponding transformed record should be created for this wke record in storage
Examples:
| payload |
| "/input_payloads/payload_for_wke_record.json" |
@WksService
Scenario Outline: WKS Transformation flow should be skipped if mapping is not present
When I hit Storage service put end point with <payload> which has invalid source so that mapping service does not return any mapping
Then No corresponding transformed record should be created for this record in storage
Examples:
| payload |
| "/input_payloads/payload_with_invalid_source.json" |
@WksService
Scenario Outline: Wks record to be created on raw record creation. WKS record should be updated on corresponding raw record update.
When I hit Storage service put end point with <payload>
Then Transformed record should be created in storage
And Wks record should be created in the intended kind
When I hit Storage service put end point to update raw record with <updatePayload>
Then Existing WKS record in storage should get updated
Examples:
| payload | updatePayload |
| "/input_payloads/payload_for_create_operation.json" | "/input_payloads/payload_for_update_operation.json" |
@WksService
Scenario Outline: Metablock should get updated as per mapping file and ancestry should be correctly updated.
When I hit Storage service put end point with <payload>
Then Raw Record should be created in Storage
And Transformed record should be created in storage
And Data block in wks should be transformed as defined in <expectedWksRecord> as per <MappingFile>
And Ancestry block should be updated correctly
# And Metablock references in the transformed record should be updated as defined in <expectedWksRecord> as per <MappingFile>
Examples:
| payload | expectedWksRecord | MappingFile |
| "/input_payloads/payload_for_meta_and_ancestry.json" | "/input_payloads/expected_transformed_record_draft.json" | "/input_payloads/mappingFile.json" |
@WksService
Scenario Outline: Transformation of all record in request payload should be attempted regardless of transformation failure for few records.
When I hit Storage service put end point with <payload> having multiple records where few records are already Wks records
Then Transformation of Wks records should be skipped
And All other valid records should get transformed
Examples:
| payload |
| "/input_payloads/payload_for_bulk_records.json" |
{
"data": {
"spudDate": "atspud",
"dlWGS84": {
"latitude": "NaN",
"longitude": "NaN"
}
},
"meta": [
{
"kind": "Unit",
"name": "m",
"persistableReference": "{\"scaleOffset\":{\"scale\":1.0,\"offset\":0.0},\"symbol\":\"m\",\"baseMeasurement\":{\"ancestry\":\"Length\",\"type\":\"UM\"},\"type\":\"USO\"}",
"propertyNames": [
"spudDate"
]
},
{
"kind": "CRS",
"name": "ED50 * EPSG-Nor N62 2001 / UTM zone 31N [23031,1612]",
"persistableReference": "{\"lateBoundCRS\":{\"wkt\":\"PROJCS[\\\"ED_1950_UTM_Zone_31N\\\",GEOGCS[\\\"GCS_European_1950\\\",DATUM[\\\"D_European_1950\\\",SPHEROID[\\\"International_1924\\\",6378388.0,297.0]],PRIMEM[\\\"Greenwich\\\",0.0],UNIT[\\\"Degree\\\",0.0174532925199433]],PROJECTION[\\\"Transverse_Mercator\\\"],PARAMETER[\\\"False_Easting\\\",500000.0],PARAMETER[\\\"False_Northing\\\",0.0],PARAMETER[\\\"Central_Meridian\\\",3.0],PARAMETER[\\\"Scale_Factor\\\",0.9996],PARAMETER[\\\"Latitude_Of_Origin\\\",0.0],UNIT[\\\"Meter\\\",1.0],AUTHORITY[\\\"EPSG\\\",23031]]\",\"ver\":\"PE_10_3_1\",\"name\":\"ED_1950_UTM_Zone_31N\",\"authCode\":{\"auth\":\"EPSG\",\"code\":\"23031\"},\"type\":\"LBC\"},\"singleCT\":{\"wkt\":\"GEOGTRAN[\\\"ED_1950_To_WGS_1984_23\\\",GEOGCS[\\\"GCS_European_1950\\\",DATUM[\\\"D_European_1950\\\",SPHEROID[\\\"International_1924\\\",6378388.0,297.0]],PRIMEM[\\\"Greenwich\\\",0.0],UNIT[\\\"Degree\\\",0.0174532925199433]],GEOGCS[\\\"GCS_WGS_1984\\\",DATUM[\\\"D_WGS_1984\\\",SPHEROID[\\\"WGS_1984\\\",6378137.0,298.257223563]],PRIMEM[\\\"Greenwich\\\",0.0],UNIT[\\\"Degree\\\",0.0174532925199433]],METHOD[\\\"Position_Vector\\\"],PARAMETER[\\\"X_Axis_Translation\\\",-116.641],PARAMETER[\\\"Y_Axis_Translation\\\",-56.931],PARAMETER[\\\"Z_Axis_Translation\\\",-110.559],PARAMETER[\\\"X_Axis_Rotation\\\",0.893],PARAMETER[\\\"Y_Axis_Rotation\\\",0.921],PARAMETER[\\\"Z_Axis_Rotation\\\",-0.917],PARAMETER[\\\"Scale_Difference\\\",-3.52],AUTHORITY[\\\"EPSG\\\",1612]]\",\"ver\":\"PE_10_3_1\",\"name\":\"ED_1950_To_WGS_1984_23\",\"authCode\":{\"auth\":\"EPSG\",\"code\":\"1612\"},\"type\":\"ST\"},\"ver\":\"PE_10_3_1\",\"name\":\"ED50 * EPSG-Nor N62 2001 / UTM zone 31N [23031,1612]\",\"authCode\":{\"auth\":\"\",\"code\":\"23031023\"},\"type\":\"EBC\"}",
"propertyNames": [
"dlWGS84.latitude",
"dlWGS84.longitude"
]
}
],
"id": "<WksRecordId>",
"ancestry": {
"parents": [
"<rawRecordId>:<rawRecordVersion>"
]
},
"kind": "opendes:wks:wellbore:1.0.0",
"acl": {
"viewers": [
"data.test1@opendes.contoso.com"
],
"owners": [
"data.test1@opendes.contoso.com"