Commit 43207501 authored by David Diederich's avatar David Diederich
Browse files

Merge remote-tracking branch 'origin/master' into release/0.4

parents a2c39939 7d986c0c
Pipeline #20730 failed with stages
in 141 minutes and 8 seconds
......@@ -17,11 +17,12 @@ variables:
OSDU_GCP_APPLICATION_NAME: os-indexer
OSDU_GCP_SERVICE: indexer
OSDU_GCP_VENDOR: gcp
OSDU_GCP_QUEUE_SA_EMAIL: pub-sub-indexer-queue@nice-etching-277309.iam.gserviceaccount.com
OSDU_GCP_SERVICE_ACCOUNT: osdu-gcp-sa@nice-etching-277309.iam.gserviceaccount.com
OSDU_SECURITY_HTTPS_CERTIFICATE_TRUST: 'true'
OSDU_GCP_STORAGE_RECORDS_BATCH_SIZE: 20
OSDU_GCP_DATA_GROUP: osdu
OSDU_GCP_ENV_VARS: AUTHORIZE_API=$OSDU_GCP_ENTITLEMENTS_URL,GOOGLE_CLOUD_PROJECT=$OSDU_GCP_PROJECT,REDIS_SEARCH_HOST=$REDIS_SEARCH_HOST,REDIS_GROUP_HOST=$REDIS_GROUP_HOST,SECURITY_HTTPS_CERTIFICATE_TRUST=$OSDU_SECURITY_HTTPS_CERTIFICATE_TRUST,INDEXER_HOST=$OSDU_GCP_INDEXER_HOST,STORAGE_QUERY_RECORD_HOST=$OSDU_GCP_STORAGE_QUERY_RECORD_HOST,STORAGE_SCHEMA_HOST=$OSDU_GCP_STORAGE_SCHEMA_HOST,STORAGE_QUERY_RECORD_FOR_CONVERSION_HOST=$OSDU_GCP_STORAGE_QUERY_RECORD_FOR_CONVERSION_HOST,STORAGE_HOSTNAME=$OSDU_GCP_STORAGE_HOSTNAME,STORAGE_RECORDS_BATCH_SIZE=$OSDU_GCP_STORAGE_RECORDS_BATCH_SIZE,INDEXER_QUEUE_HOST=$OSDU_GCP_INDEXER_QUEUE_HOST,LEGALTAG_API=$OSDU_GCP_LEGALTAG_API,CRS_API=$OSDU_GCP_CRS_API,DATA_GROUP=$OSDU_GCP_DATA_GROUP,GOOGLE_AUDIENCES=$GOOGLE_AUDIENCE,INDEXER_QUE_SERVICE_MAIL=$OSDU_GCP_SERVICE_ACCOUNT --vpc-connector=$OSDU_GCP_VPC_CONNECTOR
OSDU_GCP_ENV_VARS: AUTHORIZE_API=$OSDU_GCP_ENTITLEMENTS_URL,GOOGLE_CLOUD_PROJECT=$OSDU_GCP_PROJECT,REDIS_SEARCH_HOST=$REDIS_SEARCH_HOST,REDIS_GROUP_HOST=$REDIS_GROUP_HOST,SECURITY_HTTPS_CERTIFICATE_TRUST=$OSDU_SECURITY_HTTPS_CERTIFICATE_TRUST,INDEXER_HOST=$OSDU_GCP_INDEXER_HOST,STORAGE_QUERY_RECORD_HOST=$OSDU_GCP_STORAGE_QUERY_RECORD_HOST,STORAGE_SCHEMA_HOST=$OSDU_GCP_STORAGE_SCHEMA_HOST,STORAGE_QUERY_RECORD_FOR_CONVERSION_HOST=$OSDU_GCP_STORAGE_QUERY_RECORD_FOR_CONVERSION_HOST,STORAGE_HOSTNAME=$OSDU_GCP_STORAGE_HOSTNAME,STORAGE_RECORDS_BATCH_SIZE=$OSDU_GCP_STORAGE_RECORDS_BATCH_SIZE,INDEXER_QUEUE_HOST=$OSDU_GCP_INDEXER_QUEUE_HOST,LEGALTAG_API=$OSDU_GCP_LEGALTAG_API,CRS_API=$OSDU_GCP_CRS_API,DATA_GROUP=$OSDU_GCP_DATA_GROUP,GOOGLE_AUDIENCES=$GOOGLE_AUDIENCE,INDEXER_QUE_SERVICE_MAIL=$OSDU_GCP_QUEUE_SA_EMAIL --vpc-connector=$OSDU_GCP_VPC_CONNECTOR
IBM_BUILD_SUBDIR: provider/indexer-ibm
IBM_INT_TEST_SUBDIR: testing/indexer-test-ibm
......
......@@ -694,6 +694,7 @@ CPL-1.0
========================================================================
The following software have components provided under the terms of this license:
- JUnit (from http://junit.org)
- System Rules (from http://stefanbirkner.github.io/system-rules/)
========================================================================
......@@ -701,7 +702,6 @@ EPL-1.0
========================================================================
The following software have components provided under the terms of this license:
- JUnit (from http://junit.org)
- Logback Classic Module (from )
- Logback Core Module (from )
- Microsoft Application Insights Java SDK Core (from https://github.com/Microsoft/ApplicationInsights-Java)
......@@ -1008,6 +1008,7 @@ The following software have components provided under the terms of this license:
- Bouncy Castle Provider (from http://www.bouncycastle.org/java.html)
- Bouncy Castle Provider (from http://www.bouncycastle.org/java.html)
- Byte Buddy (without dependencies) (from )
- JUnit (from http://junit.org)
- JavaBeans Activation Framework API jar (from )
- Servlet Specification 2.5 API (from )
- Spongy Castle (from http://rtyley.github.io/spongycastle/)
......
# Introduction
os-indexer-azure is a [Spring Boot](https://spring.io/projects/spring-boot) service that is responsible for indexing Records that enable the `os-search` service to execute OSDU R2 domain searches against Elasticsearch.
## Azure Implementation
The [os-indexer-azure README.md](./provider/indexer-azure/README.md) has all the information needed to get started
......@@ -9,3 +11,4 @@ running the `os-indexer` Azure implementation
All documentation for the GCP implementation of `os-indexer` lives [here](./provider/indexer-gcp/README.md)
......@@ -91,6 +91,8 @@ spec:
value: http://entitlements-azure/entitlements/v1
- name: entitlements_service_api_key
value: "OBSOLETE"
- name: schema_service_url
value: http://schema-service/api/schema-service/v1
- name: storage_service_url
value: http://storage/api/storage/v2
- name: STORAGE_SCHEMA_HOST
......
......@@ -50,14 +50,16 @@ variables:
value: $[ resources.repositories['FluxRepo'].name ]
- name: SKIP_TESTS
value: 'false'
- name: 'MAVEN_CACHE_FOLDER'
value: $(Pipeline.Workspace)/.m2/repository
stages:
- template: /devops/build-stage.yml@TemplateRepo
parameters:
mavenGoal: 'package'
mavenPublishJUnitResults: true
serviceCoreMavenOptions: '-P indexer-core'
mavenOptions: '-P indexer-azure'
serviceCoreMavenOptions: '-P indexer-core --settings .mvn/community-maven.settings.xml -Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
mavenOptions: '-P indexer-azure --settings .mvn/community-maven.settings.xml -Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
copyFileContents: |
pom.xml
provider/indexer-azure/maven/settings.xml
......
......@@ -50,14 +50,16 @@ variables:
value: $[ resources.repositories['FluxRepo'].name ]
- name: SKIP_TESTS
value: 'false'
- name: 'MAVEN_CACHE_FOLDER'
value: $(Pipeline.Workspace)/.m2/repository
stages:
- template: /devops/build-stage.yml@TemplateRepo
parameters:
mavenGoal: 'package'
mavenPublishJUnitResults: true
serviceCoreMavenOptions: '-P indexer-core'
mavenOptions: '-P indexer-azure'
serviceCoreMavenOptions: '-P indexer-core --settings .mvn/community-maven.settings.xml -Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
mavenOptions: '-P indexer-azure --settings .mvn/community-maven.settings.xml -Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
copyFileContents: |
pom.xml
provider/indexer-azure/maven/settings.xml
......
......@@ -152,6 +152,8 @@ spec:
value: http://entitlements-azure/entitlements/v1
- name: entitlements_service_api_key
value: "OBSOLETE"
- name: schema_service_url
value: http://schema-service/api/schema-service/v1
- name: storage_service_url
value: http://storage/api/storage/v2
- name: STORAGE_SCHEMA_HOST
......
......@@ -7,6 +7,7 @@
- [Reindex <a name="reindex"></a>](#reindex)
- [Copy Index <a name="copy-index"></a>](#copy-index)
- [Get task status <a name="get-task-status"></a>](#get-task-status)
- [Schema Service adoption <a name="schema-service-adoption"></a>](#schema-service-adoption)
##Introduction <a name="introduction"></a>
......@@ -250,3 +251,12 @@ API will respond with status of task.
[Back to table of contents](#TOC)
##Shema Service adoption <a name="schema-service-adoption"></a>
Indexer service is in adaptation process to use schemas from the Schema service instead of Storage Service.
The Indexer Service retrieves a schema from the Schema Service if the schema is not found on the Storage Service.
Change affects only Azure implementation so far.
Later call to the Storage Service will be deprecated and then removed (after the end of the deprecation period).
[Back to table of contents](#TOC)
......@@ -47,6 +47,7 @@ public class IndexerConfigurationProperties {
private String storageQueryRecordHost;
private Integer storageRecordsBatchSize;
private String storageSchemaHost;
private String schemaHost;
private String entitlementsHost;
private String entitlementTargetAudience;
private String indexerQueueHost;
......
......@@ -27,6 +27,7 @@ public class AuditEvents {
private static final String INDEX_CREATE_RECORDS_SUCCESS = "Successfully created record in index";
private static final String INDEX_CREATE_RECORDS_FAILURE = "Failed creating record in index";
private static final String INDEX_UPDATE_RECORD_ACTION_ID = "IN002";
private static final String INDEX_UPDATE_RECORDS_SUCCESS = "Successfully updated record in index";
private static final String INDEX_UPDATE_RECORDS_FAILURE = "Failed updating record in index";
......
// Copyright 2017-2020, Schlumberger
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.opengroup.osdu.indexer.schema.converter;
import org.apache.http.HttpStatus;
import org.opengroup.osdu.core.common.logging.JaxRsDpsLog;
import org.opengroup.osdu.core.common.model.http.AppException;
import org.opengroup.osdu.core.common.search.Preconditions;
import org.opengroup.osdu.indexer.schema.converter.config.SchemaConverterConfig;
import org.opengroup.osdu.indexer.schema.converter.config.SchemaConverterPropertiesConfig;
import org.opengroup.osdu.indexer.schema.converter.tags.AllOfItem;
import org.opengroup.osdu.indexer.schema.converter.tags.Definition;
import org.opengroup.osdu.indexer.schema.converter.tags.Definitions;
import org.opengroup.osdu.indexer.schema.converter.tags.TypeProperty;
import java.util.*;
import java.util.function.Supplier;
import java.util.stream.Stream;
public class PropertiesProcessor {
private JaxRsDpsLog log;
private SchemaConverterConfig schemaConverterConfig;
private static final String DEF_PREFIX = "#/definitions/";
private static final String LINK_PREFIX = "^srn";
private static final String LINK_TYPE = "link";
private final Definitions definitions;
private final String pathPrefix;
private final String pathPrefixWithDot;
public PropertiesProcessor(Definitions definitions, JaxRsDpsLog log, SchemaConverterConfig schemaConverterConfig) {
this(definitions, null, log, schemaConverterConfig);
}
public PropertiesProcessor(Definitions definitions, String pathPrefix, JaxRsDpsLog log, SchemaConverterConfig schemaConverterConfig) {
this.log = log;
this.definitions = definitions;
this.pathPrefix = pathPrefix;
this.pathPrefixWithDot = Objects.isNull(pathPrefix) || pathPrefix.isEmpty() ? "" : pathPrefix + ".";
this.schemaConverterConfig = schemaConverterConfig;
}
public Stream<Map<String, Object>> processItem(AllOfItem allOfItem) {
Preconditions.checkNotNull(allOfItem, "allOfItem cannot be null");
String ref = allOfItem.getRef();
return Objects.isNull(ref) ?
allOfItem.getProperties().entrySet().stream().flatMap(this::processPropertyEntry) : processRef(ref);
}
public Stream<Map<String, Object>> processRef(String ref) {
Preconditions.checkNotNull(ref, "reference cannot be null");
if (!ref.contains(DEF_PREFIX)) {
log.warning("Unknown definition:" + ref);
return Stream.empty();
}
String definitionSubRef = ref.substring(DEF_PREFIX.length());
if (schemaConverterConfig.getSkippedDefinitions().contains(definitionSubRef)) {
return Stream.empty();
}
if (Objects.nonNull(schemaConverterConfig.getSpecialDefinitionsMap().get(definitionSubRef))) {
return storageSchemaEntry(schemaConverterConfig.getSpecialDefinitionsMap().get(definitionSubRef), pathPrefix);
}
Definition definition = definitions.getDefinition(definitionSubRef);
Optional.ofNullable(definition).orElseThrow(() ->
new AppException(HttpStatus.SC_NOT_FOUND, "Failed to find definition:" + definitionSubRef,
"Unknown definition:" + definitionSubRef));
return definition.getProperties().entrySet().stream().flatMap(this::processPropertyEntry);
}
private Stream<Map<String, Object>> processPropertyEntry(Map.Entry<String, TypeProperty> entry) {
Preconditions.checkNotNull(entry, "entry cannot be null");
if ("object".equals(entry.getValue().getType())
&& Objects.isNull(entry.getValue().getItems())
&& Objects.isNull(entry.getValue().getRef())
&& Objects.isNull(entry.getValue().getProperties())) {
return Stream.empty();
}
if ("array".equals(entry.getValue().getType())) {
if (schemaConverterConfig.getSupportedArrayTypes().contains(entry.getValue().getItems().getType())) {
return storageSchemaEntry("[]" + getTypeByDefinitionProperty(entry.getValue()), pathPrefixWithDot + entry.getKey());
}
return Stream.empty();
}
if (Objects.nonNull(entry.getValue().getProperties())) {
PropertiesProcessor propertiesProcessor = new PropertiesProcessor(definitions, pathPrefixWithDot + entry.getKey()
, log, new SchemaConverterPropertiesConfig());
return entry.getValue().getProperties().entrySet().stream().flatMap(propertiesProcessor::processPropertyEntry);
}
if (Objects.nonNull(entry.getValue().getRef())) {
return new PropertiesProcessor(definitions, pathPrefixWithDot + entry.getKey(), log, new SchemaConverterPropertiesConfig())
.processRef(entry.getValue().getRef());
}
return storageSchemaEntry(getTypeByDefinitionProperty(entry.getValue()), pathPrefixWithDot + entry.getKey());
}
private Stream<Map<String, Object>> storageSchemaEntry(String kind, String path) {
Preconditions.checkNotNullOrEmpty(kind, "kind cannot be null or empty");
Preconditions.checkNotNullOrEmpty(path, "path cannot be null or empty");
Map<String, Object> map = new HashMap<>();
map.put("kind", kind);
map.put("path", path);
return Stream.of(map);
}
private String getTypeByDefinitionProperty(TypeProperty definitionProperty) {
Preconditions.checkNotNull(definitionProperty, "definitionProperty cannot be null");
return Stream.of(
getFromPattern(definitionProperty.getPattern()),
getFromItemsPattern(() -> definitionProperty.getItems() != null ? definitionProperty.getItems().getPattern() : null),
getFromFormat(definitionProperty::getFormat),
getFromItemsType (() -> definitionProperty.getItems() != null ? definitionProperty.getItems().getType() : null))
.filter(x -> x.get() != null)
.findFirst()
.orElse(getFromType(definitionProperty::getType)).get();
}
private Supplier<String> getFromPattern(String pattern) {
return () -> Objects.nonNull(pattern) && pattern.startsWith(LINK_PREFIX) ? LINK_TYPE : null;
}
private Supplier<String> getFromItemsPattern(Supplier<String> itemsPatternSupplier) {
return () -> {
String itemsPattern = itemsPatternSupplier.get();
return Objects.nonNull(itemsPattern) && itemsPattern.startsWith(LINK_PREFIX) ? LINK_TYPE : null;
};
}
private Supplier<String> getFromType(Supplier<String> typeSupplier) {
return () -> {
String type = typeSupplier.get();
return schemaConverterConfig.getPrimitiveTypesMap().getOrDefault(type, type);
};
}
private Supplier<String> getFromFormat(Supplier<String> formatSupplier){
return () -> {
String format = formatSupplier.get();;
return Objects.nonNull(format) ? schemaConverterConfig.getPrimitiveTypesMap().getOrDefault(format, format) : null;
};
}
private Supplier<String> getFromItemsType(Supplier<String> itemsTypeSupplier) {
return () -> {
String itemsType = itemsTypeSupplier.get();
return Objects.nonNull(itemsType) ? schemaConverterConfig.getPrimitiveTypesMap().getOrDefault(itemsType, itemsType) : null;
};
}
}
// Copyright 2017-2020, Schlumberger
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.opengroup.osdu.indexer.schema.converter;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.http.HttpStatus;
import org.opengroup.osdu.core.common.logging.JaxRsDpsLog;
import org.opengroup.osdu.core.common.model.http.AppException;
import org.opengroup.osdu.core.common.search.Preconditions;
import org.opengroup.osdu.indexer.schema.converter.config.SchemaConverterConfig;
import org.opengroup.osdu.indexer.schema.converter.interfaces.SchemaToStorageFormat;
import org.opengroup.osdu.indexer.schema.converter.tags.PropertiesData;
import org.opengroup.osdu.indexer.schema.converter.tags.SchemaRoot;
import org.springframework.stereotype.Component;
import javax.inject.Inject;
import java.util.*;
import java.util.stream.Collectors;
/**
* Converts schema from Schema Service format to Storage Service format
*/
@Component
public class SchemaToStorageFormatImpl implements SchemaToStorageFormat {
private ObjectMapper objectMapper;
private JaxRsDpsLog log;
private SchemaConverterConfig schemaConverterConfig;
@Inject
public SchemaToStorageFormatImpl(ObjectMapper objectMapper, JaxRsDpsLog log, SchemaConverterConfig schemaConverterConfig) {
Preconditions.checkNotNull(objectMapper, "objectMapper cannot be null");
this.objectMapper = objectMapper;
this.log = log;
this.schemaConverterConfig = schemaConverterConfig;
}
@Override
public String convertToString(final String schemaServiceFormat, String kind) {
Preconditions.checkNotNullOrEmpty(schemaServiceFormat, "schemaServiceFormat cannot be null or empty");
Preconditions.checkNotNullOrEmpty(kind, "kind cannot be null or empty");
return saveJsonToString(convert(parserJsonString(schemaServiceFormat), kind));
}
public Map<String, Object> convertToMap(final String schemaServiceFormat, String kind) {
Preconditions.checkNotNullOrEmpty(schemaServiceFormat, "schemaServiceFormat cannot be null or empty");
Preconditions.checkNotNullOrEmpty(kind, "kind cannot be null or empty");
return convert(parserJsonString(schemaServiceFormat), kind);
}
private SchemaRoot parserJsonString(final String schemaServiceFormat) {
try {
return objectMapper.readValue(schemaServiceFormat, SchemaRoot.class);
} catch (JsonProcessingException e) {
throw new AppException(HttpStatus.SC_BAD_REQUEST, "Loading shchem error", "Failed to load schema", e);
}
}
private String saveJsonToString(final Map<String, Object> schemaServiceFormat) {
try {
return objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(schemaServiceFormat);
} catch (JsonProcessingException e) {
throw new AppException(HttpStatus.SC_UNPROCESSABLE_ENTITY, "Saving JSON error", "Failed to save a JSON file", e);
}
}
private Map<String, Object> convert(SchemaRoot schemaServiceSchema, String kind) {
Preconditions.checkNotNull(objectMapper, "schemaServiceSchema cannot be null");
Preconditions.checkNotNullOrEmpty(kind, "kind cannot be null or empty");
PropertiesProcessor propertiesProcessor = new PropertiesProcessor(schemaServiceSchema.getDefinitions(), log, schemaConverterConfig);
final List<Map<String, Object>> storageSchemaItems = new ArrayList<>();
if (schemaServiceSchema.getProperties() != null) {
PropertiesData schemaData = schemaServiceSchema.getProperties().getData();
if (!Objects.isNull(schemaData)) {
if (schemaData.getAllOf() != null) {
storageSchemaItems.addAll(schemaServiceSchema.getProperties().getData().getAllOf().stream()
.flatMap(propertiesProcessor::processItem)
.collect(Collectors.toList()));
}
if (schemaData.getRef() != null) {
storageSchemaItems.addAll(propertiesProcessor.processRef(schemaData.getRef())
.collect(Collectors.toList()));
}
}
} else {
log.warning("Schema doesn't have properties, kind:" + kind);
}
final Map<String, Object> result = new LinkedHashMap<>();
result.put("kind", kind);
result.put("schema", storageSchemaItems);
return result;
}
}
package org.opengroup.osdu.indexer.schema.converter.config;
import java.util.Map;
import java.util.Set;
/*
Provides configuration for the schema converter
*/
public interface SchemaConverterConfig {
Set<String> getSkippedDefinitions();
Set<String> getSupportedArrayTypes();
Map<String, String> getSpecialDefinitionsMap();
Map<String, String> getPrimitiveTypesMap();
}
package org.opengroup.osdu.indexer.schema.converter.config;
import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import java.util.*;
@Configuration
@ConfigurationProperties(prefix = "schema.converter")
@Getter
@Setter
public class SchemaConverterPropertiesConfig implements SchemaConverterConfig {
private Set<String> skippedDefinitions = getDefaultSkippedDefinitions();
private Set<String> supportedArrayTypes = getDefaultSupportedArrayTypes();
private Map<String, String> specialDefinitionsMap = getDefaultSpecialDefinitionsMap();
private Map<String, String> primitiveTypesMap = getDefaultPrimitiveTypesMap();
private Set<String> getDefaultSkippedDefinitions() {
return new HashSet<>(Arrays.asList("AbstractAnyCrsFeatureCollection.1.0.0",
"anyCrsGeoJsonFeatureCollection"));
}
private Set<String> getDefaultSupportedArrayTypes() {
return new HashSet<>(Arrays.asList("boolean", "integer", "number", "string"));
}
private Map<String, String> getDefaultSpecialDefinitionsMap() {
Map<String, String> defaultSpecialDefinitions = new HashMap<>();
defaultSpecialDefinitions.put("AbstractFeatureCollection.1.0.0", "core:dl:geoshape:1.0.0");
defaultSpecialDefinitions.put("core_dl_geopoint", "core:dl:geopoint:1.0.0");
defaultSpecialDefinitions.put("geoJsonFeatureCollection", "core:dl:geoshape:1.0.0");
return defaultSpecialDefinitions;
}
private Map<String, String> getDefaultPrimitiveTypesMap() {
Map<String, String> defaultPrimitiveTypesMap = new HashMap<>();
defaultPrimitiveTypesMap.put("boolean", "bool");
defaultPrimitiveTypesMap.put("number", "double");
defaultPrimitiveTypesMap.put("date-time", "datetime");
defaultPrimitiveTypesMap.put("date", "datetime");
defaultPrimitiveTypesMap.put("time", "datetime");
defaultPrimitiveTypesMap.put("int32", "int");
defaultPrimitiveTypesMap.put("integer", "int");
defaultPrimitiveTypesMap.put("int64", "long");
return defaultPrimitiveTypesMap;
}
}
// Copyright 2017-2020, Schlumberger
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.opengroup.osdu.indexer.schema.converter.interfaces;
public interface SchemaToStorageFormat {
String convertToString(String schemaServiceFormat, String kind);
}
Schema Service schema conversion.
=================================
Purpose
-------
The purpose of this document is to describe schema conversion from the
Schema Service format to the Storage Service format.
Storage Service schema has the following JSON format
----------------------------------------------------
```json
{
"kind": "<kind>",
"schema": [
{
"kind": "<type>",
"path": "<path>"
},