Commit e20b6f24 authored by Luc Yriarte's avatar Luc Yriarte
Browse files

SLB code push 2

parent 4d3f9806
......@@ -174,4 +174,4 @@
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
\ No newline at end of file
END OF TERMS AND CONDITIONS
......@@ -4,7 +4,7 @@ is projet python client library for the open data ecosystem storage service.
Storage service which handles the metadata ingestion in the Data Ecosystem
This Python package is automatically generated with fastapi_client
from the swagger api definition converted to openapi 3.0 specification using swagger2openapi
## Requirements
Python 3.7
......@@ -61,4 +61,4 @@ rm -rf ./odes_storage & \
## Author
clallement@slb.com
\ No newline at end of file
clallement@slb.com
......@@ -873,6 +873,12 @@
}
}
},
"Map":{
"type":"object",
"additionalProperties":{
"type":"object"
}
},
"Record" : {
"type" : "object",
"required" : [ "acl", "data", "kind", "legal" ],
......@@ -900,6 +906,12 @@
"description" : "Attributes which represent the legal constraints associated with the record.",
"$ref" : "#/definitions/Legal"
},
"meta" : {
"type":"array",
"items":{
"$ref":"#/definitions/Map"
}
},
"data" : {
"type" : "object",
"description" : "Record payload represented as a list of key-value pairs.",
......@@ -1058,4 +1070,4 @@
"allowCors" : "true",
"name" : "os-storage-dot-opendes.appspot.com"
} ]
}
\ No newline at end of file
}
from odes_storage import AsyncApis as StorageApi, AuthApiClient as StorageClient
import asyncio
from pprint import pprint
from odes_storage import AsyncApis as StorageApi
from odes_storage import AuthApiClient as StorageClient
from odes_storage.models import (
CreateUpdateRecordsResponse,
DatastoreQueryResult,
......@@ -7,11 +11,6 @@ from odes_storage.models import (
StorageAcl,
)
import asyncio
from pprint import pprint
HOSTNAME = "https://os-storage-dot-opendes.appspot.com/api/storage"
TOKEN = "eyJhbGciOiJSUzI1NiIsImtpZCI6ImY5ZDk3YjRjYWU5MGJjZDc2YWViMjAwMjZmNmI3NzBjYWMyMjE3ODMiLCJ0eXAiOiJKV1QifQ.eyJpc3MiOiJodHRwczovL2FjY291bnRzLmdvb2dsZS5jb20iLCJhenAiOiI0ODk1NzIxMDc2OTUtb2FjajI0ZnRlNWExN3JtM2VsZ2lsaGJuNDljaWVnaG8uYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJhdWQiOiI0ODk1NzIxMDc2OTUtb2FjajI0ZnRlNWExN3JtM2VsZ2lsaGJuNDljaWVnaG8uYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJzdWIiOiIxMDM0MzY5NjA1NjQ3Njk2ODQzMzUiLCJoZCI6Im9wZW5kZXMucDRkLmNsb3VkLnNsYi1kcy5jb20iLCJlbWFpbCI6ImNsYWxsZW1lbnRAb3BlbmRlcy5wNGQuY2xvdWQuc2xiLWRzLmNvbSIsImVtYWlsX3ZlcmlmaWVkIjp0cnVlLCJhdF9oYXNoIjoiV1RBb1lVLTI5YTU3QmpOaV9IajNndyIsImlhdCI6MTU4NzU0MDI3NCwiZXhwIjoxNTg3NTQzODc0fQ.l3OljSD9N5FhNRbj8E8Gd3dyDj8Qi40r6W4e473tfTvGHzuE6QLpm5Yhp-l43HHHcOt_Gnbm-hEq6l9XXyx4yGAhna8Wp9vgz2HRlQDcXAB1WeYkBhzJ2PCu8QrT_cqahPzH8pqN8mNuTpEzJTVKZQJIm0Crs9DVz2XyIvCaUxX9lEqHKIzCVeRYjdoBKundRw4WKVXtrLd4ZX56fP9s06aUmHdrdN8BFmSn1JS2I8XNtPHarv_kKiItWbZIjWkjvM_OOCwx38oZUztTBq9W-uMh-nIT59PGj7e_RxtGUiEBlvkZroKvHg1wF1WcPOBQ5-EvW1oU19fjWkhyfbuQFQ"
DATA_PARTITION_ID = "opendes"
......@@ -31,21 +30,16 @@ async def main():
data={"msg": "hello world, from Data Ecosystem"},
)
client = StorageApi(
StorageClient(host=HOSTNAME, token=TOKEN)
)
client = StorageApi(StorageClient(host=HOSTNAME, token=TOKEN))
created_record_response = await client.records_api.create_or_update_records(
data_partition_id=DATA_PARTITION_ID,
skipdupes=False,
record=[record],
data_partition_id=DATA_PARTITION_ID, skipdupes=False, record=[record],
)
assert isinstance(created_record_response, CreateUpdateRecordsResponse)
assert created_record_response.record_ids[0]
computed_record = await client.records_api.get_record(
data_partition_id=DATA_PARTITION_ID,
id=created_record_response.record_ids[0],
data_partition_id=DATA_PARTITION_ID, id=created_record_response.record_ids[0],
)
assert isinstance(computed_record, Record)
......@@ -54,7 +48,6 @@ async def main():
assert record.legal == computed_record.legal
assert record.data == computed_record.data
pprint(computed_record)
......
#!/bin/bash
# Install Swagger Generator
npm install -g swagger2openapi
# Clone Repository for the Clinet Generate by Christophe Lallement
git clone https://github.com/ChristopheLallement/fastapi_client.git
# Convert from swagger spec to openapi spec
swagger2openapi curated-swagger-storage.json -y --outfile openapi-storage.yaml
# Generate
rm -rf ./odes_storage & \
./fastapi_client/scripts/generate.sh \
--include-auth \
-i openapi-storage.yaml \
-p odes_storage \
-n odes_storage \
-o ./
# For now the formatter for the generator doesn't work
# Uncomment the following lines to format the genreated files
# pip install isort black
# isort */**.py
# black .
\ No newline at end of file
......@@ -25,7 +25,11 @@ class _QueryApi:
body = jsonable_encoder(multi_record_ids)
return self.api_client.request(
type_=m.MultiRecordInfo, method="POST", url="/v2/query/records", headers=headers, json=body
type_=m.MultiRecordInfo,
method="POST",
url="/v2/query/records",
headers=headers,
json=body,
)
def _build_for_fetch_records_with_optional_conversion(
......@@ -39,7 +43,11 @@ class _QueryApi:
body = jsonable_encoder(multi_record_request)
return self.api_client.request(
type_=m.MultiRecordResponse, method="POST", url="/v2/query/records:batch", headers=headers, json=body
type_=m.MultiRecordResponse,
method="POST",
url="/v2/query/records:batch",
headers=headers,
json=body,
)
def _build_for_get_all_kinds(
......@@ -57,11 +65,19 @@ class _QueryApi:
headers = {"data-partition-id": str(data_partition_id)}
return self.api_client.request(
type_=m.DatastoreQueryResult, method="GET", url="/v2/query/kinds", params=query_params, headers=headers,
type_=m.DatastoreQueryResult,
method="GET",
url="/v2/query/kinds",
params=query_params,
headers=headers,
)
def _build_for_get_all_record_from_kind(
self, data_partition_id: str, cursor: str = None, limit: int = None, kind: str = None
self,
data_partition_id: str,
cursor: str = None,
limit: int = None,
kind: str = None,
) -> Awaitable[m.DatastoreQueryResult]:
"""
The API returns a list of all record ids which belong to the specified kind. Required roles: 'users.datalake.ops'.
......@@ -77,11 +93,21 @@ class _QueryApi:
headers = {"data-partition-id": str(data_partition_id)}
return self.api_client.request(
type_=m.DatastoreQueryResult, method="GET", url="/v2/query/records", params=query_params, headers=headers,
type_=m.DatastoreQueryResult,
method="GET",
url="/v2/query/records",
params=query_params,
headers=headers,
)
class AsyncQueryApi(_QueryApi):
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.api_client.close()
async def fetch_records(
self, data_partition_id: str, multi_record_ids: m.MultiRecordIds = None
) -> m.MultiRecordInfo:
......@@ -99,7 +125,8 @@ class AsyncQueryApi(_QueryApi):
Fetch records and do corresponding conversion as user requested, no more than 20 records per request.
"""
return await self._build_for_fetch_records_with_optional_conversion(
data_partition_id=data_partition_id, multi_record_request=multi_record_request
data_partition_id=data_partition_id,
multi_record_request=multi_record_request,
)
async def get_all_kinds(
......@@ -108,10 +135,16 @@ class AsyncQueryApi(_QueryApi):
"""
The API returns a list of all kinds in the specific {Data-Partition-Id}. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
"""
return await self._build_for_get_all_kinds(data_partition_id=data_partition_id, cursor=cursor, limit=limit)
return await self._build_for_get_all_kinds(
data_partition_id=data_partition_id, cursor=cursor, limit=limit
)
async def get_all_record_from_kind(
self, data_partition_id: str, cursor: str = None, limit: int = None, kind: str = None
self,
data_partition_id: str,
cursor: str = None,
limit: int = None,
kind: str = None,
) -> m.DatastoreQueryResult:
"""
The API returns a list of all record ids which belong to the specified kind. Required roles: 'users.datalake.ops'.
......@@ -122,7 +155,9 @@ class AsyncQueryApi(_QueryApi):
class SyncQueryApi(_QueryApi):
def fetch_records(self, data_partition_id: str, multi_record_ids: m.MultiRecordIds = None) -> m.MultiRecordInfo:
def fetch_records(
self, data_partition_id: str, multi_record_ids: m.MultiRecordIds = None
) -> m.MultiRecordInfo:
"""
The API fetches multiple records at once. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
......@@ -138,19 +173,28 @@ class SyncQueryApi(_QueryApi):
Fetch records and do corresponding conversion as user requested, no more than 20 records per request.
"""
coroutine = self._build_for_fetch_records_with_optional_conversion(
data_partition_id=data_partition_id, multi_record_request=multi_record_request
data_partition_id=data_partition_id,
multi_record_request=multi_record_request,
)
return get_event_loop().run_until_complete(coroutine)
def get_all_kinds(self, data_partition_id: str, cursor: str = None, limit: int = None) -> m.DatastoreQueryResult:
def get_all_kinds(
self, data_partition_id: str, cursor: str = None, limit: int = None
) -> m.DatastoreQueryResult:
"""
The API returns a list of all kinds in the specific {Data-Partition-Id}. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
"""
coroutine = self._build_for_get_all_kinds(data_partition_id=data_partition_id, cursor=cursor, limit=limit)
coroutine = self._build_for_get_all_kinds(
data_partition_id=data_partition_id, cursor=cursor, limit=limit
)
return get_event_loop().run_until_complete(coroutine)
def get_all_record_from_kind(
self, data_partition_id: str, cursor: str = None, limit: int = None, kind: str = None
self,
data_partition_id: str,
cursor: str = None,
limit: int = None,
kind: str = None,
) -> m.DatastoreQueryResult:
"""
The API returns a list of all record ids which belong to the specified kind. Required roles: 'users.datalake.ops'.
......
......@@ -15,7 +15,10 @@ class _RecordsApi:
self.api_client = api_client
def _build_for_create_or_update_records(
self, data_partition_id: str, skipdupes: bool = None, record: List[m.Record] = None
self,
data_partition_id: str,
skipdupes: bool = None,
record: List[m.Record] = None,
) -> Awaitable[m.CreateUpdateRecordsResponse]:
"""
The API represents the main injection mechanism into the Data Ecosystem. It allows records creation and/or update. When no record id is provided or when the provided id is not already present in the Data Ecosystem then a new record is created. If the id is related to an existing record in the Data Ecosystem then an update operation takes place and a new version of the record is created. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
......@@ -37,7 +40,9 @@ class _RecordsApi:
json=body,
)
def _build_for_delete_record(self, id: str, data_partition_id: str, body: Any = None) -> Awaitable[None]:
def _build_for_delete_record(
self, id: str, data_partition_id: str, body: Any = None
) -> Awaitable[None]:
"""
The API performs a logical deletion of the given record. This operation can be reverted later. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
"""
......@@ -56,7 +61,9 @@ class _RecordsApi:
json=body,
)
def _build_for_get_all_record_versions(self, id: str, data_partition_id: str) -> Awaitable[m.RecordVersions]:
def _build_for_get_all_record_versions(
self, id: str, data_partition_id: str
) -> Awaitable[m.RecordVersions]:
"""
The API returns a list containing all versions for the given record id. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
......@@ -82,7 +89,9 @@ class _RecordsApi:
query_params = {}
if attribute is not None:
query_params["attribute"] = [str(attribute_item) for attribute_item in attribute]
query_params["attribute"] = [
str(attribute_item) for attribute_item in attribute
]
headers = {"data-partition-id": str(data_partition_id)}
......@@ -105,7 +114,9 @@ class _RecordsApi:
query_params = {}
if attribute is not None:
query_params["attribute"] = [str(attribute_item) for attribute_item in attribute]
query_params["attribute"] = [
str(attribute_item) for attribute_item in attribute
]
headers = {"data-partition-id": str(data_partition_id)}
......@@ -118,7 +129,9 @@ class _RecordsApi:
headers=headers,
)
def _build_for_purge_record(self, id: str, data_partition_id: str) -> Awaitable[None]:
def _build_for_purge_record(
self, id: str, data_partition_id: str
) -> Awaitable[None]:
"""
The API performs the physical deletion of the given record and all of its versions. This operation cannot be undone. Required roles: 'users.datalake.ops'.
"""
......@@ -127,13 +140,26 @@ class _RecordsApi:
headers = {"data-partition-id": str(data_partition_id)}
return self.api_client.request(
type_=None, method="DELETE", url="/v2/records/{id}", path_params=path_params, headers=headers,
type_=None,
method="DELETE",
url="/v2/records/{id}",
path_params=path_params,
headers=headers,
)
class AsyncRecordsApi(_RecordsApi):
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.api_client.close()
async def create_or_update_records(
self, data_partition_id: str, skipdupes: bool = None, record: List[m.Record] = None
self,
data_partition_id: str,
skipdupes: bool = None,
record: List[m.Record] = None,
) -> m.CreateUpdateRecordsResponse:
"""
The API represents the main injection mechanism into the Data Ecosystem. It allows records creation and/or update. When no record id is provided or when the provided id is not already present in the Data Ecosystem then a new record is created. If the id is related to an existing record in the Data Ecosystem then an update operation takes place and a new version of the record is created. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
......@@ -142,23 +168,35 @@ class AsyncRecordsApi(_RecordsApi):
data_partition_id=data_partition_id, skipdupes=skipdupes, record=record
)
async def delete_record(self, id: str, data_partition_id: str, body: Any = None) -> None:
async def delete_record(
self, id: str, data_partition_id: str, body: Any = None
) -> None:
"""
The API performs a logical deletion of the given record. This operation can be reverted later. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
"""
return await self._build_for_delete_record(id=id, data_partition_id=data_partition_id, body=body)
return await self._build_for_delete_record(
id=id, data_partition_id=data_partition_id, body=body
)
async def get_all_record_versions(self, id: str, data_partition_id: str) -> m.RecordVersions:
async def get_all_record_versions(
self, id: str, data_partition_id: str
) -> m.RecordVersions:
"""
The API returns a list containing all versions for the given record id. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
return await self._build_for_get_all_record_versions(id=id, data_partition_id=data_partition_id)
return await self._build_for_get_all_record_versions(
id=id, data_partition_id=data_partition_id
)
async def get_record(self, id: str, data_partition_id: str, attribute: List[str] = None) -> m.Record:
async def get_record(
self, id: str, data_partition_id: str, attribute: List[str] = None
) -> m.Record:
"""
This API returns the latest version of the given record. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
return await self._build_for_get_record(id=id, data_partition_id=data_partition_id, attribute=attribute)
return await self._build_for_get_record(
id=id, data_partition_id=data_partition_id, attribute=attribute
)
async def get_record_version(
self, id: str, version: int, data_partition_id: str, attribute: List[str] = None
......@@ -167,19 +205,27 @@ class AsyncRecordsApi(_RecordsApi):
The API retrieves the specific version of the given record. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
return await self._build_for_get_record_version(
id=id, version=version, data_partition_id=data_partition_id, attribute=attribute
id=id,
version=version,
data_partition_id=data_partition_id,
attribute=attribute,
)
async def purge_record(self, id: str, data_partition_id: str) -> None:
"""
The API performs the physical deletion of the given record and all of its versions. This operation cannot be undone. Required roles: 'users.datalake.ops'.
"""
return await self._build_for_purge_record(id=id, data_partition_id=data_partition_id)
return await self._build_for_purge_record(
id=id, data_partition_id=data_partition_id
)
class SyncRecordsApi(_RecordsApi):
def create_or_update_records(
self, data_partition_id: str, skipdupes: bool = None, record: List[m.Record] = None
self,
data_partition_id: str,
skipdupes: bool = None,
record: List[m.Record] = None,
) -> m.CreateUpdateRecordsResponse:
"""
The API represents the main injection mechanism into the Data Ecosystem. It allows records creation and/or update. When no record id is provided or when the provided id is not already present in the Data Ecosystem then a new record is created. If the id is related to an existing record in the Data Ecosystem then an update operation takes place and a new version of the record is created. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
......@@ -193,21 +239,31 @@ class SyncRecordsApi(_RecordsApi):
"""
The API performs a logical deletion of the given record. This operation can be reverted later. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
"""
coroutine = self._build_for_delete_record(id=id, data_partition_id=data_partition_id, body=body)
coroutine = self._build_for_delete_record(
id=id, data_partition_id=data_partition_id, body=body
)
return get_event_loop().run_until_complete(coroutine)
def get_all_record_versions(self, id: str, data_partition_id: str) -> m.RecordVersions:
def get_all_record_versions(
self, id: str, data_partition_id: str
) -> m.RecordVersions:
"""
The API returns a list containing all versions for the given record id. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
coroutine = self._build_for_get_all_record_versions(id=id, data_partition_id=data_partition_id)
coroutine = self._build_for_get_all_record_versions(
id=id, data_partition_id=data_partition_id
)
return get_event_loop().run_until_complete(coroutine)
def get_record(self, id: str, data_partition_id: str, attribute: List[str] = None) -> m.Record:
def get_record(
self, id: str, data_partition_id: str, attribute: List[str] = None
) -> m.Record:
"""
This API returns the latest version of the given record. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
coroutine = self._build_for_get_record(id=id, data_partition_id=data_partition_id, attribute=attribute)
coroutine = self._build_for_get_record(
id=id, data_partition_id=data_partition_id, attribute=attribute
)
return get_event_loop().run_until_complete(coroutine)
def get_record_version(
......@@ -217,7 +273,10 @@ class SyncRecordsApi(_RecordsApi):
The API retrieves the specific version of the given record. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
coroutine = self._build_for_get_record_version(
id=id, version=version, data_partition_id=data_partition_id, attribute=attribute
id=id,
version=version,
data_partition_id=data_partition_id,
attribute=attribute,
)
return get_event_loop().run_until_complete(coroutine)
......@@ -225,5 +284,7 @@ class SyncRecordsApi(_RecordsApi):
"""
The API performs the physical deletion of the given record and all of its versions. This operation cannot be undone. Required roles: 'users.datalake.ops'.
"""
coroutine = self._build_for_purge_record(id=id, data_partition_id=data_partition_id)
coroutine = self._build_for_purge_record(
id=id, data_partition_id=data_partition_id
)
return get_event_loop().run_until_complete(coroutine)
......@@ -14,7 +14,9 @@ class _SchemasApi:
def __init__(self, api_client: "ApiClient"):
self.api_client = api_client
def _build_for_create_schema(self, data_partition_id: str, schema: m.Schema = None) -> Awaitable[None]:
def _build_for_create_schema(
self, data_partition_id: str, schema: m.Schema = None
) -> Awaitable[None]:
"""
The API allows the creation of a new schema for the given kind. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
"""
......@@ -22,9 +24,13 @@ class _SchemasApi:
body = jsonable_encoder(schema)
return self.api_client.request(type_=None, method="POST", url="/v2/schemas", headers=headers, json=body)
return self.api_client.request(
type_=None, method="POST", url="/v2/schemas", headers=headers, json=body
)
def _build_for_delete_a_schema(self, kind: str, data_partition_id: str) -> Awaitable[None]:
def _build_for_delete_a_schema(
self, kind: str, data_partition_id: str
) -> Awaitable[None]:
"""
The API deletes the schema of the given kind, which must follow the naming convetion {Data-Partition-Id}:{dataset}:{type}:{version} format. This operation cannot be undone. Required roles: 'users.datalake.ops'.
"""
......@@ -33,10 +39,16 @@ class _SchemasApi:
headers = {"data-partition-id": str(data_partition_id)}
return self.api_client.request(
type_=None, method="DELETE", url="/v2/schemas/{kind}", path_params=path_params, headers=headers,
type_=None,
method="DELETE",
url="/v2/schemas/{kind}",
path_params=path_params,
headers=headers,
)
def _build_for_get_schema(self, kind: str, data_partition_id: str) -> Awaitable[m.Schema]:
def _build_for_get_schema(
self, kind: str, data_partition_id: str
) -> Awaitable[m.Schema]:
"""
The API returns the schema specified byt the given kind, which must follow the naming convention {Data-Partition-Id}:{dataset}:{type}:{version}. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
......@@ -45,28 +57,46 @@ class _SchemasApi:
headers = {"data-partition-id": str(data_partition_id)}
return self.api_client.request(
type_=m.Schema, method="GET", url="/v2/schemas/{kind}", path_params=path_params, headers=headers,
type_=m.Schema,
method="GET",
url="/v2/schemas/{kind}",
path_params=path_params,
headers=headers,
)
class AsyncSchemasApi(_SchemasApi):
async def create_schema(self, data_partition_id: str, schema: m.Schema = None) -> None:
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.api_client.close()
async def create_schema(
self, data_partition_id: str, schema: m.Schema = None
) -> None:
"""
The API allows the creation of a new schema for the given kind. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
"""
return await self._build_for_create_schema(data_partition_id=data_partition_id, schema=schema)
return await self._build_for_create_schema(
data_partition_id=data_partition_id, schema=schema
)
async def delete_a_schema(self, kind: str, data_partition_id: str) -> None:
"""
The API deletes the schema of the given kind, which must follow the naming convetion {Data-Partition-Id}:{dataset}:{type}:{version} format. This operation cannot be undone. Required roles: 'users.datalake.ops'.
"""
return await self._build_for_delete_a_schema(kind=kind, data_partition_id=data_partition_id)
return await self._build_for_delete_a_schema(
kind=kind, data_partition_id=data_partition_id
)
async def get_schema(self, kind: str, data_partition_id: str) -> m.Schema:
"""
The API returns the schema specified byt the given kind, which must follow the naming convention {Data-Partition-Id}:{dataset}:{type}:{version}. Required roles: 'users.datalake.viewers' or 'users.datalake.editors' or 'users.datalake.admins'.
"""
return await self._build_for_get_schema(kind=kind, data_partition_id=data_partition_id)
return await self._build_for_get_schema(
kind=kind, data_partition_id=data_partition_id
)
class SyncSchemasApi(_SchemasApi):
......@@ -74,19 +104,25 @@ class SyncSchemasApi(_SchemasApi):
"""
The API allows the creation of a new schema for the given kind. Required roles: 'users.datalake.editors' or 'users.datalake.admins'.
"""
coroutine = self._build_for_create_schema(data_partition_id=data_partition_id, schema=schema)
coroutine = self._build_for_create_schema(
data_partition_id=data_partition_id, schema=schema
)
return get_event_loop().run_until_complete(coroutine)