Commit 14654201 authored by Luc Yriarte's avatar Luc Yriarte
Browse files

Merge branch 'slb_code_push-wip' into 'master'

Slb code push

See merge request !3
parents c65a23c5 230c14bc
[bumpversion]
current_version = 0.2.4
commit = True
tag = False
[bumpversion:file:osdu_gcp/__init__.py]
default:
image: python:3.7-slim-buster
stages:
- test
- deploy
build:
stage: test
script:
- echo ---- ---- ---- SYSTEM DEPENDENCIES ---- ---- ----
- apt update
- apt install -y --no-install-recommends git
- echo ---- ---- ---- BUILD IMAGE ---- ---- ----
- pip3 install -r requirements.txt
- pip3 install -r requirements_opengroup.txt
- pip3 install -r requirements_dev.txt
- pytest tests --junitxml=report.xml
artifacts:
when: always
reports:
junit: report.xml
# This job only runs on master, and it creates the lib and push it to the feed
deploylib:
stage: deploy
script:
- echo ---- ---- ---- SYSTEM DEPENDENCIES ---- ---- ----
- apt update
- apt install -y --no-install-recommends git
- echo ---- ---- ---- BUILD IMAGE ---- ---- ----
- pip3 install -r requirements.txt
- pip3 install -r requirements_opengroup.txt
- pip3 install twine
- python3 setup.py sdist bdist_wheel
- TWINE_PASSWORD=${CI_JOB_TOKEN} TWINE_USERNAME=${CI_REGISTRY_USER} python -m twine upload --repository-url ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi dist/*
rules:
- if: $CI_COMMIT_BRANCH == 'master'
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
......@@ -173,29 +174,4 @@
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2020 Open Subsurface Data Universe Software / Platform / Domain Data Mgmt Services / Wellbore / Lib / Wellbore-cloud
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
END OF TERMS AND CONDITIONS
\ No newline at end of file
# Copyright 2020 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.2.4'
# MUSE NOT BE CHANGED MANUALLY
# The patch number (the last one) should only be updated by the build/deploy script, if changed manually
# To increase the minor or major version:
# $> pip install -U bumpversion
# $> bumpversion major | minor
# then $> git push --tag
# may fail on windows try without commit: bumpversion part --no-commit
__version__ = '0.3.0'
# Copyright 2020 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
......@@ -12,6 +12,7 @@ from aiohttp import ClientResponseError
from osdu.core.api.storage.blob_storage_base import BlobStorageBase
from osdu.core.api.storage.blob import Blob
from .auth_gcp_sa import GoogleAccountAuth
from osdu.core.api.storage.tenant import Tenant
try:
......@@ -57,9 +58,53 @@ class GCloudAioStorage(BlobStorageBase):
return await tenant_access_token.token()
async def delete(self, project: str, bucket: str, object_name: str,
*, auth: Optional = None, timeout: int = 10, params: dict = None, **kwargs):
@classmethod
def build_URI(cls, bucket: str, object_name: str) -> str:
return f'gs://{bucket}/{object_name}'
async def list_objects(self, tenant: Tenant,
*args, auth: Optional = None, prefix: str = '', page_token: Optional[str] = None,
max_result: Optional[int] = None, timeout: int = 10, **kwargs) -> List[str]:
"""
list all object within a bucket
:param auth: auth obj to perform the operation
:param tenant: tenant info
:param prefix: Filter results to objects whose names begin with this prefix
:param page_token: A previously-returned page token representing part of the larger set of results to view.
:param max_result: Maximum number of items to return.
:param timeout: timeout
:param kwargs:
:return: list of blob names
"""
project = tenant.project_id
bucket = tenant.bucket_name
url = f'{API_ROOT}/{bucket}/o'
params = {'prefix': prefix}
if page_token is not None:
params['pageToken'] = page_token
if max_result is not None:
params['maxResults'] = max_result
data, _ = await self._client.get(url, response_type=ResponseType.JSON, params=params or {}, timeout=timeout,
auth_token=await self._get_access_token(project, bucket, auth))
return [x['name'] for x in data.get('items', list())]
async def delete(self, tenant: Tenant, object_name: str,
*args, auth: Optional = None, timeout: int = 10, params: dict = None, **kwargs):
"""
delete an object
:param auth: auth obj to perform the operation
:param tenant: tenant info
:param object_name:
:param timeout:
:param kwargs:
:return:
"""
# https://cloud.google.com/storage/docs/json_api/#encoding
project = tenant.project_id
bucket = tenant.bucket_name
encoded_object_name = quote(object_name, safe='')
url = f'{API_ROOT}/{bucket}/o/{encoded_object_name}'
token = await self._get_access_token(project, bucket, auth)
......@@ -68,12 +113,35 @@ class GCloudAioStorage(BlobStorageBase):
timeout=timeout,
auth_token=token)
async def download(self, project: str, bucket: str, object_name: str,
*, auth: Optional = None, timeout: int = 10, **kwargs) -> bytes:
async def download(self, tenant: Tenant, object_name: str,
*args, auth: Optional = None, timeout: int = 10, **kwargs) -> bytes:
"""
download blob data
:param auth: auth obj to perform the operation
:param tenant: tenant info
:param object_name:
:param timeout:
:param kwargs:
:return:
"""
project = tenant.project_id
bucket = tenant.bucket_name
return await self._download(project, bucket, object_name, auth=auth, timeout=timeout, params={'alt': 'media'})
async def download_metadata(self, project: str, bucket: str, object_name: str,
*, auth: Optional = None, timeout: int = 10, ** kwargs) -> Blob:
async def download_metadata(self, tenant: Tenant, object_name: str,
*args, auth: Optional = None, timeout: int = 10, **kwargs) -> Blob:
"""
download blob data
:param auth: auth obj to perform the operation
:param tenant: tenant info
:param object_name:
:param timeout:
:param kwargs:
:return: blob
"""
project = tenant.project_id
bucket = tenant.bucket_name
data = await self._download(project, bucket, object_name, auth=auth, timeout=timeout)
metadata: dict = json.loads(data.decode())
return Blob(identifier=metadata.get('id', None),
......@@ -87,29 +155,24 @@ class GCloudAioStorage(BlobStorageBase):
size=metadata.get('size', -1)
)
async def list_objects(self, project: str, bucket: str, *,
auth: Optional = None, prefix: str = '', page_token: Optional[str] = None,
max_result: Optional[int] = None, timeout: int = 10, **kwargs) -> List[str]:
url = f'{API_ROOT}/{bucket}/o'
params = {'prefix': prefix}
if page_token is not None:
params['pageToken'] = page_token
if max_result is not None:
params['maxResults'] = max_result
data, _ = await self._client.get(url, response_type=ResponseType.JSON, params=params or {}, timeout=timeout,
auth_token=await self._get_access_token(project, bucket, auth))
return [x['name'] for x in data.get('items', list())]
@classmethod
def build_URI(cls, bucket: str, object_name: str) -> str:
return f'gs://{bucket}/{object_name}'
# TODO: if `metadata` is set, use multipart upload:
# https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload
async def upload(self, project: str, bucket: str, object_name: str, file_data: Any, *,
auth: Optional = None, content_type: str = None, metadata: dict = None,
timeout: int = 30, **kwargs) -> dict:
async def upload(self, tenant: Tenant, object_name: str, file_data: Any,
*args, auth: Optional = None, content_type: str = None, metadata: dict = None,
timeout: int = 30, **kwargs):
"""
upload blob data
:param tenant: tenant info
:param object_name: maps to file name
:param file_data: Any, *,
:param auth: Optional = None,
:param content_type: str = None,
:param metadata: dict = None,
:param timeout: int = 30, **kwargs
:param return: blob id
"""
project = tenant.project_id
bucket = tenant.bucket_name
url = f'{API_ROOT_UPLOAD}/{bucket}/o'
stream = self._preprocess_data(file_data)
......
# osdu core lib main python
osdu-core-lib-python>=0.3.0, <0.4
osdu-core-lib-python>=0.4.0, <0.5
# for google provider
aiohttp
cryptography
......
git+https://community.opengroup.org/osdu/platform/domain-data-mgmt-services/wellbore/lib/wellbore-core/wellbore-core-lib.git@slb-code-push#egg=osdu-core-lib-python
import os
import re
from setuptools import setup, find_packages
from osdu_gcp import __version__ as osdu_gcp_version
requirements_file = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'requirements.txt')
......@@ -10,19 +12,6 @@ test_requirements_file = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'requirements_dev.txt')
)
# version is unified in osdu_gcp/__init__.py
def read_version():
regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'")
init_py = os.path.join(os.path.dirname(__file__), 'osdu_gcp', '__init__.py')
with open(init_py) as f:
for line in f:
match = regexp.match(line)
if match is not None:
return match.group(1)
raise RuntimeError('Cannot find version in {}'.format(init_py))
with open(requirements_file) as f:
requirements = f.read().splitlines()
......@@ -32,7 +21,7 @@ with open(test_requirements_file) as f:
setup(
name='osdu-core-lib-python-gcp',
version=read_version(),
version=osdu_gcp_version,
packages=find_packages(),
......
# Copyright 2020 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2020 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
......@@ -5,13 +5,10 @@ from osdu_gcp.storage.blob_storage_gcp import GCloudAioStorage
import pytest
import aiohttp
import uuid
from osdu.core.api.storage.tenant import Tenant
class _TESTING_CFG:
project_id = TESTING_GCP_DATA_PROJECT_ID
credentials = TESTING_GCP_DATA_PROJECT_CREDENTIALS
bucket_name = 'testing-osdu-core'
TEST_DATA = {
'initial_files': [
......@@ -32,6 +29,10 @@ async def storage_client(request):
await session.close()
@pytest.fixture
async def test_tenant():
return Tenant(project_id=TESTING_GCP_DATA_PROJECT_ID, bucket_name='testing-osdu-core', data_partition_id='testing-partition-name')
@pytest.fixture
async def temp_directory() -> str:
tmpdir = tempfile.mkdtemp()
......@@ -41,72 +42,87 @@ async def temp_directory() -> str:
shutil.rmtree(tmpdir, ignore_errors=True)
@pytest.mark.asyncio
async def test_list_objects(storage_client):
result = await storage_client.list_objects(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name)
async def test_list_objects(storage_client, test_tenant):
result = await storage_client.list_objects(test_tenant)
for file_name, _ in TEST_DATA['initial_files']:
assert file_name in result
@pytest.mark.asyncio
async def test_download(storage_client):
async def test_download(storage_client, test_tenant):
name, expected_content = TEST_DATA['initial_files'][0]
data = await storage_client.download(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, name)
data = await storage_client.download(test_tenant, name)
result = data.decode('utf-8')
assert result == expected_content
@pytest.mark.asyncio
async def test_upload_check_delete(storage_client):
async def test_upload_check_delete(storage_client, test_tenant):
name = str(uuid.uuid4())
content = str(uuid.uuid4())
# upload
await storage_client.upload(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, name, content, content_type='text/plain')
await storage_client.upload(test_tenant, name, content, content_type='text/plain')
# check single object with this name
result = await storage_client.list_objects(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, prefix=name)
result = await storage_client.list_objects(test_tenant, prefix=name)
assert result == [name]
# check its content
data = await storage_client.download(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, name)
data = await storage_client.download(test_tenant, name)
assert data.decode('utf-8') == content
# delete it
await storage_client.delete(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, name)
await storage_client.delete(test_tenant, name)
# check nothing remains
result = await storage_client.list_objects(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, prefix=name)
result = await storage_client.list_objects(test_tenant, prefix=name)
assert len(result) == 0
@pytest.mark.asyncio
async def test_upload_from_various_type(storage_client, temp_directory):
async def test_upload_file_bin_input(storage_client, temp_directory, test_tenant):
file_c = temp_directory + '\\testing.file'
content_bin = b'expected content 123456789'
content_str = content_bin.decode('utf-8')
with open(file_c, 'w') as f:
f.write('expected content 123456789')
content_bin = b'expected content 123456789'
with open(file_c, 'rb') as file_bin_input:
await storage_client.upload(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'file_bin_input', file_bin_input)
assert await storage_client.download(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'file_bin_input') == content_bin
await storage_client.upload(test_tenant, 'file_bin_input', file_bin_input)
assert await storage_client.download(test_tenant, 'file_bin_input') == content_bin
@pytest.mark.asyncio
async def test_upload_file_txt_input(storage_client, temp_directory, test_tenant):
file_c = temp_directory + '\\testing.file'
with open(file_c, 'w') as f:
f.write('expected content 123456789')
content_bin = b'expected content 123456789'
with open(file_c, 'r') as file_txt_input:
await storage_client.upload(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'file_txt_input', file_txt_input)
assert await storage_client.download(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'file_txt_input') == content_bin
await storage_client.upload(test_tenant, 'file_txt_input', file_txt_input)
assert await storage_client.download(test_tenant, 'file_txt_input') == content_bin
await storage_client.upload(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'str_input', content_str)
assert await storage_client.download(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'str_input') == content_bin
@pytest.mark.asyncio
async def test_upload_str_input(storage_client, test_tenant):
content_bin = b'expected content 123456789'
content_str = content_bin.decode('utf-8')
await storage_client.upload(test_tenant, 'str_input', content_str)
assert await storage_client.download(test_tenant, 'str_input') == content_bin
await storage_client.upload(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'bin_input', content_bin)
assert await storage_client.download(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'bin_input') == content_bin
@pytest.mark.asyncio
async def test_upload_bin_input(storage_client, test_tenant):
content_bin = b'expected content 123456789'
await storage_client.upload(test_tenant, 'bin_input', content_bin)
assert await storage_client.download(test_tenant, 'bin_input') == content_bin
await storage_client.upload(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'empty_input', None)
actual_data = await storage_client.download(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'empty_input')
@pytest.mark.asyncio
async def test_upload_empty_input(storage_client, test_tenant):
await storage_client.upload(test_tenant, 'empty_input', None)
actual_data = await storage_client.download(test_tenant, 'empty_input')
assert len(actual_data) == 0
@pytest.mark.asyncio
async def test_upload_int_input(storage_client, test_tenant):
with pytest.raises(TypeError):
await storage_client.upload(_TESTING_CFG.project_id, _TESTING_CFG.bucket_name, 'int_input', 123456)
await storage_client.upload(test_tenant, 'int_input', 123456)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment