There is a security vulnerability in SSH key-generation using GitKraken < v8.0.1. If you used this tool to create SSH keys, please update GitKraken and regenerate. If you need help with this, contact forum-support@opengroup.org

Commit d21c9025 authored by Yannick's avatar Yannick
Browse files

Use constant for sas token duration

Add fallback case if not account_key available
Add unit tests
parent c4a88d65
Pipeline #50827 passed with stage
in 42 seconds
from typing import Optional
from datetime import datetime, timedelta
import logging
import adlfs
import fsspec
......@@ -10,6 +11,10 @@ from osdu.core.api.storage.tenant import Tenant
from .blob_storage_az import AzureAioBlobStorage
from osdu_az.partition.partition_service import PartitionService, STORAGE_ACCOUNT_NAME, STORAGE_ACCOUNT_KEY
SAS_TOKEN_DURATION_IN_SECONDS = 600 # 10 minutes
_LOGGER = logging.getLogger(__name__)
class AzureBlobFileSystemWithDefaultCredentials(adlfs.AzureBlobFileSystem):
""" Wrap the azure file system to add credentials if not present
......@@ -25,6 +30,7 @@ class AzureBlobFileSystemWithDefaultCredentials(adlfs.AzureBlobFileSystem):
or "sas_token" in kwargs
)
if not has_credential:
_LOGGER.debug('Dask set to use DefaultCredential')
kwargs["credential"] = AzureAioBlobStorage._get_credentials()
super().__init__(*args, **kwargs)
......@@ -37,19 +43,21 @@ def register_azure_fsspec():
async def get_dask_storage_parameters(tenant: Tenant, directory: Optional[str] = None) -> DaskStorageParameters:
partition_info = await PartitionService.get_partition(tenant.data_partition_id)
storage_account_name = partition_info.get_value(STORAGE_ACCOUNT_NAME)
storage_account_key = partition_info.get_value(STORAGE_ACCOUNT_KEY)
sas_token = generate_account_sas(
storage_account_name,
account_key=storage_account_key,
resource_types=ResourceTypes(object=True, container=True, service=False),
permission=AccountSasPermissions(read=True, write=True, delete=True,
list=True, add=True, create=True, update=True,
process=True, delete_previous_version=True),
expiry=datetime.utcnow() + timedelta(minutes=10)
)
storage_options = {'account_name': storage_account_name, 'sas_token': sas_token}
storage_options = {'account_name': storage_account_name}
storage_account_key = partition_info.get_value(STORAGE_ACCOUNT_KEY)
if storage_account_key: # in some en case account_key may not be available
sas_token = generate_account_sas(
storage_account_name,
account_key=storage_account_key,
resource_types=ResourceTypes(object=True, container=True, service=False),
permission=AccountSasPermissions(read=True, write=True, delete=True,
list=True, add=True, create=True, update=True,
process=True, delete_previous_version=True),
expiry=datetime.utcnow() + timedelta(seconds=SAS_TOKEN_DURATION_IN_SECONDS)
)
storage_options['sas_token'] = sas_token
base_directory = f'{tenant.bucket_name}/{directory}' if directory else tenant.bucket_name
......
import base64
import uuid
import pytest
from mock import patch
import fsspec
import uuid
from osdu.core.api.storage.tenant import Tenant
from azure.identity.aio import DefaultAzureCredential
from osdu.core.api.storage.tenant import Tenant
from osdu_az.partition.partition_info import PartitionInfo
from osdu_az.partition.partition_service import PartitionService
from osdu_az.storage.blob_storage_az import AzureAioBlobStorage
from osdu_az.storage.dask_storage_parameters import get_dask_storage_parameters
from osdu_az.storage.dask_storage_parameters import (get_dask_storage_parameters,
AzureBlobFileSystemWithDefaultCredentials)
from tests.conftest import Config
......@@ -58,3 +59,31 @@ async def test_read_write(with_azurite_credentials, test_tenant):
fs.rm(file_path)
assert content == 'test content'
@pytest.mark.asyncio
async def test_get_dask_parameter_no_account_key(test_tenant):
with patch.object(PartitionService, 'get_partition', return_value=PartitionInfo({
"storage-account-name": {
"sensitive": False, "value": "opendes-storage"
}
})):
# when
parameters = await get_dask_storage_parameters(test_tenant)
# then not 'sas_token' put in the storage option
assert 'sas_token' not in parameters.storage_options
# then use default credentials
azfs = AzureBlobFileSystemWithDefaultCredentials(**parameters.storage_options)
assert isinstance(azfs.credential, DefaultAzureCredential)
@pytest.mark.asyncio
async def test_get_dask_parameter_with_account_key(with_azurite_credentials, test_tenant):
# when
parameters = await get_dask_storage_parameters(test_tenant)
# then 'sas_token' put in the storage option
assert 'sas_token' in parameters.storage_options
azfs = AzureBlobFileSystemWithDefaultCredentials(**parameters.storage_options)
assert parameters.storage_options['sas_token'] in azfs.sas_token
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment