Commit d6f47c5c authored by Luc Yriarte's avatar Luc Yriarte
Browse files

Applying drop 3 patch on app folder

parent 3a6a098e
......@@ -18,3 +18,4 @@ from .dataframe_serializer import DataframeSerializer
from .json_orient import JSONOrient
from .mime_types import MimeTypes
from .tenant_provider import resolve_tenant
from .exceptions import UnknownChannelsException, InvalidBulkException, NoBulkException, NoDataException, RecordNotFoundException
class RecordNotFoundException(Exception):
""" Raised when specified Record does not exist """
pass
class NoDataException(Exception):
""" Raised when asking data for a Record that doesn't have any data """
pass
class NoBulkException(Exception):
""" Raised when asking data for a Record that doesn't have bulkURI """
pass
class InvalidBulkException(Exception):
""" Raised when asking data for a Record that have an invalid bulkURI """
pass
class UnknownChannelsException(Exception):
""" Raised when unknown channel """
pass
......@@ -32,13 +32,6 @@ async def resolve_tenant(data_partition_id: str) -> Tenant:
bucket_name='wdms-osdu'
)
if Config.cloud_provider.value == 'ibm':
return Tenant(
data_partition_id=data_partition_id,
project_id=Config.default_data_tenant_project_id.value,
bucket_name='logstore-osdu-ibm'
)
return Tenant(
data_partition_id=data_partition_id,
project_id='undefined',
......
......@@ -16,7 +16,7 @@ from opencensus.trace.span import SpanKind
from app import conf
from app.utils import Context
from app.helper import traces
from app.helper import utils, traces
def _before_tracing_attributes(ctx, request):
......@@ -24,16 +24,16 @@ def _before_tracing_attributes(ctx, request):
Add request attributes + correlation id to the the current tracer's span
"""
ctx.tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_HOST,
attribute_key=utils.HTTP_HOST,
attribute_value=request.url.host)
ctx.tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_METHOD,
attribute_key=utils.HTTP_METHOD,
attribute_value=request.method)
ctx.tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_PATH,
attribute_key=utils.HTTP_PATH,
attribute_value=str(request.url.path))
ctx.tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_URL,
attribute_key=utils.HTTP_URL,
attribute_value=str(request.url))
ctx.tracer.add_attribute_to_current_span(
attribute_key=conf.CORRELATION_ID_HEADER_NAME,
......@@ -60,6 +60,6 @@ async def client_middleware(request, call_next):
request.headers[conf.APP_KEY_HEADER_NAME] = ctx.app_key
result = await call_next(request)
span.add_attribute(traces.HTTP_STATUS_CODE, result.status_code)
span.add_attribute(utils.HTTP_STATUS_CODE, result.status_code)
return result
......@@ -89,12 +89,18 @@ class ConfigurationContainer:
use env_var.printable_value instead of env_var.value when the goal is to log/display it.
"""
service_name: EnvVar = EnvVar(
key='SERVICE_NAME',
description='Display name of the service when exporting entries for logging and tracing',
default='os-wellbore-ddms---local'
)
cloud_provider: EnvVar = EnvVar(
key='CLOUD_PROVIDER',
description='Short name of the current cloud provider environment, must be "gcp" or "az" or "ibm',
description='Short name of the current cloud provider environment, must be "gcp" or "az"',
default=None,
is_mandatory=True,
allowed_values=['gcp', 'az', 'local', 'ibm'],
allowed_values=['gcp', 'az', 'local'],
factory=lambda x: x.lower()
)
......@@ -279,13 +285,6 @@ def cloud_provider_additional_environment(config: ConfigurationContainer):
override=True,
validator=validator_path_must_exist)
if provider == 'ibm':
config.add_from_env(attribute_name='default_data_tenant_project_id',
env_var_key='OS_WELLBORE_DDMS_DATA_PROJECT_ID',
description='IBM data tenant ID',
default='logstore-ibm',
is_mandatory=True,
override=True)
# Global config instance
Config = ConfigurationContainer.with_load_all(contextual_loader=cloud_provider_additional_environment)
......
......@@ -15,15 +15,16 @@
import logging
import traceback
import sys
from app.conf import Config
from app.utils import get_or_create_ctx
import rapidjson
import structlog
from structlog.contextvars import merge_contextvars, bind_contextvars
from structlog.contextvars import merge_contextvars
from opencensus.ext.azure.log_exporter import AzureLogHandler
from opencensus.trace import config_integration
import rapidjson
from app.conf import Config
from app.utils import get_or_create_ctx
from app.helper.utils import rename_cloud_role_func
_LOGGER = None
......@@ -32,19 +33,6 @@ def get_logger():
return _LOGGER
def add_fields(**kwargs):
"""
Add key-value pairs to our homemade logger
e.g.
>>> bind_contextvars(a=1, b=2)
>>> # Then use loggers as per normal
>>> log.msg("hello")
a=1 b=2 event='hello'
Full documentation: https://www.structlog.org/en/stable/contextvars.html
"""
bind_contextvars(**kwargs)
class StackDriverRenderer(object):
def __init__(self, service_name=None):
self.service_name = service_name
......@@ -100,13 +88,13 @@ class AzureContextLoggerAdapter(logging.LoggerAdapter):
return msg, kwargs
def init_logger():
def init_logger(service_name):
global _LOGGER
if Config.cloud_provider.value == 'az':
_LOGGER = create_azure_logger()
_LOGGER = create_azure_logger(service_name)
elif Config.cloud_provider.value == 'gcp':
_LOGGER = create_gcp_logger()
_LOGGER = create_gcp_logger(service_name)
else:
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
_LOGGER = logging.getLogger(__name__)
......@@ -114,7 +102,7 @@ def init_logger():
return _LOGGER
def create_azure_logger():
def create_azure_logger(service_name):
"""
Create logger with two handlers:
- AzureLogHandler: to see Dependencies, Requests, Traces and Exception into Azure monitoring
......@@ -133,12 +121,13 @@ def create_azure_logger():
logger_level = Config.get('az_logger_level')
handler = AzureLogHandler(connection_string=f'InstrumentationKey={key}')
handler.setLevel(logging.getLevelName(logger_level))
handler.add_telemetry_processor(rename_cloud_role_func(service_name))
logger.addHandler(handler)
return AzureContextLoggerAdapter(logger, extra=dict())
def create_gcp_logger(service_name='wellbore-ddms'):
def create_gcp_logger(service_name):
"""
Initialize structlog with following configuration:
- Make logs compatible with Stackdriver
......
......@@ -13,16 +13,14 @@
# limitations under the License.
from app.conf import Config
from app.helper.utils import rename_cloud_role_func, COMPONENT
from opencensus.common.transports.async_ import AsyncTransport
from opencensus.trace.attributes_helper import COMMON_ATTRIBUTES
from opencensus.trace import base_exporter
from opencensus.ext.stackdriver.trace_exporter import StackdriverExporter
from opencensus.ext.azure.trace_exporter import AzureExporter
from opencensus.trace.propagation.trace_context_http_header_format import TraceContextPropagator
from starlette.requests import Request
import http
"""
How to add specific span in a method
......@@ -39,6 +37,14 @@ How to add specific span in a method
"""
def get_trace_propagator() -> TraceContextPropagator:
"""
Returns the implementation of standard tracing propagation as defined
by W3C: https://www.w3.org/TR/trace-context/
"""
return TraceContextPropagator()
def _create_azure_exporter(key: str):
return AzureExporter(connection_string=f'InstrumentationKey={key}')
......@@ -65,6 +71,7 @@ def create_exporter(service_name):
key = Config.get('az_ai_instrumentation_key')
try:
az_exporter = _create_azure_exporter(key)
az_exporter.add_telemetry_processor(rename_cloud_role_func(service_name))
combined_exporter.add_exporter(az_exporter)
except ValueError as e:
print('Unable to create AzureExporter:', str(e))
......@@ -94,59 +101,3 @@ class CombinedExporter(base_exporter.Exporter):
for e in self.exporters:
e.export(span_datas)
def get_trace_propagator() -> TraceContextPropagator:
"""
Returns the implementation of standard tracing propagation as defined
by W3C: https://www.w3.org/TR/trace-context/
"""
return TraceContextPropagator()
def _get_status_phrase(status_code):
try:
return http.HTTPStatus(status_code).phrase
except ValueError:
return str()
STATUS_PHRASES = {
status_code: _get_status_phrase(status_code) for status_code in range(100, 600)
}
def process_message(request: Request, status_code: int):
"""
Returns pretty print string to be logger, from Starlette request and status code.
E.g. Request from: 127.0.0.1:55353 - "GET /api/os-wellbore-ddms/ddms/v2/about" 200 OK
"""
reason = STATUS_PHRASES[status_code]
return f'Request from: {_get_client_str(request.client)} - "{request.method}' \
f' {request.url.path}" {status_code} {reason}'
def _get_client_str(client) -> str:
"""
Returns a string container host:port from given starlette client
"""
host, port = client.host, client.port
if not host:
return ""
return f'{host}:{port}'
"""
Attributes helper have been used similarly to some examples:
Ex of other middleware : https://github.com/census-instrumentation/opencensus-python/blob/master/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py
https://github.com/census-instrumentation/opencensus-python/blob/master/opencensus/trace/attributes_helper.py
"""
HTTP_HOST = COMMON_ATTRIBUTES['HTTP_HOST']
HTTP_METHOD = COMMON_ATTRIBUTES['HTTP_METHOD']
HTTP_PATH = COMMON_ATTRIBUTES['HTTP_PATH']
HTTP_ROUTE = COMMON_ATTRIBUTES['HTTP_ROUTE']
HTTP_URL = COMMON_ATTRIBUTES['HTTP_URL']
HTTP_STATUS_CODE = COMMON_ATTRIBUTES['HTTP_STATUS_CODE']
HTTP_REQUEST_SIZE = COMMON_ATTRIBUTES['HTTP_REQUEST_SIZE']
HTTP_RESPONSE_SIZE = COMMON_ATTRIBUTES['HTTP_RESPONSE_SIZE']
COMPONENT = COMMON_ATTRIBUTES['COMPONENT']
\ No newline at end of file
from structlog.contextvars import bind_contextvars
from opencensus.trace.attributes_helper import COMMON_ATTRIBUTES
from starlette.requests import Request
import http
def rename_cloud_role_func(service_name):
"""
Return a processor function to change 'Cloud Role Name' in AppInsight with given service_name variable.
It's used by AzureLogHandler and AzureExporter.
https://docs.microsoft.com/en-us/azure/azure-monitor/app/api-filtering-sampling#opencensus-python-telemetry-processors
"""
def callback_func(envelope):
envelope.tags['ai.cloud.role'] = service_name
return True
return callback_func
def add_fields(**kwargs):
"""
Add key-value pairs to our homemade logger
e.g.
>>> bind_contextvars(a=1, b=2)
>>> # Then use loggers as per normal
>>> log.msg("hello")
a=1 b=2 event='hello'
Full documentation: https://www.structlog.org/en/stable/contextvars.html
"""
bind_contextvars(**kwargs)
def _get_status_phrase(status_code):
try:
return http.HTTPStatus(status_code).phrase
except ValueError:
return str()
STATUS_PHRASES = {
status_code: _get_status_phrase(status_code) for status_code in range(100, 600)
}
def process_message(request: Request, status_code: int):
"""
Returns pretty print string to be logger, from Starlette request and status code.
E.g. Request from: 127.0.0.1:55353 - "GET /api/os-wellbore-ddms/ddms/v2/about" 200 OK
"""
reason = STATUS_PHRASES[status_code]
return f'Request from: {_get_client_str(request.client)} - "{request.method}' \
f' {request.url.path}" {status_code} {reason}'
def _get_client_str(client) -> str:
"""
Returns a string container host:port from given starlette client
"""
host, port = client.host, client.port
if not host:
return ""
return f'{host}:{port}'
"""
Attributes helper have been used similarly to some examples:
Ex of other middleware : https://github.com/census-instrumentation/opencensus-python/blob/master/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py
https://github.com/census-instrumentation/opencensus-python/blob/master/opencensus/trace/attributes_helper.py
"""
HTTP_HOST = COMMON_ATTRIBUTES['HTTP_HOST']
HTTP_METHOD = COMMON_ATTRIBUTES['HTTP_METHOD']
HTTP_PATH = COMMON_ATTRIBUTES['HTTP_PATH']
HTTP_ROUTE = COMMON_ATTRIBUTES['HTTP_ROUTE']
HTTP_URL = COMMON_ATTRIBUTES['HTTP_URL']
HTTP_STATUS_CODE = COMMON_ATTRIBUTES['HTTP_STATUS_CODE']
HTTP_REQUEST_SIZE = COMMON_ATTRIBUTES['HTTP_REQUEST_SIZE']
HTTP_RESPONSE_SIZE = COMMON_ATTRIBUTES['HTTP_RESPONSE_SIZE']
COMPONENT = COMMON_ATTRIBUTES['COMPONENT']
from osdu.core.api.storage.blob_storage_base import BlobStorageBase
from app.utils import get_http_client_session
from osdu_ibm.storage.blob_storage_ibm import IBMObjectStorage
from .app_injector import AppInjector, AppInjectorModule
from app.utils import Context
from app.bulk_persistence import resolve_tenant
class IBMInjector(AppInjectorModule):
def configure(self, app_injector: AppInjector):
app_injector.register(BlobStorageBase, IBMInjector.build_ibm_blob_storage)
@staticmethod
async def build_ibm_blob_storage(*args, **kwargs) -> BlobStorageBase:
ctx: Context = Context.current()
# TODO to be reviewed
tenant = await resolve_tenant(ctx.partition_id)
return IBMObjectStorage(
session=get_http_client_session(),
service_account_file=tenant.credentials
)
......@@ -24,7 +24,6 @@ from app.clients.search_service_client import SearchServiceClient
from app.clients import make_search_client, make_storage_record_client
from osdu.core.api.storage.blob_storage_local_fs import LocalFSBlobStorage
from app.helper.logger import get_logger
from app.injector.ibm_injector import IBMInjector
class MainInjector(AppInjectorModule):
......@@ -57,10 +56,6 @@ class MainInjector(AppInjectorModule):
logger.info('using gcp injector')
GCPInjector().configure(app_injector)
if Config.cloud_provider.value == 'ibm':
logger.info('using ibm injector')
IBMInjector().configure(app_injector)
# run overriders
self.overriders(app_injector)
......
......@@ -19,7 +19,7 @@ from fastapi.security.api_key import APIKeyHeader
from starlette.middleware.base import BaseHTTPMiddleware
from structlog.contextvars import clear_contextvars as clear_logger_contextvars
from app.helper import logger
import app.helper.utils as logger_utils
from app import conf
from app.injector.app_injector import AppInjector
from app.model.user import User
......@@ -31,14 +31,14 @@ class CreateBasicContextMiddleware(BaseHTTPMiddleware):
def __init__(self, injector: AppInjector, **kwargs):
super().__init__(**kwargs)
self._app_injector = injector
@staticmethod
def _add_csp_header(request, response):
"""
Returns the response with the additional CSP headers added to allow for swagger js and css files from the given domains
Returns the response with the additional CSP headers added to allow for swagger js and css files from the given domains.
"""
if "/docs" in request.url.path:
response.headers["Content-Security-Policy"] = "default-src 'self'; script-src 'self' *.jsdelivr.net 'unsafe-inline'; style-src 'self' *.jsdelivr.net; img-src 'self' *.tiangolo.com;"
response.headers["Content-Security-Policy"] = "default-src 'self'; script-src 'self' *.jsdelivr.net 'unsafe-inline'; style-src 'self' *.jsdelivr.net; img-src 'self' *.tiangolo.com data:;"
async def dispatch(self, request, call_next):
api_key = request.headers.get('x-api-key', None)
......@@ -49,11 +49,11 @@ class CreateBasicContextMiddleware(BaseHTTPMiddleware):
anonymous_user = User(email='anonymous', authenticated=False)
clear_logger_contextvars()
logger.add_fields(correlation_id=correlation_id,
request_id=request_id,
partition_id=partition_id,
app_key=app_key,
api_key=api_key)
logger_utils.add_fields(correlation_id=correlation_id,
request_id=request_id,
partition_id=partition_id,
app_key=app_key,
api_key=api_key)
ctx = get_or_create_ctx()
ctx.set_current_with_value(logger=get_logger(),
......@@ -71,6 +71,7 @@ class CreateBasicContextMiddleware(BaseHTTPMiddleware):
self._add_csp_header(request, response)
return response
async def require_data_partition_id(
data_partition_id: str = Header(default=None,
title='data partition id',
......
......@@ -19,10 +19,11 @@ from starlette.requests import Request
from starlette.responses import Response
from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR
from app.helper import traces
from opencensus.trace import tracer as open_tracer
from opencensus.trace.samplers import AlwaysOnSampler
from opencensus.trace.span import SpanKind
from app.helper import traces, utils
from app.utils import get_or_create_ctx
from app import conf
from inspect import isfunction as is_function
......@@ -64,20 +65,20 @@ class TracingMiddleware(BaseHTTPMiddleware):
@staticmethod
def _before_request(request: Request, tracer: open_tracer.Tracer):
tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_HOST,
attribute_key=utils.HTTP_HOST,
attribute_value=request.url.hostname)
tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_METHOD,
attribute_key=utils.HTTP_METHOD,
attribute_value=request.method)
tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_ROUTE,
attribute_key=utils.HTTP_ROUTE,
attribute_value=request.url.path)
tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_PATH,
attribute_key=utils.HTTP_PATH,
attribute_value=str(request.url.path))
tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_URL,
attribute_key=utils.HTTP_URL,
attribute_value=str(request.url))
ctx_correlation_id = get_or_create_ctx().correlation_id
......@@ -90,13 +91,13 @@ class TracingMiddleware(BaseHTTPMiddleware):
@staticmethod
def _after_successful_request(response: Response, tracer):
tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_STATUS_CODE,
attribute_key=utils.HTTP_STATUS_CODE,
attribute_value=response.status_code)
@staticmethod
def _after_request(request, tracer):
tracer.add_attribute_to_current_span(
attribute_key=traces.HTTP_ROUTE,
attribute_key=utils.HTTP_ROUTE,
attribute_value=TracingMiddleware._retrieve_raw_path(request))
async def dispatch(self, request: Request, call_next: Any) -> Response:
......@@ -125,5 +126,5 @@ class TracingMiddleware(BaseHTTPMiddleware):
finally:
status = response.status_code if response else HTTP_500_INTERNAL_SERVER_ERROR
ctx.logger.info(traces.process_message(request, status))
ctx.logger.info(utils.process_message(request, status))
self._after_request(request, tracer)
from __future__ import annotations
from typing import List, Optional
from pydantic import Field
from app.model.model_curated import DDMSBaseModel
class AboutResponseUser(DDMSBaseModel):
tenant: Optional[str] = None
email: Optional[str] = None
class V1DmsInfo(DDMSBaseModel):
kinds: Optional[List[str]] = None
class V1AboutResponse(DDMSBaseModel):
user: Optional[AboutResponseUser] = None
dmsInfo: Optional[V1DmsInfo] = None
class FastSearchResponse(DDMSBaseModel):
results: Optional[List[str]] = None
#unused after revert on bug 602935
class WriteDataResponse(DDMSBaseModel):
rowCount: Optional[int] = Field(..., description="Row count")
columnCount: Optional[int] = Field(..., description="Column count")
......@@ -49,19 +49,6 @@ class DDMSBaseModelWithExtra(BaseModel):
extra = Extra.allow
class AboutResponseUser(DDMSBaseModel):
tenant: Optional[str] = None
email: Optional[str] = None
class V1DmsInfo(DDMSBaseModel):
kinds: Optional[List[str]] = None
class FastSearchResponse(DDMSBaseModel):
results: Optional[List[str]] = None
class Point(DDMSBaseModel):
latitude: Optional[confloat(ge=-90.0, le=90.0)] = Field(
None, description='Latitude of point.'
......@@ -337,7 +324,7 @@ class projectedPosition(DDMSBaseModel):
)
class wellborerelationships(DDMSBaseModel):
class wellborerelationships(DDMSBaseModelWithExtra):
definitiveTimeDepthRelation: Optional[ToOneRelationship] = Field(
None,
description='The definitive tome-depth relation providing the MD to seismic travel-time transformation.',
......@@ -452,7 +439,7 @@ class Format(Enum):
float128 = 'float128'
class logsetrelationships(DDMSBaseModel):
class logsetrelationships(DDMSBaseModelWithExtra):
well: Optional[ToOneRelationship] = Field(
None,
description='The well to which this logSet belongs. Only required if the wellbore is unknown.',
......@@ -468,7 +455,7 @@ class logsetrelationships(DDMSBaseModel):
)
class dipsetrelationships(DDMSBaseModel):
class dipsetrelationships(DDMSBaseModelWithExtra):
well: Optional[ToOneRelationship] = Field(
None,
description='The well to which this dipSet belongs. Only required if the wellbore is unknown.',
......@@ -646,7 +633,7 @@ class trajectorychannel(DDMSBaseModel):