Commit ad060824 authored by Victor Dosda's avatar Victor Dosda
Browse files

Fixed e2e tests on import wdms function

parent 09999f52
Pipeline #118996 failed with stages
in 102 minutes and 31 seconds
......@@ -22,7 +22,6 @@ import pandas as pd
import pytest
from typing import List
from app.bulk_persistence import DataConsistencyChecks
from ..generate_dataframe import generate_df
from .fixtures import with_wdms_env
......@@ -53,7 +52,6 @@ def build_base_url_without_dask(entity_type: str) -> str:
@contextmanager
def create_record(env, entity_type: str, curves: List[str]):
if entity_type == "well_log":
curves = DataConsistencyChecks._get_curve_name_and_column_count(curves)
result = build_request_create_osdu_welllog(False, curves).call(env)
elif entity_type == "wellbore_trajectory":
result = build_request_create_osdu_wellboretrajectory(False, curves).call(env)
......@@ -164,9 +162,9 @@ WELLLOG_URL_PREFIX = 'alpha/ddms/v3/welllogs'
@pytest.mark.parametrize('entity_type', ["well_log", "wellbore_trajectory", "log"])
@pytest.mark.parametrize('serializer', [ParquetSerializer(), JsonSerializer()])
def test_send_one_chunk_without_session(with_wdms_env, entity_type, serializer):
col = ['MD', 'X']
with create_record(with_wdms_env, entity_type, col) as record_id:
data = generate_df(col, range(8))
col_and_nb_col = {'MD': 1, 'X': 1}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
data = generate_df(col_and_nb_col.keys(), range(8))
data_to_send = serializer.dump(data)
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
......@@ -182,9 +180,9 @@ def test_send_one_chunk_without_session(with_wdms_env, entity_type, serializer):
@pytest.mark.parametrize('serializer', [ParquetSerializer(), JsonSerializer()])
def test_send_one_chunk_with_session_commit(with_wdms_env, entity_type, serializer):
col = ['MD', 'X']
with create_record(with_wdms_env, entity_type, col) as record_id:
expected = generate_df(col, range(8))
col_and_nb_col = {'MD': 1, 'X': 1}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
expected = generate_df(col_and_nb_col.keys(), range(8))
# create session
session_id = create_session(with_wdms_env, entity_type, record_id, True) # mode overwrite
......@@ -215,9 +213,9 @@ def test_send_multiple_chunks_with_session_commit(with_wdms_env, shuffle):
# well log on parquet
entity_type = "well_log"
serializer = ParquetSerializer()
col = ['MD', 'X', 'Y', 'Z']
with create_record(with_wdms_env, entity_type, col) as record_id:
data = generate_df(col, range(1000))
col_and_nb_col = {'MD': 1, 'X': 1, 'Y': 1, 'Z': 1}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
data = generate_df(col_and_nb_col.keys(), range(1000))
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
# create session
......@@ -259,10 +257,10 @@ def test_get_data_with_offset_filter(with_wdms_env):
# well log on parquet
entity_type = "well_log"
serializer = ParquetSerializer()
col = ['MD', 'X']
with create_record(with_wdms_env, entity_type,col) as record_id:
col_and_nb_col = {'MD': 1, 'X': 1}
with create_record(with_wdms_env, entity_type,col_and_nb_col) as record_id:
size = 100
data = generate_df(col, range(size))
data = generate_df(col_and_nb_col.keys(), range(size))
data_to_send = serializer.dump(data)
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
......@@ -298,8 +296,8 @@ def test_get_data_with_column_filter(with_wdms_env):
# well log on parquet
entity_type = "well_log"
serializer = ParquetSerializer()
with create_record(with_wdms_env, entity_type, ['MD', 'X', 'Y', 'Z', '2D[0]', '2D[1]', '2D[2]']) as record_id:
col_and_nb_col = {'MD': 1, 'X': 1, 'Y': 1, 'Z': 1, '2D': 3}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
size = 100
data = generate_df(['MD', 'X', 'Y', 'Z', '2D[0]', '2D[1]', '2D[2]'], range(size))
data_to_send = serializer.dump(data)
......@@ -334,10 +332,10 @@ def test_get_data_with_limit_filter(with_wdms_env):
entity_type = "well_log"
serializer = ParquetSerializer()
col = ['MD', 'X']
with create_record(with_wdms_env, entity_type, col) as record_id:
col_and_nb_col = {'MD': 1, 'X': 1}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
size = 100
data = generate_df(col, range(size))
data = generate_df(col_and_nb_col.keys(), range(size))
data_to_send = serializer.dump(data)
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
......@@ -370,10 +368,10 @@ def test_get_data_with_limit_filter(with_wdms_env):
@pytest.mark.parametrize('entity_type', ["well_log", "wellbore_trajectory", "log"])
def test_get_data_with_limit_and_offset_filter(with_wdms_env, entity_type):
serializer = ParquetSerializer()
col = ['MD', 'X']
with create_record(with_wdms_env, entity_type, ['MD', 'X']) as record_id:
col_and_nb_col = {'MD': 1, 'X': 1}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
size = 100
data = generate_df(['MD', 'X'], range(size))
data = generate_df(col_and_nb_col.keys(), range(size))
data_to_send = serializer.dump(data)
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
......@@ -398,12 +396,12 @@ def test_get_data_with_limit_and_offset_filter(with_wdms_env, entity_type):
@pytest.mark.parametrize('entity_type', ["well_log", "wellbore_trajectory", "log"])
@pytest.mark.parametrize('serializer', [ParquetSerializer(), JsonSerializer()])
def test_multiple_overwrite_sessions_in_parallel_then_commit(with_wdms_env, entity_type, serializer):
col = ['MD', 'X']
with create_record(with_wdms_env, entity_type, col) as record_id:
col_and_nb_col = {'MD': 1, 'X': 1}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
# create session
sessions = [{
'id': create_session(with_wdms_env, entity_type, record_id, True),
'df': generate_df(col, range(8))
'df': generate_df(col_and_nb_col.keys(), range(8))
} for _i in range(5)] # mode overwrite
for session in sessions:
......@@ -436,10 +434,10 @@ def test_multiple_overwrite_sessions_in_parallel_then_commit(with_wdms_env, enti
@pytest.mark.parametrize('serializer', [ParquetSerializer(), JsonSerializer()])
def test_multiple_update_sessions_in_parallel_then_commit(with_wdms_env, entity_type, serializer):
col = ['MD', 'X']
with create_record(with_wdms_env, entity_type, ['MD', 'X']) as record_id:
col_and_nb_col = {'MD': 1, 'X': 1}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
# post data
data = generate_df(col, range(10))
data = generate_df(col_and_nb_col.keys(), range(10))
data_to_send = serializer.dump(data)
build_request_post_data(entity_type, record_id, data_to_send).call(
with_wdms_env, headers={'Content-Type': serializer.mime_type}).assert_ok()
......@@ -447,7 +445,7 @@ def test_multiple_update_sessions_in_parallel_then_commit(with_wdms_env, entity_
# create session
sessions = [{
'id': create_session(with_wdms_env, entity_type, record_id, False),
'df': generate_df(['MD', 'X'], range(10, 20))
'df': generate_df(col_and_nb_col.keys(), range(10, 20))
} for _i in range(5)] # mode overwrite
for session in sessions:
......@@ -544,8 +542,9 @@ def test_data_without_dask_update_session(with_wdms_env, entity_type, serializer
@pytest.mark.parametrize('entity_type', ["well_log", "wellbore_trajectory"])
@pytest.mark.parametrize('serializer', [ParquetSerializer(), JsonSerializer()])
def test_send_arrayd_without_session(with_wdms_env, entity_type, serializer):
with create_record(with_wdms_env, entity_type, ['MD', 'array_10_A']) as record_id:
data = generate_df(['MD', 'array_10_A'], range(8))
col_and_nb_col = {'MD': 1, 'array_10_A': 1}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
data = generate_df(col_and_nb_col.keys(), range(8))
data_to_send = serializer.dump(data)
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
......@@ -559,10 +558,10 @@ def test_send_arrayd_without_session(with_wdms_env, entity_type, serializer):
@pytest.mark.parametrize('entity_type', ["well_log"])
@pytest.mark.parametrize('serializer', [ParquetSerializer()])
def test_describe(with_wdms_env, entity_type, serializer):
columns = ['BOB', 'MD']
with create_record(with_wdms_env, entity_type, columns) as record_id:
col_and_nb_col = {'BOB': 1, 'MD': 1}
with create_record(with_wdms_env, entity_type, col_and_nb_col) as record_id:
number_of_rows = 8
data = generate_df(columns, range(number_of_rows))
data = generate_df(col_and_nb_col.keys(), range(number_of_rows))
data_to_send = serializer.dump(data)
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
......
......@@ -14,7 +14,6 @@
import pytest
from app.bulk_persistence import DataConsistencyChecks
from wdms_client.request_builders.wdms.crud.osdu_wellboretrajectory import build_request_create_osdu_wellboretrajectory
from wdms_client.request_builders.wdms.crud.osdu_welllog import build_request_create_osdu_welllog
from .fixtures import with_wdms_env
......@@ -34,17 +33,16 @@ def build_request_post_data(entity_type: str, record_id: str, payload) -> Reques
def create_record_with_data(with_wdms_env, entity_type, serializer, nb_version):
col = ['MD', 'X']
col_and_nb_col = {'MD': 1, 'X': 1}
if entity_type == 'welllog':
col = DataConsistencyChecks._get_curve_name_and_column_count(col)
result = build_request_create_osdu_welllog(False, col).call(with_wdms_env)
result = build_request_create_osdu_welllog(False, col_and_nb_col).call(with_wdms_env)
elif entity_type == 'wellboretrajectory':
result = build_request_create_osdu_wellboretrajectory(False, col).call(with_wdms_env)
result = build_request_create_osdu_wellboretrajectory(False, col_and_nb_col.keys()).call(with_wdms_env)
result.assert_ok()
resobj = result.get_response_obj()
data = generate_df(col, range(8))
data = generate_df(col_and_nb_col.keys(), range(8))
data_to_send = serializer.dump(data)
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment