Commit 902246e5 authored by Jeremie Hallal's avatar Jeremie Hallal
Browse files

add unit test

parent cbf989c7
Pipeline #71124 failed with stages
in 16 minutes and 24 seconds
......@@ -18,6 +18,7 @@ import time
from contextlib import suppress
from functools import wraps
from operator import attrgetter
from typing import List
import fsspec
import pandas as pd
......
......@@ -16,7 +16,6 @@ import asyncio
from fastapi import APIRouter, Depends, HTTPException, Request, status
from app.bulk_persistence import JSONOrient, get_dataframe
from app.bulk_persistence.bulk_id import BulkId
from app.bulk_persistence.dask.dask_bulk_storage import DaskBulkStorage
from app.bulk_persistence.dask.errors import BulkError, BulkNotFound
......
......@@ -13,15 +13,15 @@
# limitations under the License.
import io
from enum import Enum
from contextlib import contextmanager
import random
import numpy as np
import numpy.testing as npt
import pandas as pd
import pytest
from tests.unit.generate_data import generate_df
from .fixtures import with_wdms_env
from ..request_builders.wdms.crud.log import build_request_create_log, build_request_delete_log
from ..request_builders.wdms.session import build_delete_session
......@@ -36,13 +36,6 @@ from ..request_builders.wdms.crud.osdu_wellboretrajectory import (
build_request_delete_osdu_wellboretrajectory)
def generate_df(columns, index):
nbrows = len(index)
df = pd.DataFrame(
np.random.randint(-100, 1000, size=(nbrows, len(columns))), index=index)
df.columns = columns
return df
entity_type_dict={
"well_log": {"entity": "welllogs", "version": "v3"},
"wellbore_trajectory": {"entity": "wellboretrajectories", "version": "v3"},
......
......@@ -9,7 +9,7 @@ from app.bulk_persistence import JSONOrient
from app.model.model_chunking import GetDataParams
from app.routers.bulk.bulk_routes import DataFrameRender
from app.routers.bulk.utils import get_df_from_request
from tests.unit.generate_data import generate_df
@pytest.mark.parametrize("requested, df_columns, expected", [
(["X"], {"X"}, ["X"]),
......@@ -76,7 +76,7 @@ def basic_dataframe():
async def test_df_render_accept_parquet(default_get_params, basic_dataframe, accept):
response = await DataFrameRender.df_render(basic_dataframe, default_get_params, accept)
assert 'application/x-parquet' == response.headers.get('Content-Type')
assert response.headers.get('Content-Type') == "application/x-parquet"
assert_df_in_parquet(basic_dataframe, response.body)
......@@ -84,11 +84,22 @@ async def test_df_render_accept_parquet(default_get_params, basic_dataframe, acc
@pytest.mark.parametrize("orient", [JSONOrient.split, JSONOrient.columns])
async def test_df_render_accept_json(default_get_params, basic_dataframe, orient):
response = await DataFrameRender.df_render(basic_dataframe, default_get_params, "application/json", orient)
assert 'application/json' == response.headers.get('Content-Type')
assert response.headers.get('Content-Type') == "application/json"
actual = pd.read_json(response.body, orient=orient)
assert_frame_equal(basic_dataframe, actual)
@pytest.mark.asyncio
async def test_df_render_describe():
columns = [f'var_{i}' for i in range(10)]
data = generate_df(columns, index=range(100))
response = await DataFrameRender.df_render(data, GetDataParams(
describe=True, limit=None, curves=None, offset=None))
assert response['columns'] == columns
assert response['numberOfRows'] == 100
class RequestMock:
def __init__(self, headers: dict = {}, body=None):
self.headers = headers
......
import numpy as np
import pandas as pd
def generate_df(columns, index):
def gen_values(col_name, size):
if col_name.startswith('float'):
return np.random.random_sample(size=size)
if col_name.startswith('str'):
return [f'string_value_{i}' for i in range(size)]
if col_name.startswith('date'):
return (np.datetime64('2021-01-01') + days for days in range(size))
if col_name.startswith('array_'):
array_size = int(col_name.split('_')[1])
return [np.array(np.random.random_sample(size=array_size)) for _i in range(size)]
return np.random.randint(-100, 1000, size=size)
df = pd.DataFrame({c: gen_values(c, len(index))
for c in columns}, index=index)
return df
......@@ -20,11 +20,11 @@ import pandas as pd
import pytest
from tests.unit.test_utils import ctx_fixture, nope_logger_fixture
from tests.unit.generate_data import generate_df
import mock
from app.utils import DaskException
from app.utils import DaskClient
from dask.utils import parse_bytes
from app.helper import logger
from app.bulk_persistence.dask.dask_bulk_storage import (BulkNotFound,
BulkNotProcessable,
......@@ -34,20 +34,6 @@ from app.persistence.sessions_storage import (Session, SessionState,
SessionUpdateMode)
def generate_df(columns, index):
def gen_values(col_name, size):
if col_name.startswith('float'):
return np.random.random_sample(size=size)
if col_name.startswith('str'):
return [f'string_value_{i}' for i in range(size)]
if col_name.startswith('date'):
return (np.datetime64('2021-01-01') + days for days in range(size))
return np.random.randint(-100, 1000, size=size)
df = pd.DataFrame({c: gen_values(c, len(index))
for c in columns}, index=index)
return df
@pytest.fixture(scope="module")
def event_loop(): # all tests will share the same loop
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment