Commit 8268c867 authored by Jeremie Hallal's avatar Jeremie Hallal
Browse files

fix unit tests

parent efe0e358
Pipeline #86860 failed with stages
in 26 minutes
......@@ -23,6 +23,7 @@ import pyarrow.parquet as pa
from app.helper.logger import get_logger
from app.utils import capture_timings
WDMS_INDEX_NAME = '_wdms_index_'
def worker_make_log_captured_timing_handler(level=INFO):
......
......@@ -208,8 +208,8 @@ async def get_data_version(
df = await get_dataframe(ctx, bulk_id)
auto_cast_columns_to_string(df)
else:
# future_index = await DataFrameRender.load_index(record_id, bulk_id, dask_blob_storage)
future_index = await dask_blob_storage.load_index(record_id, bulk_id)
if data_param.offset or data_param.limit:
future_index = await DataFrameRender.load_index(record_id, bulk_id, dask_blob_storage)
df, filters, stat = await _process_request_v1(record_id, bulk_id, data_param, filters, dask_blob_storage)
df = await DataFrameRender.process_params(df, data_param, filters, dask_blob_storage, future_index)
......
......@@ -262,7 +262,7 @@ class DataFrameRender:
else:
df = df[natsorted(df.columns)] # columns are ordered by natural sort
if filters:
if filters and (params.offset or params.limit):
f_index = dask_blob_storage.client.compute(df.index)
df = await DataFrameRender.select_range(df, params.offset, params.limit, dask_blob_storage, f_index)
......
import io
import math
import platform
from tempfile import TemporaryDirectory
import numpy as np
......@@ -12,7 +11,6 @@ import pytest
from app.auth.auth import require_opendes_authorized_user
from app.bulk_persistence.dask.dask_bulk_storage import DaskBulkStorage
from app.bulk_persistence.dask.dask_bulk_storage_local import make_local_dask_bulk_storage
from app.bulk_persistence.dask.errors import BulkNotProcessable
from app.clients import StorageRecordServiceClient
from app.clients.storage_service_blob_storage import StorageRecordServiceBlobStorage
from app.helper import traces
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment