Commit 6c4fca24 authored by Diego Molteni's avatar Diego Molteni
Browse files

Merge branch 'slb/vm/paginationdatasetlist' into 'master'

feat: update dataset list endpoint to return paginated results if limit and cursor query parameters are present

See merge request !173
parents fae8296d 7267d74b
Pipeline #58933 passed with stages
in 9 minutes and 47 seconds
......@@ -468,7 +468,7 @@ The following software have components provided under the terms of this license:
- cross-spawn (from https://www.npmjs.com/package/cross-spawn)
- crypto-random-string (from https://www.npmjs.com/package/crypto-random-string)
- d64 (from https://www.npmjs.com/package/d64)
- dashdash (from https://github.com/trentm/node-dashdash)
- dashdash (from https://www.npmjs.com/package/dashdash)
- date-and-time (from https://www.npmjs.com/package/date-and-time)
- date-format (from https://www.npmjs.com/package/date-format)
- date-format (from https://www.npmjs.com/package/date-format)
......
......@@ -662,7 +662,14 @@ paths:
items:
type: string
collectionFormat: multi
- description: 'Limit the number of datasets in the response'
in: query
name: limit
type: string
- description: 'Cursor for pagination on the datasets list'
in: query
name: cursor
type: string
responses:
200:
description: "The list of all datasets in the subproject if no gtags are in the request parameters. If gtags exist in the request parameters, then list all datasets that have the same list of gtags."
......@@ -670,6 +677,10 @@ paths:
type: array
items:
$ref: "#/definitions/Dataset"
201:
description: "Paginated dataset list with nextPageCursor. For documentation purposes, if limit or cursor is given, status code here is 200."
schema:
$ref: "#/definitions/PaginatedDatasets"
400:
description: "Bad request."
401:
......@@ -2196,6 +2207,16 @@ definitions:
type: string
description: Next cursor for pagination.
example: { datasets: ["folderA/", "folderB/", "dataset01"], nextPageCursor: "abc1234" }
# PaginatedDatasets
PaginatedDatasets:
properties:
datasets:
type: array
items:
$ref: '#/definitions/Dataset'
nextPageCursor:
type: string
# ===========================================================================
# Endpoints Security Section
......
......@@ -672,6 +672,14 @@ paths:
items:
type: string
collectionFormat: multi
- description: 'Limit the number of datasets in the response'
in: query
name: limit
type: string
- description: 'Cursor for pagination on the datasets list'
in: query
name: cursor
type: string
responses:
200:
......@@ -680,6 +688,11 @@ paths:
type: array
items:
$ref: "#/definitions/Dataset"
201:
description: "Paginated dataset list with nextPageCursor. For documentation purposes, if limit or cursor is given, status code here is 200."
schema:
$ref: "#/definitions/PaginatedDatasets"
400:
description: "Bad request"
401:
......@@ -2205,6 +2218,16 @@ definitions:
type: string
description: Next cursor for pagination
example: { datasets: ["folderA/", "folderB/", "dataset01"], nextPageCursor: "abc1234" }
# PaginatedDatasets
PaginatedDatasets:
properties:
datasets:
type: array
items:
$ref: '#/definitions/Dataset'
nextPageCursor:
type: string
# ===========================================================================
# Endpoints Security Section
......
......@@ -15,10 +15,10 @@
// ============================================================================
import { DatasetModel, PaginationModel } from '.';
import { IJournal, IJournalTransaction } from '../../cloud';
import { Config } from '../../cloud';
import { Config, IJournal, IJournalTransaction } from '../../cloud';
import { Utils } from '../../shared';
import { Locker } from './locker';
import { PaginatedDatasetList } from './model';
export class DatasetDAO {
......@@ -69,7 +69,9 @@ export class DatasetDAO {
public static async list(
journalClient: IJournal | IJournalTransaction,
dataset: DatasetModel): Promise<DatasetModel[]> {
dataset: DatasetModel, pagination: PaginationModel):
Promise<any> {
let query: any;
if (dataset.gtags === undefined || dataset.gtags.length === 0) {
query = journalClient.createQuery(
......@@ -83,16 +85,27 @@ export class DatasetDAO {
}
}
let [entities] = await journalClient.runQuery(query);
if (pagination && pagination.cursor) { query = query.start(pagination.cursor); }
if (pagination && pagination.limit) { query = query.limit(pagination.limit); }
entities = (entities) as DatasetModel[];
const [entities, info] = await journalClient.runQuery(query);
// Fix model for old entity
for (let entity of entities) {
entity = await this.fixOldModel(entity, dataset.tenant, dataset.subproject);
}
return entities;
const output: PaginatedDatasetList = {
datasets: entities,
nextPageCursor: null
};
if (pagination) {
output.nextPageCursor = info.endCursor;
}
return output;
}
......
......@@ -318,7 +318,10 @@ export class DatasetHandler {
private static async list(req: expRequest, tenant: TenantModel, subproject: SubProjectModel) {
// Retrieve the dataset path information
const dataset = DatasetParser.list(req);
const userInput = DatasetParser.list(req);
const dataset = userInput.dataset;
const pagination = userInput.pagination;
// init journalClient client
const journalClient = JournalFactoryTenantClient.get(tenant);
......@@ -331,18 +334,21 @@ export class DatasetHandler {
req.headers['impersonation-token-context'] as string);
}
// Retrieve the list of datasets metadata
const datasets = await DatasetDAO.list(journalClient, dataset);
const output = await DatasetDAO.list(journalClient, dataset, pagination) as any;
// attach the gcpid for fast check
for (const item of datasets) {
for (const item of output.datasets) {
item.ctag = item.ctag + tenant.gcpid + ';' + DESUtils.getDataPartitionID(tenant.esd);
}
// Retrieve the list of datasets metadata
return datasets;
if (output.nextPageCursor) {
return output;
}
return output.datasets;
}
// delete a dataset
......@@ -834,7 +840,7 @@ export class DatasetHandler {
// check if the dataset does not exist
const lockKey = datasetIN.tenant + '/' + datasetIN.subproject + datasetIN.path + datasetIN.name;
if (!dataset) {
if(await Locker.getLock(lockKey)) {
if (await Locker.getLock(lockKey)) {
// if a previous call fails, the dataset is not created but the lock is acquired and not released
await Locker.unlock(lockKey);
return;
......
......@@ -14,6 +14,7 @@
// limitations under the License.
// ============================================================================
export interface IDatasetModel {
name: string;
tenant: string;
......@@ -46,4 +47,9 @@ export interface IPaginationModel {
export interface IDatasetAcl {
admins: string[],
viewers: string[];
}
export interface PaginatedDatasetList {
datasets: IDatasetModel[];
nextPageCursor: string;
}
\ No newline at end of file
......@@ -109,10 +109,28 @@ export class DatasetParser {
req.query.seismicmeta === 'true'];
}
public static list(req: expRequest): DatasetModel {
public static list(req: expRequest): any {
const dataset = this.createDatasetModelFromRequest(req);
dataset.gtags = req.query.gtag;
return dataset;
const limit = parseInt(req.query.limit, 10);
if (limit < 0) {
throw (Error.make(Error.Status.BAD_REQUEST,
'The \'limit\' query parameter can not be less than zero.'));
}
const cursor = req.query.cursor as string;
if (cursor === '') {
throw (Error.make(Error.Status.BAD_REQUEST,
'The \'cursor\' query parameter can not be empty if supplied'));
}
let pagination = null;
if (limit || cursor) {
pagination = { limit, cursor };
}
// Retrieve the list of datasets metadata
return { dataset, pagination };
}
public static delete(req: expRequest): DatasetModel {
......
......@@ -14,13 +14,12 @@
// limitations under the License.
// ============================================================================
import sinon from 'sinon';
import { Datastore } from '@google-cloud/datastore';
import { Entity } from '@google-cloud/datastore/build/src/entity';
import { RunQueryResponse } from '@google-cloud/datastore/build/src/query';
import { google } from '../../../src/cloud/providers';
import sinon from 'sinon';
import { Config } from '../../../src/cloud';
import { google } from '../../../src/cloud/providers';
import { RecordLatency } from '../../../src/metrics';
import { DatasetModel } from '../../../src/services/dataset';
import { DatasetDAO } from '../../../src/services/dataset/dao';
......@@ -28,6 +27,7 @@ import { Locker } from '../../../src/services/dataset/locker';
import { IPaginationModel } from '../../../src/services/dataset/model';
import { Tx } from '../utils';
export class TestDataset {
private static dataset: DatasetModel;
......@@ -82,13 +82,13 @@ export class TestDataset {
Tx.test(async (done: any) => {
this.journal.save.resolves({} as never);
await DatasetDAO.register(this.journal, { key: {'key': 'dataset_key'}, data: TestDataset.dataset });
await DatasetDAO.register(this.journal, { key: { 'key': 'dataset_key' }, data: TestDataset.dataset });
done();
});
Tx.test(async (done: any) => {
this.journal.save.resolves();
await DatasetDAO.register(this.journal, { key: {'key': 'dataset_key'}, data: TestDataset.dataset });
await DatasetDAO.register(this.journal, { key: { 'key': 'dataset_key' }, data: TestDataset.dataset });
done();
});
}
......@@ -186,7 +186,7 @@ export class TestDataset {
this.journal.runQuery.resolves([expectedResult, undefined]);
this.sandbox.stub(DatasetDAO, 'fixOldModel').resolves(expectedResult[0]);
const result = await DatasetDAO.list(this.journal, this.dataset);
const result = await DatasetDAO.list(this.journal, this.dataset, null);
Tx.checkTrue(
this.journal.runQuery.calledWith(query) && result[0] === expectedResult[0],
......@@ -231,7 +231,7 @@ export class TestDataset {
this.journal.runQuery.resolves([expectedResult, undefined]);
this.sandbox.stub(DatasetDAO, 'fixOldModel').resolves(expectedResult[0]);
const result = await DatasetDAO.list(this.journal, this.dataset);
const result = await DatasetDAO.list(this.journal, this.dataset, null);
Tx.checkTrue(
this.journal.runQuery.calledWith(query) && result[0] === expectedResult[0],
......@@ -274,7 +274,7 @@ export class TestDataset {
this.journal.getQueryFilterSymbolContains.returns('=');
this.sandbox.stub(DatasetDAO, 'fixOldModel').resolves(expectedResult[0]);
const result = await DatasetDAO.list(this.journal, this.dataset);
const result = await DatasetDAO.list(this.journal, this.dataset, null);
Tx.checkTrue(
this.journal.runQuery.calledWith(query) && result[0] === expectedResult[0],
......@@ -320,7 +320,7 @@ export class TestDataset {
this.journal.getQueryFilterSymbolContains.returns('=');
this.sandbox.stub(DatasetDAO, 'fixOldModel').resolves(expectedResult[0]);
const result = await DatasetDAO.list(this.journal, this.dataset);
const result = await DatasetDAO.list(this.journal, this.dataset, null);
Tx.checkTrue(
this.journal.runQuery.calledWith(query) && result[0] === expectedResult[0],
......@@ -329,7 +329,7 @@ export class TestDataset {
});
Tx.test(async (done: any) => {
Config.CLOUDPROVIDER='azure'
Config.CLOUDPROVIDER = 'azure';
this.dataset.gtags = ['tagA', 'tagB'];
const expectedResult = [
{
......@@ -367,7 +367,7 @@ export class TestDataset {
this.journal.getQueryFilterSymbolContains.returns('CONTAINS');
this.sandbox.stub(DatasetDAO, 'fixOldModel').resolves(expectedResult[0]);
const result = await DatasetDAO.list(this.journal, this.dataset);
const result = await DatasetDAO.list(this.journal, this.dataset, null);
Tx.checkTrue(
this.journal.runQuery.calledWith(query) && result[0] === expectedResult[0],
......
......@@ -444,7 +444,7 @@ export class TestDatasetSVC {
Tx.testExp(async (done: any, expReq: expRequest, expRes: expResponse) => {
this.sandbox.stub(TenantDAO, 'get').resolves({} as any);
this.sandbox.stub(Auth, 'isReadAuthorized').resolves(undefined);
this.sandbox.stub(DatasetDAO, 'list').resolves([{}] as DatasetModel[]);
this.sandbox.stub(DatasetDAO, 'list').resolves({ datasets: [{} as DatasetModel], nextPageCursor: null });
this.sandbox.stub(Auth, 'isLegalTagValid').resolves(true);
this.sandbox.stub(SubProjectDAO, 'get').resolves(this.testSubProject);
this.sandbox.stub(DESUtils, 'getDataPartitionID').returns('datapartition');
......@@ -1039,7 +1039,7 @@ export class TestDatasetSVC {
this.journal.runQuery.resolves([[{}], {}] as never);
this.sandbox.stub(DatasetDAO, 'fixOldModel').resolves();
this.sandbox.stub(SubProjectDAO, 'get').resolves(this.testSubProject);
await DatasetDAO.list(this.journal, this.dataset);
await DatasetDAO.list(this.journal, this.dataset, null);
done();
});
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment