Commit 8316e70a authored by Diego Molteni's avatar Diego Molteni
Browse files

Merge branch 'master' into slb/dm3/fix-correlation-id

parents 3e5bd55d f52f2a3e
Pipeline #98537 failed with stages
in 17 minutes and 10 seconds
apiVersion: v2
name: seismic-store-service
name: osdu-ddms
appVersion: "latest"
description: Helm Chart for installing sdms service.
version: 0.1.0
......
......@@ -986,6 +986,157 @@ paths:
404:
description: "Not found."
/utility/upload-connection-string:
get:
summary: "Generate the upload connection credentials string"
description: "<ul>
<li>Generate the upload connection credential string for a subproject collection or a dataset, depending of the applied access policy (uniform/dataset).
<li>These credentials can be used via CSP SDK, on client side, to perform bulk upload.</li>
<li>
The endpoint response is CSP (Cloud Solution Provider) dependent:
<ul>
<br/><li><b>Azure</b>: shared access signature (SaS) Url token
<br/>
<br/>{
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;access_token: 'https://{accountName}.blob.core.windows.net/{containerName}?{SASQueryParameters}`'
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;expires_in: 3599
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;token_type: 'SasUrl'
<br/>}
<br/><br/></li>
<li><b>Google</b>: standard access token credential signed and down-scoped by google
<br/>
<br/>{
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;access_token: 'google_signed_access_token'
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;expires_in: 3600
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;token_type: 'Bearer'
<br/>}
<br/><br/></li>
<li><b>AWS</b>: double column separated string containing access key id, the access key secret and the session token
<br/>
<br/>{
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;access_token: 'accessKeyId:secretAccessKey:sessionToken'
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;expires_in: 3599
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;token_type: 'Bearer'
<br/>}
<br/><br/></li>
<li><b>IBM</b>: double column separated string containing access key id, the access key secret and the session token
<br/>
<br/>{
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;access_token: 'accessKeyId:secretAccessKey:sessionToken'
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;expires_in: 7200
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;token_type: 'Bearer'
<br/>}
<br/><br/></li>
</ul>
</li>
<li>Required roles: subproject.admin, subproject.editor, subproject.viewer</li>
</ul>"
operationId: upload-connection-string
tags:
- Utility
parameters:
- description: "The impersonation token context (required only with impersonation token credentials)"
in: header
name: impersonation-token-context
type: string
required: false
default: ""
- description: "Seismic store path in the format sd://tenant/subproject (for uniform applied policies) or sd://tenant/subproject/dataset (for dataset applied policies)"
in: query
name: sdpath
type: string
required: true
default: "sd://opendes/sandbox"
responses:
200:
description: "The upload connection credential string."
schema:
$ref: "#/definitions/AccessToken"
400:
description: "Bad request."
401:
description: "Unauthorized."
403:
description: "Forbidden."
404:
description: "Not found."
/utility/download-connection-string:
get:
summary: "Generate the download connection credentials string"
description: "<ul>
<li>Generate the download connection credential string for a subproject collection or a dataset, depending of the applied access policy (uniform/dataset).
<li>These credentials can be used via CSP SDK, on client side, to perform bulk download.</li>
<li>
The endpoint response is CSP (Cloud Solution Provider) dependent:
<ul>
<br/><li><b>Azure</b>: shared access signature (SaS) Url token
<br/>
<br/>{
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;access_token: 'https://{accountName}.blob.core.windows.net/{containerName}?{SASQueryParameters}`'
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;expires_in: 3599
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;token_type: 'SasUrl'
<br/>}
<br/><br/></li>
<li><b>Google</b>: standard access token credential signed and down-scoped by google
<br/>
<br/>{
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;access_token: 'google_signed_access_token'
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;expires_in: 3600
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;token_type: 'Bearer'
<br/>}
<br/><br/></li>
<li><b>AWS</b>: double column separated string containing access key id, the access key secret and the session token
<br/>
<br/>{
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;access_token: 'accessKeyId:secretAccessKey:sessionToken'
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;expires_in: 3599
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;token_type: 'Bearer'
<br/>}
<br/><br/></li>
<li><b>IBM</b>: double column separated string containing access key id, the access key secret and the session token
<br/>
<br/>{
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;access_token: 'accessKeyId:secretAccessKey:sessionToken'
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;expires_in: 7200
<br/>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;token_type: 'Bearer'
<br/>}
<br/><br/></li>
</ul>
</li>
<li>Required roles: subproject.admin, subproject.editor, subproject.viewer</li>
</ul>"
operationId: download-connection-string
tags:
- Utility
parameters:
- description: "The impersonation token context (required only with impersonation token credentials)"
in: header
name: impersonation-token-context
type: string
required: false
default: ""
- description: "Seismic store path in the format sd://tenant/subproject (for uniform applied policies) or sd://tenant/subproject/dataset (for dataset applied policies)"
in: query
name: sdpath
type: string
required: true
default: "sd://opendes/sandbox"
responses:
200:
description: "The download connection credential string."
schema:
$ref: "#/definitions/AccessToken"
400:
description: "Bad request."
401:
description: "Unauthorized."
403:
description: "Forbidden."
404:
description: "Not found."
/imptoken:
post:
summary: "Generate impersonation credentials token."
......@@ -1985,15 +2136,15 @@ definitions:
properties:
access_token:
type: string
description: Access token.
description: connection string credentials or standard access token (CSP dependent)
token_type:
type: string
description: Type of the token.
description: token type (Bearer, SasUrl, ....).
expires_in:
type: number
description: Time in seconds for expiration of the access token.
description: expiration time (in minutes) of the connection string credentials
example:
access_token: "ya29.fgdgsdngevrjbinb0exdnberoibnerbnerber-fdsfwefwe_cece.rfd43f3"
access_token: "header.payload.signature"
token_type: "Bearer"
expires_in: 3600
......
......@@ -18,6 +18,7 @@ import Bull from 'bull';
import { Request as expRequest, Response as expResponse } from 'express';
import { Auth, AuthRoles } from '../../auth';
import { Config, CredentialsFactory, JournalFactoryTenantClient, StorageFactory } from '../../cloud';
import { IAccessTokenModel } from '../../cloud/credentials';
import { IDESEntitlementGroupModel } from '../../cloud/dataecosystem';
import { SeistoreFactory } from '../../cloud/seistore';
import { StorageJobManager } from '../../cloud/shared/queue';
......@@ -44,14 +45,85 @@ export class UtilityHandler {
} else if (op === UtilityOP.CP) {
const response = await this.cp(req);
Response.writeOK(res, { 'status': response.status }, response.code);
} else if (op === UtilityOP.UPLOAD_CONNECTION_STRING) {
Response.writeOK(res, await this.getConnectionString(req, false));
} else if (op === UtilityOP.DOWNLOAD_CONNECTION_STRING) {
Response.writeOK(res, await this.getConnectionString(req, true));
} else if (op === UtilityOP.STORAGE_TIERS) {
Response.writeOK(res, await this.listStorageTiers(req));
} else {
throw (Error.make(Error.Status.UNKNOWN, 'Internal Server Error'));
}
else { throw (Error.make(Error.Status.UNKNOWN, 'Internal Server Error')); }
} catch (error) { Response.writeError(res, error); }
}
// ------------------------------------------------------------------
// get the connection credentials string token
//
// Required role:
//
// - for connection string with subproject access:
// - read write access request: subproject.admin
// - read only access request: subproject.viewer
//
// - for connection string with dataset access:
// - read write access request:
// - subproject.admin if the subproject access policy = uniform
// - dataset.admin if the subproject access policy = dataset
// - read only access request:
// - subproject.viewer if the subproject access policy = uniform
// - dataset.viewer if the subproject access policy = dataset
// ------------------------------------------------------------------
private static async getConnectionString(req: expRequest, readOnly: boolean): Promise<IAccessTokenModel> {
if (!FeatureFlags.isEnabled(Feature.STORAGE_CREDENTIALS)) return;
const requestDataset = UtilityParser.connectionString(req);
const tenant = await TenantDAO.get(requestDataset.tenant);
const journalClient = JournalFactoryTenantClient.get(tenant);
const subproject = await SubProjectDAO.get(journalClient, requestDataset.tenant, requestDataset.subproject);
const dataPartitionId = DESUtils.getDataPartitionID(tenant.esd);
let bucket: string;
let virtualFolder: string;
let authGroups: string[];
if (requestDataset.name) { // dataset connection strings
const dataset = subproject.enforce_key ?
await DatasetDAO.getByKey(journalClient, requestDataset) :
(await DatasetDAO.get(journalClient, requestDataset))[0];
authGroups = DatasetAuth.getAuthGroups(subproject, dataset, readOnly ? AuthRoles.viewer : AuthRoles.admin);
bucket = DatasetUtils.getBucketFromDatasetResourceUri(dataset.gcsurl);
virtualFolder = DatasetUtils.getVirtualFolderFromDatasetResourceUri(dataset.gcsurl);
} else { // subproject connection string
authGroups = SubprojectAuth.getAuthGroups(subproject, readOnly ? AuthRoles.viewer : AuthRoles.admin);
bucket = subproject.gcs_bucket;
}
// authorize the call
readOnly ?
await Auth.isReadAuthorized(req.headers.authorization,
authGroups,
tenant, subproject.name, req[Config.DE_FORWARD_APPKEY],
req.headers['impersonation-token-context'] as string) :
await Auth.isWriteAuthorized(req.headers.authorization,
authGroups,
tenant, subproject.name, req[Config.DE_FORWARD_APPKEY],
req.headers['impersonation-token-context'] as string);
// generate and return the connection credentials string
return await CredentialsFactory.build(Config.CLOUDPROVIDER).getStorageCredentials(
subproject.tenant, subproject.name, bucket, readOnly, dataPartitionId, virtualFolder);
}
// Generate the storage access token
// Required role:
// - for subproject path request:
......
......@@ -14,4 +14,4 @@
// limitations under the License.
// ============================================================================
export enum UtilityOP { LS, CP, GCSTOKEN, STORAGE_TIERS }
export enum UtilityOP { LS, CP, GCSTOKEN, STORAGE_TIERS, UPLOAD_CONNECTION_STRING, DOWNLOAD_CONNECTION_STRING }
......@@ -96,6 +96,31 @@ export class UtilityParser {
return { sdPath, wmode, pagination };
}
public static connectionString(req: expRequest): DatasetModel {
Params.checkString(req.query.sdpath, 'sdpath');
const sdPath = SDPath.getFromString(req.query.sdpath as string);
if (!sdPath) {
throw (Error.make(Error.Status.BAD_REQUEST,
'The \'sdpath\' query parameter is not a valid seismic store resource path.'));
}
if (!sdPath.subproject) {
throw (Error.make(Error.Status.BAD_REQUEST,
'The \'sdpath\' query parameter must be a subproject or a dataset resource path.'));
}
const dataset: DatasetModel = { } as DatasetModel;
dataset.name = sdPath.dataset;
dataset.tenant = sdPath.tenant;
dataset.subproject = sdPath.subproject;
dataset.path = sdPath.path;
return dataset ;
}
public static gcsToken(req: expRequest): { sdPath: SDPathModel, readOnly: boolean; dataset: DatasetModel } {
Params.checkString(req.query.sdpath, 'sdpath');
......@@ -114,7 +139,7 @@ export class UtilityParser {
const dataset: DatasetModel = {} as DatasetModel;
if (sdPath.dataset) {
this.constructDatasetModel(dataset, sdPath, req);
this.constructDatasetModel(dataset, sdPath);
}
Params.checkString(req.query.readonly, 'readonly', false);
......@@ -132,7 +157,7 @@ export class UtilityParser {
}
private static constructDatasetModel(dataset: DatasetModel, sdPath: SDPathModel, req: expRequest) {
private static constructDatasetModel(dataset: DatasetModel, sdPath: SDPathModel) {
dataset.name = sdPath.dataset;
dataset.tenant = sdPath.tenant;
dataset.subproject = sdPath.subproject;
......
......@@ -35,6 +35,17 @@ router.get('/gcs-access-token', async (req: expRequest, res: expResponse) => {
await UtilityHandler.handler(req, res, UtilityOP.GCSTOKEN);
});
// get the upload connection string
router.get('/upload-connection-string', async (req: expRequest, res: expResponse) => {
await UtilityHandler.handler(req, res, UtilityOP.UPLOAD_CONNECTION_STRING);
});
// get the download connection string
router.get('/download-connection-string', async (req: expRequest, res: expResponse) => {
await UtilityHandler.handler(req, res, UtilityOP.DOWNLOAD_CONNECTION_STRING);
});
// get the list of supported storage tiers
router.get('/storage-tiers', async (req: expRequest, res: expResponse) => {
await UtilityHandler.handler(req, res, UtilityOP.STORAGE_TIERS);
});
......
This diff is collapsed.
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment