Commit e6ab54aa authored by Varunkumar Manohar's avatar Varunkumar Manohar Committed by Sacha Brants
Browse files

feat: enable dataset level ACL based authorization - Part 3

parent cc64b65b
......@@ -38,15 +38,15 @@ variables:
OSDU_GCP_DES_SERVICE_HOST_STORAGE: https://os-storage-attcrcktoa-uc.a.run.app/api
OSDU_GCP_ENV_VARS: CLOUDPROVIDER=${OSDU_GCP_CLOUD_PROVIDER},DES_SERVICE_HOST_PARTITION=${OSDU_GCP_PARTITION_API},ENTITLEMENT_BASE_URL_PATH=${OSDU_GCP_ENTITLEMENT_BASE_URL_PATH},DATA_PARTITION_REST_HEADER_KEY=${OSDU_GCP_DATA_PARTITION_REST_HEADER_KEY},DES_SERVICE_HOST_STORAGE=${OSDU_GCP_DES_SERVICE_HOST_STORAGE},DES_SERVICE_HOST_COMPLIANCE=${OSDU_GCP_DES_SERVICE_HOST_COMPLIANCE},SEISTORE_DES_TARGET_AUDIENCE=${GOOGLE_AUDIENCE},SERVICE_CLOUD_PROJECT=${OSDU_GCP_PROJECT},APP_ENVIRONMENT_IDENTIFIER=${TENANT},IMP_SERVICE_ACCOUNT_SIGNER=${OSDU_GCP_IMP_SERVICE_ACCOUNT_SIGNER},DES_SERVICE_HOST_ENTITLEMENT=${OSDU_GCP_ENTITLEMENTS_V2_BASE_URL},SEISTORE_DES_APPKEY=${OSDU_GCP_SEISTORE_DES_APPKEY},DES_REDIS_INSTANCE_ADDRESS=${OSDU_GCP_DES_REDIS_INSTANCE_ADDRESS},DES_REDIS_INSTANCE_PORT=${OSDU_GCP_DES_REDIS_INSTANCE_PORT},LOCKSMAP_REDIS_INSTANCE_ADDRESS=${OSDU_GCP_LOCKSMAP_REDIS_INSTANCE_ADDRESS} --vpc-connector=$OSDU_GCP_VPC_CONNECTOR
include:
include:
# pipeline logic
- project: "osdu/platform/ci-cd-pipelines"
file: "standard-setup.yml"
# build
- project: "osdu/platform/ci-cd-pipelines"
file: "build/seismic-store-service.yml"
# scan
#fossa
- local: "devops/osdu/scanners/fossa-node.yml"
......@@ -56,7 +56,7 @@ include:
# containerize
- project: "osdu/platform/ci-cd-pipelines"
file: "containerize/seismic-store-service.yml"
# aws
- project: "osdu/platform/ci-cd-pipelines"
file: "cloud-providers/aws-global.yml"
......@@ -64,11 +64,11 @@ include:
- local: "/devops/aws/awstest.yml"
# deploy
#azure
#azure
- project: "osdu/platform/ci-cd-pipelines"
file: "cloud-providers/azure-seismic-store-service.yml"
#ibm
#ibm
- project: "osdu/platform/ci-cd-pipelines"
file: "cloud-providers/ibm-seismic-store-service.yml"
......@@ -92,7 +92,3 @@ osdu-gcp-test-python:
only:
variables:
- $OSDU_GCP == 'true' && $OSDU_GCP_INT_TEST_TYPE == 'python'
......@@ -166,6 +166,11 @@ export abstract class Config implements IConfig {
// The C++ SDK mainly requires a fix on how behave on mutable calls.
public static SKIP_WRITE_LOCK_CHECK_ON_MUTABLE_OPERATIONS = true;
// Access policy of a subproject can either be uniform or dataset
public static UNIFORM_ACCESS_POLICY = 'uniform';
public static DATASET_ACCESS_POLICY = 'dataset';
public static setCloudProvider(cloudProvider: string) {
Config.CLOUDPROVIDER = cloudProvider;
if (Config.CLOUDPROVIDER === undefined) {
......
......@@ -25,7 +25,7 @@ export interface IAccessTokenModel {
export interface ICredentials {
getStorageCredentials(
tenant: string, subproject: string,
bucket: string, readonly: boolean, partitionID: string): Promise<IAccessTokenModel>;
bucket: string, readonly: boolean, partitionID: string, objectPrefix?: string): Promise<IAccessTokenModel>;
getServiceAccountAccessToken(): Promise<IAccessTokenModel>;
getIAMResourceUrl(serviceSigner: string): string;
getAudienceForImpCredentials(): string;
......@@ -35,7 +35,7 @@ export interface ICredentials {
export abstract class AbstractCredentials implements ICredentials {
public abstract getStorageCredentials(
tenant: string, subproject: string,
bucket: string, readonly: boolean, partitionID: string): Promise<IAccessTokenModel>;
bucket: string, readonly: boolean, partitionID: string, objectPrefix?: string): Promise<IAccessTokenModel>;
public abstract getServiceAccountAccessToken(): Promise<IAccessTokenModel>;
public abstract getIAMResourceUrl(serviceSigner: string): string;
public abstract getAudienceForImpCredentials(): string;
......@@ -43,7 +43,7 @@ export abstract class AbstractCredentials implements ICredentials {
}
export class CredentialsFactory extends CloudFactory {
public static build(providerLabel: string, args: { [key: string]: any } = {}): ICredentials {
public static build(providerLabel: string, args: { [key: string]: any; } = {}): ICredentials {
return CloudFactory.build(providerLabel, AbstractCredentials, args) as ICredentials;
}
}
......
......@@ -16,12 +16,12 @@
import jwttoken from 'jsonwebtoken';
import request from 'request-promise';
import { Config } from '../../../cloud';
import { Error, Utils } from '../../../shared';
import { AbstractCredentials, CredentialsFactory, IAccessTokenModel } from '../../credentials';
import { ConfigGoogle } from './config';
interface IDTokenModel {
id_token: string;
}
......@@ -40,11 +40,11 @@ export class Credentials extends AbstractCredentials {
public async getStorageCredentials(
tenant: string, subproject: string,
bucket: string, readonly: boolean, _partition: string): Promise<IAccessTokenModel> {
bucket: string, readonly: boolean, _partition: string, objectPrefix?: string): Promise<IAccessTokenModel> {
const serviceAccessToken = await this.getServiceAccountAccessToken(false);
const serviceAccessTokenDownscoped = await this.exchangeJwtWithDownScopedAccessToken(
serviceAccessToken.access_token, bucket, readonly);
serviceAccessToken.access_token, bucket, readonly, objectPrefix);
return {
access_token: serviceAccessTokenDownscoped.access_token,
......@@ -54,27 +54,46 @@ export class Credentials extends AbstractCredentials {
}
private async exchangeJwtWithDownScopedAccessToken(accessToken: string,
bucket: string, readonly: boolean): Promise<IDownScopedToken> {
bucket: string, readonly: boolean, objectPrefix?: string): Promise<IDownScopedToken> {
try {
return JSON.parse(await request.post({
const accessBoundary = {
'accessBoundaryRules': [
{
'availableResource': '//storage.googleapis.com/projects/_/buckets/' + bucket,
'availablePermissions': [
'inRole:roles/' + (readonly ? 'storage.objectViewer' : 'storage.objectAdmin')
],
}
]
};
if (objectPrefix) {
accessBoundary.accessBoundaryRules[0]['availabilityCondition'] = {
'title': 'obj-prefixes',
'expression': 'resource.name.startsWith(\"projects/_/buckets/' +
bucket + '/objects/' + objectPrefix + '\")'
};
}
const requestOptions = {
form: {
access_boundary: JSON.stringify({
accessBoundaryRules: [{
availablePermissions: [
'inRole:roles/' + (readonly ? 'storage.objectViewer' : 'storage.objectAdmin')],
availableResource: '//storage.googleapis.com/projects/_/buckets/' + bucket,
}],
}),
grant_type: 'urn:ietf:params:oauth:grant-type:token-exchange',
requested_token_type: 'urn:ietf:params:oauth:token-type:access_token',
subject_token: accessToken,
subject_token_type: 'urn:ietf:params:oauth:token-type:access_token',
'grant_type': 'urn:ietf:params:oauth:grant-type:token-exchange',
'subject_token_type': 'urn:ietf:params:oauth:token-type:access_token',
'requested_token_type': 'urn:ietf:params:oauth:token-type:access_token',
'subject_token': accessToken,
'options': JSON.stringify({
'accessBoundary': accessBoundary
})
},
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
url: 'https://securetoken.googleapis.com/v2beta1/token',
}));
url: 'https://sts.googleapis.com/v1beta/token',
};
return JSON.parse(await request.post(requestOptions));
} catch (error) {
throw (Error.makeForHTTPRequest(error));
}
......
......@@ -114,9 +114,9 @@ export class DatasetHandler {
const subprojectMetadata = await SubProjectDAO.get(journalClient, tenant.name, subproject.name);
const subprojectAccessPolicy = subprojectMetadata.access_policy;
if (subprojectAccessPolicy === 'uniform') {
if (subprojectAccessPolicy === Config.UNIFORM_ACCESS_POLICY) {
throw Error.make(Error.Status.BAD_REQUEST,
'Subproject access policy is set to uniform and so the dataset acls cannot be applied. Patch the subproject access policy to dataset and attempt this operation again.');
'Subproject access policy is set to uniform and so acls cannot be applied. Patch the subproject access policy to dataset and attempt this operation again.');
}
}
......@@ -271,17 +271,30 @@ export class DatasetHandler {
// Check if retrieve the seismic metadata storage record
const getSeismicMeta = datasetOUT.seismicmeta_guid !== undefined && userInput[1];
// Check if user has read access and legal tag is valid
await Promise.all([
FeatureFlags.isEnabled(Feature.AUTHORIZATION) ?
Auth.isReadAuthorized(req.headers.authorization,
subproject.acls.viewers.concat(subproject.acls.admins),
datasetIN.tenant, datasetIN.subproject, tenant.esd, req[Config.DE_FORWARD_APPKEY]) : undefined,
FeatureFlags.isEnabled(Feature.LEGALTAG) ?
datasetOUT.ltag ? Auth.isLegalTagValid(
req.headers.authorization, datasetOUT.ltag,
tenant.esd, req[Config.DE_FORWARD_APPKEY]) : undefined : undefined
]);
// Check if legal tag is valid
if (FeatureFlags.isEnabled(Feature.LEGALTAG) && datasetOUT.ltag) {
await Auth.isLegalTagValid(req.headers.authorization, datasetOUT.ltag,
tenant.esd, req[Config.DE_FORWARD_APPKEY]);
}
// Use the access policy to determine which groups to fetch for read authorization
if (FeatureFlags.isEnabled(Feature.AUTHORIZATION)) {
let authGroups = [];
if (subproject.access_policy === Config.UNIFORM_ACCESS_POLICY) {
authGroups = subproject.acls.viewers.concat(subproject.acls.admins);
} else if (subproject.access_policy === Config.DATASET_ACCESS_POLICY) {
authGroups = datasetOUT.acls ? datasetOUT.acls.viewers.concat(datasetOUT.acls.admins)
: subproject.acls.viewers.concat(subproject.acls.admins);
} else {
throw (Error.make(Error.Status.PERMISSION_DENIED, 'Access policy for the subproject is neither uniform nor dataset'
));
}
await Auth.isReadAuthorized(req.headers.authorization, authGroups,
datasetIN.tenant, datasetIN.subproject, tenant.esd,
req[Config.DE_FORWARD_APPKEY]);
}
// return the seismicmetadata (if exist)
if (getSeismicMeta) {
......@@ -349,13 +362,6 @@ export class DatasetHandler {
// init datastore client
const journalClient = JournalFactoryTenantClient.get(tenant);
// check authorization (write)
if (FeatureFlags.isEnabled(Feature.AUTHORIZATION)) {
await Auth.isWriteAuthorized(req.headers.authorization,
subproject.acls.admins,
tenant.name, subproject.name, tenant.esd, req[Config.DE_FORWARD_APPKEY]);
}
// Retrieve the dataset metadata
const dataset = subproject.enforce_key ?
await DatasetDAO.getByKey(journalClient, datasetIn) :
......@@ -364,6 +370,25 @@ export class DatasetHandler {
// if the dataset does not exist return ok
if (!dataset) { return; }
// check authorization (write)
if (FeatureFlags.isEnabled(Feature.AUTHORIZATION)) {
let authGroups = [];
const accessPolicy = subproject.access_policy;
if (accessPolicy === Config.UNIFORM_ACCESS_POLICY) {
authGroups = subproject.acls.admins;
} else if (accessPolicy === Config.DATASET_ACCESS_POLICY) {
authGroups = dataset.acls ? dataset.acls.admins : subproject.acls.admins;
} else {
throw (Error.make(Error.Status.PERMISSION_DENIED, 'Access policy for the subproject is neither uniform nor dataset'
));
}
await Auth.isWriteAuthorized(req.headers.authorization,
authGroups,
tenant.name, subproject.name, tenant.esd, req[Config.DE_FORWARD_APPKEY]);
}
// check if valid url
if (!dataset.gcsurl || dataset.gcsurl.indexOf('/') === -1) {
throw (Error.make(Error.Status.UNKNOWN,
......@@ -448,7 +473,7 @@ export class DatasetHandler {
const subprojectMetadata = await SubProjectDAO.get(journalClient, tenant.name, subproject.name);
const subprojectAccessPolicy = subprojectMetadata.access_policy;
if (subprojectAccessPolicy === 'uniform') {
if (subprojectAccessPolicy === Config.UNIFORM_ACCESS_POLICY) {
throw Error.make(Error.Status.BAD_REQUEST,
'Subproject access policy is set to uniform and so the dataset acls cannot be applied. Patch the subproject access policy to dataset and attempt this operation again.');
}
......@@ -490,10 +515,22 @@ export class DatasetHandler {
datasetIN.subproject + datasetIN.path + datasetIN.name + ' does not exist'));
}
// If the input request has dataset acls then the subproject access policy is always dataset
if (FeatureFlags.isEnabled(Feature.AUTHORIZATION)) {
// Check authorizations
let authGroups = [];
const accessPolicy = subproject.access_policy;
if (accessPolicy === Config.UNIFORM_ACCESS_POLICY) {
authGroups = subproject.acls.admins;
} else if (accessPolicy === Config.DATASET_ACCESS_POLICY) {
authGroups = datasetOUT.acls ? datasetOUT.acls.admins : subproject.acls.admins;
} else {
throw (Error.make(Error.Status.PERMISSION_DENIED, 'Access policy is neither uniform nor dataset.'
));
}
await Auth.isWriteAuthorized(req.headers.authorization,
subproject.acls.admins,
authGroups,
datasetIN.tenant, subproject.name, tenant.esd, req[Config.DE_FORWARD_APPKEY]);
}
......@@ -627,6 +664,11 @@ export class DatasetHandler {
}
}
// Update the acls if the input request has them
if (datasetIN.acls) {
datasetOUT.acls = datasetIN.acls;
}
if (newName) {
await Promise.all([
DatasetDAO.delete(journalClient, datasetOUT),
......@@ -693,23 +735,50 @@ export class DatasetHandler {
datasetIN.subproject + datasetIN.path + datasetIN.name + ' does not exist'));
}
await Promise.all([
FeatureFlags.isEnabled(Feature.AUTHORIZATION) ?
open4write ?
Auth.isWriteAuthorized(req.headers.authorization,
subproject.acls.admins,
datasetIN.tenant, datasetIN.subproject, tenant.esd, req[Config.DE_FORWARD_APPKEY]) :
Auth.isReadAuthorized(req.headers.authorization,
subproject.acls.viewers.concat(subproject.acls.admins),
datasetIN.tenant, datasetIN.subproject, tenant.esd, req[Config.DE_FORWARD_APPKEY]) :
undefined,
FeatureFlags.isEnabled(Feature.LEGALTAG) ?
datasetOUT.ltag ?
Auth.isLegalTagValid(req.headers.authorization, datasetOUT.ltag,
tenant.esd, req[Config.DE_FORWARD_APPKEY]) :
undefined :
undefined
]);
// Check if legal tag is valid;
if (FeatureFlags.isEnabled(Feature.LEGALTAG) && datasetOUT.ltag) {
await Auth.isLegalTagValid(req.headers.authorization, datasetOUT.ltag,
tenant.esd, req[Config.DE_FORWARD_APPKEY]);
}
// Use the access policy to determine which groups to fetch for read authorization
if (FeatureFlags.isEnabled(Feature.AUTHORIZATION)) {
let authGroups = [];
const accessPolicy = subproject.access_policy;
if (open4write) {
if (accessPolicy === Config.UNIFORM_ACCESS_POLICY) {
authGroups = subproject.acls.admins;
} else if (accessPolicy === Config.DATASET_ACCESS_POLICY) {
authGroups = datasetOUT.acls ? datasetOUT.acls.admins : subproject.acls.admins;
} else {
throw (Error.make(Error.Status.PERMISSION_DENIED, 'Access policy is neither uniform nor dataset'
));
}
await Auth.isWriteAuthorized(req.headers.authorization,
authGroups,
datasetIN.tenant, datasetIN.subproject, tenant.esd, req[Config.DE_FORWARD_APPKEY]);
} else {
if (accessPolicy === Config.UNIFORM_ACCESS_POLICY) {
authGroups = subproject.acls.viewers.concat(subproject.acls.admins);
} else if (accessPolicy === Config.DATASET_ACCESS_POLICY) {
authGroups = datasetOUT.acls ? datasetOUT.acls.viewers.concat(datasetOUT.acls.admins)
: subproject.acls.viewers.concat(subproject.acls.admins);
} else {
throw (Error.make(Error.Status.PERMISSION_DENIED, 'Access policy is neither uniform nor dataset'
));
}
await Auth.isReadAuthorized(req.headers.authorization, authGroups,
datasetIN.tenant, datasetIN.subproject, tenant.esd,
req[Config.DE_FORWARD_APPKEY]);
}
}
// managing read-only datasets
if (datasetOUT.readonly) {
......@@ -765,8 +834,20 @@ export class DatasetHandler {
}
// check if user is write authorized
let authGroups = [];
const accessPolicy = subproject.access_policy;
if (accessPolicy === Config.UNIFORM_ACCESS_POLICY) {
authGroups = subproject.acls.admins;
} else if (accessPolicy === Config.DATASET_ACCESS_POLICY) {
authGroups = dataset.acls ? dataset.acls.admins : subproject.acls.admins;
} else {
throw (Error.make(Error.Status.PERMISSION_DENIED, 'Access policy is neither uniform nor dataset'
));
}
await Auth.isWriteAuthorized(req.headers.authorization,
subproject.acls.admins,
authGroups,
tenant.name, dataset.subproject, tenant.esd, req[Config.DE_FORWARD_APPKEY]);
// unlock
......@@ -785,6 +866,7 @@ export class DatasetHandler {
const journalClient = JournalFactoryTenantClient.get(tenant);
if (FeatureFlags.isEnabled(Feature.AUTHORIZATION)) {
await Auth.isReadAuthorized(req.headers.authorization,
subproject.acls.viewers.concat(subproject.acls.admins),
datasets[0].tenant, datasets[0].subproject, tenant.esd, req[Config.DE_FORWARD_APPKEY]);
......@@ -924,8 +1006,21 @@ export class DatasetHandler {
}
if (FeatureFlags.isEnabled(Feature.AUTHORIZATION)) {
let authGroups = [];
const accessPolicy = subproject.access_policy;
if (accessPolicy === Config.UNIFORM_ACCESS_POLICY) {
authGroups = subproject.acls.admins;
} else if (accessPolicy === Config.DATASET_ACCESS_POLICY) {
authGroups = datasetOUT.acls ? datasetOUT.acls.admins : subproject.acls.admins;
} else {
throw (Error.make(Error.Status.PERMISSION_DENIED, 'Access policy is neither uniform nor dataset'
));
}
await Auth.isWriteAuthorized(req.headers.authorization,
subproject.acls.admins,
authGroups,
datasetIN.tenant, datasetIN.subproject, tenant.esd, req[Config.DE_FORWARD_APPKEY]);
}
......@@ -955,14 +1050,38 @@ export class DatasetHandler {
const res = { read: false, write: false, delete: false };
if (FeatureFlags.isEnabled(Feature.AUTHORIZATION)) {
// Check if has write and read access
let authGroups = [];
const accessPolicy = subproject.access_policy;
if (accessPolicy === Config.UNIFORM_ACCESS_POLICY) {
authGroups = subproject.acls.admins;
} else if (accessPolicy === Config.DATASET_ACCESS_POLICY) {
authGroups = dataset.acls ? dataset.acls.admins : subproject.acls.admins;
} else {
throw (Error.make(Error.Status.PERMISSION_DENIED, 'Access policy is neither uniform nor dataset'
));
}
res.write = await Auth.isWriteAuthorized(req.headers.authorization,
subproject.acls.admins,
dataset.tenant, dataset.subproject, tenant.esd, req[Config.DE_FORWARD_APPKEY], false);
res.read = await Auth.isReadAuthorized(
req.headers.authorization,
subproject.acls.viewers.concat(subproject.acls.admins),
authGroups,
dataset.tenant, dataset.subproject, tenant.esd, req[Config.DE_FORWARD_APPKEY], false);
// Check write authorization
if (accessPolicy === Config.UNIFORM_ACCESS_POLICY) {
authGroups = subproject.acls.viewers.concat(subproject.acls.admins);
} else if (accessPolicy === Config.DATASET_ACCESS_POLICY) {
authGroups = dataset.acls ? dataset.acls.viewers.concat(dataset.acls.admins)
: subproject.acls.viewers.concat(subproject.acls.admins);
} else {
throw (Error.make(Error.Status.PERMISSION_DENIED, 'Access policy is neither uniform nor dataset'
));
}
res.read = await Auth.isReadAuthorized(req.headers.authorization, authGroups,
dataset.tenant, dataset.subproject, tenant.esd,
req[Config.DE_FORWARD_APPKEY], false);
} else {
res.write = true;
res.read = true;
......
......@@ -72,14 +72,15 @@ export class DatasetParser {
seismicmeta.recordType = ':' + (seismicmeta.kind as string).split(':')[2] + ':';
}
dataset.acls = req.body && 'acls' in req.body ? req.body.acls : undefined;
DatasetParser.validateAcls(dataset);
DatasetParser.validateAcls(dataset, req);
return [dataset, seismicmeta];
}
private static validateAcls(dataset: DatasetModel, req) {
dataset.acls = req.body && 'acls' in req.body ? req.body.acls : undefined;
private static validateAcls(dataset: DatasetModel) {
if (dataset.acls) {
if (!('admins' in dataset.acls) || !('viewers' in dataset.acls)) {
......@@ -153,7 +154,7 @@ export class DatasetParser {
const seismicmeta = req.body.seismicmeta;
Params.checkObject(seismicmeta, 'seismicmeta', false);
DatasetParser.validateAcls(dataset, req);
DatasetParser.validateAcls(dataset);
return [dataset, seismicmeta, newName, closeid];
}
......
......@@ -15,13 +15,12 @@
// ============================================================================
import { decode as jwtDecode, verify as jwtVerify } from 'jsonwebtoken';
import request from 'request-promise';
import { ImpTokenModel } from '.';
import { CredentialsFactory } from '../../cloud';
import { Config } from '../../cloud';
import { Config, CredentialsFactory } from '../../cloud';
import { Error } from '../../shared';
import { IImpTokenBodyModel as ImpTokenBodyModel, IRefreshUrl } from './model';
import request from 'request-promise';
export class ImpTokenDAO {
......@@ -73,19 +72,20 @@ export class ImpTokenDAO {
const options: request.Options = { method: 'GET', url: '' };
if(refreshUrl.startsWith('https://') || refreshUrl.startsWith('http://')) {
if (refreshUrl.startsWith('https://') || refreshUrl.startsWith('http://')) {
options.method = 'GET';
options.headers = {
'Accept': 'application/json',
'Content-Type': 'application/json'};
'Content-Type': 'application/json'
};
options.url = refreshUrl;
}
else {
const refreshUrlOptions = JSON.parse(refreshUrl) as IRefreshUrl;
options.method = refreshUrlOptions.method;
options.url = refreshUrlOptions.url;
if(refreshUrlOptions.headers) options.headers = refreshUrlOptions.headers;
if(refreshUrlOptions.body) options.json = refreshUrlOptions.body;
if (refreshUrlOptions.headers) options.headers = refreshUrlOptions.headers;
if (refreshUrlOptions.body) options.json = refreshUrlOptions.body;
}
try {
......@@ -182,4 +182,4 @@ export class ImpTokenDAO {
}
}
}
\ No newline at end of file
......@@ -42,4 +42,4 @@ export interface IRefreshUrl {
url: string;
headers: object;
body: object;
}
\ No newline at end of file
}
......@@ -24,17 +24,17 @@ export class ImpTokenParser {
private static checkRefreshUrl(refreshUrl: any) {
if(refreshUrl.startsWith('https://') || refreshUrl.startsWith('http://')) return;
if (refreshUrl.startsWith('https://') || refreshUrl.startsWith('http://')) return;
try {
refreshUrl = JSON.parse(refreshUrl);
// method/url mandatory as string headers/body optional but if presents as objects
if(!(refreshUrl.method && typeof(refreshUrl.method) === 'string'))
if (!(refreshUrl.method && typeof (refreshUrl.method) === 'string'))
throw undefined;
if(!(refreshUrl.url && typeof(refreshUrl.url) === 'string'))
if (!(refreshUrl.url && typeof (refreshUrl.url) === 'string'))
throw undefined;
if(!(!refreshUrl.headers || (refreshUrl.headers && typeof(refreshUrl.headers) === 'object')))
if (!(!refreshUrl.headers || (refreshUrl.headers && typeof (refreshUrl.headers) === 'object')))
throw undefined;
if(!(!refreshUrl.body || (refreshUrl.body && typeof(refreshUrl.body) === 'object')))
if (!(!refreshUrl.body || (refreshUrl.body && typeof (refreshUrl.body) === 'object')))
throw undefined;