From fd7c77d2515c18b86bd0a77610ac55505664fe15 Mon Sep 17 00:00:00 2001
From: Shane Hutchins <shane.hutchins@hitachivantara.com>
Date: Mon, 30 Oct 2023 13:27:56 +0000
Subject: [PATCH] Bugfix API update for additional responses, correlation-id,
 data-partition-id, /bootstrap API and other fixes

---
 .gitlab-ci.yml                                |  9 +++-
 NOTICE                                        |  1 +
 VERSION                                       |  1 +
 app/Makefile                                  |  3 +-
 app/_buildinfo.py                             | 11 ----
 app/_version.py                               |  8 +++
 app/api/backup_api.py                         | 14 +++--
 app/api/bootstrap_api.py                      | 27 +++++++---
 app/api/compile_api.py                        | 21 ++++----
 app/api/config_api.py                         | 52 ++++++++++--------
 app/api/diag_api.py                           | 10 +---
 app/api/health_api.py                         | 20 +++++--
 app/api/info_api.py                           | 36 +++++++++----
 app/api/policy_eval_api.py                    | 34 +++++++-----
 app/api/policy_read_api.py                    | 42 +++++++++++----
 app/api/policy_update_api.py                  | 33 +++++++++---
 app/api/tenant_api.py                         | 48 +++++++++++++----
 app/api/translate_api.py                      | 27 +++++++---
 app/api/validate_api.py                       | 26 +++++----
 app/auth/auth.py                              | 18 +++----
 app/bundles/bundle.py                         | 16 +-----
 app/bundles/providers/aws/storage.py          |  8 ++-
 app/bundles/providers/azure/storage.py        |  7 +--
 app/bundles/providers/ibm/storage.py          |  2 +-
 app/bundles/storage.py                        |  2 -
 app/conf.py                                   |  7 +++
 app/correlation.py                            | 13 ++++-
 app/entitlement.py                            |  6 +--
 app/k8s.py                                    |  8 +--
 app/main.py                                   | 30 +++++------
 app/opa.py                                    | 15 +++---
 app/opa_response.py                           |  1 -
 app/tests/integration/test_integration_010.py |  4 +-
 app/tests/testlib.py                          |  7 ++-
 app/tests/unit/test_api_unit.py               |  5 --
 app/timer.py                                  |  4 +-
 app/translate/convert.py                      | 42 +++++++--------
 app/views/home.py                             |  4 +-
 devops/aws/override-stages.yaml               |  3 ++
 devops/azure/override-stages.yml              |  8 ++-
 requirements.txt                              |  2 +-
 requirements_dev.txt                          |  3 +-
 requirements_setversion.txt                   |  1 +
 setversion.py                                 | 54 +++++++++++++++++++
 44 files changed, 452 insertions(+), 241 deletions(-)
 create mode 100644 VERSION
 delete mode 100644 app/_buildinfo.py
 create mode 100644 app/_version.py
 create mode 100644 requirements_setversion.txt
 create mode 100755 setversion.py

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 2a2e94f8..0800a358 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -92,6 +92,11 @@ build-image:
     - echo ---- ---- ---- SYSTEM DEPENDENCIES ---- ---- ----
     - apk update
     - apk add git
+    - echo ---- ---- ---- PYTHON DEPENDENCIES ---- ---- ----
+    - apk add --no-cache python3 py3-pip
+    - pip install -r requirements_setversion.txt
+    - echo ---- ---- ---- UPDATE BUILD INFO---- ---- ----
+    - version=$(python3 setversion.py app)
     - echo ---- ---- ---- BUILD IMAGE ---- ---- ----
     - echo IMAGE_TAG $IMAGE_TAG
     - commit_id=$(git log -n 1 --pretty="%h")
@@ -106,12 +111,14 @@ build-image:
     - echo commit_branch $commit_branch
     - COMMIT_IMAGE_NAMETAG=${CI_REGISTRY_IMAGE}:${commit_id}
     - BRANCH_IMAGE_NAMETAG=${CI_REGISTRY_IMAGE}:${CI_COMMIT_REF_SLUG}
+    - VERSION_IMAGE_NAMETAG=${CI_REGISTRY_IMAGE}:${version}
     - echo ---- ---- BUILD IMAGE
-    - docker build -t $IMAGE_TAG -t=$CI_REGISTRY_IMAGE:latest -t $COMMIT_IMAGE_NAMETAG -t $BRANCH_IMAGE_NAMETAG --rm . -f ./build/Dockerfile --build-arg PIP_WHEEL_DIR=python-packages --build-arg build_date="$current_utc_date" --build-arg build_number=$commit_id --build-arg commit_id=$commit_id --build-arg build_origin="Gitlab" --build-arg commit_branch=$commit_branch
+    - docker build -t $IMAGE_TAG -t=$CI_REGISTRY_IMAGE:latest -t $COMMIT_IMAGE_NAMETAG -t $BRANCH_IMAGE_NAMETAG -t $VERSION_IMAGE_NAMETAG --rm . -f ./build/Dockerfile --build-arg PIP_WHEEL_DIR=python-packages --build-arg build_date="$current_utc_date" --build-arg build_number=$commit_id --build-arg commit_id=$commit_id --build-arg build_origin="Gitlab" --build-arg commit_branch=$commit_branch
     - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
     - echo ---- ---- PUSH IMAGE $IMAGE_TAG
     - docker push $IMAGE_TAG
     - echo ---- ---- PUSH IMAGE $CI_REGISTRY_IMAGE:latest
     - docker push $CI_REGISTRY_IMAGE:latest
+    - docker push $VERSION_IMAGE_NAMETAG
     - docker push $COMMIT_IMAGE_NAMETAG
     - docker push $BRANCH_IMAGE_NAMETAG
diff --git a/NOTICE b/NOTICE
index 7e4ef493..03178a9d 100644
--- a/NOTICE
+++ b/NOTICE
@@ -133,6 +133,7 @@ The following software have components provided under the terms of this license:
 - sniffio (from https://github.com/python-trio/sniffio)
 - starlette-context (from https://github.com/tomwojcik/starlette-context)
 - tomli (from https://pypi.org/project/tomli/1.2.2/, https://pypi.org/project/tomli/2.0.0/, https://pypi.org/project/tomli/2.0.1/)
+- typer (from https://github.com/tiangolo/typer)
 - urllib3 (from https://pypi.org/project/urllib3/2.0.2/, https://pypi.org/project/urllib3/2.0.4/, https://pypi.org/project/urllib3/2.0.7/)
 - uuid7 (from https://github.com/stevesimmons/uuid7)
 
diff --git a/VERSION b/VERSION
new file mode 100644
index 00000000..d21d277b
--- /dev/null
+++ b/VERSION
@@ -0,0 +1 @@
+0.25.0
diff --git a/app/Makefile b/app/Makefile
index 93e56999..6e6a3ef9 100644
--- a/app/Makefile
+++ b/app/Makefile
@@ -18,7 +18,7 @@ OPA_PORT=8181
 .EXPORT_ALL_VARIABLES:
 POLICY_BUCKET=r3m16-515517605230-us-east-1-policy
 GOOGLE_CLOUD_PROJECT=osdu-dev-policy
-DATA_PARTITION=osdu
+DATA_PARTITION := ${DATA_PARTITION}
 OPA_URL = http://$(OPA_NAME):$(OPA_PORT)
 mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST)))
 current_dir := $(notdir $(patsubst %/,%,$(dir $(mkfile_path))))
@@ -39,6 +39,7 @@ aws_set_token_green:
 build: build_adminui build_docker
 
 build_docker:
+	cd .. && python3 setversion.py app
 	cd .. && docker build --network host -t $(IMAGE_NAME):$(TAG) -f app/Dockerfile .
 
 build_adminui:
diff --git a/app/_buildinfo.py b/app/_buildinfo.py
deleted file mode 100644
index 9f5f1c21..00000000
--- a/app/_buildinfo.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# IN THE FUTURE THIS WILL BE GENERATED BY CI
-version = "0.24.0"
-milestone = "M21"
-artifactId = None
-name = "policy"
-groupId  = "org.opengroup.osdu"
-buildTime = None
-branch = "master"
-commitId = None
-commitMessage = "M21"
-buildTime = None
\ No newline at end of file
diff --git a/app/_version.py b/app/_version.py
new file mode 100644
index 00000000..6e2c568c
--- /dev/null
+++ b/app/_version.py
@@ -0,0 +1,8 @@
+__version__ = "0.24.a0.dev0"
+__milestone__ = "M21"
+__branch__ = "dev"
+__buildtime__ = 1698466844.468756
+__commitid__ = ""
+__commitmessage__ = ""
+__committimestamp__ = "2023-10-28T00:20:44.468643"
+__commitrefslug__ = ""
diff --git a/app/api/backup_api.py b/app/api/backup_api.py
index ea95e75c..763356f1 100644
--- a/app/api/backup_api.py
+++ b/app/api/backup_api.py
@@ -1,24 +1,30 @@
 import fastapi
 from fastapi import Depends, HTTPException, Response
-import requests
 from auth import auth
 import logging
 import conf
 from starlette_context import context
 from fastapi.responses import StreamingResponse
 from bundles import bundle
-router = fastapi.APIRouter()
 import correlation
 from datetime import date
+from pydantic import BaseModel
 
 from starlette.status import (
-    HTTP_405_METHOD_NOT_ALLOWED,
+    HTTP_400_BAD_REQUEST,
     HTTP_424_FAILED_DEPENDENCY
 )
+router = fastapi.APIRouter()
+
+class Detail(BaseModel):
+    detail: str
 
 _logger = logging.getLogger(__name__)
 
-@router.get("/backup")
+@router.get("/backup", responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_424_FAILED_DEPENDENCY: {"model": Detail},
+        })
 def backup(response: Response, auth_data: auth.Auth = Depends(auth.require_authorized_admin)):
     """
     Experimental Backup API.
diff --git a/app/api/bootstrap_api.py b/app/api/bootstrap_api.py
index 5ebdaf8f..d04baa62 100644
--- a/app/api/bootstrap_api.py
+++ b/app/api/bootstrap_api.py
@@ -6,19 +6,30 @@ import conf
 import json
 from starlette_context import context
 from bundles import bundle
-router = fastapi.APIRouter()
 import correlation
+from pydantic import BaseModel
 
 from starlette.status import (
     HTTP_201_CREATED,
     HTTP_202_ACCEPTED,
+    HTTP_400_BAD_REQUEST,
     HTTP_405_METHOD_NOT_ALLOWED,
     HTTP_424_FAILED_DEPENDENCY
 )
 
+class Detail(BaseModel):
+    detail: str
+
+router = fastapi.APIRouter()
+
 logger = logging.getLogger(__name__)
 
-@router.post("/bootstrap")
+@router.post("/bootstrap", responses={
+        HTTP_201_CREATED: {"model": Detail},
+        HTTP_202_ACCEPTED: {"model": Detail},
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_405_METHOD_NOT_ALLOWED: {"model": Detail}
+        })
 def bootstrap(response: Response,
               auth_data: auth.Auth = Depends(auth.require_authorized_admin),
               force: bool = False
@@ -46,15 +57,16 @@ def bootstrap(response: Response,
     response.headers["X-Correlation-ID"] = context["correlation_id"]
 
     if bundle.get_bundle(data_partition=auth_data.data_partition_id):
-        logger.debug(f"bootstrap bundle already exists")
+        logger.debug("bootstrap bundle already exists")
         if force:
             if not conf.ALLOW_FORCE_BOOTSTRAP:
                 raise HTTPException(status_code=HTTP_405_METHOD_NOT_ALLOWED, detail=f"'{auth_data.data_partition_id}' already bootstrapped and forced updates not allowed.")
             status = bootstrap_bundle(data_partition=auth_data.data_partition_id)
             if status:
                 response.status_code = HTTP_202_ACCEPTED
-                detail = f"AUDIT bootstrap bundle for partition '{auth_data.data_partition_id}' updated to default, user: {auth_data.user_id}"
-                logger.info(detail)
+                detail = f"bootstrap bundle for partition '{auth_data.data_partition_id}' updated to default"
+                audit = f"AUDIT {detail}, user: {auth_data.user_id}"
+                logger.info(audit)
                 return {"detail": detail}
             else:
                 raise HTTPException(status_code=HTTP_424_FAILED_DEPENDENCY, detail=f"'{auth_data.data_partition_id}' update failed {status}!")
@@ -64,8 +76,9 @@ def bootstrap(response: Response,
         status = bootstrap_bundle(data_partition=auth_data.data_partition_id)
         if status:
             response.status_code = HTTP_201_CREATED
-            detail = f"AUDIT bootstrap bundle for partition '{auth_data.data_partition_id}' created, user: {auth_data.user_id}."
-            logger.info(detail)
+            detail = f"bootstrap bundle for partition '{auth_data.data_partition_id}' created"
+            audit = f"AUDIT {detail}, user: {auth_data.user_id}"
+            logger.info(audit)
             return {"detail": detail}
         else:
             detail = f"bootstrap bundle for partition '{auth_data.data_partition_id}' create failed {status}!"
diff --git a/app/api/compile_api.py b/app/api/compile_api.py
index 2bfff8ca..b5d95dab 100644
--- a/app/api/compile_api.py
+++ b/app/api/compile_api.py
@@ -1,23 +1,26 @@
-from fastapi import APIRouter, Depends, File, UploadFile, HTTPException, Query, Response
+from fastapi import APIRouter, Depends, UploadFile, HTTPException, Query, Response
 from starlette_context import context
-import os
 import logging
-from requests.structures import CaseInsensitiveDict
 from auth import auth
-import json
-import requests
-router = APIRouter()
-import conf
-import _buildinfo as b
 import opa
 import correlation
+from pydantic import BaseModel
 
 from starlette.status import (
+    HTTP_400_BAD_REQUEST,
     HTTP_422_UNPROCESSABLE_ENTITY,
 )
+
+class Detail(BaseModel):
+    detail: str
+
+router = APIRouter()
 logger = logging.getLogger(__name__)
 
-@router.post("/compile")
+@router.post("/compile", responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_422_UNPROCESSABLE_ENTITY: {"model": Detail},
+        })
 def compile_partially_evaluate_a_query(
     response: Response,
     file: UploadFile,
diff --git a/app/api/config_api.py b/app/api/config_api.py
index 54c04ae4..66024e49 100644
--- a/app/api/config_api.py
+++ b/app/api/config_api.py
@@ -1,16 +1,13 @@
-from typing import Annotated, List, Union
-from fastapi import APIRouter, Depends, Response, Request, Header
+from fastapi import APIRouter, Depends, Response, Request
 from starlette_context import context
 import os
 import logging
 from requests.structures import CaseInsensitiveDict
 from auth import auth
-import json
 import requests
-router = APIRouter()
 import conf
-import _buildinfo as b
-import opa
+#import _buildinfo as b
+import _version as b
 import correlation
 import socket
 import k8s
@@ -18,14 +15,26 @@ import time
 import psutil
 #import pdb
 
+from pydantic import BaseModel
+
+from starlette.status import (
+    HTTP_400_BAD_REQUEST,
+)
+router = APIRouter()
+
+class Detail(BaseModel):
+    detail: str
+
 def seconds_elapsed():
     return time.time() - psutil.boot_time()
 
-@router.get('/config')
+@router.get('/config', responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        })
 def show_policy_config_details(
         response: Response,
         request: Request,
-        duplicate: Annotated[Union[List[str], None], Header()] = None, # Python 3.9+
+        #duplicate: Annotated[Union[List[str], None], Header()] = None, # Python 3.9+
         auth_data: auth.Auth = Depends(auth.require_authorized_admin)):
     """
     Return detail configuration details.
@@ -54,7 +63,7 @@ def show_policy_config_details(
         r_status_json = r_status.json()
     except requests.exceptions.HTTPError:
         logger.error(f"Error: endpoint {status_url}: HTTPError")
-    except:
+    except Exception:
         logger.error(f"Error: endpoint {status_url}: Error")
 
     try:
@@ -63,7 +72,7 @@ def show_policy_config_details(
         r_config_headers = r_config.headers
     except requests.exceptions.HTTPError:
         logger.error(f"Error: endpoint {conf.OPA_CONFIG_API}: HTTPError")
-    except:
+    except Exception:
         logger.error(f"Error: endpoint {conf.OPA_CONFIG_API}: Error")
 
     cpu_usage = None
@@ -87,7 +96,7 @@ def show_policy_config_details(
     conf_dict = {}
     for i in dir(conf):
         if i[0].isupper():
-            if not "MOCK_ENTITLEMENT_RESULT" in i and not "TOKEN" in i:
+            if "MOCK_ENTITLEMENT_RESULT" not in i and "TOKEN" not in i:
                 conf_dict[i] = getattr(conf, i)
     
     if conf.MASK_OPA_CONFIG_DETAILS:
@@ -125,21 +134,22 @@ def show_policy_config_details(
         "memory_usage": memory_usage,
         "client_host": request.client.host,
         "request_headers": request.headers,
-        "duplicate": duplicate,
+        #"duplicate": duplicate,
         "auth_data": {
             "data_partition_id": auth_data.data_partition_id,
         #    "groups": auth_data.groups
         },
         "build_data": {
-            "version": b.version,
-            "milestone": b.milestone,
-            "artifactId": b.artifactId,
-            "name": b.name,
-            "groupId": b.groupId,
-            "buildTime": b.buildTime,
-            "branch": b.branch,
-            "commitId": b.commitId,
-            "commitMessage": b.commitMessage
+            "version": b.__version__,
+            "milestone": b.__milestone__,
+            "artifactId": None,
+            "name": "policy",
+            "groupId": "org.opengroup.osdu",
+            "buildTime": b.__committimestamp__,
+            "branch": b.__branch__,
+            "commitId": b.__commitid__,
+            "commitMessage": b.__commitmessage__,
+            "commitrefslug": b.__commitrefslug__
         },
         "opa_status": r_status_json,
         "opa_config": r_config_json
diff --git a/app/api/diag_api.py b/app/api/diag_api.py
index 3b5d753f..4f810430 100644
--- a/app/api/diag_api.py
+++ b/app/api/diag_api.py
@@ -1,17 +1,11 @@
-from fastapi import APIRouter, Depends, File, UploadFile, HTTPException, Query
+from fastapi import APIRouter, Depends, HTTPException
 from starlette_context import context
-import os
 import logging
-from requests.structures import CaseInsensitiveDict
 from auth import auth
-import json
-import requests
-router = APIRouter()
 import conf
-import _buildinfo as b
 import opa
 import correlation
-import inspect
+router = APIRouter()
 
 logger = logging.getLogger(__name__)
 if conf.ENABLE_DEV_DIAGNOSTICS:
diff --git a/app/api/health_api.py b/app/api/health_api.py
index 1cd75a0e..355d5211 100644
--- a/app/api/health_api.py
+++ b/app/api/health_api.py
@@ -1,16 +1,24 @@
 from fastapi import APIRouter, HTTPException
 import requests
 import logging
+from pydantic import BaseModel
 
 from starlette.status import (
+    HTTP_400_BAD_REQUEST,
     HTTP_501_NOT_IMPLEMENTED,
     HTTP_503_SERVICE_UNAVAILABLE
 )
 
 import conf
 
+class Detail(BaseModel):
+    detail: str
+
 router = APIRouter()
-@router.get("/health")
+@router.get("/health", responses={
+    HTTP_400_BAD_REQUEST: {"model": Detail},
+    HTTP_503_SERVICE_UNAVAILABLE: {"model": Detail}
+    })
 async def health():
     """
     ## Health check endpoint, which does not depend on OPA.
@@ -22,7 +30,11 @@ async def health():
     """
     return {'message': 'Healthy'}
 
-@router.get("/ready")
+@router.get("/ready", responses={
+    HTTP_400_BAD_REQUEST: {"model": Detail},
+    HTTP_501_NOT_IMPLEMENTED: {"model": Detail},
+    HTTP_503_SERVICE_UNAVAILABLE: {"model": Detail},
+    })
 def ready():
     """
     ## Health check endpoint, which depends on OPA being available and healthy.
@@ -39,10 +51,10 @@ def ready():
     try:
         url = conf.OPA_HEALTH_API
     except NameError:
-       logging.error(f"conf.OPA_URL undefined")
+       logging.error("conf.OPA_URL undefined")
        raise HTTPException(status_code=HTTP_501_NOT_IMPLEMENTED, detail="NotHealthy - OPA configuration issue")
     except AttributeError:
-       logging.error(f"conf.OPA_URL undefined")
+       logging.error("conf.OPA_URL undefined")
        raise HTTPException(status_code=HTTP_501_NOT_IMPLEMENTED, detail="NotHealthy - OPA configuration issue")
 
     try:
diff --git a/app/api/info_api.py b/app/api/info_api.py
index 03c5cf92..e21bd566 100644
--- a/app/api/info_api.py
+++ b/app/api/info_api.py
@@ -4,14 +4,28 @@ import requests
 import logging
 import conf
 from models.info import InfoOut, ServiceDetail, Services
-import _buildinfo as b
+import _version as b
 from starlette_context import context
-router = fastapi.APIRouter()
+from pydantic import BaseModel
 import correlation
 
+from starlette.status import (
+    HTTP_400_BAD_REQUEST,
+    HTTP_503_SERVICE_UNAVAILABLE
+)
+
+router = fastapi.APIRouter()
+
+class Detail(BaseModel):
+    detail: str
+
 logger = logging.getLogger(__name__)
 
-@router.get("/info", response_model=InfoOut)
+@router.get("/info", response_model=InfoOut,
+            responses={
+                HTTP_400_BAD_REQUEST: {"model": Detail},
+                HTTP_503_SERVICE_UNAVAILABLE: {"model": Detail}
+                })
 def return_version_info(response: Response):
     """
     Return Service version information.
@@ -43,14 +57,14 @@ def return_version_info(response: Response):
     svcdetail = ServiceDetail(version=opa_version)
     opa = Services(opa=svcdetail)
     info = InfoOut(
-        version = b.version,
-        artifactId = b.artifactId,
-        name = b.name,
-        groupId = b.groupId,
-        buildTime = b.buildTime,
-        branch = b.branch,
-        commitId = b.commitId,
-        commitMessage = b.commitMessage,
+        version = b.__version__,
+        artifactId = None,
+        name = "policy",
+        groupId = "org.opengroup.osdu",
+        buildTime = b.__committimestamp__,
+        branch = b.__branch__,
+        commitId = b.__commitid__,
+        commitMessage = b.__commitmessage__,
         connectedOuterServices=opa
     )
     return info
\ No newline at end of file
diff --git a/app/api/policy_eval_api.py b/app/api/policy_eval_api.py
index 3b06f6a4..d428d83f 100644
--- a/app/api/policy_eval_api.py
+++ b/app/api/policy_eval_api.py
@@ -14,7 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from email import header
 import fastapi
 from fastapi import Depends, HTTPException, UploadFile, Query, Response
 import sys
@@ -22,28 +21,37 @@ import os
 import logging
 import json
 from starlette_context import context
+from pydantic import BaseModel
 import conf
 import correlation
 
 from starlette.status import (
-    HTTP_200_OK, 
-    HTTP_403_FORBIDDEN,
+    HTTP_400_BAD_REQUEST,
     HTTP_406_NOT_ACCEPTABLE,
     HTTP_422_UNPROCESSABLE_ENTITY,
+    HTTP_500_INTERNAL_SERVER_ERROR,
+    HTTP_503_SERVICE_UNAVAILABLE
 )
 
-from opa_response import OpaResponse
+#from opa_response import OpaResponse
 sys.path.append(os.path.abspath('..'))
 import opa
 from auth import auth
 
+class Detail(BaseModel):
+    detail: str
+
 logger = logging.getLogger(__name__)
 router = fastapi.APIRouter()
 
 # Header code is compatible with Python 3.6 - 3.9, it will not work with Python 3.10
 # See https://fastapi.tiangolo.com/tutorial/header-params/
 
-@router.post("/evaluations/query")
+@router.post("/evaluations/query", responses={
+    HTTP_400_BAD_REQUEST: {"model": Detail},
+    HTTP_500_INTERNAL_SERVER_ERROR: {"model": Detail},
+    HTTP_503_SERVICE_UNAVAILABLE: {"model": Detail},
+    })
 def evaluate_policy(
         response: Response,
         policy_id: str,
@@ -96,7 +104,7 @@ def evaluate_policy(
     # Try to make it easier for developers
     if 'osdu/instance/' in policy_id:
         logger.debug(f"Instance policy eval {policy_id}")
-    elif not 'osdu/partition/' in policy_id:
+    elif 'osdu/partition/' not in policy_id:
         policy_id = 'osdu/partition/' + auth_data.data_partition_id + '/' + policy_id
     
     #contents = await file.read()
@@ -104,8 +112,8 @@ def evaluate_policy(
     try:
         posted_data = json.loads(contents.decode('utf-8'))
 
-        if not 'input' in posted_data:
-            raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail=f"Input not found in file data")
+        if 'input' not in posted_data:
+            raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail="Input not found in file data")
     except json.decoder.JSONDecodeError as err:
         raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail=f"JSON error {err}")
     except UnicodeDecodeError as err:
@@ -120,10 +128,10 @@ def evaluate_policy(
         posted_data["input"]["datapartitionid"] = auth_data.data_partition_id
 
     # make sure we have them
-    if not 'token' in posted_data["input"]:
-        raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail=f"token not found in file data")
-    if not 'datapartitionid' in posted_data["input"]:
-        raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail=f"datapartitionid not found in file data")
+    if 'token' not in posted_data["input"]:
+        raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail="token not found in file data")
+    if 'datapartitionid' not in posted_data["input"]:
+        raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail="datapartitionid not found in file data")
 
     result = opa.data(query=json.dumps(posted_data), path=policy_id)
     logging.error(result)
@@ -133,7 +141,7 @@ def evaluate_policy(
         logging.info(result.message)
         raise HTTPException(status_code=result.status_code, detail=f"Error when talking to OPA: {result.message}")
 
-    if not "result" in result.json:
+    if "result" not in result.json:
         if conf.ENABLE_DEV_DIAGNOSTICS_DUMPS:
             # save lots of debugging info
             file_path = "eval_dump_bad.json"
diff --git a/app/api/policy_read_api.py b/app/api/policy_read_api.py
index 97d53a02..690f7594 100644
--- a/app/api/policy_read_api.py
+++ b/app/api/policy_read_api.py
@@ -21,6 +21,8 @@ import sys
 import os
 import logging
 from starlette_context import context
+import hashlib
+from pydantic import BaseModel
 
 from starlette.status import (
     HTTP_400_BAD_REQUEST, 
@@ -32,10 +34,11 @@ from opa_response import OpaResponse
 sys.path.append(os.path.abspath('..'))
 import conf
 import opa
-from datetime import date
 from auth import auth
 import correlation
-import hashlib
+
+class Detail(BaseModel):
+    detail: str
 
 logger = logging.getLogger(__name__)
 router = fastapi.APIRouter()
@@ -43,7 +46,11 @@ router = fastapi.APIRouter()
 # Header code is compatible with Python 3.6 - 3.9, it will not work with Python 3.10
 # See https://fastapi.tiangolo.com/tutorial/header-params/
 
-@router.get("/policies")
+@router.get("/policies",
+    responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_500_INTERNAL_SERVER_ERROR: {"model": Detail},
+        })
 def fetch_all_policies_that_match_partition(response: Response, data_partition_id:  Union[str, None] = Header(default=None), auth_data: auth.Auth = Depends(auth.require_authorized_user)):
     """
     Return all policies from OPA directly that match partition_bundle_root data-partition-id in header (if bundles are enabled).
@@ -58,11 +65,11 @@ def fetch_all_policies_that_match_partition(response: Response, data_partition_i
     cloud_provider = os.environ.get('CLOUD_PROVIDER')
     if cloud_provider is None:
         opa_response = OpaResponse()
-        logger.critical(f"Error: CLOUD_PROVIDER ENV VAR not set / Mocking results for /policies")
+        logger.critical("Error: CLOUD_PROVIDER ENV VAR not set / Mocking results for /policies")
         opa_response.json = {"result": {}}
     elif cloud_provider == conf.MOCK:
         opa_response = OpaResponse()
-        logger.critical(f"Warning: CLOUD_PROVIDER ENV VAR set to Mock results for /policies")
+        logger.critical("Warning: CLOUD_PROVIDER ENV VAR set to Mock results for /policies")
         opa_response.json = {"result": {}}
     else:
         opa_response = opa.fetch_opa_policies_direct()
@@ -80,7 +87,12 @@ def fetch_all_policies_that_match_partition(response: Response, data_partition_i
                 x['id'].startswith('/' + conf.INSTANCE_BUNDLE_ROOT) ]}
     raise HTTPException(status_code=HTTP_500_INTERNAL_SERVER_ERROR, detail="Unexpected result from OPA")
 
-@router.get("/policies/{policy_id}")
+@router.get("/policies/{policy_id}",
+    responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_404_NOT_FOUND: {"model": Detail},
+        HTTP_500_INTERNAL_SERVER_ERROR: {"model": Detail},
+        })
 def fetch_a_policy(response: Response, policy_id: str,  auth_data: auth.Auth = Depends(auth.require_authorized_user)):
     """
     Return a policy directly from OPA with no filtering
@@ -108,11 +120,15 @@ def fetch_a_policy(response: Response, policy_id: str,  auth_data: auth.Auth = D
     else:
         opa_response = opa.fetch_opa_policy_direct(policy_id)
 
-    if opa_response.ok == True:
+    if opa_response.ok is True:
         return opa_response.json
     raise HTTPException(status_code=opa_response.status_code, detail=f"Unexpected result from OPA {opa_response.message}")
 
-@router.get("/policies/osdu/instance/{policy_id}")
+@router.get("/policies/osdu/instance/{policy_id}",
+    responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_500_INTERNAL_SERVER_ERROR: {"model": Detail},
+        })
 def fetch_instance_policy(response: Response, policy_id: str,  auth_data: auth.Auth = Depends(auth.require_authorized_user)):
     """
     Return an instance policy from OPA directly.
@@ -141,11 +157,15 @@ def fetch_instance_policy(response: Response, policy_id: str,  auth_data: auth.A
         opa_response = opa.fetch_opa_policy_direct(conf.INSTANCE_BUNDLE_ROOT+"/{0}".format(policy_id))
 
     logger.debug(f"OPA return {opa_response.json}")
-    if opa_response.ok == True:
+    if opa_response.ok is True:
         return opa_response.json
     raise HTTPException(status_code=opa_response.status_code, detail=f"Unexpected result from OPA {opa_response.message}")
 
-@router.get("/policies/osdu/partition/{data_partition}/{policy_id}")
+@router.get("/policies/osdu/partition/{data_partition}/{policy_id}",
+    responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_500_INTERNAL_SERVER_ERROR: {"model": Detail},
+        })
 def fetch_partition_policy_directly_from_opa(
     response: Response,
     policy_id: str,
@@ -191,7 +211,7 @@ def fetch_partition_policy_directly_from_opa(
         # include extra debugging information in response headers
         response.headers["X-Debug-Result"] = f"{opa_response.json}"
 
-    if opa_response.ok == True:
+    if opa_response.ok is True:
         if "result" in opa_response.json:
             if "raw" in opa_response.json["result"]:
                 data = opa_response.json["result"]["raw"]
diff --git a/app/api/policy_update_api.py b/app/api/policy_update_api.py
index 63a196ce..bbe1159e 100644
--- a/app/api/policy_update_api.py
+++ b/app/api/policy_update_api.py
@@ -22,20 +22,24 @@ import logging
 import hashlib
 from starlette.requests import Request
 from starlette_context import context
-from requests.structures import CaseInsensitiveDict
+#from requests.structures import CaseInsensitiveDict
 
 from starlette.status import (
     HTTP_200_OK, 
+    HTTP_202_ACCEPTED,
     HTTP_400_BAD_REQUEST, 
+    HTTP_404_NOT_FOUND,
     HTTP_422_UNPROCESSABLE_ENTITY,
-    HTTP_501_NOT_IMPLEMENTED
+    HTTP_500_INTERNAL_SERVER_ERROR,
+    HTTP_501_NOT_IMPLEMENTED,
+    HTTP_503_SERVICE_UNAVAILABLE
 )
 
+from pydantic import BaseModel
 from opa_response import OpaResponse
 sys.path.append(os.path.abspath('..'))
 import conf
 import opa
-from datetime import date
 from auth import auth
 from bundles import bundle
 import correlation
@@ -44,10 +48,21 @@ from .validate_api import verify_policy_with_opa
 logger = logging.getLogger(__name__)
 router = fastapi.APIRouter()
 
+class Detail(BaseModel):
+    detail: str
+
 # Header code is compatible with Python 3.6 - 3.9, it will not work with Python 3.10
 # See https://fastapi.tiangolo.com/tutorial/header-params/
 
-@router.delete("/policies/osdu/partition/{data_partition}/{policy_id}")
+@router.delete("/policies/osdu/partition/{data_partition}/{policy_id}",
+       responses={
+            HTTP_202_ACCEPTED: {"model": Detail},
+            HTTP_400_BAD_REQUEST: {"model": Detail},
+            HTTP_404_NOT_FOUND: {"model": Detail},
+            HTTP_501_NOT_IMPLEMENTED: {"model": Detail},
+            HTTP_503_SERVICE_UNAVAILABLE: {"model": Detail},
+            }
+        )
 def delete_partition_policy(
         response: Response,
         request: Request,
@@ -156,7 +171,13 @@ def delete_partition_policy(
             logger.error(f"Error when talking to bundle service {result.message}")
             raise HTTPException(status_code=result.status_code, detail=f"Error when talking to bundle service {result.message}")
 
-@router.put("/policies/osdu/partition/{data_partition}/{policy_id}")
+@router.put("/policies/osdu/partition/{data_partition}/{policy_id}", responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_404_NOT_FOUND: {"model": Detail},
+        HTTP_500_INTERNAL_SERVER_ERROR: {"model": Detail},
+        HTTP_501_NOT_IMPLEMENTED: {"model": Detail},
+        HTTP_503_SERVICE_UNAVAILABLE: {"model": Detail},
+    })
 def create_or_update_partition_policy(
         response: Response,
         request: Request,
@@ -246,7 +267,7 @@ def create_or_update_partition_policy(
     
     validated = False
     # Verify if ENABLE_VERIFY_POLICY is set and not Mocking OPA
-    if conf.ENABLE_VERIFY_POLICY and not cloud_provider in (None, conf.MOCK):
+    if conf.ENABLE_VERIFY_POLICY and cloud_provider not in (None, conf.MOCK):
         validated = verify_policy_with_opa(policy_id=policy_id, auth_data=auth_data, correlation_id=context["correlation_id"], data=contents.decode('utf-8'))
 
     if cloud_provider is None:
diff --git a/app/api/tenant_api.py b/app/api/tenant_api.py
index 7248317b..f22676db 100644
--- a/app/api/tenant_api.py
+++ b/app/api/tenant_api.py
@@ -3,27 +3,33 @@ from fastapi import Depends, HTTPException, Response
 from auth import auth
 import logging
 import conf
-import json
 from starlette_context import context
 import k8s
-router = fastapi.APIRouter()
 import correlation
 import ruamel.yaml
 from kubernetes.client.models import V1ConfigMap, V1ObjectMeta
+from pydantic import BaseModel
 
 from starlette.status import (
-    HTTP_201_CREATED,
     HTTP_202_ACCEPTED,
+    HTTP_400_BAD_REQUEST,
     HTTP_405_METHOD_NOT_ALLOWED,
     HTTP_404_NOT_FOUND,
-    HTTP_424_FAILED_DEPENDENCY,
     HTTP_501_NOT_IMPLEMENTED,
     HTTP_502_BAD_GATEWAY
 )
+router = fastapi.APIRouter()
+
+class Detail(BaseModel):
+    detail: str
 
-logger = logging.getLogger(__name__)
+#logger = logging.getLogger(__name__)
 
-@router.get("/tenant")
+@router.get("/tenant",        responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_405_METHOD_NOT_ALLOWED: {"model": Detail},
+        HTTP_501_NOT_IMPLEMENTED: {"model": Detail},
+        })
 def get_tenant(response: Response,
             all_data: bool = False,
             auth_data: auth.Auth = Depends(auth.require_authorized_admin)
@@ -49,6 +55,7 @@ def get_tenant(response: Response,
         else:
             raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"{path} not found in OPA configmap.")
     else:
+        logger.info("GET /tenant HTTP_405_METHOD_NOT_ALLOWED")
         raise HTTPException(status_code=HTTP_405_METHOD_NOT_ALLOWED, detail=f"Unable to read configmap '{conf.OPA_CONFIG_MAP}' details")
 
 def _mlput(self, path, value, index=None, sep=':'):
@@ -59,7 +66,12 @@ def _mlput(self, path, value, index=None, sep=':'):
     else:
         parent.insert(index, spath[-1], value)
 
-@router.put("/tenant")
+@router.put("/tenant", responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_405_METHOD_NOT_ALLOWED: {"model": Detail},
+        HTTP_501_NOT_IMPLEMENTED: {"model": Detail},
+        HTTP_502_BAD_GATEWAY: {"model": Detail},
+        })
 def update_tenant(response: Response,
               service: str,
               polling_min_delay_seconds: int = 6,
@@ -69,6 +81,10 @@ def update_tenant(response: Response,
     """
     Experimental tenant API for updating OPA bundle config for a data partition.
     Adding new partitions is not supported in M20.
+
+    Services must be one of the supported (s3, gcs, gcp, blob, ...). See documentation for full list.
+    Min polling 1 second
+    Max polling 3600(1 hour)
     """
     logging.setLogRecordFactory(correlation.set_correlation_id(context["correlation_id"]))
     logger = logging.getLogger(__name__)
@@ -76,6 +92,12 @@ def update_tenant(response: Response,
 
     logger.info(f"service: {service}, polling_min_delay_seconds: {polling_min_delay_seconds}, polling_max_delay_seconds: {polling_max_delay_seconds}")
 
+    if service not in conf.OPA_SUPPORTED_SERVICES:
+        raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Unsupported service provided")
+
+    if polling_min_delay_seconds >= polling_max_delay_seconds or polling_min_delay_seconds < 1 or polling_max_delay_seconds > conf.OPA_MAX_POLLING:
+        raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Unsupported polling_min_delay_seconds polling_max_delay_seconds")
+
     config_map = k8s.get_opa_config(namespace=conf.NAMESPACE, name=conf.OPA_CONFIG_MAP, config_only=False)
     if config_map:
         path = f"osdu/partition/{auth_data.data_partition_id}"
@@ -126,9 +148,14 @@ def update_tenant(response: Response,
                 raise HTTPException(status_code=HTTP_501_NOT_IMPLEMENTED, detail=f"{path} not found in {data}")
 
     else:
-        raise HTTPException(status_code=HTTP_405_METHOD_NOT_ALLOWED, detail=f"Unable to read configmap details for {path}")
+        logger.info("PUT /tenant HTTP_405_METHOD_NOT_ALLOWED")
+        raise HTTPException(status_code=HTTP_405_METHOD_NOT_ALLOWED, detail="Unable to read configmap details")
 
-@router.delete("/tenant")
+@router.delete("/tenant", responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_405_METHOD_NOT_ALLOWED: {"model": Detail},
+        HTTP_501_NOT_IMPLEMENTED: {"model": Detail},
+        })
 def delete_tenant(response: Response,
               auth_data: auth.Auth = Depends(auth.require_authorized_admin),
               ):
@@ -148,4 +175,5 @@ def delete_tenant(response: Response,
         del data["bundles"][path]
         raise HTTPException(status_code=HTTP_501_NOT_IMPLEMENTED, detail=f"{path} {data}")
     else:
-        raise HTTPException(status_code=HTTP_405_METHOD_NOT_ALLOWED, detail=f"Unable to read configmap details for {path}")
\ No newline at end of file
+        logger.info("DELETE /tenant HTTP_405_METHOD_NOT_ALLOWED")
+        raise HTTPException(status_code=HTTP_405_METHOD_NOT_ALLOWED, detail="Unable to read configmap details")
\ No newline at end of file
diff --git a/app/api/translate_api.py b/app/api/translate_api.py
index 962b2ba7..592ef0af 100644
--- a/app/api/translate_api.py
+++ b/app/api/translate_api.py
@@ -1,15 +1,14 @@
-from email import policy
+#from email import policy
 import fastapi
 from fastapi import HTTPException, Depends, Response
-import requests
 import json
 import logging
 import re
-router = fastapi.APIRouter()
 import opa
 import conf
 from starlette_context import context
 import correlation
+from pydantic import BaseModel
 
 from starlette.status import (
     HTTP_500_INTERNAL_SERVER_ERROR,
@@ -24,9 +23,21 @@ from auth import auth
 import translate.convert
 from translate.translate_exceptions import PolicyDenyError, PolicyNotFoundError
 
+class Detail(BaseModel):
+    detail: str
+
+router = fastapi.APIRouter()
 logger = logging.getLogger(__name__)
 
-@router.post("/translate", tags=["translate"])
+@router.post("/translate",
+    tags=["translate"],
+    responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_404_NOT_FOUND: {"model": Detail},
+        HTTP_406_NOT_ACCEPTABLE: {"model": Detail},
+        HTTP_422_UNPROCESSABLE_ENTITY: {"model": Detail},
+        HTTP_500_INTERNAL_SERVER_ERROR: {"model": Detail},
+        })
 def translate_policy_api(response: Response, trans:  TranslateItem,  auth_data: auth.Auth = Depends(auth.require_authorized_user)):
     """
     ## Translate policy
@@ -42,8 +53,8 @@ def translate_policy_api(response: Response, trans:  TranslateItem,  auth_data:
     posted_data = json.loads(trans.tojson()) # convert to dict for easy manipulation
     logger.debug(f"data: {posted_data}")
 
-    if not 'input' in posted_data:
-        raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail=f"Input not found in data")
+    if 'input' not in posted_data:
+        raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail="Input not found in data")
 
     posted_data["input"]["token"] = auth_data.access_token
     posted_data["input"]["xuserid"] = auth_data.x_user_id
@@ -162,10 +173,10 @@ def translate_preprocess(posted_data, data_partition):
         elif original_query.startswith('data.osdu.instance.'):
             policy_id = original_query.split('.')[3]
         else:
-            raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=f"Translate API Error: expected query in format data.osdu.partition...")
+            raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Translate API Error: expected query in format data.osdu.partition...")
         path = conf.PARTITION_BUNDLE_ROOT.format(data_partition) + "/{0}".format(policy_id)
     else:
-        raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=f"Translate API Error: expected query in request")
+        raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Translate API Error: expected query in request")
     logger.debug(f"PolicyID: {policy_id} DATA_PARTITION: {data_partition} PATH: {path}")
 
     opa_response = opa.data(query=json.dumps(posted_data), path=path, name="data api for preprocess translate")
diff --git a/app/api/validate_api.py b/app/api/validate_api.py
index fa9f6413..183e21b9 100644
--- a/app/api/validate_api.py
+++ b/app/api/validate_api.py
@@ -1,26 +1,30 @@
-from fastapi import APIRouter, Depends, File, UploadFile, HTTPException, Query, Response, Request
+from fastapi import APIRouter, Depends, UploadFile, HTTPException, Response, Request
 from starlette_context import context
-import os
 import logging
 from requests.structures import CaseInsensitiveDict
 from auth import auth
-import json
-import requests
-router = APIRouter()
-import conf
-import _buildinfo as b
 import opa
 import correlation
 from bundles import bundle
 from string import Template
 import hashlib
 
+from pydantic import BaseModel
+
 from starlette.status import (
+    HTTP_400_BAD_REQUEST,
     HTTP_422_UNPROCESSABLE_ENTITY,
 )
+router = APIRouter()
 logger = logging.getLogger(__name__)
 
-@router.put("/validate/{policy_id}")
+class Detail(BaseModel):
+    detail: str
+
+@router.put("/validate/{policy_id}", responses={
+        HTTP_400_BAD_REQUEST: {"model": Detail},
+        HTTP_422_UNPROCESSABLE_ENTITY: {"model": Detail},
+        })
 def validate_policy(
     response: Response,
     request: Request,
@@ -68,6 +72,10 @@ def validate_policy(
         sha1=hashlib.sha1(data.encode()).hexdigest()
         response.headers["X-SHA-1"] = sha1
         return f"{policy_id} valid as osdu.partition[\"{auth_data.data_partition_id}\"].{short_name} {sha1}"
+    else:
+        # Not expected to get to this code block
+        logger.error(f"Invalid {policy_id}, unexpected return from OPA")
+        raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Invalid request")
 
 
 def verify_policy_with_opa(policy_id: str, auth_data: auth.Auth, data: str, correlation_id: str):
@@ -84,7 +92,7 @@ def verify_policy_with_opa(policy_id: str, auth_data: auth.Auth, data: str, corr
     headers["data-partition-id"] = data_partition
     headers["Content-Type"] = "application/octet-stream"
 
-    data = data.replace(f"package osdu.partition[", f"package tmp.osdu.partition[")
+    data = data.replace("package osdu.partition[", "package tmp.osdu.partition[")
 
     temp_id = f'tmp/{data_partition}/{policy_id}'
     logger.info(f"TMP-AUDIT-OPA put id: {temp_id} user: {auth_data.user_id}")
diff --git a/app/auth/auth.py b/app/auth/auth.py
index e3a490c3..f1697776 100644
--- a/app/auth/auth.py
+++ b/app/auth/auth.py
@@ -1,25 +1,18 @@
-from os import access
 from fastapi import Depends, Request, Header, HTTPException
 from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
-from requests import head
 from starlette.authentication import AuthCredentials
 from requests.structures import CaseInsensitiveDict
-import requests
 import logging
-import json
 import conf
 from starlette_context import context
 from entitlement import EntitlementService
+import re
 import correlation
 
 from starlette.status import (
-    HTTP_200_OK, 
-    HTTP_201_CREATED, 
     HTTP_400_BAD_REQUEST, 
     HTTP_401_UNAUTHORIZED, 
     HTTP_403_FORBIDDEN,
-    HTTP_404_NOT_FOUND, 
-    HTTP_422_UNPROCESSABLE_ENTITY,
 )
 
 _logger = logging.getLogger(__name__)
@@ -41,7 +34,7 @@ async def require_data_partition_id(
     if data_partition_id is None:
         raise HTTPException(
             status_code=HTTP_400_BAD_REQUEST,
-            detail=f"Bad Request. Missing data_partition_id in header",
+            detail="Bad Request. Missing data_partition_id in header",
             headers={"WWW-Authenticate": "Bearer"},
         )
     return data_partition_id
@@ -72,6 +65,11 @@ async def require_authorized_user(
 
     token = credentials.credentials.strip()
 
+    # handle junk in data_partition_id
+    data_partition_id = str(data_partition_id)
+    if not re.match(pattern=conf.ALLOW_DATA_PARTITION_ID_PATTERN, string=data_partition_id):
+        raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Unsupported data-partition-id")
+
     auth_data = Auth(access_token=token, data_partition_id=data_partition_id, x_user_id=x_user_id)
     headers = CaseInsensitiveDict()
     headers["Authorization"] = "Bearer " + token
@@ -113,7 +111,7 @@ async def require_authorized_admin(request: Request, auth_data: Auth = Depends(r
     if conf.ADMIN_PERMISSION not in auth_data.groups:
         raise HTTPException(
             status_code=HTTP_403_FORBIDDEN,
-            detail=f"User does not have the permission to call the Admin API",
+            detail="User does not have the permission to call the Admin API",
             headers={"WWW-Authenticate": "Bearer"},
         )
     return auth_data
diff --git a/app/bundles/bundle.py b/app/bundles/bundle.py
index 8294ad72..15af1752 100644
--- a/app/bundles/bundle.py
+++ b/app/bundles/bundle.py
@@ -1,31 +1,19 @@
 import logging
-from os import environ
 import tarfile
 import io
 import time
-from fastapi import HTTPException
 import hashlib
 
 from starlette.status import (
     HTTP_200_OK, 
-    HTTP_201_CREATED, 
     HTTP_202_ACCEPTED,
-    HTTP_400_BAD_REQUEST, 
-    HTTP_401_UNAUTHORIZED, 
-    HTTP_403_FORBIDDEN,
     HTTP_404_NOT_FOUND, 
-    HTTP_422_UNPROCESSABLE_ENTITY,
-    HTTP_500_INTERNAL_SERVER_ERROR,
-    HTTP_502_BAD_GATEWAY,
     HTTP_503_SERVICE_UNAVAILABLE
 )
 
-import conf
 from bundles import storage
-from osdu_api.providers.types import FileLikeObject
 
 from opa_response import OpaResponse
-import correlation
 
 logger = logging.getLogger(__name__)
 #logger = correlation.DefaultExtrasAdapter(_logger, {"correlation_id": "None"})
@@ -93,7 +81,7 @@ def get_bundle(data_partition):
         #    logger.critical(f"Error Downloading {bundle} from CSP Storage")
     except Exception as err:
         logger.critical(f"Error Downloading {bundle} from CSP Storage: {err}")
-        status_code = HTTP_503_SERVICE_UNAVAILABLE
+        #status_code = HTTP_503_SERVICE_UNAVAILABLE
         message = f"Unable to get bundle from bundle server: download bundle {bundle} error: {err}"
         #template = "An exception of type {0} occurred. Arguments:\n{1!r}"
         #message = template.format(type(err).__name__, err.args)
@@ -125,7 +113,7 @@ def put_default_bundle(data_partition, policy_list):
                     logger.critical(f"Error uploading {bundle} to CSP Storage")
     except Exception as err:
         logger.critical(f"Error uploading {bundle} from CSP Storage: {err}")
-        status_code = HTTP_503_SERVICE_UNAVAILABLE
+        #status_code = HTTP_503_SERVICE_UNAVAILABLE
         message = f"Unable to upload bundle {bundle} to bundle server error: {err}"
         #template = "An exception of type {0} occurred. Arguments:\n{1!r}"
         #message = template.format(type(err).__name__, err.args)
diff --git a/app/bundles/providers/aws/storage.py b/app/bundles/providers/aws/storage.py
index 6ce15613..9ab115f2 100644
--- a/app/bundles/providers/aws/storage.py
+++ b/app/bundles/providers/aws/storage.py
@@ -20,11 +20,9 @@ from osdu_api.providers.types import FileLikeObject
 import boto3
 
 from bundles.storage import BundleStorageClient
-import correlation
 import conf
 
 logger = logging.getLogger(__name__)
-#logger = correlation.DefaultExtrasAdapter(_logger, {"correlation_id": "None"})
 logger.setLevel(conf.LOG_LEVEL)
 
 class FileNotFound(Exception):
@@ -57,7 +55,7 @@ class AWSBundleStorageClient(BundleStorageClient):
                 raise FileNotFound(filename)
             
         except Exception as e:
-            logger.error(f"Failed to download file from {uri} {e}")
+            logger.error(f"Failed to download file from {uri}: {e}")
         return None, None
 
     def upload_file(self, name: str, file: FileLikeObject) -> str:
@@ -66,7 +64,7 @@ class AWSBundleStorageClient(BundleStorageClient):
             self.client.upload_file(uri, file, self.content_type)
             return uri
         except Exception as e:
-            logger.error(f"Failed to upload file to {uri} {e}")
+            logger.error(f"Failed to upload file to {uri}: {e}")
 
     # TODO: Fix bug in AWS python SDK and replace
     def _does_file_exist(self, uri: str) -> bool:
@@ -86,7 +84,7 @@ class AWSBundleStorageClient(BundleStorageClient):
                 # fast operation no matter the size of the data object
                 self.s3_client.head_object(Bucket=bucket_name, Key=object_name)
             except Exception as e:
-                logger.info(f"{uri} does not exist")
+                logger.info(f"{uri} does not exist {e}")
                 return False
             return True
 
diff --git a/app/bundles/providers/azure/storage.py b/app/bundles/providers/azure/storage.py
index f2c2cd0c..9283cfec 100644
--- a/app/bundles/providers/azure/storage.py
+++ b/app/bundles/providers/azure/storage.py
@@ -4,7 +4,8 @@ import os
 from typing import Tuple
 from osdu_api.providers.types import FileLikeObject
 from bundles.storage import BundleStorageClient
-from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
+#from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
+from azure.storage.blob import BlobServiceClient
 from azure.identity import DefaultAzureCredential
 from azure.keyvault.secrets import SecretClient
 
@@ -30,7 +31,7 @@ class AzureBundleStorageClient(BundleStorageClient):
             downloader.readinto(file)
             return file, uri
         except Exception as e:
-            logger.error(f"Failed to download file from {uri}")
+            logger.error(f"Failed to download file from {uri}: {e}")
 
     def upload_file(self, name: str, file: FileLikeObject) -> str:
         try:
@@ -40,7 +41,7 @@ class AzureBundleStorageClient(BundleStorageClient):
             blob_client.upload_blob(file.read(), overwrite=True, blob_type="BlockBlob")
             return uri
         except Exception as e:
-            logger.error(f"Failed to upload file to {uri}")
+            logger.error(f"Failed to upload file to {uri}: {e}")
 
 
 class Helper(object):
diff --git a/app/bundles/providers/ibm/storage.py b/app/bundles/providers/ibm/storage.py
index b1d2b44a..9ed473b6 100644
--- a/app/bundles/providers/ibm/storage.py
+++ b/app/bundles/providers/ibm/storage.py
@@ -3,7 +3,7 @@ import os
 from typing import Tuple
 import boto3
 from botocore.client import Config
-from osdu_api.providers.blob_storage import get_client
+#from osdu_api.providers.blob_storage import get_client
 from osdu_api.providers.types import FileLikeObject
 
 from bundles.storage import BundleStorageClient
diff --git a/app/bundles/storage.py b/app/bundles/storage.py
index 61d2b5e6..65dd1e81 100644
--- a/app/bundles/storage.py
+++ b/app/bundles/storage.py
@@ -21,8 +21,6 @@ import sys
 from typing import Tuple
 
 from osdu_api.providers.types import FileLikeObject
-import correlation
-import conf
 
 logger = logging.getLogger(__name__)
 #logger = correlation.DefaultExtrasAdapter(_logger, {"correlation_id": "None"})
diff --git a/app/conf.py b/app/conf.py
index 06a164f7..c4e1a591 100644
--- a/app/conf.py
+++ b/app/conf.py
@@ -139,5 +139,12 @@ if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/token"):
 	    TOKEN = f.read()
 
 OPA_CONFIG_MAP = os.getenv("OPA_CONFIG_MAP", 'opa-config')
+OPA_MAX_POLLING = 3600
+
+ALLOW_DATA_PARTITION_ID_PATTERN = "^[A-Za-z0-9_-]*$"
+ALLOW_CORRELATION_ID_PATTERN = "^[A-Za-z0-9_-]*$"
+
+# OCI Registry not supported
+OPA_SUPPORTED_SERVICES = ['s3', 'gcs', 'gcp', 'blob', 'nginx']
 
 CLOUD_PROVIDER = os.getenv("CLOUD_PROVIDER", 'LOCAL')
diff --git a/app/correlation.py b/app/correlation.py
index 2f019bfb..e5b20e98 100644
--- a/app/correlation.py
+++ b/app/correlation.py
@@ -1,8 +1,14 @@
 import logging
 from fastapi import Header
-from starlette_context import context, request_cycle_context
+from fastapi import HTTPException
+from starlette_context import request_cycle_context
 from uuid_extensions import uuid7str
-from conf import GENERATE_CORRELATION_ID, LOG_LEVEL
+from conf import GENERATE_CORRELATION_ID, LOG_LEVEL, ALLOW_CORRELATION_ID_PATTERN
+import re
+
+from starlette.status import (
+    HTTP_400_BAD_REQUEST,
+)
 #import coloredlogs
 
 #class CorrelationIDLoggerAdapter(logging.LoggerAdapter):
@@ -40,6 +46,9 @@ async def policy_context_dependency(correlation_id: str = Header(None), user_age
 
     if not correlation_id and GENERATE_CORRELATION_ID:
         correlation_id = uuid7str()
+    else:
+        if not re.match(pattern=ALLOW_CORRELATION_ID_PATTERN, string=correlation_id):
+            raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Unsupported correlation-id")
 
     data = {"correlation_id": correlation_id,
             "user_agent": user_agent
diff --git a/app/entitlement.py b/app/entitlement.py
index 9603b83b..3b7b474e 100644
--- a/app/entitlement.py
+++ b/app/entitlement.py
@@ -1,10 +1,10 @@
 # entitlement.py
 #from asyncio.log import logger
 import logging
-from email import header
+#from email import header
 from fastapi import HTTPException
 import requests
-from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN
+from starlette.status import HTTP_401_UNAUTHORIZED
 from starlette_context import context
 import json
 import conf
@@ -51,6 +51,6 @@ class EntitlementService:
         # this should never happen
         raise HTTPException(
             status_code=HTTP_401_UNAUTHORIZED,
-            detail=f"Authentication was unsuccessful. Groups undefined",
+            detail="Authentication was unsuccessful. Groups undefined",
             headers={"WWW-Authenticate": "Bearer"},
         )
\ No newline at end of file
diff --git a/app/k8s.py b/app/k8s.py
index a3d2db22..732be7b1 100755
--- a/app/k8s.py
+++ b/app/k8s.py
@@ -27,8 +27,8 @@ def get_opa_config(namespace="osdu-services", name="opa-config", config_only=Tru
     except config.config_exception.ConfigException as err:
         logger.error(f"Exception when calling kubernetes load config {conf.CLOUD_PROVIDER}: {err}")
         return None
-    except:
-        logger.error(f"Unknown exception when calling kubernetes load config {conf.CLOUD_PROVIDER}")
+    except Exception as e:
+        logger.error(f"Unknown exception {e} when calling kubernetes load config {conf.CLOUD_PROVIDER}")
         return None
 
     v1 = client.CoreV1Api()
@@ -50,8 +50,8 @@ def get_opa_config(namespace="osdu-services", name="opa-config", config_only=Tru
     except ApiException as err:
         # If you get 403 errors from the API server you will have to configure K8s RBAC to add permission 
         logger.error(f"Exception when calling CoreV1Api->list_namespaced_config_map: {err}")
-    except:
-        logger.error(f"Unknown exception when calling CoreV1Api->list_namespaced_config_map")
+    except Exception as e:
+        logger.error(f"Unknown exception {e} when calling CoreV1Api->list_namespaced_config_map")
     return None
 
 def patch_opa_config(namespace="osdu-services", name="opa-config", pretty=True, body=None):
diff --git a/app/main.py b/app/main.py
index 490cd6c7..d6b2cfb3 100644
--- a/app/main.py
+++ b/app/main.py
@@ -1,12 +1,12 @@
 #!/usr/bin/env python3
 # OSDU Policy Service
-from fastapi import FastAPI, Request, Depends, BackgroundTasks, Header, Depends, Response
-from fastapi.responses import RedirectResponse, HTMLResponse
-from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm, HTTPBearer
-from fastapi.openapi.docs import get_swagger_ui_html
-from fastapi.openapi.utils import get_openapi
+from fastapi import FastAPI, Request, Depends
+#from fastapi.responses import RedirectResponse, HTMLResponse
+#from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm, HTTPBearer
+#from fastapi.openapi.docs import get_swagger_ui_html
+#from fastapi.openapi.utils import get_openapi
 from fastapi.staticfiles import StaticFiles
-from starlette_context import context, request_cycle_context
+#from starlette_context import context, request_cycle_context
 import uvicorn
 import os
 import logging
@@ -14,7 +14,7 @@ import conf
 from api import policy_read_api, policy_update_api, policy_eval_api, health_api, compile_api, info_api, translate_api, diag_api, login_api, backup_api, bootstrap_api, config_api, tenant_api, validate_api
 #from models.translate import TranslateItem
 from views import home
-import _buildinfo as b
+import _version
 from auth import auth
 #from fastapi.middleware.cors import CORSMiddleware
 from starlette.middleware import Middleware
@@ -61,7 +61,7 @@ tags_metadata = [
 ]
 
 description = f"""
-    OSDU Policy Service v{b.version} API for Milestone {b.milestone}
+    OSDU Policy Service v{_version.__version__} API for {_version.__milestone__}
 
 Policy service is used for management and evaluation of dynamic policies in OSDU.
 
@@ -141,7 +141,7 @@ app = FastAPI(
     middleware=middleware,
     title="OSDU Policy Service",
     description=description,
-    version=f"v{b.version} {b.milestone}",
+    version=f"v{_version.__version__} {_version.__milestone__}",
     docs_url=conf.SERVICE_BASE_PATH + "/docs",
     redoc_url=conf.SERVICE_BASE_PATH + "/redoc",
     openapi_url=conf.SERVICE_BASE_PATH + "/openapi.json",
@@ -256,12 +256,12 @@ def configure_routing():
 
 def configure_middleware():
     # This will need some work before Admin UI is moved out of POC
-    origins = [
-        "http://localhost",
-        "http://localhost:8080",
-        "http://0.0.0.0:8080",
-        "http://localhost:4200"
-    ]
+    # origins = [
+    #     "http://localhost",
+    #     "http://localhost:8080",
+    #     "http://0.0.0.0:8080",
+    #     "http://localhost:4200"
+    # ]
     app.add_middleware(
         CORSMiddleware,
         allow_origins=['*'],
diff --git a/app/opa.py b/app/opa.py
index 93cf4dda..9b604115 100644
--- a/app/opa.py
+++ b/app/opa.py
@@ -1,4 +1,3 @@
-from urllib import response
 import conf
 import requests
 import logging
@@ -56,6 +55,8 @@ def fetch_opa_policy_direct(policy_id, timeout=20, name="fetch_opa_policy"):
 def fetch_opa_policy_direct_with_caching(policy_id, timeout=20):
     """
     fetch a specific policy in OPA
+    200 - no error
+    500 - server error
     """
     logging.setLogRecordFactory(correlation.set_correlation_id(context["correlation_id"]))
     logger = logging.getLogger(__name__)
@@ -144,10 +145,10 @@ def put_opa_policy_direct(policy_id, data, timeout=20, headers=None, validate=Fa
             result.message = f"{rsp.text} {rsp.status_code}"
     except requests.ConnectionError as err:
         logger.error(f"Unexpected ConnectionError when attempting to connect to OPA: {err}")
-        result.message = f"Unexpected ConnectionError when connecting to OPA"
+        result.message = "Unexpected ConnectionError when connecting to OPA"
     except Exception as err:
         logger.error(f"Unexpected error when attempting to connect to OPA: {err}")
-        result.message = f"Unexpected error when connecting to OPA"
+        result.message = "Unexpected error when connecting to OPA"
     return result
 
 def compile(query, metrics=False, instrument=False, timeout=20, name="compile api"):
@@ -200,7 +201,7 @@ def compile_with_caching(query, metrics=False, instrument=False, timeout=20):
 
         if rsp.ok:
             logger.debug(f"OK Response from OPA({url}): {json.dumps(result.json, indent=2)}")
-            result.message = f"Policy OPA compiled"
+            result.message = "Policy OPA compiled"
         else:
             logger.error("Error compiling policy, got status %s and message: %s" % (rsp.status_code, rsp.text))
             result.message = f"Error compiling policy: {rsp.text}"
@@ -254,7 +255,7 @@ def data_with_caching(query, path, timeout=20):
         #if rsp.status_code == httpx.codes.OK:
         if rsp.ok:
             logger.debug(f"OK Response from OPA({url}): {json.dumps(result.json, indent=2)}")
-            result.message = f"Policy OPA query"
+            result.message = "Policy OPA query"
         else:
             logger.error("Error compiling policy, got status %s and message: %s" % (rsp.status_code, rsp.text))
             result.message = f"Error query policy: {rsp.text}"
@@ -288,11 +289,11 @@ def get_document_with_input_cached(rego_package_name, rego_rule_name, rego_input
     try:
         #async with httpx.AsyncClient() as client:
             #response = await client.post(url,
-        response = requests.post(url,
+        resp = requests.post(url,
             data=json.dumps({"input": rego_input}), timeout=timeout)
 
     except Exception as err:
         logger.error(err)
 
-    result = response.json()["result"]
+    result = resp.json()["result"]
     return result
\ No newline at end of file
diff --git a/app/opa_response.py b/app/opa_response.py
index 8a7f9d34..69af7896 100644
--- a/app/opa_response.py
+++ b/app/opa_response.py
@@ -1,5 +1,4 @@
 # opa_helper.py
-from unittest import result
 
 class OpaResponse:
     ok: bool
diff --git a/app/tests/integration/test_integration_010.py b/app/tests/integration/test_integration_010.py
index 8b3b96b5..9587bd00 100644
--- a/app/tests/integration/test_integration_010.py
+++ b/app/tests/integration/test_integration_010.py
@@ -45,7 +45,7 @@ def test_mock_health_api_endpoint_no_auth_opa_available(opa_access, cloud_provid
     r = client.get(conf.SERVICE_BASE_PATH+"/health")
     # If OPA is available should get a 200
     # Some CSPs (like AWS) don't expose /health without auth so let's skip it in that case
-    if r.status_code == 401:
+    if r.status_code == 401 or r.status_code == 403:
         pytest.skip(f"Skipping mock /health unauth test for {cloud_provider}")
     assert r.status_code == 200, f"Expect /health endpoint to be available {r.text}"
     assert "Healthy" in r.text, "Expected Healthy in response text from /health"
@@ -56,7 +56,7 @@ def test_serviceurl_health_api_endpoint_no_auth_opa_available(service_url, cloud
     r = requests.get(url)
     # If OPA is available should get a 200
     # Some CSPs (like AWS) don't expose /health without auth so let's skip it in that case
-    if r.status_code == 401:
+    if r.status_code == 401 or r.status_code == 403:
         pytest.skip(f"Skipping service_url /health unauth test for {cloud_provider}")
     assert r.status_code == 200, f"Expect /health endpoint ({url}) to be available {r.text}"
     assert "Healthy" in r.text, "Expected Healthy in response text from /health"
diff --git a/app/tests/testlib.py b/app/tests/testlib.py
index 6cc82963..280dda03 100644
--- a/app/tests/testlib.py
+++ b/app/tests/testlib.py
@@ -1,7 +1,6 @@
 # testlib.py
 import os
 import requests
-import json
 import re
 from string import Template
 import pytest
@@ -80,7 +79,7 @@ def put_policy_test_data(
                     'DOMAIN': domain
                 })
 
-            if not (f"package osdu.partition[\"{data_partition}\"].{policy_id_short}" in data):
+            if f"package osdu.partition[\"{data_partition}\"].{policy_id_short}" not in data:
                 print(f"expected matching package name '{policy_id_short}' for {file_path} skipping")
                 pytest.xfail(f"expected matching package name '{policy_id_short}' for {file_path}")
 
@@ -116,10 +115,10 @@ def put_policy_test_data(
                 assert policy_id in r.text, f"Expect {policy_id} message in result {r.text}"
                 result = r.json()
                 #print(f"returned json: {json.dumps(result, indent=4)}")
-                assert result["status"] == True, "Expected status true (success)"
+                assert result["status"] is True, "Expected status true (success)"
             num_tests = num_tests + 1
         else:
-            pytest.skip(f"Only .rego templates")
+            pytest.skip("Only .rego templates")
     #assert num_tests >= 7, "At least 7 polices were tested"
 
 def strip_comments(code):
diff --git a/app/tests/unit/test_api_unit.py b/app/tests/unit/test_api_unit.py
index aa4698bb..8658f548 100644
--- a/app/tests/unit/test_api_unit.py
+++ b/app/tests/unit/test_api_unit.py
@@ -1,10 +1,6 @@
-from textwrap import indent
 import pytest
 import unittest
-import responses
 from fastapi.testclient import TestClient
-import requests
-import re
 import logging
 import sys
 import os
@@ -16,7 +12,6 @@ sys.path.append('tests')
 
 # local
 from main import app
-from auth import auth
 import conf
 import testlib
 
diff --git a/app/timer.py b/app/timer.py
index 7c59f87c..ed503cc8 100644
--- a/app/timer.py
+++ b/app/timer.py
@@ -26,14 +26,14 @@ class Timer:
     def start(self):
         """Start a new timer"""
         if self._start_time is not None:
-            raise TimerError(f"Timer is running. Use .stop() to stop it")
+            raise TimerError("Timer is running. Use .stop() to stop it")
 
         self._start_time = time.perf_counter()
 
     def stop(self):
         """Stop the timer, and report the elapsed time"""
         if self._start_time is None:
-            raise TimerError(f"Timer is not running. Use .start() to start it")
+            raise TimerError("Timer is not running. Use .start() to start it")
 
         elapsed_time = time.perf_counter() - self._start_time
         self._start_time = None
diff --git a/app/translate/convert.py b/app/translate/convert.py
index f66ff2de..47aaa855 100644
--- a/app/translate/convert.py
+++ b/app/translate/convert.py
@@ -15,12 +15,12 @@ from elasticsearch_dsl.query import (
     Terms,
 )
 
-from cachetools import cached, TTLCache
+#from cachetools import cached, TTLCache
 import correlation
 from . import ast
 import opa
 from .translate_exceptions import PolicyDenyError
-import conf
+#import conf
 
 logger = logging.getLogger(__name__)
 
@@ -62,23 +62,23 @@ def _convert_opa_query_to_es_query(opa_query: ast.Query, opa_request_input):
                 logger.debug(f"performing side effect for {expr}")
                 if expr.op() == "http.send":
                     logger.debug(f"isistance {expr.terms[2].value}, {ast.Var}")
-                    assert isinstance(expr.terms[2].value, ast.Var), f"assert02 isinstance"
+                    assert isinstance(expr.terms[2].value, ast.Var), "assert02 isinstance"
                     logger.debug("after assert")
                     http_result = _resolve_http(expr)
                     side_effect_context[expr.terms[2].value.value] = http_result
                     logger.debug(f"side effect context updated: {side_effect_context}")
                 elif expr.op() == "eq":
                     logger.debug(f"eq {expr.terms[1].value}, {str(ast.Ref)}")
-                    assert isinstance(expr.terms[1].value, ast.Ref), f"assert03 isinstance"
+                    assert isinstance(expr.terms[1].value, ast.Ref), "assert03 isinstance"
                     logger.debug("after assert eq 1")
                     the_ref = expr.terms[1]
                     logger.debug(f"the_ref: {the_ref}")
                     logger.debug(f"assert: {the_ref.value.terms[0].value} {ast.Var}")
-                    assert isinstance(the_ref.value.terms[0].value, ast.Var), f"assert04 isinstance"
+                    assert isinstance(the_ref.value.terms[0].value, ast.Var), "assert04 isinstance"
                     logger.debug("after assert eq 2")
                     variable_key = the_ref.value.terms[0].value.value
                     logger.debug(f"variable_key: {variable_key}")
-                    assert isinstance(the_ref.value.terms[1].value, ast.Scalar), f"assert05 isinstance"
+                    assert isinstance(the_ref.value.terms[1].value, ast.Scalar), "assert05 isinstance"
                     logger.debug("after assert eq 3")
                     variable_path = the_ref.value.terms[1].value.value
                     logger.debug(f"variable_path: {variable_path}")
@@ -112,10 +112,10 @@ def _convert_opa_query_to_es_query(opa_query: ast.Query, opa_request_input):
                     scalar = operand.value
                     field_value = scalar.value
                 elif isinstance(operand.value, ast.Object):
-                    assert False, f"assert06 False"
+                    assert False, "assert06 False"
                     logger.debug(operand.value)
                 elif isinstance(operand.value, ast.Var):
-                    assert False, f"assert07 False"
+                    assert False, "assert07 False"
                     logger.debug(operand.value)
                 else:
                     not_scalar = operand.value
@@ -124,8 +124,8 @@ def _convert_opa_query_to_es_query(opa_query: ast.Query, opa_request_input):
                         for term in not_scalar.terms[2:]
                         if not isinstance(term.value, ast.Var)
                     )
-            assert field_name is not None, f"assert08 field_name"
-            assert field_value is not None, f"assert09 field_value"
+            assert field_name is not None, "assert08 field_name"
+            assert field_value is not None, "assert09 field_value"
 
             if expr.op() == "eq":
                 es_filter = Term(**{field_name: field_value})
@@ -154,8 +154,8 @@ def _convert_opa_query_to_es_query(opa_query: ast.Query, opa_request_input):
             )(expr.terms.value)
 
             # Validate domain is input.record.xxx
-            assert domain.value.operand(0).value.value == "input", f"assert10 input"
-            assert domain.value.operand(1).value.value == "record", f"assert11 record"
+            assert domain.value.operand(0).value.value == "input", "assert10 input"
+            assert domain.value.operand(1).value.value == "record", "assert11 record"
 
             # Support only simple body
             assert len(body) == 1, f"assert12 body len {body}"
@@ -163,10 +163,10 @@ def _convert_opa_query_to_es_query(opa_query: ast.Query, opa_request_input):
 
             # Ensure body is negated
             # ElasticSearch, being an inverted index, cannot support universal quantification
-            assert body.negated, f"assert13 body negated"
+            assert body.negated, "assert13 body negated"
 
             # TODO: Assume body is always a Ref
-            assert isinstance( body.terms.value, ast.Ref), f"assert14 isinstance"
+            assert isinstance( body.terms.value, ast.Ref), "assert14 isinstance"
             values = _resolve_ref(
                 body.terms.value, var_in_body.value.value, opa_request_input
             )
@@ -184,9 +184,9 @@ def _resolve_ref(ref, expected_variable_name, opa_request_input):
     logging.setLogRecordFactory(correlation.set_correlation_id(context["correlation_id"]))
     logger = logging.getLogger(__name__)
     logger.debug(f"resolve ref: {ref}")
-    assert isinstance(ref.operand(0), ast.Term), f"assert15 isinstance"
-    assert isinstance(ref.operand(0).value, ast.Var), f"assert16 isinstance"
-    assert ref.operand(0).value.value == "data", f"assert17 data"
+    assert isinstance(ref.operand(0), ast.Term), "assert15 isinstance"
+    assert isinstance(ref.operand(0).value, ast.Var), "assert16 isinstance"
+    assert ref.operand(0).value.value == "data", "assert17 data"
     rego_package_name = ref.operand(1).value.value
     rego_rule_name = ref.operand(2).value.value
     variable_name = ref.operand(3).value.value
@@ -200,7 +200,7 @@ def _resolve_http(expr):
     logging.setLogRecordFactory(correlation.set_correlation_id(context["correlation_id"]))
     logger = logging.getLogger(__name__)
     logger.debug(f"resolve http: {expr}")
-    assert isinstance(expr.terms[1].value, ast.Object), f"assert19 isinstance"
+    assert isinstance(expr.terms[1].value, ast.Object), "assert19 isinstance"
     method = None
     url = None
     headers = {}
@@ -220,8 +220,8 @@ def _resolve_http(expr):
             pass  # no-op
         else:
             raise NotImplementedError("Unsupported http.send option", str(key))
-    assert method is not None, f"assert20 method is None"
-    assert url is not None, f"assert21 url is None"
+    assert method is not None, "assert20 method is None"
+    assert url is not None, "assert21 url is None"
     logger.debug(f"http request method: {method}, url: {url}, headers: {headers}")
     resp = requests.request(method, url=url, headers=headers)
     response = {
@@ -294,4 +294,4 @@ def _postprocess_es_queries(es_queries):
         return new_es_queries
 
     new_es_queries = combine_term_queries(es_queries)
-    return new_es_queries
\ No newline at end of file
+    return new_es_queries
diff --git a/app/views/home.py b/app/views/home.py
index b1ec5c0c..a5985d47 100644
--- a/app/views/home.py
+++ b/app/views/home.py
@@ -2,7 +2,7 @@ import fastapi
 import sys
 import os
 sys.path.append(os.path.abspath('..'))
-from _buildinfo import version
+import _version
 router = fastapi.APIRouter()
 
 def generate_html_response():
@@ -12,7 +12,7 @@ def generate_html_response():
             <title>Policy Service</title>
         </head>
         <body>
-            <h1>Policy Service v{version}</h1>
+            <h1>Policy Service v{_version.__version__} {_version.__milestone__}</h1>
               <ul>
               <li><a href='/api/policy/v1/docs'>Swagger UI</a>
               <li><a href='/api/policy/v1/redoc'>ReDoc</a>
diff --git a/devops/aws/override-stages.yaml b/devops/aws/override-stages.yaml
index 2ae8a82c..8a564d17 100644
--- a/devops/aws/override-stages.yaml
+++ b/devops/aws/override-stages.yaml
@@ -1,5 +1,8 @@
 aws-containerize:
   before_script:
+    - apt-get install -y software-properties-common
+    - apt-get install -y python3 python3-pip
+    - python3 -m pip install -r requirements_setversion.txt
     - cd app
     - make build_docker
     - cd ..
diff --git a/devops/azure/override-stages.yml b/devops/azure/override-stages.yml
index 56ca8d14..42ee7ba4 100644
--- a/devops/azure/override-stages.yml
+++ b/devops/azure/override-stages.yml
@@ -15,12 +15,16 @@ azure_containerize:
     - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
     - az --version
     - az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID
+    # Set Version
+    - apk add --no-cache python3 py3-pip
+    - pip install -r requirements_setversion.txt
   script:
+    - python3 setversion.py app
     # Gitlab Container Registry
     - docker build -f $AZURE_BUILD_SUBDIR/Dockerfile -t $CI_REGISTRY_IMAGE/$SHA_IMAGE .
     - docker push ${CI_REGISTRY_IMAGE}/$SHA_IMAGE
     - echo $CI_COMMIT_TAG
-    # Azure Container Registryb
+    # Azure Container Registry
     - az acr login -n $AZURE_REGISTRY
     - docker tag $CI_REGISTRY_IMAGE/$SHA_IMAGE ${AZURE_REGISTRY}.azurecr.io/$SHA_IMAGE
     - docker push ${AZURE_REGISTRY}.azurecr.io/$SHA_IMAGE
@@ -103,7 +107,7 @@ azure_test_py:
     - echo "DATA_PARTITION $DATA_PARTITION_ID"
     - echo "BUNDLE_PAUSE $BUNDLE_PAUSE"
     - echo "CLOUD_PROVIDER $CLOUD_PROVIDER"
-    - python3 -m pytest --token=$BEARER_TOKEN --service_url=https://${AZURE_DNS_NAME} --data_partition=$DATA_PARTITION_ID
+    - python3 -m pytest -v --color=yes --token=$BEARER_TOKEN --service_url=https://${AZURE_DNS_NAME} --data_partition=$DATA_PARTITION_ID
   only:
     variables:
       - $AZURE == '1'
diff --git a/requirements.txt b/requirements.txt
index 8022796d..3f96d2ff 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -18,4 +18,4 @@ google-cloud-storage
 
 # osdu dependences
 --extra-index-url https://community.opengroup.org/api/v4/projects/148/packages/pypi/simple 
-osdu-api[all]~=0.22.0rc1, ==0.22.* # it will install a rc-version if there is no release one.
+osdu-api[all]~=0.24.0rc1, ==0.24.* # it will install a rc-version if there is no release one.
diff --git a/requirements_dev.txt b/requirements_dev.txt
index 548cdb22..f66bcc6b 100644
--- a/requirements_dev.txt
+++ b/requirements_dev.txt
@@ -4,6 +4,7 @@ pytest-cov
 pytest-mock
 httpx
 parameterized
+typer
 
 # the following are used in functional integration tests
 boto3==1.26.133
@@ -16,4 +17,4 @@ msal
 
 # osdu dependences
 --extra-index-url https://community.opengroup.org/api/v4/projects/148/packages/pypi/simple 
-osdu-api[all]~=0.22.0rc1, ==0.22.*
+osdu-api[all]~=0.24.0rc1, ==0.24.*
diff --git a/requirements_setversion.txt b/requirements_setversion.txt
new file mode 100644
index 00000000..ec184f68
--- /dev/null
+++ b/requirements_setversion.txt
@@ -0,0 +1 @@
+typer
diff --git a/setversion.py b/setversion.py
new file mode 100755
index 00000000..9a893d0c
--- /dev/null
+++ b/setversion.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+import typer
+import time
+from datetime import datetime
+import re
+
+def main(
+    path: str = typer.Argument(envvar="CI_PROJECT_NAME"),
+    branch: str = typer.Option("dev", "--branch", help="Release", envvar="CI_COMMIT_REF_NAME"),
+    build: str = typer.Option("0", "--build", help="Build ID", envvar="CI_PIPELINE_IID"),
+    commit_message: str = typer.Option("", envvar="CI_COMMIT_MESSAGE"),
+    commit_timestamp: str = typer.Option(None, envvar="CI_COMMIT_TIMESTAMP"),
+    commit_ref_slug: str = typer.Option("", envvar="CI_COMMIT_REF_SLUG}"),
+    commit_id: str = typer.Option("", envvar="CI_COMMIT_SHA"),
+    versionfile: str = typer.Option("VERSION", "--version-file", help="Version file"),
+    pyfile: str = typer.Option("_version.py", "--pyfile", help="python file"),
+    ):
+
+    with open(versionfile, "r") as f:
+        (major, minor, patch) = f.read().strip().split('.')
+
+    if branch == "master" or branch == "main":
+        release = f"rc{patch}.dev{build}"
+    elif branch.startswith("release"):
+        release = f"{patch}"
+    elif branch.startswith("trusted"):
+        release = f"b{patch}.dev{build}"
+    else:
+        release = f"a{patch}.dev{build}"
+
+    __version__ = f"{major}.{minor}.{release}"
+    print(f"{__version__}")
+    milestone = int(minor) - 3
+
+    if not commit_timestamp:
+        today = datetime.now()
+        commit_timestamp = today.isoformat()
+
+    regex = re.compile('[^a-zA-Z0-9_]')
+    commit_message = regex.sub('', commit_message[0:64])
+    
+    with open(path + "/" + pyfile, "w") as f:
+        f.write(f"__version__ = \"{__version__}\"\n")
+        f.write(f"__milestone__ = \"M{milestone}\"\n")
+        f.write(f"__branch__ = \"{branch}\"\n")
+        f.write(f"__buildtime__ = {time.time()}\n")
+        f.write(f"__commitid__ = \"{commit_id}\"\n")
+        #f.write(f"__commitmessage__ = \"{commit_message}\"\n")
+        f.write(f"__commitmessage__ = \"\"\n")
+        f.write(f"__committimestamp__ = \"{commit_timestamp}\"\n")
+        f.write(f"__commitrefslug__ = \"{commit_ref_slug}\"\n")
+
+if __name__ == "__main__":
+    typer.run(main)
-- 
GitLab