diff --git a/app/api/compile_api.py b/app/api/compile_api.py
index 44b22b7d2c7dd304c2522fbc1477654f2aa5cd03..1c55280e38bf8aa8c974336a418e4b6c5d77e353 100644
--- a/app/api/compile_api.py
+++ b/app/api/compile_api.py
@@ -71,7 +71,6 @@ def compile_partially_evaluate_a_query(
     logger = logging.getLogger(__name__)
     response.headers["X-Correlation-ID"] = context["correlation_id"]
 
-    # contents = await file.read()
     contents = file.file.read()
 
     try:
diff --git a/app/api/config_api.py b/app/api/config_api.py
index 23f8940fc95a895ebff5572fb92462e2934708e1..773a8caf1e2cad87fac49575806b4f98dcd2a567 100644
--- a/app/api/config_api.py
+++ b/app/api/config_api.py
@@ -13,7 +13,7 @@ import socket
 import k8s
 import time
 import psutil
-# import pdb
+# If doing tracing enable import pdb
 
 from pydantic import BaseModel
 
@@ -41,7 +41,6 @@ def seconds_elapsed():
 def show_policy_config_details(
     response: Response,
     request: Request,
-    # duplicate: Annotated[Union[List[str], None], Header()] = None, # Python 3.9+
     auth_data: auth.Auth = Depends(auth.require_authorized_admin),
 ):
     """
@@ -58,7 +57,7 @@ def show_policy_config_details(
     logger = logging.getLogger(__name__)
     response.headers["X-Correlation-ID"] = context["correlation_id"]
 
-    # pdb.set_trace()
+    # if doing tracing insert pdb.set_trace()
     r_status_json = {}
     r_config_json = {}
     r_config_headers = {}
@@ -89,23 +88,8 @@ def show_policy_config_details(
     except Exception:
         logger.error(f"Error: endpoint {conf.OPA_CONFIG_API}: Error")
 
-    cpu_usage = None
-    memory_usage = None
-    try:
-        with open("/sys/fs/cgroup/cpu/cpuacct.usage") as file:
-            cpu_usage = file.read().strip()
-    except FileNotFoundError as err:
-        logger.error(f"file not found cpu_usage error: {err}")
-    except OSError as err:
-        logger.error(f"cpu_usage error: {err}")
-
-    try:
-        with open("/sys/fs/cgroup/memory/memory.usage_in_bytes") as file:
-            memory_usage = file.read().strip()
-    except FileNotFoundError as err:
-        logger.error(f"file not found memory_usage error: {err}")
-    except OSError as err:
-        logger.error(f"memory_usage error: {err}")
+    cpu_usage = get_cpu_usage()
+    memory_usage = get_memory_usage()
 
     conf_dict = {}
     for i in dir(conf):
@@ -122,9 +106,6 @@ def show_policy_config_details(
             "HOSTNAME": os.environ.get("HOSTNAME"),
             "PYTHON_VERSION": os.environ.get("PYTHON_VERSION"),
             "KUBERNETES_SERVICE_HOST": os.environ.get("KUBERNETES_SERVICE_HOST"),
-            #    "POLICY_BUCKET": os.environ.get("POLICY_BUCKET"),
-            #    "CONTAINER_NAME": os.environ.get("CONTAINER_NAME"),
-            #    "STORAGE_ACCOUNT": os.environ.get("STORAGE_ACCOUNT")
         }
     else:
         env = os.environ.items()
@@ -145,10 +126,8 @@ def show_policy_config_details(
         "memory_usage": memory_usage,
         "client_host": request.client.host,
         "request_headers": request.headers,
-        # "duplicate": duplicate,
         "auth_data": {
             "data_partition_id": auth_data.data_partition_id,
-            #    "groups": auth_data.groups
         },
         "build_data": {
             "version": b.__version__,
@@ -165,3 +144,33 @@ def show_policy_config_details(
         "opa_status": r_status_json,
         "opa_config": r_config_json,
     }
+
+
+def get_cpu_usage():
+    logging.setLogRecordFactory(
+        correlation.set_correlation_id(context["correlation_id"])
+    )
+    logger = logging.getLogger(__name__)
+    try:
+        with open("/sys/fs/cgroup/cpu/cpuacct.usage") as file:
+            return file.read().strip()
+    except FileNotFoundError as err:
+        logger.error(f"file not found cpu_usage error: {err}")
+    except OSError as err:
+        logger.error(f"cpu_usage error: {err}")
+
+    return None
+
+
+def get_memory_usage():
+    logging.setLogRecordFactory(
+        correlation.set_correlation_id(context["correlation_id"])
+    )
+    logger = logging.getLogger(__name__)
+    try:
+        with open("/sys/fs/cgroup/memory/memory.usage_in_bytes") as file:
+            return file.read().strip()
+    except FileNotFoundError as err:
+        logger.error(f"file not found memory_usage error: {err}")
+    except OSError as err:
+        logger.error(f"memory_usage error: {err}")
diff --git a/app/api/policy_eval_api.py b/app/api/policy_eval_api.py
index 9093bfe9497bc6db70abfe6e387bca9429fbb5ed..f0232a43aaa98e2c4321505519df2e1f6b9ecd67 100644
--- a/app/api/policy_eval_api.py
+++ b/app/api/policy_eval_api.py
@@ -33,7 +33,6 @@ from starlette.status import (
     HTTP_503_SERVICE_UNAVAILABLE,
 )
 
-# from opa_response import OpaResponse
 sys.path.append(os.path.abspath(".."))
 import opa
 from auth import auth
@@ -123,7 +122,50 @@ def evaluate_policy(
     elif "osdu/partition/" not in policy_id:
         policy_id = "osdu/partition/" + auth_data.data_partition_id + "/" + policy_id
 
-    # contents = await file.read()
+    posted_data = process_posted_data(
+        file=file, include_auth=include_auth, auth_data=auth_data
+    )
+    result = opa.data(query=json.dumps(posted_data), path=policy_id)
+    logging.error(result)
+    if result.ok:
+        logging.debug(result.message)
+    else:
+        logging.info(result.message)
+        raise HTTPException(
+            status_code=result.status_code,
+            detail=f"Error when talking to OPA: {result.message}",
+        )
+
+    if "result" not in result.json:
+        if conf.ENABLE_DEV_DIAGNOSTICS_DUMPS:
+            # save lots of debugging info
+            file_path = "eval_dump_bad.json"
+            with open(file_path, "w") as outfile:
+                outfile.write(json.dumps(posted_data, indent=4))
+            print(posted_data)
+
+        raise HTTPException(
+            status_code=HTTP_406_NOT_ACCEPTABLE,
+            detail=f"Evaluate failed for policy '{policy_id}'. result: {result.json}",
+            headers={"X-Error-data": json.dumps(posted_data)},
+        )
+    else:
+        if conf.ENABLE_DEV_DIAGNOSTICS_DUMPS:
+            # save lots of debugging info
+            file_path = "eval_dump_input_ok.json"
+            with open(file_path, "w") as outfile:
+                outfile.write(json.dumps(posted_data, indent=4))
+
+            print(posted_data)
+
+            file_path = "eval_dump_output_ok.json"
+            with open(file_path, "w") as outfile:
+                outfile.write(json.dumps(result.json, indent=4))
+
+        return result.json
+
+
+def process_posted_data(file: UploadFile, include_auth: bool, auth_data: auth.Auth):
     contents = file.file.read()
     try:
         posted_data = json.loads(contents.decode("utf-8"))
@@ -158,82 +200,11 @@ def evaluate_policy(
             status_code=HTTP_422_UNPROCESSABLE_ENTITY,
             detail="token not found in file data",
         )
+
     if "datapartitionid" not in posted_data["input"]:
         raise HTTPException(
             status_code=HTTP_422_UNPROCESSABLE_ENTITY,
             detail="datapartitionid not found in file data",
         )
 
-    result = opa.data(query=json.dumps(posted_data), path=policy_id)
-    logging.error(result)
-    if result.ok:
-        logging.debug(result.message)
-    else:
-        logging.info(result.message)
-        raise HTTPException(
-            status_code=result.status_code,
-            detail=f"Error when talking to OPA: {result.message}",
-        )
-
-    if "result" not in result.json:
-        if conf.ENABLE_DEV_DIAGNOSTICS_DUMPS:
-            # save lots of debugging info
-            file_path = "eval_dump_bad.json"
-            with open(file_path, "w") as outfile:
-                outfile.write(json.dumps(posted_data, indent=4))
-            print(posted_data)
-
-        raise HTTPException(
-            status_code=HTTP_406_NOT_ACCEPTABLE,
-            detail=f"Evaluate failed for policy '{policy_id}'. result: {result.json}",
-            headers={"X-Error-data": json.dumps(posted_data)},
-        )
-    else:
-        if conf.ENABLE_DEV_DIAGNOSTICS_DUMPS:
-            # save lots of debugging info
-            file_path = "eval_dump_input_ok.json"
-            with open(file_path, "w") as outfile:
-                outfile.write(json.dumps(posted_data, indent=4))
-
-            print(posted_data)
-
-            file_path = "eval_dump_output_ok.json"
-            with open(file_path, "w") as outfile:
-                outfile.write(json.dumps(result.json, indent=4))
-
-        return result.json
-
-    # try:
-    # get userinfo
-    # user_info = get_user_info(auth_data.access_token)
-    # logger.debug(f"user_info: {user_info}")
-    # input_json['user']=user_info
-
-    # consider getting user groups - current expectation is that they are provided as part of input
-
-    #     # get legalTags
-    #     legaltagslist = []
-    #     if 'record' in input_json:
-    #         legaltagslist = get_legal_tag_list(input_json['record'])
-    #         logger.debug(f"Found record. Legaltagslist: {legaltagslist}")
-
-    #     if 'records' in input_json and isinstance(input_json['records'], list):
-    #         logger.debug(f"inside if")
-    #         for record in input_json['records']:
-    #             logger.debug(f"extending legaltagslist by {record}")
-    #             legaltagslist.extend(get_legal_tag_list(record))
-
-    #     if not 'legaltags' in input_json or not isinstance(input_json['legaltags'], list):
-    #         logger.debug(f"if not legaltags or not instance - clearing input_json")
-    #         input_json['legaltags'] = []
-
-    #     # Added for issue#17
-    #     legalTagListLen = len(legaltagslist)
-    #     logger.debug(f"len {legalTagListLen}")
-    #     for x in range(0, legalTagListLen+1, 25):
-    #         input_json['legaltags'].extend(get_legal_tag_info(legaltagslist[x:x+25], auth_data.data_partition_id, auth_data.access_token, user_id=auth_data.x_user_id))
-
-    # except Exception as err:
-    #     tb = traceback.format_exc()
-    #     logging.error(f"Exception: {err}")
-    #     logging.error(f"traceback: {tb}")
+    return posted_data
diff --git a/app/api/policy_update_api.py b/app/api/policy_update_api.py
index 5dfcd4c10fb907f64cab51cb6312803df95b0d32..31b189c186572a666ceb927c1274e81d84951042 100644
--- a/app/api/policy_update_api.py
+++ b/app/api/policy_update_api.py
@@ -22,7 +22,6 @@ import logging
 import hashlib
 from starlette.requests import Request
 from starlette_context import context
-# from requests.structures import CaseInsensitiveDict
 
 from starlette.status import (
     HTTP_200_OK,
@@ -159,38 +158,38 @@ def delete_partition_policy(
         )
 
     cloud_provider = os.environ.get("CLOUD_PROVIDER")
-    id = f"osdu/partition/{data_partition}/{policy_id}"
+    pol_id = f"osdu/partition/{data_partition}/{policy_id}"
     if cloud_provider is None:
         # Help support MOCK testing
         result = OpaResponse()
         logger.critical(
-            f"Error: CLOUD_PROVIDER ENV VAR not set / Mocking results for delete: /policies/osdu/partition/{data_partition}/{id}"
+            f"Error: CLOUD_PROVIDER ENV VAR not set / Mocking results for delete: /policies/osdu/partition/{data_partition}/{pol_id}"
         )
-        result.message = f"MOCK OK policy_id: {id}"
+        result.message = f"MOCK OK policy_id: {pol_id}"
         result.ok = True
     elif cloud_provider == conf.MOCK:
         result = OpaResponse()
         logger.critical(
-            f"Warning: CLOUD_PROVIDER ENV VAR set to Mock results {cloud_provider} for delete: /policies/osdu/partition/{data_partition}/{id}"
+            f"Warning: CLOUD_PROVIDER ENV VAR set to Mock results {cloud_provider} for delete: /policies/osdu/partition/{data_partition}/{pol_id}"
         )
-        result.message = f"MOCK OK policy_id: {id}"
+        result.message = f"MOCK OK policy_id: {pol_id}"
         result.ok = True
     elif cloud_provider == conf.LOCAL:
         # LOCAL Development ENV
         logger.critical(
-            f"Error: CLOUD_PROVIDER ENV VAR LOCAL, Using local / direct OPA {conf.OPA_URL} delete /policies/osdu/partition/{data_partition}/{id}"
+            f"Error: CLOUD_PROVIDER ENV VAR LOCAL, Using local / direct OPA {conf.OPA_URL} delete /policies/osdu/partition/{data_partition}/{pol_id}"
         )
-        result = opa.delete_opa_policy_direct(id)
+        result = opa.delete_opa_policy_direct(pol_id)
     else:
         logger.info(
-            f"bundle delete policy_id: {policy_id} id: {id} {auth_data.user_id}"
+            f"bundle delete policy_id: {policy_id} id: {pol_id} {auth_data.user_id}"
         )
         result = bundle.delete_policy(
             data_partition=data_partition, policy_id=policy_id
         )
 
     if result.ok:
-        audit_log_msg = f"delete policy_id: {policy_id} id: {id} {audit_msg}"
+        audit_log_msg = f"delete policy_id: {policy_id} id: {pol_id} {audit_msg}"
         logger.info(f"AUDIT {audit_log_msg}")
         audit_log.logger.info(audit_log_msg)
         write_audit_log(data_partition=data_partition, log_message=str(audit_log))
@@ -211,10 +210,14 @@ def delete_partition_policy(
         elif cloud_provider == conf.MOCK:
             logger.error("Unexpected Mock error")
         else:
-            audit_log_msg = f"failed-delete policy_id: {policy_id} id: {id} {audit_msg}"
+            audit_log_msg = (
+                f"failed-delete policy_id: {policy_id} id: {pol_id} {audit_msg}"
+            )
             audit_log.logger.error(audit_log_msg)
             write_audit_log(data_partition=data_partition, log_message=str(audit_log))
-            logger.error(f"Error from bundle service {result.status_code} {result.message} for failed-delete. AUDIT {audit_log_msg}")
+            logger.error(
+                f"Error from bundle service {result.status_code} {result.message} for failed-delete. AUDIT {audit_log_msg}"
+            )
             raise HTTPException(
                 status_code=result.status_code,
                 detail=f"Error from bundle service {result.message}",
@@ -320,7 +323,6 @@ def create_or_update_partition_policy(
             detail="Policy ID should end with a .rego",
         )
 
-    # contents = await file.read()
     contents = file.file.read()
 
     if not bundle.package_name_ok(data_partition, policy_id, contents):
@@ -332,7 +334,7 @@ def create_or_update_partition_policy(
         )
 
     cloud_provider = os.environ.get("CLOUD_PROVIDER")
-    id = f"osdu/partition/{data_partition}/{policy_id}"
+    pol_id = f"osdu/partition/{data_partition}/{policy_id}"
 
     sha256 = hashlib.sha256(contents.decode("utf-8").encode()).hexdigest()
     response.headers["X-SHA-256"] = sha256
@@ -350,39 +352,14 @@ def create_or_update_partition_policy(
             data=contents.decode("utf-8"),
         )
 
-    if cloud_provider is None:
-        # Help support MOCK testing
-        logger.critical(
-            f"Error: CLOUD_PROVIDER ENV VAR not set / Mocking results for put /policies/osdu/partition/{data_partition}/{id}"
-        )
-        result = OpaResponse()
-        result.ok = True
-        result.status_code = HTTP_200_OK
-        result.message = f"MOCK Policy {id} added to OPA"
-    elif cloud_provider == conf.MOCK:
-        logger.critical(
-            f"Warning: CLOUD_PROVIDER ENV VAR set to Mock results {cloud_provider} for put /policies/osdu/partition/{data_partition}/{id}"
-        )
-        result = OpaResponse()
-        result.ok = True
-        result.status_code = HTTP_200_OK
-        result.message = f"MOCK Policy {id} added to OPA"
-    elif cloud_provider == conf.LOCAL:
-        # LOCAL Development ENV
-        logger.critical(
-            f"Error: CLOUD_PROVIDER ENV VAR LOCAL, Using local / direct OPA {conf.OPA_URL} put /policies/osdu/partition/{data_partition}/{id} sha256 {sha256}"
-        )
-        result = opa.put_opa_policy_direct(policy_id=id, data=contents)
-        # result = opa.put_opa_policy_direct(policy_id=id, data=contents.decode('utf-8'))
-    else:
-        logger.info(f"bundle put policy_id: {policy_id} id: {id} {auth_data.user_id}")
-        result = bundle.put_policy(
-            data_partition=data_partition, policy_id=policy_id, policy=contents
-        )
-
+    result = process_cloud_provider(
+        cloud_provider=cloud_provider,
+        policy_id=policy_id,
+        pol_id=pol_id,
+        auth_data=auth_data,
+    )
     if result.ok:
-        # TODO dev testing - fix return
-        audit_log_msg = f"put policy_id: {policy_id} id: {id} {audit_msg}"
+        audit_log_msg = f"put policy_id: {policy_id} id: {pol_id} {audit_msg}"
         logger.info(f"AUDIT {audit_log_msg}")
         audit_log.logger.info(audit_log_msg)
         write_audit_log(data_partition=data_partition, log_message=str(audit_log))
@@ -406,11 +383,49 @@ def create_or_update_partition_policy(
         elif cloud_provider == conf.MOCK:
             logger.error("Unexpected Mock error")
         else:
-            audit_log_msg = f"failed-put policy_id: {policy_id} id: {id} {audit_msg}"
+            audit_log_msg = (
+                f"failed-put policy_id: {policy_id} id: {pol_id} {audit_msg}"
+            )
             audit_log.logger.error(audit_log_msg)
             write_audit_log(data_partition=data_partition, log_message=str(audit_log))
-            logger.error(f"Error from bundle service {result.status_code} {result.message} for failed-put. AUDIT {audit_log_msg}")
+            logger.error(
+                f"Error from bundle service {result.status_code} {result.message} for failed-put. AUDIT {audit_log_msg}"
+            )
             raise HTTPException(
                 status_code=result.status_code,
                 detail=f"Error from bundle service {result.message}",
             )
+
+
+def process_cloud_provider(cloud_provider, policy_id, pol_id, auth_data):
+    if cloud_provider is None:
+        # Help support MOCK testing
+        logger.critical(
+            f"Error: CLOUD_PROVIDER ENV VAR not set / Mocking results for put /policies/osdu/partition/{data_partition}/{pol_id}"
+        )
+        result = OpaResponse()
+        result.ok = True
+        result.status_code = HTTP_200_OK
+        result.message = f"MOCK Policy {pol_id} added to OPA"
+    elif cloud_provider == conf.MOCK:
+        logger.critical(
+            f"Warning: CLOUD_PROVIDER ENV VAR set to Mock results {cloud_provider} for put /policies/osdu/partition/{data_partition}/{pol_id}"
+        )
+        result = OpaResponse()
+        result.ok = True
+        result.status_code = HTTP_200_OK
+        result.message = f"MOCK Policy {pol_id} added to OPA"
+    elif cloud_provider == conf.LOCAL:
+        # LOCAL Development ENV
+        logger.critical(
+            f"Error: CLOUD_PROVIDER ENV VAR LOCAL, Using local / direct OPA {conf.OPA_URL} put /policies/osdu/partition/{data_partition}/{pol_id} sha256 {sha256}"
+        )
+        result = opa.put_opa_policy_direct(policy_id=pol_id, data=contents)
+    else:
+        logger.info(
+            f"bundle put policy_id: {policy_id} id: {pol_id} {auth_data.user_id}"
+        )
+        result = bundle.put_policy(
+            data_partition=data_partition, policy_id=policy_id, policy=contents
+        )
+    return result
diff --git a/app/api/tenant_api.py b/app/api/tenant_api.py
index adf146b8ea3ae8c3c3328744b47ad720d53b8892..30263f8e90a929aed1013f33095c5a2ba0672863 100644
--- a/app/api/tenant_api.py
+++ b/app/api/tenant_api.py
@@ -26,9 +26,6 @@ class Detail(BaseModel):
     detail: str
 
 
-# logger = logging.getLogger(__name__)
-
-
 @router.get(
     "/tenant",
     responses={
@@ -168,8 +165,6 @@ def update_tenant(
 
             data = yaml.load(config_map.data[key])
             if path in config_map.data[key]:
-                # data_config = config_map.data[key]
-                # logger.info(f"data_config: {type(data_config)}")
                 logger.info(f"path found: {data['bundles'][path]}")
                 data["bundles"][path]["service"] = service
                 data["bundles"][path][
@@ -184,7 +179,6 @@ def update_tenant(
                     ] = polling_max_delay_seconds
                 else:
                     logger.warning("polling not found in bundle configuration")
-                    # data.mlput(f"bundles:{path}:polling:min_delay_seconds", polling_min_delay_seconds, 1)
 
                 new_data_str = yaml.dump_to_string(data)
                 logger.debug(f"new_data_str: {new_data_str}")
diff --git a/app/api/translate_api.py b/app/api/translate_api.py
index 6f55e3311d52cf1a5fdfe26a0cb183218c98675b..0d1a66b3811232dd523f30508d94f31459978e33 100644
--- a/app/api/translate_api.py
+++ b/app/api/translate_api.py
@@ -1,4 +1,3 @@
-# from email import policy
 import fastapi
 from fastapi import HTTPException, Depends, Response
 import json
@@ -136,11 +135,6 @@ def translate_policy(posted_data, data_partition):
                 status_code=HTTP_406_NOT_ACCEPTABLE,
                 detail=f"An error occurred when talking translate service. {err}",
             )
-            # return {
-            #    "query": {
-            #        "must_not": {}
-            #        }
-            #    }
         except NotImplementedError as err:
             logger.error(err)
             raise HTTPException(
@@ -251,7 +245,6 @@ def translate_preprocess(posted_data, data_partition):
     if opa_response.ok:
         logger.debug(f"Opa Response: {opa_response.json}")
         logger.debug(f"Opa Response text: {opa_response.text}")
-        # posted_data = posted_data + opa_response.json["result"]["queries"]
         logger.debug(f"New input: {posted_data}")
 
         if "input_from_preprocessor" in opa_response.text:
diff --git a/app/api/validate_api.py b/app/api/validate_api.py
index a64677e572b121636858b41fe35500cc6281f921..f747c7b670b8a2a7e80d379a2d710e45b22d148d 100644
--- a/app/api/validate_api.py
+++ b/app/api/validate_api.py
@@ -54,7 +54,8 @@ def validate_policy(
     logger = logging.getLogger(__name__)
     response.headers["X-Correlation-ID"] = context["correlation_id"]
 
-    if not policy_id.endswith(".rego"):
+    dot_rego =".rego"
+    if not policy_id.endswith(dot_rego):
         raise HTTPException(
             status_code=HTTP_422_UNPROCESSABLE_ENTITY,
             detail="Policy ID should end with a .rego",
@@ -68,7 +69,7 @@ def validate_policy(
             {
                 "data_partition": auth_data.data_partition_id,
                 "DATA_PARTITION": auth_data.data_partition_id,
-                "name": policy_id.removesuffix(".rego"),
+                "name": policy_id.removesuffix(dot_rego)
             }
         )
         contents = data.encode("utf-8")
@@ -85,7 +86,7 @@ def validate_policy(
 
     data = contents.decode("utf-8")
 
-    short_name = policy_id.removesuffix(".rego")
+    short_name = policy_id.removesuffix(dot_rego)
 
     if verify_policy_with_opa(
         policy_id=policy_id,
@@ -105,7 +106,8 @@ def validate_policy(
 def verify_policy_with_opa(
     policy_id: str, auth_data: auth.Auth, data: str, correlation_id: str
 ):
-    if not policy_id.endswith(".rego"):
+    dot_rego =".rego"
+    if not policy_id.endswith(dot_rego):
         raise HTTPException(
             status_code=HTTP_422_UNPROCESSABLE_ENTITY,
             detail="Policy ID should end with a .rego",
@@ -142,9 +144,6 @@ def verify_policy_with_opa(
             logger.warning(
                 f"Error when talking to OPA to delete temp policy {temp_id} {temp_result.message}"
             )
-            # raise HTTPException(status_code=temp_result.status_code,
-            #    detail=f"OPA Error while deleting temp policy: {temp_result.message}."
-            #    )
         else:
             logger.info(f"TMP-AUDIT-OPA delete id: {temp_id} user: {auth_data.user_id}")
         return True
diff --git a/app/auth/auth.py b/app/auth/auth.py
index bf138c1f363587da94b2c6480f994b3fc86b9ac7..900ca970ef6e11b84adac892e6505cdd9bdec403 100644
--- a/app/auth/auth.py
+++ b/app/auth/auth.py
@@ -70,7 +70,6 @@ class Auth:
 
 
 async def require_authorized_user(
-    request: Request,
     credentials: HTTPAuthorizationCredentials = Depends(security),
     data_partition_id: str = Header(
         default=None,
diff --git a/app/bundles/__init__.py b/app/bundles/__init__.py
index 8f57cfe1b9f8cc8fa16934c8b4567a523a78549d..d0cbca43577a5f7deb7a5bf45f27537064c9f0bb 100644
--- a/app/bundles/__init__.py
+++ b/app/bundles/__init__.py
@@ -11,4 +11,4 @@
 #  distributed under the License is distributed on an "AS IS" BASIS,
 #  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 #  See the License for the specific language governing permissions and
-#  limitations under the License.
\ No newline at end of file
+#  limitations under the License.
diff --git a/app/bundles/bundle.py b/app/bundles/bundle.py
index 791e1979cf3688ca76d6e5772d12b2a38350adf4..803ec39b152d97c37d7440e65cc49a56f74d646c 100644
--- a/app/bundles/bundle.py
+++ b/app/bundles/bundle.py
@@ -5,10 +5,10 @@ import time
 import hashlib
 
 from starlette.status import (
-    HTTP_200_OK, 
+    HTTP_200_OK,
     HTTP_202_ACCEPTED,
-    HTTP_404_NOT_FOUND, 
-    HTTP_503_SERVICE_UNAVAILABLE
+    HTTP_404_NOT_FOUND,
+    HTTP_503_SERVICE_UNAVAILABLE,
 )
 
 from bundles import storage
@@ -16,7 +16,7 @@ from bundles import storage
 from opa_response import OpaResponse
 
 logger = logging.getLogger(__name__)
-#logger = correlation.DefaultExtrasAdapter(_logger, {"correlation_id": "None"})
+
 
 def delete_policy(data_partition, policy_id):
     """
@@ -35,14 +35,14 @@ def delete_policy(data_partition, policy_id):
     with io.BytesIO() as outbuffer:
         with io.BytesIO() as inbuffer:
             try:
-                inbuffer, uri = storage.get_storage().download_file(bundle, inbuffer)
+                inbuffer, _ = storage.get_storage().download_file(bundle, inbuffer)
             except Exception as err:
                 logger.error(f"Error Downloading {bundle} from CSP Storage: {err}")
                 response.status_code = HTTP_503_SERVICE_UNAVAILABLE
                 return response
             inbuffer.seek(0)
             with tarfile.open(mode="w:gz", fileobj=outbuffer) as tarout:
-                with tarfile.open(mode='r:gz',fileobj=inbuffer) as tarin:
+                with tarfile.open(mode="r:gz", fileobj=inbuffer) as tarin:
                     for member in tarin.getmembers():
                         # Skip policy to be deleted
                         if member.name == policy_id:
@@ -53,9 +53,11 @@ def delete_policy(data_partition, policy_id):
                 outbuffer.seek(0)
                 if found_policy_in_bundle:
                     if storage.get_storage().upload_file(bundle, outbuffer):
-                        response.message = f"Policy {policy_id} removed from bundle server"
+                        response.message = (
+                            f"Policy {policy_id} removed from bundle server"
+                        )
                         # Could be 200, but 202 is proper since there could be OPA servers that may still have the content
-                        response.status_code = HTTP_202_ACCEPTED # could be 200
+                        response.status_code = HTTP_202_ACCEPTED  # could be 200
                         response.ok = True
                         return response
                 else:
@@ -65,6 +67,7 @@ def delete_policy(data_partition, policy_id):
                     response.status_code = HTTP_404_NOT_FOUND
     return response
 
+
 def get_bundle(data_partition):
     """
     Get bundle (for backup)
@@ -74,20 +77,16 @@ def get_bundle(data_partition):
 
     try:
         inbuffer = io.BytesIO()
-        inbuffer, uri = storage.get_storage().download_file(bundle, inbuffer)
+        inbuffer, _ = storage.get_storage().download_file(bundle, inbuffer)
         inbuffer.seek(0)
         return inbuffer
-        #else:
-        #    logger.critical(f"Error Downloading {bundle} from CSP Storage")
     except Exception as err:
         logger.critical(f"Error Downloading {bundle} from CSP Storage: {err}")
-        #status_code = HTTP_503_SERVICE_UNAVAILABLE
         message = f"Unable to get bundle from bundle server: download bundle {bundle} error: {err}"
-        #template = "An exception of type {0} occurred. Arguments:\n{1!r}"
-        #message = template.format(type(err).__name__, err.args)
         logger.critical(message)
     return None
 
+
 def put_default_bundle(data_partition, policy_list):
     """
     Get bundle (for backup)
@@ -103,7 +102,7 @@ def put_default_bundle(data_partition, policy_list):
                     info.size = len(policy_list[key])
                     info.mtime = int(time.time())
                     tarout.addfile(info, io.BytesIO(policy_list[key]))
-                #tarout.addfile(info, io.BytesIO(policy_list[key]))
+                # tarout.addfile(info, io.BytesIO(policy_list[key]))
                 tarout.close()
                 outbuffer.seek(0)
                 if storage.get_storage().upload_file(bundle, outbuffer):
@@ -113,13 +112,11 @@ def put_default_bundle(data_partition, policy_list):
                     logger.critical(f"Error uploading {bundle} to CSP Storage")
     except Exception as err:
         logger.critical(f"Error uploading {bundle} from CSP Storage: {err}")
-        #status_code = HTTP_503_SERVICE_UNAVAILABLE
         message = f"Unable to upload bundle {bundle} to bundle server error: {err}"
-        #template = "An exception of type {0} occurred. Arguments:\n{1!r}"
-        #message = template.format(type(err).__name__, err.args)
         logger.critical(message)
     return None
 
+
 def put_policy(data_partition, policy_id, policy):
     """
     Put policy as bundle to CSP providers
@@ -134,7 +131,7 @@ def put_policy(data_partition, policy_id, policy):
     with io.BytesIO() as outbuffer:
         with io.BytesIO() as inbuffer:
             try:
-                inbuffer, uri = storage.get_storage().download_file(bundle, inbuffer)
+                inbuffer, _ = storage.get_storage().download_file(bundle, inbuffer)
             except Exception as err:
                 logger.critical(f"Error Downloading {bundle} from CSP Storage: {err}")
                 response.status_code = HTTP_503_SERVICE_UNAVAILABLE
@@ -145,30 +142,38 @@ def put_policy(data_partition, policy_id, policy):
                 return response
             inbuffer.seek(0)
             with tarfile.open(mode="w:gz", fileobj=outbuffer) as tarout:
-                with tarfile.open(mode='r:gz',fileobj=inbuffer) as tarin:
+                with tarfile.open(mode="r:gz", fileobj=inbuffer) as tarin:
                     for member in tarin.getmembers():
                         # Do not add previous version of existing policy (in case it existed)
-                        if member.name!= policy_id:
+                        if member.name != policy_id:
                             tarout.addfile(member, tarin.extractfile(member.name))
                         elif member.name == policy_id:
                             f = tarin.extractfile(member.name)
                             contents = f.read()
                             existing_sha256 = hashlib.sha256(contents).hexdigest()
                             updated_existing = True
-                            logger.info(f"Request to update existing {policy_id} {existing_sha256} in bundle")
-                
+                            logger.info(
+                                f"Request to update existing {policy_id} {existing_sha256} in bundle"
+                            )
+
                 if updated_existing:
                     updated_sha256 = hashlib.sha256(policy).hexdigest()
                     if existing_sha256 == updated_sha256:
                         tarout.close()
                         response.ok = True
-                        response.message = f"Policy {policy_id} already in bundle server"
+                        response.message = (
+                            f"Policy {policy_id} already in bundle server"
+                        )
                         response.status_code = HTTP_200_OK
-                        logger.info(f"Ignoring update to {policy_id}. No change detected. {existing_sha256} with {updated_sha256}")
+                        logger.info(
+                            f"Ignoring update to {policy_id}. No change detected. {existing_sha256} with {updated_sha256}"
+                        )
                         return response
                     else:
-                        logger.info(f"Replacing {policy_id} {existing_sha256} with {updated_sha256}")
-                info = tarfile.TarInfo(policy_id)    
+                        logger.info(
+                            f"Replacing {policy_id} {existing_sha256} with {updated_sha256}"
+                        )
+                info = tarfile.TarInfo(policy_id)
                 info.size = len(policy)
                 tarout.addfile(info, io.BytesIO(policy))
                 tarout.close()
@@ -176,11 +181,13 @@ def put_policy(data_partition, policy_id, policy):
                 if storage.get_storage().upload_file(bundle, outbuffer):
                     response.ok = True
                     if updated_existing:
-                        response.message = f"Policy {policy_id} updated in bundle server"
+                        response.message = (
+                            f"Policy {policy_id} updated in bundle server"
+                        )
                         response.status_code = HTTP_202_ACCEPTED
                     else:
                         response.message = f"Policy {policy_id} added to bundle server"
-                    # could be 200, but 202 is proper since there could be OPA servers that may not have the content yet
+                        # could be 200, but 202 is proper since there could be OPA servers that may not have the content yet
                         response.status_code = HTTP_202_ACCEPTED
                     return response
                 else:
@@ -190,8 +197,10 @@ def put_policy(data_partition, policy_id, policy):
 
     return response
 
+
 def policy_filename(policy_id):
-    return '{0}.rego'.format(policy_id)
+    return "{0}.rego".format(policy_id)
+
 
 def bundle_filename(data_partition):
     if data_partition:
@@ -199,11 +208,15 @@ def bundle_filename(data_partition):
         return f"bundle-{data_partition}.tar.gz"
     else:
         logger.debug("Instance/default bundle")
-        return 'bundle.tar.gz'
+        return "bundle.tar.gz"
+
 
 def correct_package_declaration(data_partition, policy_id):
     return 'package osdu.partition["{0}"].{1}'.format(data_partition, policy_id[:-5])
 
+
 def package_name_ok(data_partition, policy_id, policy):
     logger.debug(f"data_partition: {data_partition} policy_id: {policy_id}")
-    return correct_package_declaration(data_partition, policy_id) in policy.decode("utf-8")
+    return correct_package_declaration(data_partition, policy_id) in policy.decode(
+        "utf-8"
+    )
diff --git a/app/bundles/providers/__init__.py b/app/bundles/providers/__init__.py
index 2704c571e6ad695fcac0614c6a02819d734328fb..39172646946138be6d1847d3755157867fd7d70c 100644
--- a/app/bundles/providers/__init__.py
+++ b/app/bundles/providers/__init__.py
@@ -12,4 +12,4 @@
 #  distributed under the License is distributed on an "AS IS" BASIS,
 #  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 #  See the License for the specific language governing permissions and
-#  limitations under the License.
\ No newline at end of file
+#  limitations under the License.
diff --git a/app/bundles/providers/aws/__init__.py b/app/bundles/providers/aws/__init__.py
index b52c0ec8a8e28cc96f08a33e2553a2d052c47a7b..f9652a2b7b48b5b7d198582addb1d22608f07496 100644
--- a/app/bundles/providers/aws/__init__.py
+++ b/app/bundles/providers/aws/__init__.py
@@ -12,4 +12,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from bundles.providers.aws.storage import AWSBundleStorageClient as storage_client
\ No newline at end of file
+from bundles.providers.aws.storage import AWSBundleStorageClient as storage_client
diff --git a/app/bundles/providers/aws/storage.py b/app/bundles/providers/aws/storage.py
index 9ab115f29601b9174be56f750319a156aacc0b3d..99b95ff004476e39c567b9506027fc183a6cceef 100644
--- a/app/bundles/providers/aws/storage.py
+++ b/app/bundles/providers/aws/storage.py
@@ -25,25 +25,28 @@ import conf
 logger = logging.getLogger(__name__)
 logger.setLevel(conf.LOG_LEVEL)
 
+
 class FileNotFound(Exception):
     def __init__(self, filename: str):
-        self.message = f'ERROR: File {filename} was not found.'
+        self.message = f"ERROR: File {filename} was not found."
         super().__init__(self.message)
 
+
 class AWSBundleStorageClient(BundleStorageClient):
     def __init__(self) -> None:
         self.client = get_client()
         self.bucket_name = os.environ["POLICY_BUCKET"]
         self.content_type = "application/x-gtar"
         session = boto3.session.Session()
-        self.s3_client =session.client('s3', region_name="us-east-1")
+        self.s3_client = session.client("s3", region_name="us-east-1")
         logger.debug(f"bucket: {self.bucket_name}")
 
-
     def _get_bucket_uri(self, filename: str) -> str:
         return f"s3://{self.bucket_name}/{filename}"
 
-    def download_file(self, filename: str, file: FileLikeObject) -> Tuple[FileLikeObject, str]:
+    def download_file(
+        self, filename: str, file: FileLikeObject
+    ) -> Tuple[FileLikeObject, str]:
         try:
             uri = self._get_bucket_uri(filename)
 
@@ -53,7 +56,7 @@ class AWSBundleStorageClient(BundleStorageClient):
             else:
                 logger.info(f"{filename} does not exist")
                 raise FileNotFound(filename)
-            
+
         except Exception as e:
             logger.error(f"Failed to download file from {uri}: {e}")
         return None, None
@@ -68,27 +71,27 @@ class AWSBundleStorageClient(BundleStorageClient):
 
     # TODO: Fix bug in AWS python SDK and replace
     def _does_file_exist(self, uri: str) -> bool:
-            """Verify if a file exists in the given URI.
-
-            :param uri: The AWS URI of the file.
-            :type uri: str
-            :return: A boolean indicating if the file exists
-            :rtype: bool
-            """
-
-            # assuming the URI here is an s3:// URI
-            # get the bucket name and path to object
-            bucket_name, object_name = self._split_s3_path(uri)
-            try:
-                # try to get the s3 metadata for the object, which is a 
-                # fast operation no matter the size of the data object
-                self.s3_client.head_object(Bucket=bucket_name, Key=object_name)
-            except Exception as e:
-                logger.info(f"{uri} does not exist {e}")
-                return False
-            return True
-
-    def _split_s3_path(self, s3_path:str):
+        """Verify if a file exists in the given URI.
+
+        :param uri: The AWS URI of the file.
+        :type uri: str
+        :return: A boolean indicating if the file exists
+        :rtype: bool
+        """
+
+        # assuming the URI here is an s3:// URI
+        # get the bucket name and path to object
+        bucket_name, object_name = self._split_s3_path(uri)
+        try:
+            # try to get the s3 metadata for the object, which is a
+            # fast operation no matter the size of the data object
+            self.s3_client.head_object(Bucket=bucket_name, Key=object_name)
+        except Exception as e:
+            logger.info(f"{uri} does not exist {e}")
+            return False
+        return True
+
+    def _split_s3_path(self, s3_path: str):
         """split a s3:// path into bucket and key parts
 
         Args:
@@ -97,9 +100,9 @@ class AWSBundleStorageClient(BundleStorageClient):
         Returns:
             tuple: bucket name, key name ( with path )
         """
-        path_parts=s3_path.replace("s3://","").split("/")
-        bucket=path_parts.pop(0)
-        key="/".join(path_parts)
+        path_parts = s3_path.replace("s3://", "").split("/")
+        bucket = path_parts.pop(0)
+        key = "/".join(path_parts)
         return bucket, key
 
     # TODO: Fix bug in AWS python SDK and replace
diff --git a/app/bundles/providers/azure/storage.py b/app/bundles/providers/azure/storage.py
index 9283cfec1dfabe85019aaa4bfc94309bb16c5d7e..80a19af74c10083036a74ecb4a0f056bbdaf1313 100644
--- a/app/bundles/providers/azure/storage.py
+++ b/app/bundles/providers/azure/storage.py
@@ -4,7 +4,8 @@ import os
 from typing import Tuple
 from osdu_api.providers.types import FileLikeObject
 from bundles.storage import BundleStorageClient
-#from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
+
+# from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
 from azure.storage.blob import BlobServiceClient
 from azure.identity import DefaultAzureCredential
 from azure.keyvault.secrets import SecretClient
@@ -15,18 +16,26 @@ logger = logging.getLogger(__name__)
 class AzureBundleStorageClient(BundleStorageClient):
     def __init__(self) -> None:
         helper = Helper()
-        self.client = BlobServiceClient.from_connection_string(helper.get_storage_connection_string())
+        self.client = BlobServiceClient.from_connection_string(
+            helper.get_storage_connection_string()
+        )
         self.account_name = os.environ["STORAGE_ACCOUNT"]
         self.container_name = os.environ["CONTAINER_NAME"]
         self.content_type = "application/x-gtar"
 
     def _get_uri(self, filename: str) -> str:
-        return "https://{0}/{1}/{2}".format(self.account_name,self.container_name,filename)
+        return "https://{0}/{1}/{2}".format(
+            self.account_name, self.container_name, filename
+        )
 
-    def download_file(self, filename: str, file: FileLikeObject) -> Tuple[FileLikeObject, str]:
+    def download_file(
+        self, filename: str, file: FileLikeObject
+    ) -> Tuple[FileLikeObject, str]:
         try:
             uri = self._get_uri(filename)
-            blob_client = self.client.get_blob_client(container=self.container_name, blob=filename)
+            blob_client = self.client.get_blob_client(
+                container=self.container_name, blob=filename
+            )
             downloader = blob_client.download_blob()
             downloader.readinto(file)
             return file, uri
@@ -36,8 +45,10 @@ class AzureBundleStorageClient(BundleStorageClient):
     def upload_file(self, name: str, file: FileLikeObject) -> str:
         try:
             uri = self._get_uri(name)
-            blob_client = self.client.get_blob_client(container=self.container_name, blob=name)
-            #not sure how to set content type
+            blob_client = self.client.get_blob_client(
+                container=self.container_name, blob=name
+            )
+            # not sure how to set content type
             blob_client.upload_blob(file.read(), overwrite=True, blob_type="BlockBlob")
             return uri
         except Exception as e:
@@ -45,14 +56,12 @@ class AzureBundleStorageClient(BundleStorageClient):
 
 
 class Helper(object):
-
     KEY_VAULT_URI = os.getenv("KEYVAULT_URI")
 
     def get_key_vault_secret(self, key: str):
         credential = DefaultAzureCredential()
         secret_client = SecretClient(
-            vault_url=Helper.KEY_VAULT_URI,
-            credential=credential
+            vault_url=Helper.KEY_VAULT_URI, credential=credential
         )
         return secret_client.get_secret(key).value
 
diff --git a/app/bundles/providers/baremetal/__init__.py b/app/bundles/providers/baremetal/__init__.py
index b9e067d9f589d80215a490ff7f53f3d77e887682..a1aa036defdde659adceca60eb04cb648fde877f 100644
--- a/app/bundles/providers/baremetal/__init__.py
+++ b/app/bundles/providers/baremetal/__init__.py
@@ -13,4 +13,6 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 
-from bundles.providers.baremetal.storage import MinIOBundleStorageClient as storage_client
+from bundles.providers.baremetal.storage import (
+    MinIOBundleStorageClient as storage_client,
+)
diff --git a/app/bundles/providers/baremetal/storage.py b/app/bundles/providers/baremetal/storage.py
index 8ec1d152749bb345fa74685414da6a650e6f15bb..078222aad67fe074182cf3b823f67c170be0474f 100644
--- a/app/bundles/providers/baremetal/storage.py
+++ b/app/bundles/providers/baremetal/storage.py
@@ -23,11 +23,13 @@ from bundles.storage import BundleStorageClient
 
 logger = logging.getLogger(__name__)
 
+
 class FileNotFound(Exception):
     def __init__(self, filename: str):
-        self.message = f'ERROR: File {filename} was not found.'
+        self.message = f"ERROR: File {filename} was not found."
         super().__init__(self.message)
 
+
 class MinIOBundleStorageClient(BundleStorageClient):
     def __init__(self) -> None:
         self._client = get_client()
@@ -37,7 +39,9 @@ class MinIOBundleStorageClient(BundleStorageClient):
     def _get_bucket_uri(self, filename: str) -> str:
         return f"s3://{self._bucket_name}/{filename}"
 
-    def download_file(self, filename: str, file: FileLikeObject) -> Tuple[FileLikeObject, str]:
+    def download_file(
+        self, filename: str, file: FileLikeObject
+    ) -> Tuple[FileLikeObject, str]:
         try:
             uri = self._get_bucket_uri(filename)
 
@@ -46,7 +50,7 @@ class MinIOBundleStorageClient(BundleStorageClient):
                 return file, uri
             else:
                 raise FileNotFound(filename)
-            
+
         except Exception as e:
             logger.error(f"Failed to download file from {uri} {e}")
 
@@ -59,11 +63,11 @@ class MinIOBundleStorageClient(BundleStorageClient):
             logger.error(f"Failed to upload file to {uri} {e}")
 
     def _does_file_exist(self, uri: str) -> bool:
-            """Verify if a file exists in the given URI.
+        """Verify if a file exists in the given URI.
 
-            :param uri: The AWS URI of the file.
-            :type uri: str
-            :return: A boolean indicating if the file exists
-            :rtype: bool
-            """
-            return self._client.does_file_exist(uri)
+        :param uri: The AWS URI of the file.
+        :type uri: str
+        :return: A boolean indicating if the file exists
+        :rtype: bool
+        """
+        return self._client.does_file_exist(uri)
diff --git a/app/bundles/providers/gc/__init__.py b/app/bundles/providers/gc/__init__.py
index 6a5313e4b49524e5b85fcc7db92f62301609a755..5839f6a29a356fb3f616d213fd714eafe8fd2d3a 100644
--- a/app/bundles/providers/gc/__init__.py
+++ b/app/bundles/providers/gc/__init__.py
@@ -13,4 +13,4 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 
-from bundles.providers.gc.storage import GCBundleStorageClient as storage_client
\ No newline at end of file
+from bundles.providers.gc.storage import GCBundleStorageClient as storage_client
diff --git a/app/bundles/providers/gc/storage.py b/app/bundles/providers/gc/storage.py
index 333673e44094320df41b318e198fb6d4997a61da..a81eeb484846e2b53c12d600260894c43413e15b 100644
--- a/app/bundles/providers/gc/storage.py
+++ b/app/bundles/providers/gc/storage.py
@@ -23,6 +23,7 @@ from bundles.storage import BundleStorageClient
 
 logger = logging.getLogger(__name__)
 
+
 class GCBundleStorageClient(BundleStorageClient):
     def __init__(self) -> None:
         self.client = get_client()
@@ -32,7 +33,9 @@ class GCBundleStorageClient(BundleStorageClient):
     def _get_gs_uri(self, filename: str) -> str:
         return f"gs://{self.bucket_name}/{filename}"
 
-    def download_file(self, filename: str, file: FileLikeObject) -> Tuple[FileLikeObject, str]:
+    def download_file(
+        self, filename: str, file: FileLikeObject
+    ) -> Tuple[FileLikeObject, str]:
         try:
             uri = self._get_gs_uri(filename)
             logger.debug(f"download_file {uri}")
diff --git a/app/bundles/providers/ibm/__init__.py b/app/bundles/providers/ibm/__init__.py
index 5cc328e77d03d2f69cbea80def8877a6b37e9606..13302fef94384e50cedcb3bdbf8726245ea661a8 100644
--- a/app/bundles/providers/ibm/__init__.py
+++ b/app/bundles/providers/ibm/__init__.py
@@ -1 +1 @@
-from bundles.providers.ibm.storage import IBMBundleStorageClient as storage_client
\ No newline at end of file
+from bundles.providers.ibm.storage import IBMBundleStorageClient as storage_client
diff --git a/app/bundles/providers/ibm/storage.py b/app/bundles/providers/ibm/storage.py
index 9ed473b6e64378e4eeec247a82771e0a0e86afd3..97cb0f9f8d58e858da807436f384d9c9dc0ce5d6 100644
--- a/app/bundles/providers/ibm/storage.py
+++ b/app/bundles/providers/ibm/storage.py
@@ -3,7 +3,8 @@ import os
 from typing import Tuple
 import boto3
 from botocore.client import Config
-#from osdu_api.providers.blob_storage import get_client
+
+# from osdu_api.providers.blob_storage import get_client
 from osdu_api.providers.types import FileLikeObject
 
 from bundles.storage import BundleStorageClient
@@ -13,26 +14,29 @@ logger = logging.getLogger(__name__)
 
 class FileNotFound(Exception):
     def __init__(self, filename: str):
-        self.message = f'ERROR: File {filename} was not found.'
+        self.message = f"ERROR: File {filename} was not found."
         super().__init__(self.message)
 
 
 class IBMBundleStorageClient(BundleStorageClient):
     def __init__(self) -> None:
-        #self.client = get_client()
+        # self.client = get_client()
         self.bucket_name = os.environ["POLICY_BUCKET"]
         self.content_type = "application/x-gtar"
-        self.s3_client = boto3.client('s3',
-                                      endpoint_url=os.environ["ENDPOINT_URL"],
-                                      aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
-                                      aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
-                                      config=Config(signature_version='s3v4'),
-                                      region_name='us-east-1',
-                                      verify=os.environ["ssl_verify"]
-                                    )
+        self.s3_client = boto3.client(
+            "s3",
+            endpoint_url=os.environ["ENDPOINT_URL"],
+            aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
+            aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
+            config=Config(signature_version="s3v4"),
+            region_name="us-east-1",
+            verify=os.environ["ssl_verify"],
+        )
         logger.debug(f"IBM storage init {self.bucket_name}")
 
-    def download_file(self, key: str, file: FileLikeObject) -> Tuple[FileLikeObject, str]:
+    def download_file(
+        self, key: str, file: FileLikeObject
+    ) -> Tuple[FileLikeObject, str]:
         logger.debug(f"IBM storage download bucket: {self.bucket_name} file:{file}")
         try:
             self.s3_client.download_fileobj(self.bucket_name, key, file)
diff --git a/app/bundles/storage.py b/app/bundles/storage.py
index 65dd1e817a5bd7fecb00702dcd6bb0a619fa2a14..4dd0616a93cc11a003fd97c7432bca41aa48a699 100644
--- a/app/bundles/storage.py
+++ b/app/bundles/storage.py
@@ -23,22 +23,23 @@ from typing import Tuple
 from osdu_api.providers.types import FileLikeObject
 
 logger = logging.getLogger(__name__)
-#logger = correlation.DefaultExtrasAdapter(_logger, {"correlation_id": "None"})
+
 
 class BundleStorageClient(abc.ABC):
     """Base interface for bundle storage clients"""
 
     @abc.abstractmethod
-    def download_file(self, filename: str, file: FileLikeObject) -> Tuple[FileLikeObject, str]:
+    def download_file(
+        self, filename: str, file: FileLikeObject
+    ) -> Tuple[FileLikeObject, str]:
         """Download file by name"""
         logger.debug(f"download_file {filename}")
-        pass
 
     @abc.abstractmethod
     def upload_file(self, name: str, file: FileLikeObject) -> bool:
         """Upload file by name"""
         logger.debug(f"upload_file {name}")
-        pass
+
 
 def _import_provider_specific_module(provider: str) -> str:
     """Import provider specific module"""
@@ -46,6 +47,7 @@ def _import_provider_specific_module(provider: str) -> str:
     module = importlib.import_module(module_name)
     return module
 
+
 def get_storage() -> BundleStorageClient:
     try:
         os.environ["CLOUD_PROVIDER"]
diff --git a/app/common.py b/app/common.py
index 7bac010e2cdbffe5496a54cfeaf8e6d467976fd0..cab134a3f3b3910b9455f2767896f13e3873231b 100644
--- a/app/common.py
+++ b/app/common.py
@@ -1,19 +1,9 @@
-# import jwt
 import conf
 import requests
 import logging
 
 logger = logging.getLogger(__name__)
 
-# def get_user_info(access_token):
-#    """Get user info from access_token"""
-#    logger.info('Get user info...')
-#    tk=jwt.decode(access_token, options={"verify_signature": False}) # decode JWT token
-#    res = {}
-#    for k, v in tk.items():
-#        res[k]=v
-#    return res
-
 
 def get_legal_tag_list(record):
     if "legal" in record and "legaltags" in record["legal"]:
@@ -21,7 +11,7 @@ def get_legal_tag_list(record):
     return []
 
 
-def get_legal_tag_info(legal_tags, data_partition_id, access_token, user_id=None):
+def get_legal_tag_info(legal_tags, data_partition_id, access_token):
     """Get legal tag details"""
     if conf.LEGAL_BATCH_API is None or not legal_tags:
         logger.debug("unexpected legal tag or API")
diff --git a/app/conf.py b/app/conf.py
index e3617da3493d27edde92e0aa5fd4841ebee16b22..9e8933cae966f7933f768e300da1d0e540aa1036 100644
--- a/app/conf.py
+++ b/app/conf.py
@@ -48,7 +48,7 @@ USE_BUNDLES = os.getenv("USE_BUNDLES", True)
 # Require users and admin to be in these groups
 USER_PERMISSION = "service.policy.user"
 ADMIN_PERMISSION = "service.policy.admin"
-OPS_PERMISSION = os.getenv("OPS_PERMISSION", "service.policy.admin").lower()
+OPS_PERMISSION = os.getenv("OPS_PERMISSION", ADMIN_PERMISSION).lower()
 
 # Search Translate PreProcessor
 # It is highly recommended to always use the translate preprocessor.
@@ -210,7 +210,7 @@ MOCK_ENTITLEMENT_RESULT = {
             "email": "users@osdu.example.com",
         },
         {
-            "name": "service.policy.admin",
+            "name": ADMIN_PERMISSION,
             "description": "Datalake policy admins",
             "email": "service.policy.admin@osdu.example.com",
         },
@@ -325,7 +325,7 @@ MOCK_ENTITLEMENT_RESULT = {
             "email": "service.edsdms.user@osdu.example.com",
         },
         {
-            "name": "service.policy.user",
+            "name": USER_PERMISSION,
             "description": "The viewer of the datalake policy service",
             "email": "service.policy.user@osdu.example.com",
         },
@@ -493,14 +493,15 @@ MASK_OPA_CONFIG_DETAILS = os.getenv("MASK_OPA_CONFIG_DETAILS", "False").lower()
 MASK_CONFIG_ENV = os.getenv("MASK_CONFIG_ENV", "True").lower() in ("true", "1", "t")
 
 ENVOY = os.getenv("ZIPKIN", "True").lower() in ("true", "1", "t")
+NAMESPACE_PATH = "/var/run/secrets/kubernetes.io/serviceaccount/namespace"
 
 # Kubernetes details
 if "NAMESPACE" in os.environ:
     NAMESPACE = os.getenv("NAMESPACE")
 elif "AWS_SERVICE_NAMESPACE" in os.environ:
     NAMESPACE = os.getenv("AWS_SERVICE_NAMESPACE") + "-core"
-elif os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"):
-    with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace") as f:
+elif os.path.isfile(NAMESPACE_PATH):
+    with open(NAMESPACE_PATH) as f:
         NAMESPACE = f.read()
 else:
     NAMESPACE = "osdu-services"
@@ -508,7 +509,7 @@ else:
 # Kubernetes ServiceAccount bearer token
 TOKEN = None
 if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/token"):
-    with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace") as f:
+    with open(NAMESPACE_PATH) as f:
         TOKEN = f.read()
 
 OPA_CONFIG_MAP = os.getenv("OPA_CONFIG_MAP", "opa-config")
diff --git a/app/entitlement.py b/app/entitlement.py
index 2c717a34489125f78860fb34caab0d87e07f1ab5..7e922b2467aa1e493349af1059565c429f616efb 100644
--- a/app/entitlement.py
+++ b/app/entitlement.py
@@ -1,8 +1,6 @@
-# entitlement.py
 # from asyncio.log import logger
 import logging
 
-# from email import header
 from fastapi import HTTPException
 import requests
 from starlette.status import HTTP_401_UNAUTHORIZED
@@ -15,7 +13,7 @@ logger = logging.getLogger(__name__)
 
 
 class EntitlementService:
-    def request_groups(headers, timeout=10):
+    def request_groups(self, headers, timeout=10):
         # Just return canned result of Entitlement Groups API
         if conf.MOCK_ENTITLEMENT:
             return conf.MOCK_ENTITLEMENT_RESULT
@@ -53,7 +51,6 @@ class EntitlementService:
 
         jdata = response.json()
         if "groups" in jdata:
-            # return jdata['groups']
             return jdata
 
         # this should never happen
diff --git a/app/opa.py b/app/opa.py
index 37920aa43fb77f690ffc085e6317d2fb2a7d9b54..aa72923d1b6dade81c588735919261e10729f8a2 100644
--- a/app/opa.py
+++ b/app/opa.py
@@ -373,7 +373,7 @@ def get_document_with_input_cached(
     )
     logger = logging.getLogger(__name__)
     url = conf.OPA_DATA_API + "/" + rego_package_name + "/" + rego_rule_name
-    logger(f"get_document_with_input url: {url}")
+    logger.debug(f"get_document_with_input url: {url}")
     try:
         # async with httpx.AsyncClient() as client:
         # response = await client.post(url,
diff --git a/app/opa_response.py b/app/opa_response.py
index ec4145c508043060fbab206828432d9c3e97a721..fa65af1fd326388f7f7ffc534bc7516ff94f8015 100644
--- a/app/opa_response.py
+++ b/app/opa_response.py
@@ -11,10 +11,12 @@ class OpaResponse:
         self,
         ok=False,
         status_code=500,
-        json={},
+        json=None,
         message="An ERROR Occurred",
         text="N/A",
     ):
+        if json is None:
+            json = {}
         self.ok = ok
         self.status_code = status_code
         self.json = json
diff --git a/app/tests/aws/aws_jwt_client.py b/app/tests/aws/aws_jwt_client.py
index f7c32258dda6d474d3670d17d6e3c2462a2919cc..45fb46f2a4a73b18305152a84da975e6289c7b54 100644
--- a/app/tests/aws/aws_jwt_client.py
+++ b/app/tests/aws/aws_jwt_client.py
@@ -12,9 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os;
-import boto3;
-import jwt;
+import os
+import boto3
+import jwt
 
 def get_id_token():
     region = os.getenv("AWS_COGNITO_REGION")
@@ -22,7 +22,7 @@ def get_id_token():
         client = boto3.client('cognito-idp', region_name=region)
     else:
         client = boto3.client('cognito-idp', region_name=os.environ["AWS_REGION"])
-    userAuth = client.initiate_auth(
+    auth_auth = client.initiate_auth(
         ClientId= os.environ['AWS_COGNITO_CLIENT_ID'],
         AuthFlow= os.environ['AWS_COGNITO_AUTH_FLOW'],
         AuthParameters= {
@@ -30,7 +30,7 @@ def get_id_token():
             "PASSWORD": os.environ['AWS_COGNITO_AUTH_PARAMS_PASSWORD']
         })
 
-    print(userAuth['AuthenticationResult']['AccessToken'])
+    print(auth_auth['AuthenticationResult']['AccessToken'])
 
 
 def get_invalid_token():
diff --git a/app/tests/integration/test_integration_010.py b/app/tests/integration/test_integration_010.py
index 9587bd00f83498001ccee040bfe0c31cbbf9c4eb..6b19489125cf6e4dc1303fae85b508186095ae33 100644
--- a/app/tests/integration/test_integration_010.py
+++ b/app/tests/integration/test_integration_010.py
@@ -1,5 +1,6 @@
 # test_integration_010.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -14,8 +15,8 @@ import json
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -23,26 +24,38 @@ from auth import auth
 import conf
 import testlib
 
+BEARER = "Bearer "
+
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'data'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "data"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 def test_cloud_provider(cloud_provider):
-    """Check CLOUD_PROVIDER """
+    """Check CLOUD_PROVIDER"""
     assert cloud_provider is not None, "No CLOUD_PROVIDER env set"
 
+
 @pytest.mark.opa_access
 @pytest.mark.mock
 def test_mock_health_api_endpoint_no_auth_opa_available(opa_access, cloud_provider):
     """Check Health API"""
-    r = client.get(conf.SERVICE_BASE_PATH+"/health")
+    r = client.get(conf.SERVICE_BASE_PATH + "/health")
     # If OPA is available should get a 200
     # Some CSPs (like AWS) don't expose /health without auth so let's skip it in that case
     if r.status_code == 401 or r.status_code == 403:
@@ -50,7 +63,10 @@ def test_mock_health_api_endpoint_no_auth_opa_available(opa_access, cloud_provid
     assert r.status_code == 200, f"Expect /health endpoint to be available {r.text}"
     assert "Healthy" in r.text, "Expected Healthy in response text from /health"
 
-def test_serviceurl_health_api_endpoint_no_auth_opa_available(service_url, cloud_provider):
+
+def test_serviceurl_health_api_endpoint_no_auth_opa_available(
+    service_url, cloud_provider
+):
     """Check Health API"""
     url = service_url + conf.SERVICE_BASE_PATH + "/health"
     r = requests.get(url)
@@ -58,20 +74,28 @@ def test_serviceurl_health_api_endpoint_no_auth_opa_available(service_url, cloud
     # Some CSPs (like AWS) don't expose /health without auth so let's skip it in that case
     if r.status_code == 401 or r.status_code == 403:
         pytest.skip(f"Skipping service_url /health unauth test for {cloud_provider}")
-    assert r.status_code == 200, f"Expect /health endpoint ({url}) to be available {r.text}"
+    assert (
+        r.status_code == 200
+    ), f"Expect /health endpoint ({url}) to be available {r.text}"
     assert "Healthy" in r.text, "Expected Healthy in response text from /health"
 
+
 @pytest.mark.dependency(name="require_token")
-def test_serviceurl_health_api_endpoint_opa_available_auth_headers(token, data_partition, service_url):
+def test_serviceurl_health_api_endpoint_opa_available_auth_headers(
+    token, data_partition, service_url
+):
     """Check Health API with Auth headers"""
     url = service_url + conf.SERVICE_BASE_PATH + "/health"
-    headers={'Authorization': 'Bearer ' + token, 'data-partition-id': data_partition}
+    headers = {"Authorization": BEARER + token, "data-partition-id": data_partition}
     r = requests.get(url, headers=headers)
     # If OPA is available should get a 200
     # This should work in all cloud providers
-    assert r.status_code == 200, f"Expect /health endpoint ({url}) to be available {r.status_code} {r.text}"
+    assert (
+        r.status_code == 200
+    ), f"Expect /health endpoint ({url}) to be available {r.status_code} {r.text}"
     assert "Healthy" in r.text, "Expected Healthy in response text from /health"
 
+
 @pytest.mark.dependency(name="require_token")
 def test_login(token, data_partition):
     """
@@ -80,11 +104,12 @@ def test_login(token, data_partition):
     """
     if conf.ENABLE_ADMIN_UI == False:
         pytest.skip("Skipping adminui login test - ENABLE_ADMIN_UI")
-    headers={'Authorization': 'Bearer ' + token, 'data-partition-id': data_partition}
+    headers = {"Authorization": BEARER + token, "data-partition-id": data_partition}
     r = client.get(conf.SERVICE_BASE_PATH + "/login", headers=headers)
     if r.status_code == 401:
         print(f"problem with headers: {headers}")
-    assert r.status_code == 200, f"Expected /login is available {r.text}"    
+    assert r.status_code == 200, f"Expected /login is available {r.text}"
+
 
 @pytest.mark.dependency(name="require_token")
 @pytest.mark.mock
@@ -92,12 +117,16 @@ def test_info_mock(token, data_partition):
     """
     Check info API endpoint
     """
-    r = client.get(conf.SERVICE_BASE_PATH + "/info",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    r = client.get(
+        conf.SERVICE_BASE_PATH + "/info",
+        headers={"Authorization": BEARER + token, "data-partition-id": data_partition},
+    )
     js = json.dumps(r.json(), indent=4)
     print(f"{js}")
-    assert r.status_code == 200, f"Expected /info is available text: {r.text}, status_code: {r.status_code} ENTITLEMENTS_GROUPS_API:{conf.ENTITLEMENTS_GROUPS_API}, OPA_URL: {conf.OPA_URL}, Environment variables: {os.environ}"    
+    assert (
+        r.status_code == 200
+    ), f"Expected /info is available text: {r.text}, status_code: {r.status_code} ENTITLEMENTS_GROUPS_API:{conf.ENTITLEMENTS_GROUPS_API}, OPA_URL: {conf.OPA_URL}, Environment variables: {os.environ}"
+
 
 @pytest.mark.dependency(name="require_token")
 def test_info_service_url(token, data_partition, service_url):
@@ -105,13 +134,15 @@ def test_info_service_url(token, data_partition, service_url):
     Check info API endpoint
     """
     url = service_url + conf.SERVICE_BASE_PATH + "/info"
-    r = requests.get(url,
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    r = requests.get(
+        url,
+        headers={"Authorization": BEARER + token, "data-partition-id": data_partition},
+    )
     js = json.dumps(r.json(), indent=4)
     print(f"{js}")
     assert r.status_code == 200, f"Expected {url} is available {r.text}"
 
+
 @pytest.mark.opa_access
 @pytest.mark.dependency(depends=["require_token"])
 def test_local_config(token, data_partition):
@@ -122,13 +153,15 @@ def test_local_config(token, data_partition):
     """
     if conf.ENABLE_DEV_DIAGNOSTICS == False:
         pytest.skip("Skipping diagnostic test - ENABLE_DEV_DIAGNOSTICS")
-        
-    r = client.get(conf.SERVICE_BASE_PATH + "/config",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
-    print(f"text: {r.text}",file=sys.stderr)
+
+    r = client.get(
+        conf.SERVICE_BASE_PATH + "/config",
+        headers={"Authorization": BEARER + token, "data-partition-id": data_partition},
+    )
+    print(f"text: {r.text}", file=sys.stderr)
     assert r.status_code == 200, f"Config Page is available {r.text}"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_config_service_url(token, data_partition, service_url):
     """
@@ -138,30 +171,37 @@ def test_config_service_url(token, data_partition, service_url):
     """
     if conf.ENABLE_DEV_DIAGNOSTICS == False:
         pytest.skip("Skipping diagnostic test - ENABLE_DEV_DIAGNOSTICS")
-        
-    r = requests.get(service_url + conf.SERVICE_BASE_PATH + "/config",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
-    print(f"text: {r.text}",file=sys.stderr)
+
+    r = requests.get(
+        service_url + conf.SERVICE_BASE_PATH + "/config",
+        headers={"Authorization": BEARER + token, "data-partition-id": data_partition},
+    )
+    print(f"text: {r.text}", file=sys.stderr)
     assert r.status_code == 200, f"Config Page is available {r.text}"
 
+
 def test_docs_api_endpoint(token, data_partition, service_url):
     """Check Docs endpoint"""
-    response = requests.get(service_url + conf.SERVICE_BASE_PATH + "/docs",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    response = requests.get(
+        service_url + conf.SERVICE_BASE_PATH + "/docs",
+        headers={"Authorization": BEARER + token, "data-partition-id": data_partition},
+    )
     assert response.status_code == 200, "Expected /docs to be available"
 
+
 def test_redocs_api_endpoint(token, data_partition, service_url):
     """Check ReDocs endpoint"""
-    response = requests.get(service_url + conf.SERVICE_BASE_PATH + "/redoc",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    response = requests.get(
+        service_url + conf.SERVICE_BASE_PATH + "/redoc",
+        headers={"Authorization": BEARER + token, "data-partition-id": data_partition},
+    )
     assert response.status_code == 200, "Expected Redoc to be available"
 
+
 def test_openapi_json_endpoint(token, data_partition, service_url):
     """Check openapi json is available"""
-    response = requests.get(service_url + conf.SERVICE_BASE_PATH + "/openapi.json",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    response = requests.get(
+        service_url + conf.SERVICE_BASE_PATH + "/openapi.json",
+        headers={"Authorization": BEARER + token, "data-partition-id": data_partition},
+    )
     assert response.status_code == 200, "Expected OpenAPI.json to be available"
diff --git a/app/tests/integration/test_integration_011_utility.py b/app/tests/integration/test_integration_011_utility.py
index a90b4af172e49ac9dc836c4e01b98aac152faa4e..a13b33ad065b2892ea349bfa990b5668be1df4aa 100644
--- a/app/tests/integration/test_integration_011_utility.py
+++ b/app/tests/integration/test_integration_011_utility.py
@@ -1,5 +1,6 @@
 # test_integration_011_utility.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -14,8 +15,8 @@ import json
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -24,30 +25,43 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'data'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "data"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 def test_cloud_provider(cloud_provider):
-    """Check CLOUD_PROVIDER """
+    """Check CLOUD_PROVIDER"""
     assert cloud_provider is not None, "No CLOUD_PROVIDER env set"
 
+
 @pytest.mark.dependency(name="require_token")
 def test_backup(token, data_partition, service_url, cloud_provider):
     """
     Test Backup API
     """
     url = service_url + conf.SERVICE_BASE_PATH + "/backup"
-    headers={'Authorization': 'Bearer ' + token, 'data-partition-id': data_partition}
+    headers = {"Authorization": "Bearer " + token, "data-partition-id": data_partition}
     r = requests.get(url, headers=headers)
     if r.status_code != 200:
-        pytest.xfail(f"service_url /backup fail for {cloud_provider} {r.status_code} {r.text}")
+        pytest.xfail(
+            f"service_url /backup fail for {cloud_provider} {r.status_code} {r.text}"
+        )
+
 
 @pytest.mark.dependency(name="require_token")
 def test_bootstrap(token, data_partition, service_url, cloud_provider):
@@ -55,26 +69,30 @@ def test_bootstrap(token, data_partition, service_url, cloud_provider):
     Test Bootstrap API
     """
     url = service_url + conf.SERVICE_BASE_PATH + "/bootstrap"
-    headers={'Authorization': 'Bearer ' + token, 'data-partition-id': data_partition}
+    headers = {"Authorization": "Bearer " + token, "data-partition-id": data_partition}
     r = requests.post(url, headers=headers)
 
     if r.status_code == 424 or r.status_code == 405:
         pytest.xfail(f"/bootstrap {cloud_provider} {r.status_code} {r.text}")
 
-    assert r.status_code == 201 or r.status_code == 202, f"/bootstrap {cloud_provider} {r.status_code} {r.text}"
-    
+    assert (
+        r.status_code == 201 or r.status_code == 202
+    ), f"/bootstrap {cloud_provider} {r.status_code} {r.text}"
+
+
 @pytest.mark.dependency(name="require_token")
 def test_bootstrap_force(token, data_partition, service_url, cloud_provider):
     """
     Test Bootstrap API Force
     """
     url = service_url + conf.SERVICE_BASE_PATH + "/bootstrap"
-    params = {'force': True}
-    headers={'Authorization': 'Bearer ' + token, 'data-partition-id': data_partition}
+    params = {"force": True}
+    headers = {"Authorization": "Bearer " + token, "data-partition-id": data_partition}
     r = requests.post(url, headers=headers, params=params)
 
     if r.status_code == 424 or r.status_code == 405:
         pytest.xfail(f"/bootstrap {cloud_provider} {r.status_code} {r.text}")
 
-    assert r.status_code == 201 or r.status_code == 202, f"/bootstrap {cloud_provider} {r.status_code} {r.text}"
-    
\ No newline at end of file
+    assert (
+        r.status_code == 201 or r.status_code == 202
+    ), f"/bootstrap {cloud_provider} {r.status_code} {r.text}"
diff --git a/app/tests/integration/test_integration_012_validate.py b/app/tests/integration/test_integration_012_validate.py
index 14614a3ceec04425f2c49deb12a2e79cb066adf4..a00f585e256552a79071e937f69eefe2120210ff 100644
--- a/app/tests/integration/test_integration_012_validate.py
+++ b/app/tests/integration/test_integration_012_validate.py
@@ -1,5 +1,6 @@
 # test_integration_011_utility.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -14,8 +15,8 @@ import json
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -24,20 +25,30 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'data'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "data"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 def test_cloud_provider(cloud_provider):
-    """Check CLOUD_PROVIDER """
+    """Check CLOUD_PROVIDER"""
     assert cloud_provider is not None, "No CLOUD_PROVIDER env set"
 
+
 @pytest.mark.dependency(name="require_token")
 def test_validate_with_template(token, data_partition, service_url, cloud_provider):
     """
@@ -50,26 +61,28 @@ import data.osdu.instance.dataauthz as centralauthz
 
 records := centralauthz.records"""
 
-    bdata = data.encode('utf-8')
-    files = {'file': (policy_id, bdata)}
+    bdata = data.encode("utf-8")
+    files = {"file": (policy_id, bdata)}
     url = service_url + conf.SERVICE_BASE_PATH + f"/validate/{policy_id}"
-    params = {'template': True}
-    headers={'Authorization': 'Bearer ' + token, 'data-partition-id': data_partition}
-    r = requests.put(url,
-                    headers=headers,
-                    params=params,
-                    files=files)
+    params = {"template": True}
+    headers = {"Authorization": "Bearer " + token, "data-partition-id": data_partition}
+    r = requests.put(url, headers=headers, params=params, files=files)
     if r.status_code != 200:
-        pytest.xfail(f"service_url /validate fail for {cloud_provider}: {r.status_code} {r.text} {url}")
+        pytest.xfail(
+            f"service_url /validate fail for {cloud_provider}: {r.status_code} {r.text} {url}"
+        )
+
 
 @pytest.mark.parametrize("regofile", testlib.get_rego_templates())
 @pytest.mark.dependency(depends=["require_token"])
 def test_validate_policies(token, data_partition, service_url, domain, regofile):
     assert token is not None, "No token provided on command line"
-    testlib.put_policy_test_data(client=requests,
-         token=token,
-         data_partition=data_partition,
-         service_url=service_url,
-         domain=domain,
-         filename=regofile,
-         validate=True)
\ No newline at end of file
+    testlib.put_policy_test_data(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        service_url=service_url,
+        domain=domain,
+        filename=regofile,
+        validate=True,
+    )
diff --git a/app/tests/integration/test_integration_015_legal.py b/app/tests/integration/test_integration_015_legal.py
index 0ac02aee4b4e7c78bbc30acacb58f63e398dbd23..f95488a1cb0c7f4f2ddfbc4327c3160244d35632 100644
--- a/app/tests/integration/test_integration_015_legal.py
+++ b/app/tests/integration/test_integration_015_legal.py
@@ -1,5 +1,6 @@
 # test_integration_015_legal.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -14,8 +15,8 @@ import json
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -24,16 +25,25 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'data'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "data"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 def test_create_legal_tag(token, data_partition):
     """Test Create a legal tag"""
 
@@ -52,14 +62,15 @@ def test_create_legal_tag(token, data_partition):
             "dataType": "Public Domain Data",
             "securityClassification": "Public",
             "personalData": "No Personal Data",
-            "exportClassification": "EAR99"
+            "exportClassification": "EAR99",
         },
-        "description": "A default legal tag"
+        "description": "A default legal tag",
     }
 
-    headers = {'Authorization': 'Bearer ' + token,
-               'data-partition-id': data_partition}
+    headers = {"Authorization": "Bearer " + token, "data-partition-id": data_partition}
 
     # create default legal tag to avoid empty legal tag response
-    r = requests.post(url, headers=headers, json=legal_tag, verify=False)
-    assert r.status_code == 201 or r.status_code == 409, f"Expect can add tag {r.text} {r.status_code}"
+    r = requests.post(url, headers=headers, json=legal_tag)
+    assert (
+        r.status_code == 201 or r.status_code == 409
+    ), f"Expect can add tag {r.text} {r.status_code}"
diff --git a/app/tests/integration/test_integration_020_put.py b/app/tests/integration/test_integration_020_put.py
index 40f26b8b86f36818935f7fcd955559f4586c31fc..99eff20680e0c392620fe1f4fc71dc4e26c734c7 100644
--- a/app/tests/integration/test_integration_020_put.py
+++ b/app/tests/integration/test_integration_020_put.py
@@ -1,5 +1,6 @@
 # test_integration_020_put.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -15,8 +16,8 @@ import time
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -25,23 +26,35 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / '../templates'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "../templates"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 @pytest.mark.mock
 @pytest.mark.bundle
 @pytest.mark.opa_access
 @pytest.mark.dependency(depends=["require_token"])
 def test_put_policies_test_data_mock(token, data_partition, domain):
     assert token is not None, "No token provided on command line"
-    testlib.put_policies_test_data(client=client, token=token, data_partition=data_partition, domain=domain)
+    testlib.put_policies_test_data(
+        client=client, token=token, data_partition=data_partition, domain=domain
+    )
+
 
 # @pytest.mark.dependency(depends=["require_token"])
 # def test_put_policies_test_data_service_url(token, data_partition, service_url, domain):
@@ -52,20 +65,24 @@ def test_put_policies_test_data_mock(token, data_partition, domain):
 #          service_url=service_url,
 #          domain=domain)
 
+
 def get_rego_templatesx():
     p = os.path.dirname(os.path.abspath(__file__))
-    template_datadir = os.path.join(p, '../templates')
+    template_datadir = os.path.join(p, "../templates")
     return sorted(os.listdir(template_datadir))
 
+
 @pytest.mark.parametrize("regofile", testlib.get_rego_templates())
 @pytest.mark.dependency(depends=["require_token"])
 def test_put_policy(token, data_partition, service_url, domain, regofile):
     assert token is not None, "No token provided on command line"
     # Let's not burden the bundle service backend - rate limit issues
     time.sleep(1)
-    testlib.put_policy_test_data(client=requests,
-         token=token,
-         data_partition=data_partition,
-         service_url=service_url,
-         domain=domain,
-         filename=regofile)
\ No newline at end of file
+    testlib.put_policy_test_data(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        service_url=service_url,
+        domain=domain,
+        filename=regofile,
+    )
diff --git a/app/tests/integration/test_integration_025_sleep.py b/app/tests/integration/test_integration_025_sleep.py
index 72261d2e6eb9c4da3ad262ac28ca0821294c7be4..916680a828d33c3e46db285898699067d26e054a 100644
--- a/app/tests/integration/test_integration_025_sleep.py
+++ b/app/tests/integration/test_integration_025_sleep.py
@@ -3,6 +3,7 @@ import pytest
 import time
 import testlib
 
+
 def test_wait_for_bundle(bundle_pause):
     """
     Wait for bundle to catch-up.
diff --git a/app/tests/integration/test_integration_030_get.py b/app/tests/integration/test_integration_030_get.py
index 926c1008d52573ef8e72ccdd7b5307eb1e32a243..76f8c034b55256130c6c39521039ff8115c4ff30 100644
--- a/app/tests/integration/test_integration_030_get.py
+++ b/app/tests/integration/test_integration_030_get.py
@@ -1,6 +1,7 @@
 # test_integration_030_get.py
 from http import server
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -14,8 +15,8 @@ import json
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -24,41 +25,60 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'templates'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "templates"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 @pytest.mark.opa_access
 @pytest.mark.dependency(depends=["require_token"])
 def test_local_fetch_policies(token, data_partition):
     """
     Check /policies API endpoint is available
     """
-        
-    r = client.get(conf.SERVICE_BASE_PATH + "/policies",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+
+    r = client.get(
+        conf.SERVICE_BASE_PATH + "/policies",
+        headers={
+            "Authorization": "Bearer " + token,
+            "data-partition-id": data_partition,
+        },
+    )
     print(r.text)
     assert r.status_code == 200, f"Expected to get 200 from /policies {r.text}"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_serviceurl_fetch_policies(token, data_partition, service_url):
     """
     Check /policies API endpoint is available
     """
 
-    r = requests.get(service_url + conf.SERVICE_BASE_PATH + "/policies",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    r = requests.get(
+        service_url + conf.SERVICE_BASE_PATH + "/policies",
+        headers={
+            "Authorization": "Bearer " + token,
+            "data-partition-id": data_partition,
+        },
+    )
     print(r.text)
     assert r.status_code == 200, f"Expected to get 200 from /policies {r.text}"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_fetch_policies_unknown_data_partition_id(token):
     """
@@ -69,41 +89,75 @@ def test_fetch_policies_unknown_data_partition_id(token):
     if conf.MOCK_ENTITLEMENT:
         pytest.skip("MOCK ENTITLEMENT ENABLED")
 
-    r = client.get(conf.SERVICE_BASE_PATH + "/policies",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': "unknownpartitionid"})
+    r = client.get(
+        conf.SERVICE_BASE_PATH + "/policies",
+        headers={
+            "Authorization": "Bearer " + token,
+            "data-partition-id": "unknownpartitionid",
+        },
+    )
     print(token)
-    assert r.status_code == 401, f"expect unauthenticated response {r.status_code} {r.text}"
+    assert (
+        r.status_code == 401
+    ), f"expect unauthenticated response {r.status_code} {r.text}"
+
 
 @pytest.mark.opa_access
 @pytest.mark.mock
 @pytest.mark.dependency(depends=["require_token"])
 def test_local_instance_policy_dataauthz_mock(token, data_partition):
-    get_instance_policy(token=token, data_partition=data_partition, policy_id="dataauthz.rego")
+    get_instance_policy(
+        token=token, data_partition=data_partition, policy_id="dataauthz.rego"
+    )
+
 
 @pytest.mark.dependency(depends=["require_token"])
 def test_instance_policy_dataauthz_service_url(token, data_partition, service_url):
-    get_instance_policy(token=token, data_partition=data_partition, policy_id="dataauthz.rego", service_url=service_url)
+    get_instance_policy(
+        token=token,
+        data_partition=data_partition,
+        policy_id="dataauthz.rego",
+        service_url=service_url,
+    )
+
 
 @pytest.mark.opa_access
 @pytest.mark.mock
 @pytest.mark.dependency(depends=["require_token"])
 def test_local_instance_policy_entitlements_mock(token, data_partition):
-    get_instance_policy(token=token, data_partition=data_partition, policy_id="entitlements.rego")
+    get_instance_policy(
+        token=token, data_partition=data_partition, policy_id="entitlements.rego"
+    )
+
 
 @pytest.mark.dependency(depends=["require_token"])
 def test_instance_policy_entitlements_sevice_url(token, data_partition, service_url):
-    get_instance_policy(token=token, data_partition=data_partition, policy_id="entitlements.rego", service_url=service_url)
+    get_instance_policy(
+        token=token,
+        data_partition=data_partition,
+        policy_id="entitlements.rego",
+        service_url=service_url,
+    )
+
 
 @pytest.mark.opa_access
 @pytest.mark.mock
 @pytest.mark.dependency(depends=["require_token"])
 def test_local_instance_policy_legal_mock(token, data_partition, service_url):
-    get_instance_policy(token=token, data_partition=data_partition, policy_id="legal.rego")
+    get_instance_policy(
+        token=token, data_partition=data_partition, policy_id="legal.rego"
+    )
+
 
 @pytest.mark.dependency(depends=["require_token"])
 def test_instance_policy_legal_service_url(token, data_partition, service_url):
-    get_instance_policy(token=token, data_partition=data_partition, policy_id="legal.rego", service_url=service_url)
+    get_instance_policy(
+        token=token,
+        data_partition=data_partition,
+        policy_id="legal.rego",
+        service_url=service_url,
+    )
+
 
 def get_instance_policy(token, data_partition, policy_id, service_url=None):
     """
@@ -111,24 +165,37 @@ def get_instance_policy(token, data_partition, policy_id, service_url=None):
     requires dataauthz.rego to be in instance
     """
     url = conf.SERVICE_BASE_PATH + "/policies/osdu/instance/" + policy_id
-    policy_id_short = policy_id.removesuffix('.rego')
+    policy_id_short = policy_id.removesuffix(".rego")
     if service_url:
-        url = service_url + url 
+        url = service_url + url
         print(url)
-        r = requests.get(url,
-            headers={'Authorization': 'Bearer ' + token,
-            'data-partition-id': data_partition})
-    else: # mock aka no CLOUD_PROVIDER
+        r = requests.get(
+            url,
+            headers={
+                "Authorization": "Bearer " + token,
+                "data-partition-id": data_partition,
+            },
+        )
+    else:  # mock aka no CLOUD_PROVIDER
         print(url)
-        r = client.get(url,
-            headers={'Authorization': 'Bearer ' + token,
-            'data-partition-id': data_partition})
-    assert r.status_code == 200, f"Get instance policy {policy_id} failed. Load policies before running test. AdminCLI 'make load_data'"
+        r = client.get(
+            url,
+            headers={
+                "Authorization": "Bearer " + token,
+                "data-partition-id": data_partition,
+            },
+        )
+    assert (
+        r.status_code == 200
+    ), f"Get instance policy {policy_id} failed. Load policies before running test. AdminCLI 'make load_data'"
     assert "result" in r.text, f"expect result for {policy_id}"
     if service_url:
         assert f"osdu/instance/{policy_id}" in r.text, f"expect result for {policy_id}"
-        assert f"package osdu.instance.{policy_id_short}" in r.text, f"expect result for {policy_id}"
-    
+        assert (
+            f"package osdu.instance.{policy_id_short}" in r.text
+        ), f"expect result for {policy_id}"
+
+
 @pytest.mark.opa_access
 @pytest.mark.mock
 @pytest.mark.dependency(depends=["require_token"])
@@ -138,27 +205,34 @@ def test_get_diag_policies_mock(token, data_partition):
     """
     if conf.ENABLE_DEV_DIAGNOSTICS == False:
         pytest.skip("Skipping diagnostic test")
-        
+
     num_tests = 0
-    r = client.get("/diag/policies",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    r = client.get(
+        "/diag/policies",
+        headers={
+            "Authorization": "Bearer " + token,
+            "data-partition-id": data_partition,
+        },
+    )
     assert "result" in r.text, f"result not expected {r.text}"
-    #print(f"text: {r.text}",file=sys.stderr)
+    # print(f"text: {r.text}",file=sys.stderr)
     assert r.status_code == 200, "policies Page is available"
     p = os.path.dirname(os.path.abspath(__file__))
-    template_datadir = os.path.join(p, '..', 'templates')
+    template_datadir = os.path.join(p, "..", "templates")
     for filename in sorted(os.listdir(template_datadir)):
-        filetype='.rego'
+        filetype = ".rego"
         if filename.endswith(filetype):
             if filename not in r.text:
                 json_str = json.dumps(r.json(), indent=4, sort_keys=True)
                 with open("policy_dump_debug.json", "w") as outfile:
                     outfile.write(json_str)
-            assert filename in r.text, f"result not expected data from filename {filename} not in OPA. See policy_dump_debug.json" 
+            assert (
+                filename in r.text
+            ), f"result not expected data from filename {filename} not in OPA. See policy_dump_debug.json"
             num_tests = num_tests + 1
     assert num_tests >= 7, "At least 7 polices were tested"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_get_diag_policies_service_url(token, data_partition, service_url):
     """
@@ -167,24 +241,31 @@ def test_get_diag_policies_service_url(token, data_partition, service_url):
     """
     if conf.ENABLE_DEV_DIAGNOSTICS == False:
         pytest.skip("Skipping diagnostic test")
-        
+
     num_tests = 0
-    r = requests.get(service_url + "/diag/policies",
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    r = requests.get(
+        service_url + "/diag/policies",
+        headers={
+            "Authorization": "Bearer " + token,
+            "data-partition-id": data_partition,
+        },
+    )
 
     assert "result" in r.text, f"result not expected {r.text}"
-    #print(f"text: {r.text}",file=sys.stderr)
+    # print(f"text: {r.text}",file=sys.stderr)
     assert r.status_code == 200, "diag policies API is not available"
     p = os.path.dirname(os.path.abspath(__file__))
-    template_datadir = os.path.join(p, '..', 'templates')
+    template_datadir = os.path.join(p, "..", "templates")
     for filename in sorted(os.listdir(template_datadir)):
-        filetype='.rego'
+        filetype = ".rego"
         if filename.endswith(filetype):
-            assert filename in r.text, f"result not expected data from filename {filename} not in OPA: {r.text}" 
+            assert (
+                filename in r.text
+            ), f"result not expected data from filename {filename} not in OPA: {r.text}"
             num_tests = num_tests + 1
     assert num_tests >= 7, "At least 7 polices were tested"
 
+
 # @pytest.mark.dependency(depends=["require_token"])
 # def test_fetch_policies_advanced(token, data_partition, service_url):
 #     """
@@ -207,60 +288,80 @@ def test_get_diag_policies_service_url(token, data_partition, service_url):
 #             num_tests = num_tests + 1
 #     assert num_tests >= 7, "At least 7 polices were tested"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 @pytest.mark.parametrize("regofile", testlib.get_rego_templates())
 def test_get_policy(token, data_partition, service_url, domain, regofile):
     assert token is not None, "No token provided on command line"
-    filetype='.rego'
+    filetype = ".rego"
     if regofile.endswith(filetype):
         id = f"osdu/partition/{data_partition}/{regofile}"
-        r = requests.get(service_url + conf.SERVICE_BASE_PATH + "/policies/" + id,
-            headers={'Authorization': 'Bearer ' + token,
-            'data-partition-id': data_partition})
+        r = requests.get(
+            service_url + conf.SERVICE_BASE_PATH + "/policies/" + id,
+            headers={
+                "Authorization": "Bearer " + token,
+                "data-partition-id": data_partition,
+            },
+        )
         assert r.status_code == 200, f"Fetch of {id}: {r.text}"
         assert "result" in r.text, f"result not expected {r.text}"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_get_osdu_partition_policies_serviceurl(token, data_partition, service_url):
     """
     Test get /policies/osdu/partition/<data_partition>/<policy>
     """
-    get_osdu_partition_policies(token=token,
-        data_partition=data_partition,
-        service_url=service_url)
+    get_osdu_partition_policies(
+        token=token, data_partition=data_partition, service_url=service_url
+    )
+
 
 @pytest.mark.opa_access
 @pytest.mark.dependency(depends=["require_token"])
 def test_get_osdu_partition_policies_local(token, data_partition):
-    get_osdu_partition_policies(token=token,
-        data_partition=data_partition)
+    get_osdu_partition_policies(token=token, data_partition=data_partition)
+
 
 def get_osdu_partition_policies(token, data_partition, service_url=False):
     num_tests = 0
     p = os.path.dirname(os.path.abspath(__file__))
-    template_datadir = os.path.join(p, '..', 'templates')
+    template_datadir = os.path.join(p, "..", "templates")
     for filename in sorted(os.listdir(template_datadir)):
-        filetype='.rego'
+        filetype = ".rego"
         if filename.endswith(filetype):
             id = f"osdu/partition/{data_partition}/{filename}"
-            policy_id_short = filename.removesuffix('.rego')
+            policy_id_short = filename.removesuffix(".rego")
             if service_url:
                 url = service_url + conf.SERVICE_BASE_PATH + "/policies/" + id
 
-                r = requests.get(url,
-                    headers={'Authorization': 'Bearer ' + token,
-                    'data-partition-id': data_partition})
-            else: # mock
+                r = requests.get(
+                    url,
+                    headers={
+                        "Authorization": "Bearer " + token,
+                        "data-partition-id": data_partition,
+                    },
+                )
+            else:  # mock
                 url = conf.SERVICE_BASE_PATH + "/policies/" + id
-                r = client.get(url,
-                    headers={'Authorization': 'Bearer ' + token,
-                    'data-partition-id': data_partition})
+                r = client.get(
+                    url,
+                    headers={
+                        "Authorization": "Bearer " + token,
+                        "data-partition-id": data_partition,
+                    },
+                )
             assert r.status_code == 200, f"Fetch of {filename} {url}: {r.text}"
             assert "result" in r.text, f"result not expected {r.text} {url}"
-            
+
             if service_url:
-                assert f"osdu/partition/{data_partition}/{filename}" in r.text, f"expect result for {filename} osdu/partition/<dp>/<policy.rego>"
-                assert f"package osdu.partition[\\\"{data_partition}\\\"].{policy_id_short}" in r.text, f"expect result for {filename}"
+                assert (
+                    f"osdu/partition/{data_partition}/{filename}" in r.text
+                ), f"expect result for {filename} osdu/partition/<dp>/<policy.rego>"
+                assert (
+                    f'package osdu.partition[\\"{data_partition}\\"].{policy_id_short}'
+                    in r.text
+                ), f"expect result for {filename}"
             print(r.text)
             num_tests = num_tests + 1
     assert num_tests >= 7, "At least 7 polices were tested"
diff --git a/app/tests/integration/test_integration_034_compile.py b/app/tests/integration/test_integration_034_compile.py
index 3336ad7917059815f5125968b51f4e101fe6b3aa..6e955971b077d16277abc010bfb30908b8c7339b 100644
--- a/app/tests/integration/test_integration_034_compile.py
+++ b/app/tests/integration/test_integration_034_compile.py
@@ -1,5 +1,6 @@
 # test_integration_034_compile.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -16,8 +17,8 @@ from pathlib import Path
 from string import Template
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -26,37 +27,57 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'templates'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "templates"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
-def post_request(token, data_partition, files, metrics=False, instrument=False, service_url = False):
+
+def post_request(
+    token, data_partition, files, metrics=False, instrument=False, service_url=False
+):
     url = conf.SERVICE_BASE_PATH + "/compile"
-    params = {'metrics': metrics, 'instrument': instrument}
+    params = {"metrics": metrics, "instrument": instrument}
     if service_url:
-        r = requests.post(service_url + url,
-            files = files,
-            params = params,
-            headers={'Authorization': 'Bearer ' + token,
-            'data-partition-id': data_partition})
+        r = requests.post(
+            service_url + url,
+            files=files,
+            params=params,
+            headers={
+                "Authorization": "Bearer " + token,
+                "data-partition-id": data_partition,
+            },
+        )
     else:
-        r = client.post(url,
-            files = files,
-            params = params,
-            headers={'Authorization': 'Bearer ' + token,
-            'data-partition-id': data_partition})
+        r = client.post(
+            url,
+            files=files,
+            params=params,
+            headers={
+                "Authorization": "Bearer " + token,
+                "data-partition-id": data_partition,
+            },
+        )
 
     assert r.status_code == 200, f"Expected to get 200 from {url} {r.text}"
     assert "result" in r.text, f"Expected to get 'result' from {url} {r.text}"
     assert "queries" in r.text, f"Expected to get 'queries' from {url} {r.text}"
     result = r.json()
 
+
 @pytest.mark.opa_access
 @pytest.mark.dependency(depends=["require_token"])
 def test_compile(token, data_partition):
@@ -65,29 +86,29 @@ def test_compile(token, data_partition):
 
     num_tests = 0
     p = os.path.dirname(os.path.abspath(__file__))
-    template_datadir = os.path.join(p, '..', 'templates')
+    template_datadir = os.path.join(p, "..", "templates")
     for filename in sorted(os.listdir(template_datadir)):
-        filetype='.json'
+        filetype = ".json"
         file_path = os.path.join(template_datadir, filename)
         # checking if it is a file
         if os.path.isfile(file_path):
-            if file_path.endswith(filetype) and filename.startswith("compile"): # compile*.json
+            if file_path.endswith(filetype) and filename.startswith(
+                "compile"
+            ):  # compile*.json
                 input_json = os.path.basename(file_path)
                 print(input_json)
                 print(f"file: {file_path}, input: {input_json}")
-                f = open(file_path, 'r')
+                f = open(file_path, "r")
                 data = f.read()
                 f.close()
                 template_data = Template(data)
-                data = template_data.substitute(
-                    {
-                        'data_partition': data_partition
-                    })
-                bdata = data.encode('utf-8')
-                files = {'file': (input_json, bdata)}
+                data = template_data.substitute({"data_partition": data_partition})
+                bdata = data.encode("utf-8")
+                files = {"file": (input_json, bdata)}
 
                 post_request(token=token, data_partition=data_partition, files=files)
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_compile_diag_service_url(token, data_partition, service_url):
     assert token is not None, "No token provided on command line"
@@ -95,25 +116,31 @@ def test_compile_diag_service_url(token, data_partition, service_url):
 
     num_tests = 0
     p = os.path.dirname(os.path.abspath(__file__))
-    template_datadir = os.path.join(p, '..', 'templates')
+    template_datadir = os.path.join(p, "..", "templates")
     for filename in sorted(os.listdir(template_datadir)):
-        filetype='.json'
+        filetype = ".json"
         file_path = os.path.join(template_datadir, filename)
         # checking if it is a file
         if os.path.isfile(file_path):
-            if file_path.endswith(filetype) and filename.startswith("compile"): # compile*.json
+            if file_path.endswith(filetype) and filename.startswith(
+                "compile"
+            ):  # compile*.json
                 input_json = os.path.basename(file_path)
                 print(input_json)
                 print(f"file: {file_path}, input: {input_json}")
-                f = open(file_path, 'r')
+                f = open(file_path, "r")
                 data = f.read()
                 f.close()
                 template_data = Template(data)
-                data = template_data.substitute(
-                    {
-                        'data_partition': data_partition
-                    })
-                bdata = data.encode('utf-8')
-                files = {'file': (input_json, bdata)}
-
-                post_request(token=token, data_partition=data_partition, files=files, metrics=True, instrument=True, service_url=service_url)
+                data = template_data.substitute({"data_partition": data_partition})
+                bdata = data.encode("utf-8")
+                files = {"file": (input_json, bdata)}
+
+                post_request(
+                    token=token,
+                    data_partition=data_partition,
+                    files=files,
+                    metrics=True,
+                    instrument=True,
+                    service_url=service_url,
+                )
diff --git a/app/tests/integration/test_integration_035_eval.py b/app/tests/integration/test_integration_035_eval.py
index 55b730cf4b5704ef92c6613c900a5f2de027d7dc..05822374f5f55ca988c35553109a1b5dae82e1c4 100644
--- a/app/tests/integration/test_integration_035_eval.py
+++ b/app/tests/integration/test_integration_035_eval.py
@@ -1,23 +1,19 @@
 # test_integration_035_eval.py
 import pytest
+
 # requires pytest_dependency to be installed
-import unittest
-import responses
 from fastapi.testclient import TestClient
 import requests
-import re
-import logging
 import sys
 import os
-import base64
 import json
 import random
 from pathlib import Path
 from string import Template
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -26,31 +22,48 @@ import conf
 import common
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'templates'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "templates"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
-def post_query(policy_id, token, data_partition, files, service_url = False, expect=200):
+
+def post_query(policy_id, token, data_partition, files, service_url=False, expect=200):
     url = conf.SERVICE_BASE_PATH + "/evaluations/query"
-    params = {'include_auth': True, 'policy_id': policy_id}
+    params = {"include_auth": True, "policy_id": policy_id}
     if service_url:
-        r = requests.post(service_url + url,
-            files = files,
+        r = requests.post(
+            service_url + url,
+            files=files,
             params=params,
-            headers={'Authorization': 'Bearer ' + token,
-            'data-partition-id': data_partition})
+            headers={
+                "Authorization": "Bearer " + token,
+                "data-partition-id": data_partition,
+            },
+        )
     else:
-        r = client.post(url,
-            files = files,
+        r = client.post(
+            url,
+            files=files,
             params=params,
-            headers={'Authorization': 'Bearer ' + token,
-            'data-partition-id': data_partition})
+            headers={
+                "Authorization": "Bearer " + token,
+                "data-partition-id": data_partition,
+            },
+        )
 
     print(f"status code returned: {r.status_code} {r.text}")
     if r.status_code == 406 == expect:
@@ -73,24 +86,32 @@ def post_query(policy_id, token, data_partition, files, service_url = False, exp
         print(result["result"]["records"])
         errors = result["result"]["records"][0]["errors"]
         print(f"errors: {errors}")
-        if not errors == []:
-            print("Found Errors: Check to make sure id, legal tag and domain are valid!")
+        if errors != []:
+            print(
+                "Found Errors: Check to make sure id, legal tag and domain are valid!"
+            )
             print(f"Error code: {errors[0]['code']}")
-            if errors[0]['code'] == '403':
-                pytest.skip(f"{policy_id} Skipping eval test - misconfigured id in eval, otherwise working")
-            elif errors[0]["reason"] == 'Invalid legal tags':
+            if errors[0]["code"] == "403":
+                pytest.skip(
+                    f"{policy_id} Skipping eval test - misconfigured id in eval, otherwise working"
+                )
+            elif errors[0]["reason"] == "Invalid legal tags":
                 pytest.xfail(f"Invalid legal tags {errors}")
         assert errors == [], f"Expected no OPA errors from {url} {r.text}"
-        
+
     return result
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_eval_dataauthz(token, data_partition, service_url, domain):
-    eval_policy(token=token,
+    eval_policy(
+        token=token,
         data_partition=data_partition,
         service_url=service_url,
         policy_name="dataauthz",
-        domain=domain)
+        domain=domain,
+    )
+
 
 @pytest.mark.dependency(depends=["require_token"])
 def test_eval_doesnotexist(token, data_partition, service_url, opa_access, domain):
@@ -101,10 +122,20 @@ def test_eval_doesnotexist(token, data_partition, service_url, opa_access, domai
         policy_name="foobar",
         expect=406,
         opa_access=opa_access,
-        domain=domain)
+        domain=domain,
+    )
+
 
 @pytest.mark.dependency(depends=["require_token"])
-def eval_policy(token, data_partition, service_url, policy_name, expect=200, opa_access=False, domain="example.com"):
+def eval_policy(
+    token,
+    data_partition,
+    service_url,
+    policy_name,
+    expect=200,
+    opa_access=False,
+    domain="example.com",
+):
     """
     Test Evaluation reading in tests/template/eval*.json
     * This requires dataauthz.rego to be already loaded in the data partition
@@ -114,47 +145,64 @@ def eval_policy(token, data_partition, service_url, policy_name, expect=200, opa
     * Replace ${domain} with domain name
         retrieved from legal service
     """
-    #if conf.ENABLE_DEV_DIAGNOSTICS == False:
-    #    pytest.skip("Skipping diagnostic test")
     assert token is not None, "No token provided on command line"
     assert data_partition is not None, "No data partition provided on command line"
 
     policy_id = "osdu/partition/" + data_partition + "/" + policy_name
 
     # randomly select a legal tag
-    legal_tag = random.choice(get_legal_tags(token=token, data_partition=data_partition))
+    legal_tag = random.choice(
+        get_legal_tags(token=token, data_partition=data_partition)
+    )
     print(f"Using Legal tag: {legal_tag}")
     print(f"Using policy_id: {policy_id}")
 
     p = os.path.dirname(os.path.abspath(__file__))
-    template_datadir = os.path.join(p, '..', 'templates')
+    template_datadir = os.path.join(p, "..", "templates")
     for filename in sorted(os.listdir(template_datadir)):
-        filetype='.json'
+        filetype = ".json"
         file_path = os.path.join(template_datadir, filename)
         # checking if it is a file
         if os.path.isfile(file_path):
-            if file_path.endswith(filetype) and filename.startswith("eval"): # eval*.json
+            if file_path.endswith(filetype) and filename.startswith(
+                "eval"
+            ):  # eval*.json
                 input_json = os.path.basename(file_path)
                 print(input_json)
                 print(f"file: {file_path}, input: {input_json}")
-                f = open(file_path, 'r')
+                f = open(file_path, "r")
                 data = f.read()
                 f.close()
                 template_data = Template(data)
                 data = template_data.substitute(
                     {
-                        'data_partition': data_partition,
-                        'legal_tag': legal_tag,
-                        'name': policy_name,
-                        'domain': domain
-                    })
+                        "data_partition": data_partition,
+                        "legal_tag": legal_tag,
+                        "name": policy_name,
+                        "domain": domain,
+                    }
+                )
                 print(f"data: {data}")
-                bdata = data.encode('utf-8')
-                files = {'file': (input_json, bdata)}
-
-                post_query(policy_id=policy_id, token=token, data_partition=data_partition, files=files, service_url=service_url, expect=expect)
+                bdata = data.encode("utf-8")
+                files = {"file": (input_json, bdata)}
+
+                post_query(
+                    policy_id=policy_id,
+                    token=token,
+                    data_partition=data_partition,
+                    files=files,
+                    service_url=service_url,
+                    expect=expect,
+                )
                 if opa_access:
-                    post_query(policy_id=policy_id, token=token, data_partition=data_partition, files=files, expect=expect)
+                    post_query(
+                        policy_id=policy_id,
+                        token=token,
+                        data_partition=data_partition,
+                        files=files,
+                        expect=expect,
+                    )
+
 
 def get_legal_tags(token, data_partition):
     """
@@ -163,22 +211,29 @@ def get_legal_tags(token, data_partition):
 
     # if no entitlement, then mock legal tags
     if conf.MOCK_ENTITLEMENT:
-        return ['osdu-usa-dataset-osduonaws-testing', 'osdu-public-usa-dataset', 'osdu-crs-catalog-int-test-legaltag', 'osdu-usa-dataset-testm4', 'osdu-csv-parser-test']
+        return [
+            "osdu-usa-dataset-osduonaws-testing",
+            "osdu-public-usa-dataset",
+            "osdu-crs-catalog-int-test-legaltag",
+            "osdu-usa-dataset-testm4",
+            "osdu-csv-parser-test",
+        ]
 
     url = conf.LEGAL_BASE_URL + "/api/legal/v1/legaltags?valid=true"
-    headers = {'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition}
+    headers = {"Authorization": "Bearer " + token, "data-partition-id": data_partition}
     r = requests.get(url, headers=headers)
 
     legal_tags = []
 
     for name in r.json()["legalTags"]:
-        #print(name["name"])
+        # print(name["name"])
         tags = name["name"]
 
-        retval = common.get_legal_tag_info(data_partition_id=data_partition, access_token=token, legal_tags=[tags]) 
+        retval = common.get_legal_tag_info(
+            data_partition_id=data_partition, access_token=token, legal_tags=[tags]
+        )
         if retval:
-            #print(f"retval: {retval}")
+            # print(f"retval: {retval}")
             for retname in retval:
                 x = retname["name"]
                 assert x == tags, f"Expected to get {tags} from legal {retval}"
diff --git a/app/tests/integration/test_integration_035_translate.py b/app/tests/integration/test_integration_035_translate.py
index 2f9ea1981d737293d5a1824134fdb26b7effee28..d09a4b38cb3ac7f39eb5d84da09ed0edcb041117 100644
--- a/app/tests/integration/test_integration_035_translate.py
+++ b/app/tests/integration/test_integration_035_translate.py
@@ -5,7 +5,14 @@ To run the integration tests, start the OPA with sample policies first:
 opa run --server --watch tests/integration/translate/policies/*.rego
 """
 
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 import conf
 from auth import auth
 from main import app
@@ -23,14 +30,14 @@ import json
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 
 # override dependency injection for authentication to entitlement service
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'templates'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "templates"
 
 client = TestClient(app)
 
@@ -68,21 +75,25 @@ def opa_ast_to_es_request_body_perf(client):
         sys.stdout = sys.__stdout__  # restore stdout
 
 
-def opa_ast_to_es_request_body(client, token, data_partition, data, service_url=None, status_code=200):
+def opa_ast_to_es_request_body(
+    client, token, data_partition, data, service_url=None, status_code=200
+):
     url = conf.SERVICE_BASE_PATH + "/translate"
     if service_url:
         url = service_url + url
     logging.info(f"URL {url}")
     sdata = json.dumps(data)
-    #bdata = sdata.encode('utf-8')
-    #files = {'file': ("query.txt", bdata)}
+    # bdata = sdata.encode('utf-8')
+    # files = {'file': ("query.txt", bdata)}
 
     response = client.post(
         url,
-        #files = files,
+        # files = files,
         data=sdata,
-        headers={'Authorization': 'Bearer ' + token,
-                 'data-partition-id': data_partition}
+        headers={
+            "Authorization": "Bearer " + token,
+            "data-partition-id": data_partition,
+        },
     )
 
     assert response.status_code == status_code, f"Expected {status_code} status code"
@@ -90,9 +101,11 @@ def opa_ast_to_es_request_body(client, token, data_partition, data, service_url=
 
 
 @pytest.mark.dependency(depends=["require_token"])
-def test_translate_local_and_service_url_search2(token, data_partition, service_url, opa_access):
-    #pytest.skip("Skipping translate test")
-    """ 
+def test_translate_local_and_service_url_search2(
+    token, data_partition, service_url, opa_access
+):
+    # pytest.skip("Skipping translate test")
+    """
     This test requires search2.rego to be loaded previously in test_integration_020_put.py
 
     operation: view, create, update, delete, purge
@@ -107,7 +120,7 @@ def test_translate_local_and_service_url_search2(token, data_partition, service_
     }
     """
     data = {
-        "query": "data.osdu.partition[\"" + data_partition + "\"].search2.allow == true",
+        "query": 'data.osdu.partition["' + data_partition + '"].search2.allow == true',
         "input": {
             "operation": "view",
             "groups": ["AAA", "BBB"],
@@ -117,45 +130,97 @@ def test_translate_local_and_service_url_search2(token, data_partition, service_
 
     if opa_access:
         result = opa_ast_to_es_request_body(
-            client=client, token=token, data_partition=data_partition, data=data)
+            client=client, token=token, data_partition=data_partition, data=data
+        )
 
         assert result == {
             "query": {
                 "bool": {
                     "should": [
-                        {"bool": {"filter": [
-                            {"terms": {"acl.owners": ["AAA", "BBB"]}}]}},
-                        {"bool": {"filter": [
-                            {"terms": {"acl.viewers": ["AAA", "BBB"]}}]}},
+                        {
+                            "bool": {
+                                "filter": [{"terms": {"acl.owners": ["AAA", "BBB"]}}]
+                            }
+                        },
+                        {
+                            "bool": {
+                                "filter": [{"terms": {"acl.viewers": ["AAA", "BBB"]}}]
+                            }
+                        },
                     ]
                 }
             }
         }, f"Unexpected response from translate: {result}"
 
-    result2 = opa_ast_to_es_request_body(client=requests,
-                                         token=token,
-                                         data_partition=data_partition,
-                                         data=data,
-                                         service_url=service_url)
+    result2 = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+    )
 
     expected_result = {
         "query": {
             "bool": {
                 "should": [
-                    {"bool": {"filter": [
-                        {"terms": {"acl.owners": ["AAA", "BBB"]}}]}},
-                    {"bool": {"filter": [
-                        {"terms": {"acl.viewers": ["AAA", "BBB"]}}]}},
+                    {"bool": {"filter": [{"terms": {"acl.owners": ["AAA", "BBB"]}}]}},
+                    {"bool": {"filter": [{"terms": {"acl.viewers": ["AAA", "BBB"]}}]}},
                 ]
             }
         }
     }
 
-    duplicates_in_result = {'query': { 'bool': { 'should': [ {'bool': { 'filter': [ {'terms': { 'acl.owners': ['AAA', 'BBB', 'AAA', 'BBB', 'AAA', 'BBB']}}]}}, {'bool': {'filter': [{'terms': {'acl.viewers': ['AAA', 'BBB', 'AAA', 'BBB', 'AAA', 'BBB']}}]}}]}}}
+    duplicates_in_result = {
+        "query": {
+            "bool": {
+                "should": [
+                    {
+                        "bool": {
+                            "filter": [
+                                {
+                                    "terms": {
+                                        "acl.owners": [
+                                            "AAA",
+                                            "BBB",
+                                            "AAA",
+                                            "BBB",
+                                            "AAA",
+                                            "BBB",
+                                        ]
+                                    }
+                                }
+                            ]
+                        }
+                    },
+                    {
+                        "bool": {
+                            "filter": [
+                                {
+                                    "terms": {
+                                        "acl.viewers": [
+                                            "AAA",
+                                            "BBB",
+                                            "AAA",
+                                            "BBB",
+                                            "AAA",
+                                            "BBB",
+                                        ]
+                                    }
+                                }
+                            ]
+                        }
+                    },
+                ]
+            }
+        }
+    }
     print(f"expected result: {expected_result}")
     print(f"result2:         {result2}")
 
-    assert result2 == expected_result or result2 == duplicates_in_result, f"Unexpected response from translate: {result2}"
+    assert (
+        result2 == expected_result or result2 == duplicates_in_result
+    ), f"Unexpected response from translate: {result2}"
     if result2 == duplicates_in_result:
         pytest.xfail("Found duplicates in result")
 
@@ -171,26 +236,34 @@ def test_translate_bad_operation(token, data_partition, service_url):
     bad_operation = "badoperation"
     permitted_operations = "'view', 'create', 'update', 'delete' or 'purge'"
     data = {
-        "query": "data.osdu.partition[\"" + data_partition + "\"].search2.allow == true",
+        "query": 'data.osdu.partition["' + data_partition + '"].search2.allow == true',
         "input": {
             "operation": bad_operation,
             "groups": ["AAA", "BBB"],
         },
         "unknowns": ["input.record"],
     }
-    result = opa_ast_to_es_request_body(client=requests,
-                                        token=token,
-                                        data_partition=data_partition,
-                                        data=data,
-                                        service_url=service_url,
-                                        status_code=422)
+    result = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+        status_code=422,
+    )
     print(result)
-    given = result['detail'][0]['input']
-    permitted = result['detail'][0]['ctx']['expected']
-    msg = result['detail'][0]['msg']
-    assert "Input should be" in msg, f"expected 'Input should be' in msg of return json {msg}"
-    assert bad_operation == given, f"expected 'bad operation' {bad_operation} in msg of return json"
-    assert permitted == permitted_operations, f"expected list of permitted operations {permitted_operations} but got {permitted}"
+    given = result["detail"][0]["input"]
+    permitted = result["detail"][0]["ctx"]["expected"]
+    msg = result["detail"][0]["msg"]
+    assert (
+        "Input should be" in msg
+    ), f"expected 'Input should be' in msg of return json {msg}"
+    assert (
+        bad_operation == given
+    ), f"expected 'bad operation' {bad_operation} in msg of return json"
+    assert (
+        permitted == permitted_operations
+    ), f"expected list of permitted operations {permitted_operations} but got {permitted}"
 
 
 @pytest.mark.dependency(depends=["require_token"])
@@ -199,19 +272,21 @@ def test_translate_not_permitted(token, data_partition, service_url):
     This test requires checks for trying to access a partition that does not exist or you do not have access to
     """
     data = {
-        "query": "data.osdu.partition[\"doesnotexist\"].search2.allow == true",
+        "query": 'data.osdu.partition["doesnotexist"].search2.allow == true',
         "input": {
             "operation": "create",
             "groups": ["AAA", "BBB"],
         },
         "unknowns": ["input.record"],
     }
-    result = opa_ast_to_es_request_body(client=requests,
-                                        token=token,
-                                        data_partition=data_partition,
-                                        data=data,
-                                        service_url=service_url,
-                                        status_code=200)
+    result = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+        status_code=200,
+    )
     print(result)
     assert "query" in result, f"Expected query in {result}"
     assert "match_none" in str(result), f"Expected match_none in {result}"
@@ -223,19 +298,23 @@ def test_translate_does_not_exist(token, data_partition, service_url):
     This test requires checks response on a policy that does not exist
     """
     data = {
-        "query": "data.osdu.partition[\"" + data_partition + "\"].doesnotexist.allow == true",
+        "query": 'data.osdu.partition["'
+        + data_partition
+        + '"].doesnotexist.allow == true',
         "input": {
             "operation": "view",
             "groups": ["AAA", "BBB"],
         },
         "unknowns": ["input.record"],
     }
-    result = opa_ast_to_es_request_body(client=requests,
-                                        token=token,
-                                        data_partition=data_partition,
-                                        data=data,
-                                        service_url=service_url,
-                                        status_code=404)
+    result = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+        status_code=404,
+    )
     print(result)
     assert "detail" in result, f"Expected detail in {result}"
 
@@ -246,47 +325,48 @@ def test_translate_no_groups(token, data_partition, service_url):
     This test requires checks response on a policy that does not exist
     """
     data = {
-        "query": "data.osdu.partition[\"" + data_partition + "\"].search2.allow == true",
+        "query": 'data.osdu.partition["' + data_partition + '"].search2.allow == true',
         "input": {
             "operation": "view",
             "groups": [],
         },
         "unknowns": ["input.record"],
     }
-    result = opa_ast_to_es_request_body(client=requests,
-                                        token=token,
-                                        data_partition=data_partition,
-                                        data=data,
-                                        service_url=service_url,
-                                        status_code=200)
+    result = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+        status_code=200,
+    )
     print(result)
     assert "query" in result, f"Expected query in {result}"
     assert "match_none" in str(result), f"Expected match_none in {result}"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_translate_deny_only_es_query(token, data_partition, service_url):
     """
     This test checks es query returned for a deny only policy. The response should start with {'query :'}
     """
     data = {
-        "query": "data.osdu.partition[\"" + data_partition + "\"].search_with_preprocessor.allow == true",
-        "input": {
-            "operation": "view"
-        },
-        "unknowns": [
-            "input.record"
-        ]
+        "query": 'data.osdu.partition["'
+        + data_partition
+        + '"].search_with_preprocessor.allow == true',
+        "input": {"operation": "view"},
+        "unknowns": ["input.record"],
     }
 
+    result = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+        status_code=200,
+    )
 
-    result = opa_ast_to_es_request_body(client=requests,
-                                        token=token,
-                                        data_partition=data_partition,
-                                        data=data,
-                                        service_url=service_url,
-                                        status_code=200)
-    
-    
     print(result)
 
     expected_response = {
@@ -296,20 +376,20 @@ def test_translate_deny_only_es_query(token, data_partition, service_url):
                     {
                         "bool": {
                             "should": [
-                            {
-                                "bool": {
-                                    "filter": [
-                                    {
-                                        "terms": {
-                                            "legal.legaltags": [
-                                                "deny_legaltag_1",
-                                                "deny_legaltag_2"
-                                            ]
-                                        }
+                                {
+                                    "bool": {
+                                        "filter": [
+                                            {
+                                                "terms": {
+                                                    "legal.legaltags": [
+                                                        "deny_legaltag_1",
+                                                        "deny_legaltag_2",
+                                                    ]
+                                                }
+                                            }
+                                        ]
                                     }
-                                    ]
                                 }
-                            }
                             ]
                         }
                     }
@@ -317,7 +397,7 @@ def test_translate_deny_only_es_query(token, data_partition, service_url):
             }
         }
     }
-    
+
     assert result == expected_response, "Error in response"
 
 
@@ -327,27 +407,23 @@ def test_translate_allow_and_deny_es_query(token, data_partition, service_url):
     This test checks es query returned for allow and deny policy. Response should be ES compatible
     """
     data = {
-            "query": "data.osdu.partition[\"" + data_partition + "\"].search_with_preprocessor_testcase1.allow == true",
-            "input": {
-                "operation": "view"
-            },
-            "unknowns": [
-                "input.record"
-            ]
-        }
-
+        "query": 'data.osdu.partition["'
+        + data_partition
+        + '"].search_with_preprocessor_testcase1.allow == true',
+        "input": {"operation": "view"},
+        "unknowns": ["input.record"],
+    }
 
+    result = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+        status_code=200,
+    )
 
-    result = opa_ast_to_es_request_body(client=requests,
-                                        token=token,
-                                        data_partition=data_partition,
-                                        data=data,
-                                        service_url=service_url,
-                                        status_code=200)
-    
-    
     print(result)
-    
 
     expected_response = {
         "query": {
@@ -359,15 +435,15 @@ def test_translate_allow_and_deny_es_query(token, data_partition, service_url):
                                 {
                                     "bool": {
                                         "filter": [
-                                        {
-                                            "terms": {
-                                                "legal.legaltags": [
-                                                    "allow_legaltag_1",
-                                                    "allow_legaltag_2"
-                                                ]
+                                            {
+                                                "terms": {
+                                                    "legal.legaltags": [
+                                                        "allow_legaltag_1",
+                                                        "allow_legaltag_2",
+                                                    ]
+                                                }
                                             }
-                                        }
-                                    ]
+                                        ]
                                     }
                                 }
                             ]
@@ -380,25 +456,25 @@ def test_translate_allow_and_deny_es_query(token, data_partition, service_url):
                                     "bool": {
                                         "should": [
                                             {
-                                            "bool": {
-                                                "filter": [
-                                                    {
-                                                        "terms": {
-                                                            "legal.legaltags": [
-                                                                "deny_legaltag_1",
-                                                                "deny_legaltag_2"
-                                                            ]
+                                                "bool": {
+                                                    "filter": [
+                                                        {
+                                                            "terms": {
+                                                                "legal.legaltags": [
+                                                                    "deny_legaltag_1",
+                                                                    "deny_legaltag_2",
+                                                                ]
+                                                            }
                                                         }
-                                                    }
-                                                ]
-                                            }
+                                                    ]
+                                                }
                                             }
                                         ]
                                     }
                                 }
                             ]
                         }
-                    }
+                    },
                 ]
             }
         }
@@ -407,38 +483,31 @@ def test_translate_allow_and_deny_es_query(token, data_partition, service_url):
     assert result == expected_response, "Incorrect syntax"
 
 
-
 @pytest.mark.dependency(depends=["require_token"])
 def test_translate_es_subquery_match_all(token, data_partition, service_url):
     """
     This test checks es subquery with match_all
     """
     data = {
-            "query": "data.osdu.partition[\"" + data_partition + "\"].search_issue_103_tc1.allow == true",
-                "input": {
-                    "operation": "view"
-                },
-                "unknowns": [
-                    "input.record"
-                ]
-            }
-    
-    result = opa_ast_to_es_request_body(client=requests,
-                                        token=token,
-                                        data_partition=data_partition,
-                                        data=data,
-                                        service_url=service_url,
-                                        status_code=200)
-    
-    
+        "query": 'data.osdu.partition["'
+        + data_partition
+        + '"].search_issue_103_tc1.allow == true',
+        "input": {"operation": "view"},
+        "unknowns": ["input.record"],
+    }
+
+    result = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+        status_code=200,
+    )
+
     print(result)
-    
 
-    expected_response = {
-        "query": {
-            "match_all": {}
-        }
-    }
+    expected_response = {"query": {"match_all": {}}}
 
     assert result == expected_response, "Wrong output"
 
@@ -449,60 +518,52 @@ def test_translate_es_subquery_match_none(token, data_partition, service_url):
     This test checks es subquery with match_none
     """
     data = {
-            "query": "data.osdu.partition[\"" + data_partition + "\"].search_issue_103_tc2.allow == true",
-                "input": {
-                    "operation": "view"
-                },
-                "unknowns": [
-                    "input.record"
-                ]
-            }
-    
-    result = opa_ast_to_es_request_body(client=requests,
-                                        token=token,
-                                        data_partition=data_partition,
-                                        data=data,
-                                        service_url=service_url,
-                                        status_code=200)
-    
-    
+        "query": 'data.osdu.partition["'
+        + data_partition
+        + '"].search_issue_103_tc2.allow == true',
+        "input": {"operation": "view"},
+        "unknowns": ["input.record"],
+    }
+
+    result = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+        status_code=200,
+    )
+
     print(result)
-    
 
-    expected_response = {
-        "query": {
-            "match_none": {}
-        }
-    }
-    
+    expected_response = {"query": {"match_none": {}}}
 
     assert result == expected_response, "Wrong output"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_translate_es_subquery_data_group(token, data_partition, service_url):
     """
     This test checks es subquery to search data records for a particular data group
     """
     data = {
-            "query": "data.osdu.partition[\"" + data_partition + "\"].search_issue_103_tc3.allow == true",
-                "input": {
-                    "operation": "view"
-                },
-                "unknowns": [
-                    "input.record"
-                ]
-            }
-    
-    result = opa_ast_to_es_request_body(client=requests,
-                                        token=token,
-                                        data_partition=data_partition,
-                                        data=data,
-                                        service_url=service_url,
-                                        status_code=200)
-    
-    
+        "query": 'data.osdu.partition["'
+        + data_partition
+        + '"].search_issue_103_tc3.allow == true',
+        "input": {"operation": "view"},
+        "unknowns": ["input.record"],
+    }
+
+    result = opa_ast_to_es_request_body(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        data=data,
+        service_url=service_url,
+        status_code=200,
+    )
+
     print(result)
-    
 
     expected_response = {
         "query": {
@@ -511,37 +572,20 @@ def test_translate_es_subquery_data_group(token, data_partition, service_url):
                     {
                         "bool": {
                             "filter": [
-                                {
-                                    "term": {
-                                        "acl.viewers": "data.site.administrators"
-                                    }
-                                }
+                                {"term": {"acl.viewers": "data.site.administrators"}}
                             ]
                         }
                     },
                     {
                         "bool": {
                             "filter": [
-                                {
-                                    "term": {
-                                        "acl.owners": "data.site.administrators"
-                                }
-                            }
-                        ]
+                                {"term": {"acl.owners": "data.site.administrators"}}
+                            ]
                         }
-                    }
+                    },
                 ]
             }
         }
     }
-    
 
     assert result == expected_response, "Wrong output"
-
-
-
-
-
-    
-
-    
diff --git a/app/tests/integration/test_integration_038_legalpart.py b/app/tests/integration/test_integration_038_legalpart.py
index 70ac214b2b1ed5c40b55dd105608ec7b950d6d6f..85a5746592b878605a7c1ed9468eabb774f0c968 100644
--- a/app/tests/integration/test_integration_038_legalpart.py
+++ b/app/tests/integration/test_integration_038_legalpart.py
@@ -1,5 +1,6 @@
 # test_integration_038_legal.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -14,8 +15,8 @@ import json
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -25,16 +26,25 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'templates'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "templates"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_legal_tags(token, data_partition):
     """
@@ -46,16 +56,22 @@ def test_legal_tags(token, data_partition):
         pytest.skip("Skipping diagnostic test on legal tags - ENABLE_DEV_DIAGNOSTICS")
 
     url = conf.LEGAL_BASE_URL + "/api/legal/v1/legaltags?valid=true"
-    r = requests.get(url,
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
-    #print(json.dumps(r.json(), indent=4))
+    r = requests.get(
+        url,
+        headers={
+            "Authorization": "Bearer " + token,
+            "data-partition-id": data_partition,
+        },
+    )
+    # print(json.dumps(r.json(), indent=4))
 
     count = 0
     for name in r.json()["legalTags"]:
         tags = name["name"]
 
-        retval = common.get_legal_tag_info(data_partition_id=data_partition, access_token=token, legal_tags=[tags]) 
+        retval = common.get_legal_tag_info(
+            data_partition_id=data_partition, access_token=token, legal_tags=[tags]
+        )
         if retval:
             for retname in retval:
                 x = retname["name"]
@@ -65,6 +81,7 @@ def test_legal_tags(token, data_partition):
                     break
     assert count > 0, f"Expected some legal tags to exists. Found: {count}"
 
+
 @pytest.mark.dependency(depends=["require_token"])
 def test_partition(token, data_partition):
     """
@@ -73,11 +90,15 @@ def test_partition(token, data_partition):
     """
 
     pytest.skip("Skipping testing of partitions service")
-    url =  conf.PARTITION_API + "/partitions/" + data_partition
+    url = conf.PARTITION_API + "/partitions/" + data_partition
     print(f"URL: {url}")
-    r = requests.get(url,
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    r = requests.get(
+        url,
+        headers={
+            "Authorization": "Bearer " + token,
+            "data-partition-id": data_partition,
+        },
+    )
 
     print(r.text)
     print(json.dumps(r.json(), indent=4))
diff --git a/app/tests/integration/test_integration_040_delete.py b/app/tests/integration/test_integration_040_delete.py
index db43aef732a8d7ff06f8a0eb4b2885b8718522ce..2e9223bbe2a930ef22b9b1bdb4ab49e215454a43 100644
--- a/app/tests/integration/test_integration_040_delete.py
+++ b/app/tests/integration/test_integration_040_delete.py
@@ -1,5 +1,6 @@
 # test_integration_040_delete.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -15,8 +16,8 @@ import time
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -25,24 +26,35 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / 'templates'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "templates"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 @pytest.mark.delete
 @pytest.mark.dependency(depends=["require_token"])
 @pytest.mark.parametrize("regofile", testlib.get_rego_templates())
-def test_delete_osdu_partition_policies_service_url(token, data_partition, bundle_pause, service_url, regofile):
+def test_delete_osdu_partition_policies_service_url(
+    token, data_partition, bundle_pause, service_url, regofile
+):
     """
     Test delete /policies/osdu/partition/<data_partition>/<policy>
     """
-    delete_skip_list = ['dataauthz.rego', 'search.rego', 'search1.rego', 'search2.rego']
+    delete_skip_list = ["dataauthz.rego", "search.rego", "search1.rego", "search2.rego"]
     if regofile in delete_skip_list:
         pytest.skip(f"skipping delete of {regofile}")
     id = f"osdu/partition/{data_partition}/{regofile}"
@@ -51,8 +63,12 @@ def test_delete_osdu_partition_policies_service_url(token, data_partition, bundl
     # Let's not burden the bundle service backend - rate limit issues
     time.sleep(1)
 
-    r = requests.delete(service_url + url,
-        headers={'Authorization': 'Bearer ' + token,
-        'data-partition-id': data_partition})
+    r = requests.delete(
+        service_url + url,
+        headers={
+            "Authorization": "Bearer " + token,
+            "data-partition-id": data_partition,
+        },
+    )
     assert r.status_code == 200, f"delete of {regofile} {url}"
     assert "result" in r.text, f"result not expected {r.text}"
diff --git a/app/tests/integration/test_integration_043_sleep.py b/app/tests/integration/test_integration_043_sleep.py
index 3e5325cfa04c4b0cf7ac181a94c9f97946c55bea..74da2d32c362b6e3f6d3082706fb4249cc629526 100644
--- a/app/tests/integration/test_integration_043_sleep.py
+++ b/app/tests/integration/test_integration_043_sleep.py
@@ -3,6 +3,7 @@ import pytest
 import time
 import testlib
 
+
 @pytest.mark.delete
 def test_wait_for_bundle(bundle_pause):
     """
diff --git a/app/tests/integration/test_integration_045_delete.py b/app/tests/integration/test_integration_045_delete.py
index 20b34a8b91ad6d0a06240c380d13562ec6aace79..52febcdd74a487e1d7bed44a63c3bc9aaaa04497 100644
--- a/app/tests/integration/test_integration_045_delete.py
+++ b/app/tests/integration/test_integration_045_delete.py
@@ -1,5 +1,6 @@
 # test_integration_045_delete.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -16,8 +17,8 @@ import json
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -26,29 +27,40 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_TEMPLATE_DATA_DIR = Path(__file__).resolve().parent / 'templates'
+TEST_TEMPLATE_DATA_DIR = Path(__file__).resolve().parent / "templates"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 @pytest.mark.delete
 @pytest.mark.dependency(depends=["require_token"])
-def test_confirm_deletion_of_osdu_partition_policies_expect_not_found_service_url(token, data_partition, service_url):
+def test_confirm_deletion_of_osdu_partition_policies_expect_not_found_service_url(
+    token, data_partition, service_url
+):
     """
     Test delete /policies/osdu/partition/<data_partition>/<policy>
     """
-    #pytest.skip("Skipping delete")
+    # pytest.skip("Skipping delete")
     num_tests = 0
     p = os.path.dirname(os.path.abspath(__file__))
-    delete_skip_list = ['dataauthz.rego', 'search.rego', 'search1.rego', 'search2.rego']
-    template_datadir = os.path.join(p, '..', 'templates')
+    delete_skip_list = ["dataauthz.rego", "search.rego", "search1.rego", "search2.rego"]
+    template_datadir = os.path.join(p, "..", "templates")
     for filename in sorted(os.listdir(template_datadir)):
-        filetype='.rego'
+        filetype = ".rego"
         if filename.endswith(filetype):
             if filename in delete_skip_list:
                 continue
@@ -56,12 +68,18 @@ def test_confirm_deletion_of_osdu_partition_policies_expect_not_found_service_ur
             url = conf.SERVICE_BASE_PATH + "/policies/" + id
             # Let's not burden the bundle service backend - rate limit issues
             time.sleep(1)
-            r = requests.delete(service_url + url,
-                headers={'Authorization': 'Bearer ' + token,
-                'data-partition-id': data_partition})
+            r = requests.delete(
+                service_url + url,
+                headers={
+                    "Authorization": "Bearer " + token,
+                    "data-partition-id": data_partition,
+                },
+            )
             if r.status_code == 503:
                 print(f"Unexpected response {r.status_code} {r.text} rate limit?")
-            assert r.status_code == 404, f"Expect 404_NOT_FOUND. Delete of {filename} {url} should already be deleted. {r.text}"
+            assert (
+                r.status_code == 404
+            ), f"Expect 404_NOT_FOUND. Delete of {filename} {url} should already be deleted. {r.text}"
             assert "not_found" in r.text, f"expect not found message {r.text}"
             num_tests = num_tests + 1
     assert num_tests >= 7, "At least 7 polices were tested"
diff --git a/app/tests/integration/test_integration_099_load.py b/app/tests/integration/test_integration_099_load.py
index 958b9f9ec4d461a27206b23188ebbd60f6d9d306..66e70e92efc0237af7d1ae3edc8204abd5e7c2a3 100644
--- a/app/tests/integration/test_integration_099_load.py
+++ b/app/tests/integration/test_integration_099_load.py
@@ -1,5 +1,6 @@
 # test_integration_020_put.py
 import pytest
+
 # requires pytest_dependency to be installed
 import unittest
 import responses
@@ -15,8 +16,8 @@ import time
 from pathlib import Path
 
 # so the directly in which you run pytest or unittest doesn't matter as much
-sys.path.append(os.path.abspath('..'))
-sys.path.append('tests')
+sys.path.append(os.path.abspath(".."))
+sys.path.append("tests")
 
 # local
 from main import app
@@ -25,25 +26,36 @@ import conf
 import testlib
 
 # override dependency injection for authentication to entitlement service
-from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
+from override_depends import (
+    override_require_authorized_user,
+    set_authorize_session,
+    ADMIN_ONLY_SVC,
+    USER_AND_ADMIN_SVC,
+    USER_ONLY_SVC,
+    OTHER_ONLY_SVC,
+)
 
-TEST_DATA_DIR = Path(__file__).resolve().parent / '../templates'
+TEST_DATA_DIR = Path(__file__).resolve().parent / "../templates"
 
 client = TestClient(app)
 
+
 @pytest.mark.dependency(name="require_token")
 def test_require_token(token):
     assert token is not None, "No token provided on command line"
 
+
 @pytest.mark.parametrize("regofile", testlib.get_rego_templates())
 @pytest.mark.dependency(depends=["require_token"])
 def test_load_policy_service_url(token, data_partition, service_url, domain, regofile):
     assert token is not None, "No token provided on command line"
     # Let's not burden the bundle service backend - rate limit issues
     time.sleep(1)
-    testlib.put_policy_test_data(client=requests,
-         token=token,
-         data_partition=data_partition,
-         service_url=service_url,
-         domain=domain,
-         filename=regofile)
\ No newline at end of file
+    testlib.put_policy_test_data(
+        client=requests,
+        token=token,
+        data_partition=data_partition,
+        service_url=service_url,
+        domain=domain,
+        filename=regofile,
+    )
diff --git a/app/tests/integration/test_python_version.py b/app/tests/integration/test_python_version.py
index bf038a473f005604bc4a489f1ea4d5700ec19999..8eec2ece24ffd7cd9862837da4bcb80387de3557 100644
--- a/app/tests/integration/test_python_version.py
+++ b/app/tests/integration/test_python_version.py
@@ -1,10 +1,15 @@
 import sys
 import pytest
 
+
 def test_python_version():
     """
     Only 3.9.x is supported
     Python version 3.9.13 or later is recommended
     """
     assert sys.version_info >= (3, 9, 5), f"Python version too low. {sys.version_info}"
-    assert sys.version_info < (3, 9, 9999), f"Python version too new. {sys.version_info}"
+    assert sys.version_info < (
+        3,
+        9,
+        9999,
+    ), f"Python version too new. {sys.version_info}"
diff --git a/app/tests/unit/test_api_unit.py b/app/tests/unit/test_api_unit.py
index 8658f54818738f2a4e1012f6029e6634c2f927f3..2946a6d3036bd1c7325bae343460730e4ee99a64 100644
--- a/app/tests/unit/test_api_unit.py
+++ b/app/tests/unit/test_api_unit.py
@@ -15,28 +15,13 @@ from main import app
 import conf
 import testlib
 
-#logging.basicConfig( stream=sys.stderr )
-#logger = logging.getLogger("unittest" ).setLevel( logging.DEBUG )
-
 # override dependency injection for authentication to entitlement service
 from override_depends import override_require_authorized_user, set_authorize_session, ADMIN_ONLY_SVC, USER_AND_ADMIN_SVC, USER_ONLY_SVC, OTHER_ONLY_SVC
 
 client = TestClient(app)
 
-#@pytest.mark.parametrize("api_path", [("/policies"), ("/policies/xyz")])
 class TryUnitTesting(unittest.TestCase):
 
-    #@responses.activate
-    # def mock_entitlement(self):
-    #     print("mock")
-    #     entsvc = [{'name': 'service.policy.user', 'description': 'Policy user group', 'email': 'service.policy.user@osdu.group'}, {'name': 'service.policy.admin', 'description': 'Policy admin group', 'email': 'service.policy.admin@osdu.group'}]
-    #     responses.add(
-    #         responses.GET,
-    #         conf.ENTITLEMENTS_GROUPS_API,
-    #         json={"groups": entsvc},
-    #         status=200,
-    #     )
-
     def test_read_main_home_page(self):
         """Check Home Page"""
         response = client.get("/")
@@ -65,13 +50,13 @@ class TryUnitTesting(unittest.TestCase):
         response = client.get(conf.SERVICE_BASE_PATH+"/openapi.json")
         self.assertEqual(response.status_code, 200, "Expected OpenAPI.json to be available") 
 
-    def requires_auth(self, method, URL, api_path=None):
+    def requires_auth(self, method, url, api_path=None):
         """ Should always return 403 """
-        with self.subTest(URL=URL):
-            response = method(URL)
+        with self.subTest(URL=url):
+            response = method(url)
             if response.status_code == 405:
                 pytest.xfail("method not allowed")
-            self.assertEqual(response.status_code, 403, f"Not sending token to our API should result in 403 Forbidden {URL} but got {response.status_code}")
+            self.assertEqual(response.status_code, 403, f"Not sending token to our API should result in 403 Forbidden {url} but got {response.status_code}")
 
     @parameterized.expand([
         ["/policies", "get"],
diff --git a/deployment/scripts/azure/BootstrapBundles.py b/deployment/scripts/azure/BootstrapBundles.py
index 26ab53e1dc93bb036fb5bbc76e841a9a1dc1f897..01ca999ced62cc3b0c4bff4c23d3a74ffc86040e 100644
--- a/deployment/scripts/azure/BootstrapBundles.py
+++ b/deployment/scripts/azure/BootstrapBundles.py
@@ -16,7 +16,7 @@ class BootstrapBundles:
         self.blob_service_client = BlobServiceClient.from_connection_string(
             helper.get_storage_connection_string())
 
-    def create_bundles(path, tar_name):
+    def create_bundles(self, path, tar_name):
         with tarfile.open(tar_name, "w:gz") as tar_handle:
             for root, dirs, files in os.walk(path):
                 for file in files:
diff --git a/devops/gc/bootstrap-osdu-module/DataPartitionBundles.py b/devops/gc/bootstrap-osdu-module/DataPartitionBundles.py
index 17a94427461154ffac63445d60fcd0561d7069e0..bf18f0fcb34e1b0bff690807920ced980616cdf1 100644
--- a/devops/gc/bootstrap-osdu-module/DataPartitionBundles.py
+++ b/devops/gc/bootstrap-osdu-module/DataPartitionBundles.py
@@ -5,8 +5,7 @@ import argparse
 
 
 class BootstrapDataPartitionBundles:
-
-    def create_and_upload_dp_bundles(dp_id):
+    def create_and_upload_dp_bundles(self, dp_id):
         tar_name = "bundle-{dp}.tar.gz".format(dp=dp_id)
         dataauthz_template_name = "dataauthz_template.rego"
         manifest_template_name = "manifest_template.manifest"
@@ -17,8 +16,7 @@ class BootstrapDataPartitionBundles:
         template_path = "devops/gc/bootstrap-osdu-module/templates/"
 
         env = Environment(
-            loader=FileSystemLoader(template_path),
-            autoescape=select_autoescape()
+            loader=FileSystemLoader(template_path), autoescape=select_autoescape()
         )
         dataauthz_template = env.get_template(dataauthz_template_name)
         manifest_template = env.get_template(manifest_template_name)
@@ -27,20 +25,25 @@ class BootstrapDataPartitionBundles:
         manifest_render = manifest_template.render(dp_id=dp_id)
         search_render = search_template.render(dp_id=dp_id)
 
-        with open(dataauthz_filename,"w") as f1:
+        with open(dataauthz_filename, "w") as f1:
             f1.write(dataauthz_render)
         with open(manifest_filename, "w") as f2:
             f2.write(manifest_render)
         with open(search_filename, "w") as f2:
             f2.write(search_render)
         with tarfile.open(tar_name, "w:gz") as tar_handle:
-            tar_handle.add(os.path.abspath(dataauthz_filename), arcname=dataauthz_filename)
-            tar_handle.add(os.path.abspath(manifest_filename), arcname=manifest_filename)
+            tar_handle.add(
+                os.path.abspath(dataauthz_filename), arcname=dataauthz_filename
+            )
+            tar_handle.add(
+                os.path.abspath(manifest_filename), arcname=manifest_filename
+            )
             tar_handle.add(os.path.abspath(search_filename), arcname=search_filename)
 
+
 # Initialize class and upload bundles
-if __name__ == '__main__':
+if __name__ == "__main__":
     parser = argparse.ArgumentParser()
-    parser.add_argument('--partition', required=True, type=str)
+    parser.add_argument("--partition", required=True, type=str)
     args = parser.parse_args()
     BootstrapDataPartitionBundles.create_and_upload_dp_bundles(args.partition)
diff --git a/loadtest/locustfile.py b/loadtest/locustfile.py
index ea2c70793abc59420d8807fc99dd3375fa3a9e4b..1ec190ebf831aa8f50b8af0d19e5b00c723ee4fc 100644
--- a/loadtest/locustfile.py
+++ b/loadtest/locustfile.py
@@ -96,8 +96,6 @@ class PolicyServiceUser(HttpUser):
         """
         Get list of policies
         """
-        data_partition = os.environ.get('DATA_PARTITION')
-        target_data_partition = os.environ.get('TARGET_DATA_PARTITION', data_partition)
         url = SERVICE_BASE_PATH + "/policies"
         with self.client.get(url, catch_response=True) as r:
             if r.status_code == 200:
diff --git a/setversion.py b/setversion.py
index 27a08597a86d073c4f69e94b0120ebd26783428b..d08120efee19c23caccb730510d2b7e33c950674 100755
--- a/setversion.py
+++ b/setversion.py
@@ -44,10 +44,6 @@ def main(
         today = datetime.now()
         commit_timestamp = today.isoformat()
 
-    regex = re.compile("[^a-zA-Z0-9_]")
-    #comment_message = commit_message
-    #commit_message = regex.sub("", commit_message[0:128])
-
     with open(path + "/" + pyfile, "w") as f:
         f.write('__apiversion__ = "1.0.0"\n')
         f.write(f'__version__ = "{__version__}"\n')