diff --git a/frontend/admincli/Dockerfile b/frontend/admincli/Dockerfile deleted file mode 100644 index 8da7b3bf33d15bb283b4d7db4e928a3c22bd8507..0000000000000000000000000000000000000000 --- a/frontend/admincli/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM python:3.9-slim -RUN useradd --create-home --shell /bin/bash app_user -WORKDIR /home/app_user -COPY requirements.txt ./ -RUN pip install --no-cache-dir -r requirements.txt -USER app_user -COPY . . -CMD ["bash"] diff --git a/frontend/admincli/Makefile b/frontend/admincli/Makefile deleted file mode 100644 index c29088914b3d6029d57b03082fe10ed2d49c127b..0000000000000000000000000000000000000000 --- a/frontend/admincli/Makefile +++ /dev/null @@ -1,66 +0,0 @@ -IMAGE_NAME=policy-service-admincli -TAG=latest -.PHONY: build clean build_docker run test -default: build -POLICY_LOCAL_URL=http://host.docker.internal:8080 -version := $(./pol.py --version) -build: - pyinstaller --onefile pol.py - -clean: - - rm -fr dist build pol.spec *.csv *.xlsx *.rego *.json *.las - - docker rm -f $(IMAGE_NAME) - -build_docker: - docker build --network host -t $(IMAGE_NAME):$(TAG) -f Dockerfile . - -run_local: - docker run -it --rm \ - -e DATA_PARTITION=${DATA_PARTITION} \ - -e TOKEN="${TOKEN}" \ - -e POLICY_URL=$(POLICY_LOCAL_URL) \ - -e ENTITLEMENTS_URL=${ENTITLEMENTS_URL} \ - -e LEGAL_URL=${LEGAL_URL} \ - --name $(IMAGE_NAME) $(IMAGE_NAME):$(TAG) - -run: - docker run -it --rm \ - -e DATA_PARTITION=${DATA_PARTITION} \ - -e TOKEN="${TOKEN}" \ - -e POLICY_URL=${POLICY_URL} \ - -e ENTITLEMENTS_URL=${ENTITLEMENTS_URL} \ - -e LEGAL_URL=${LEGAL_URL} \ - --name $(IMAGE_NAME) $(IMAGE_NAME):$(TAG) - -scan: - docker scan $(IMAGE_NAME):$(TAG) - -# requires PYTHONPATH to be set to admincli root directory -tests: test - -test: - python3.9 -m pytest -v - -test_eval: - python3.9 -m pytest -v -k "eval" - -test_storage: - python3.9 -m pytest -v -k "storage" - -test_other: - python3.9 -m pytest -v -k "not policy" - -demo: END_RECORDING := "Thanks for watching" -demo: - - ./pol.py add -f ../../app/tests/templates/search2.rego search2 -t --force - - ./pol.py add -f ../../app/tests/templates/search_deny.rego search_deny -t --force - - ./pol.py opa-add -f ../../app/tests/instance_data/dataauthz.rego osdu/instance/dataauthz.rego --force - - ./pol.py opa-add -f tests/example.rego --force - - rm docs/admincli.cast - asciinema-rec_script docs/admincli.asc --title="AdminCLI" - -load_data: - python3.9 ./pol.py opa-add -f ../../app/tests/instance_data/dataauthz.rego osdu/instance/dataauthz.rego --force - python3.9 ./pol.py opa-add -f ../../app/tests/instance_data/entitlements.rego osdu/instance/entitlements.rego --force - python3.9 ./pol.py opa-add -f ../../app/tests/instance_data/legal.rego osdu/instance/legal.rego --force - python3.9 ./pol.py opa-add -f ../../app/tests/templates/dataauthz.rego.template osdu/partition/osdu/dataauthz.rego -t --force diff --git a/frontend/admincli/README.md b/frontend/admincli/README.md index 11eb1a8f6ef62f83952b72977ef9086385531c9d..2a6b6a78491c6b0c87a58d216a1959d811095ff2 100644 --- a/frontend/admincli/README.md +++ b/frontend/admincli/README.md @@ -1,100 +1 @@ -# Policy Service - Admin CLI -The Admin CLI is an easy to use full featured CLI. -This AdminCLI was new in OSDU Milestone 14. It was ready for production use then. -Now with M15 it's even better. As always, please report any issues. - -##### For help: -* General help: `pol.py --help` -* Individual command help is also available, for example: `pol.py ls --help` - -##### The main policy commands are: -* `add` for adding or updating policies. This is particular useful for automation and loading policies into OSDU partitions, -* `eval` for evaluating policies, -* `ls` for listing and retrieving policies and -* `rm` for deleting policies -* `health` - retrieves health status of policy service - -##### Additional Policy Developer Utils -* `compile` Use OPA's Compile API to partially evaluate a query. -* `config` Diagnostic config on Policy Service. -* `diff` Compare two policies, show the delta in a context diff format. -* `opa-add` Add or update a policy directly in OPA ✨ for LOCAL testing/development -* `opa-rm` delete a policy directly from OPA. 🔥 for LOCAL testing/development -* `translate` For helping testing translate which is used by search service via os-core-common - -##### Additional Utils/commands include: -* `groups` - Showing groups related to your auth context, -* `info` - Retrieves info from services, -* `legal-tags` - Get legal tags from legal tag service, -* `search` - Search Utility -* `storage` - Storage and Dataset record retrieval utility - -You will need to set the following environmental variables or provide details on command line: -* `TOKEN` or `--token` -* `DATA_PARTITION` or `--data-partition-id` -* `BASE_URL` or `--base-url` or `--url` or `--host` -See [setenv.sh](setenv.sh) for an example of these. -Please note: *command-line options will override environment variables.* - -Command line completion is available: -* `--install-completion` -* `--show-completition` - -##### Development / Testing Notes: -Individual services can be optionally redirected for development, testing or custom environments: -* `POLICY_URL` or `--policy-url` -* `ENTITLEMENTS_URL` or `--ent-url` -* `STORAGE_URL` or `--storage-url` -* `SEARCH_URL` or `--search-url` -* `LEGAL_URL` or `--legal-url` -* `DATASET_URL` or `--dataset-url` -These are not required. See [setenv.sh](setenv.sh) for an example of these. -Please note: *command-line options will override environment variables.* -Errors during collecting phase are generally issues with TOKEN. - -##### Built-in Template Engine -Add, eval and translate support a templating engine that makes it super easy to automate. Hopefully you find this helpful. - -When using --template option the strings in our file input will be replaced, for example: -* `${data_partition}` will be replaced by the data partition id you are using -* `${name}` will be replaced by the policy id you selected in the command -See the individual command's help for template support details. - -In future releases a compiled version of the CLI may be made available. - -##### Output options for commands: -Some commands support multiple output options for example `pol.py ls --output=<>` or `pol.py search --output=<> -* fancy (default with colors and formatting) -* simple -* excel (supported only on search currently) -* tree (tree output of policies) - -If the policy admincli detects there isn't a tty simple should automatically be selected to make automation easier. - -In addition many commands support feature to get raw json return from OSDU service using the `--raw` option. - -##### Searching: -Search uses standard Regular Expressions (shortened as regex or regexp), sometimes referred to as rational expressions - a sequence of characters that specifies a search pattern in text. This is extremely powerful search facility. - -Most letters and characters will simply match themselves. For example `test` will match the string `test` exactly. There are exceptions to this rule; some characters are special metacharacters, and don’t match themselves. Instead, they signal that some out-of-the-ordinary thing should be matched, or they affect other portions of the regex by repeating them or changing their meaning. -These include: `. ^ $ * + ? { } [ ] \ | ( )` - -The `ls` command supports two kinds of searching for policies: -* `--search` which takes a REGEX search string. This will search in both policy name and the stored policy rego. -* `--name` which takes a REGEX search string will only search in policy names. - -##### Force Option: -Some commands `add`, `eval` and `translate` will be default ask for comfirmation when changing data or using templating. You can bypass this by using option `--force` which is great for automation. - -**The policy service team would appreciate any feedback and feature requests on the AdminCLI.** - -##### Building executables: -`make build` should build executable for your architecture in dist directory. Be sure to use python 3.9.x - -##### Building container and running container: -* `make docker_build` -* `make run` - you will still need to set the environment variables -* `make run_local` - you will still need to set the environment variables but this will override POLICY_URL to point to your local machine via docker host. - -#### -[](https://asciinema.org/a/4RMvvcjmReh1qZmoNu0FIfsra?speed=0.75) +The AdminCLI has been moved to [it's own repo](https://community.opengroup.org/osdu/ui/admincli) diff --git a/frontend/admincli/opa.py b/frontend/admincli/opa.py deleted file mode 100644 index 96dd85e3395ff9a29db0c4f1ad8d0a420aada7b2..0000000000000000000000000000000000000000 --- a/frontend/admincli/opa.py +++ /dev/null @@ -1,57 +0,0 @@ -import os -import requests -import subprocess -import tempfile - -class OpaCheck: - fpath = None - def __init__(self): - self.tempdir = tempfile.mkdtemp() - - def createfile(self, rego: str, filename: str): - self.fpath = os.path.join(self.tempdir, filename) - f = open(self.fpath, "w") - f.write(rego) - f.close() - - def opa_check_file(self): - try: - result = subprocess.run(['opa', 'check', self.fpath], capture_output=True, text=True) - except FileNotFoundError as err: - os.remove(self.fpath) - return(-1,"OPA Binary not found") - os.remove(self.fpath) - return(result.returncode, result.stderr) - - def check(self, rego: str, filename: str): - self.createfile(rego, filename) - return self.opa_check_file() - - def delete(self): - os.rmdir(self.tempdir) - -def put_opa_policy_direct(policy_id, data, base_url, timeout=20): - """ - Put a policy in OPA directly - for development/LOCAL use - """ - url = base_url + '/v1/policies/' + policy_id - - try: - rsp = requests.put(url, data=data, headers={'Content-Type': 'application/octet-stream'}, timeout=timeout) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - return rsp - -def delete_opa_policy_direct(policy_id, base_url, timeout=20): - """ - Put a policy in OPA directly - for development/LOCAL use - """ - url = base_url + '/v1/policies/' + policy_id - - try: - rsp = requests.delete(url, headers={'Content-Type': 'application/octet-stream'}, timeout=timeout) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - return rsp \ No newline at end of file diff --git a/frontend/admincli/pipeline-test.yml b/frontend/admincli/pipeline-test.yml deleted file mode 100644 index 40b23e58e5edc169aeca4f8c71944fe585771c2b..0000000000000000000000000000000000000000 --- a/frontend/admincli/pipeline-test.yml +++ /dev/null @@ -1,20 +0,0 @@ -aws-python-admincli-test: - extends: - - .aws - - .aws_common_variables - - .aws_variables - image: $CI_REGISTRY/osdu/platform/deployment-and-operations/base-containers-aws/aws-python/aws-python:v1.0-py3.9 - stage: integration - tags: ['osdu-small'] - needs: ['aws-test-python'] - script: - - cd frontend/admincli - - chmod +x ./run-integration-tests.sh - - ./run-integration-tests.sh -c aws - allow_failure: true - rules: - - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - - if: $CI_COMMIT_BRANCH == 'dev' - when: manual - - if: $CI_PIPELINE_SOURCE == "schedule" - when: never diff --git a/frontend/admincli/pol.py b/frontend/admincli/pol.py deleted file mode 100755 index b7df837aea3792d8f2d124ff886833a03816a124..0000000000000000000000000000000000000000 --- a/frontend/admincli/pol.py +++ /dev/null @@ -1,1781 +0,0 @@ -#!/usr/bin/env python3.9 -# OSDU Admin CLI -from time import time -import typer -from typing import List, Optional -from types import SimpleNamespace -from rich.table import Table -from rich.console import Console -from rich.tree import Tree -from rich.panel import Panel -import rich.progress -from enum import Enum -import requests -from pathlib import Path -import os -import sys -import re -import json -import getpass -import random -from string import Template -import difflib -import datetime -from uuid_extensions import uuid7str -from rego import ast, walk -import opa -import search_cli - -__version__ = '0.0.6' -__app_name__ = 'OSDU AdminCLI' - -cli = typer.Typer(rich_markup_mode="rich", - help=__app_name__, no_args_is_help=True) -console = Console() -error_console = Console(stderr=True, style="bold red") - - -# setup simple output class -class OutputType(str, Enum): - simple = "simple" - fancy = "fancy" - tree = "tree" - excel = "excel" - - -def _version_callback(value: bool) -> None: - """ - version callback for --version - """ - if value: - typer.echo(f"{__app_name__} v{__version__}") - raise typer.Exit() - - -def headers(ctx: typer.Context, content_type_json=False): - """ - build headers for OSDU request - """ - if content_type_json: - headers = {'Authorization': 'Bearer ' + ctx.obj.token, - 'data-partition-id': ctx.obj.data_partition, - 'accept': 'application/json', - 'content-type': 'application/json' - } - else: - headers = {'Authorization': 'Bearer ' + ctx.obj.token, - 'data-partition-id': ctx.obj.data_partition, - 'accept': 'application/json', - } - if ctx.obj.uuid: - headers['Correlation-id'] = ctx.obj.uuid - if ctx.obj.x_user_id: - headers['x-user-id'] = ctx.obj.x_user_id - if ctx.obj.user_id: - headers['x-local-user-id'] = ctx.obj.user_id - if ctx.obj.debug: - console.print(f"headers: {headers}") - return headers - - -def request_groups(ctx: typer.Context): - """ - return list of groups from entitlements service - """ - try: - response = requests.get( - ctx.obj.entitlements_url + "/groups", headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if not response.ok: - if "message" in response.text: - message = json.loads(response.text)["message"] - error_console.print(f"Error: {message}") - else: - error_console.print( - f"Error: An error occurred when talking to {ctx.obj.entitlements_url} {response.status_code}") - return "" - jdata = response.json() - if "groups" in jdata: - return jdata['groups'] - - -def get_policies(ctx: typer.Context): - """ - Handle request to /policies - """ - try: - r = requests.get(ctx.obj.policy_url + "/policies", - timeout=10, - headers=headers(ctx)) - except requests.exceptions.HTTPError: - error_console.print(f"Error: endpoint {ctx.obj.policy_url}: HTTPError") - return - except requests.exceptions.ConnectionError: - error_console.print( - f"Error: endpoint {ctx.obj.policy_url}: ConnectionError") - return - except requests.exceptions.Timeout: - error_console.print(f"Error: endpoint {ctx.obj.policy_url}: Timeout") - return - except requests.exceptions.RequestException: - error_console.print( - f"Error: endpoint {ctx.obj.policy_url}: RequestException") - return - - if "result" in r.text and r.ok: - return (r.json()["result"]) - else: - if "detail" in r.text: - error_console.print(f"Error: {r.json()['detail']}") - else: - error_console.print(f"Error: {r.text}") - raise typer.Exit(1) # non-zero exit status - - -def delete_partition_policy(ctx: typer.Context, policy_id: str): - """ - handle request to delete a policy from a partition - """ - try: - r = requests.delete(ctx.obj.policy_url + "/policies/osdu/partition/" + ctx.obj.data_partition + '/' + policy_id, - headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if ctx.obj.debug: - console.print(f"debug: {r.json()}") - if r.ok: - if "result" in r.text: - return True - else: - if "detail" in r.text: - error_console.print(f"Error: {r.json()['detail']}") - else: - error_console.print(f"Error: {r.text}") - raise typer.Exit(1) # non-zero exit status - - -def add_partition_policy(ctx: typer.Context, policy_id: str, files: dict): - """ - handle request to add a policy to a partition - """ - try: - r = requests.put(ctx.obj.policy_url + "/policies/osdu/partition/" + ctx.obj.data_partition + '/' + policy_id, - files=files, - headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - return r.text - else: - if "detail" in r.text: - error_console.print(f"Error: {r.json()['detail']}") - else: - error_console.print(f"Error: {r.text}") - raise typer.Exit(1) # non-zero exit status - - -def get_partition_policy(ctx: typer.Context, policy_id: str, quiet=False): - """ - handle request to get partition policy - """ - try: - r = requests.get(ctx.obj.policy_url + "/policies/osdu/partition/" + ctx.obj.data_partition + '/' + policy_id, - headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - if "result" in r.text: - return (r.json()["result"]) - else: - if not quiet and "detail" in r.text: - error_console.print(f"Error: {r.json()['detail']}") - else: - error_console.print(f"Error: {r.text}") - # raise typer.Exit(1) # non-zero exit status - - -def get_instance_policy(ctx: typer.Context, policy_id: str, quiet=False): - """ - handle request to get instance policy - """ - try: - r = requests.get(ctx.obj.policy_url + "/policies/osdu/instance/" + policy_id, - headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - if "result" in r.text: - return (r.json()["result"]) - else: - if not quiet and "detail" in r.text: - error_console.print(f"Error: {r.json()['detail']}") - else: - error_console.print(f"Error: {r.text}") - raise typer.Exit(1) # non-zero exit status - - -def evaluations_query(ctx: typer.Context, policy_id: str, files: dict): - """ - handle request to eval query - """ - params = { - 'policy_id': policy_id, - 'include_auth': True - } - - try: - r = requests.post(ctx.obj.policy_url + "/evaluations/query", - params=params, - files=files, - headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - if "result" in r.text: - return (r.json()["result"]) - else: - if "detail" in r.text: - error_console.print(f"Error: {r.json()['detail']}") - else: - error_console.print(f"Error: {r.text}") - raise typer.Exit(r.status_code) # non-zero exit status - - -@cli.command(rich_help_panel="Utils") -def groups(ctx: typer.Context, - all: bool = False, - domain: bool = False, - domain_suffix: bool = False): - """ - Show groups of current auth context. - """ - retgroups = request_groups(ctx) - if all: - console.print(retgroups) - elif domain: - domain = retgroups[0]["email"].split('@')[1] - domain_ending = domain.removeprefix(ctx.obj.data_partition + ".") - console.print(f"fulldomain: {domain}\ndomain: {domain_ending}") - elif domain_suffix: - domain = retgroups[0]["email"].split('@')[1] - domain_ending = domain.removeprefix(ctx.obj.data_partition + ".") - console.print(domain_ending) - else: - for grp in retgroups: - if "policy" in str(grp): - console.print(grp) - - -def get_domain_suffix(ctx): - """ - get domain - """ - retgroups = request_groups(ctx) - domain = retgroups[0]["email"].split('@')[1] - domain_ending = domain.removeprefix(ctx.obj.data_partition + ".") - return domain_ending - - -def is_json(myjson): - """ - Is this string valid json - detect problems before sending it to policy service - """ - try: - json.loads(myjson) - except ValueError as e: - return False - return True - - -@cli.command(rich_help_panel="Policy Developer Utils") -def translate( - ctx: typer.Context, - policy_id: str = typer.Argument(...), - output: OutputType = typer.Option("fancy", help="Output style"), - file: Path = typer.Option(..., - "-f", "--file", - exists=True, - file_okay=True, - dir_okay=False, - writable=False, - readable=True, - resolve_path=True, - help="json file" - ), - force: bool = typer.Option( - False, "--force", help="No confirmation prompt"), - template: bool = typer.Option( - False, "--template", "-t", help="Input is a template to be rendered") -): - """ - Translate testing utility. - - Requires valid json file. - - If using option --template: - - ${data_partition} = data_partition associated with your token - ${name} = short name you give as a policy_id on the command line - - Output fancy (default) draws panels, simple just displays text - """ - with rich.progress.open(file, "r") as f: - data = f.read() - f.close() - short_policy_id = os.path.basename(policy_id) - domain = get_domain_suffix(ctx) - if template: - template_data = Template(data) - data = template_data.substitute( - {'data_partition': ctx.obj.data_partition, - 'name': short_policy_id.removesuffix('.rego'), - 'domain': domain - }) - if output == output.fancy: - console.print(Panel(data, title="rendered data", highlight=True)) - else: - print(data) - if not force: - typer.confirm( - f"translate above data against {short_policy_id} in {ctx.obj.data_partition}", abort=True) - - if not is_json(data): - error_console.print("Error: Not valid json") - raise typer.Exit(1) - - try: - with console.status("Translating..."): - r = requests.post(ctx.obj.policy_url + "/translate", - data=data, - headers=headers(ctx)) - except requests.exceptions.RequestException as e: - error_console.print(f"Error: {e}") - raise SystemExit(e) - - if r.ok: - if output == output.fancy: - console.print(r.json()) - else: - print(r.json()) - else: - if "detail" in r.text: - error_console.print(f"Error: {r.json()['detail']} {r.status_code}") - else: - error_console.print(f"Error: {r.text}") - raise typer.Exit(1) # non-zero exit status - - -@cli.command(rich_help_panel="Policy Commands") -def eval( - ctx: typer.Context, - policy_id: str = typer.Argument(None), - file: Path = typer.Option(..., - "-f", "--file", - exists=True, - file_okay=True, - dir_okay=False, - writable=False, - readable=True, - resolve_path=True, - ), - template: bool = typer.Option( - False, "--template", "-t", help="Input is a template to be rendered"), - output: OutputType = typer.Option("fancy", help="Output style"), - force: bool = typer.Option( - False, "--force", help="No confirmation prompt"), - legal_tag: str = None, - domain: str = None -): - """ - Evaluate Policy. - - If using option --template: - - ${data_partition} = data_partition associated with your token - ${name} = short name you give as a policy_id on the command line - ${legal_tag} = value of --legal_tag on the command line, if none provided admincli will pick a random one - ${domain} = value of --domain the command line, if none provided admincli will determine - - Output fancy (default) draws panels, simple just displays text - - This command will allow the policy service to add/replace any auth details in json using the include_auth capability in API. - """ - if policy_id.startswith('osdu/instance/'): - error_console.print("Error: Instance policies not supported") - raise typer.Abort() - - if domain is None: - domain = get_domain_suffix(ctx) - #console.print(f"domain is {domain}") - - short_policy_id = os.path.basename(policy_id) - - if not short_policy_id.endswith(".rego"): - short_policy_id = short_policy_id + ".rego" - - with rich.progress.open(file, "r") as f: - data = f.read() - f.close() - - if legal_tag is None: - legal_tag = get_a_random_legal_tag(ctx=ctx) - - if template: - template_data = Template(data) - data = template_data.substitute( - { - 'data_partition': ctx.obj.data_partition, - 'name': short_policy_id.removesuffix('.rego'), - 'legal_tag': legal_tag, - 'domain': domain - }) - if output == output.fancy: - console.print(Panel(data, title="rendered policy", highlight=True)) - else: - print(data) - - if not is_json(data): - error_console.print("Error: Not valid json") - raise typer.Exit(1) - - if not force: - typer.confirm( - f"evaluate against {short_policy_id} in {ctx.obj.data_partition}", abort=True) - - bdata = data.encode('utf-8') - files = {'file': (short_policy_id, bdata)} - eval_result = evaluations_query(ctx=ctx, policy_id=policy_id, files=files) - console.print(eval_result) - - -def search_policies_full(result: list, search: str): - """ - search helper - """ - retlist = [] - for pol in result: - if re.search(search, str(pol)): - retlist.append({'id': pol['id'], 'raw': pol['raw']}) - return retlist - - -def search_policies_idonly(result: list, search: str): - """ - search helper in policy name only - """ - retlist = [] - for pol in result: - if re.search(search, pol['id']): - retlist.append({'id': pol['id'], 'raw': pol['raw']}) - return retlist - - -def display_all_policies(ctx: typer.Context, - output: OutputType = OutputType.fancy, - raw: bool = False, - search_name=None, - search=None - ): - """ - display policies - """ - - # retrieve all policy ids - with console.status("Getting policies..."): - result = get_policies(ctx) - - if search_name: - with console.status("Searching policies by name..."): - result = search_policies_idonly(result, search_name) - - if search: - with console.status("Searching policies..."): - result = search_policies_full(result, search) - - if result: - if raw: - if output == output.simple: - print(result) - else: - console.print(result) - raise typer.Exit() - - # tree output - if output == output.tree: - tree = Tree("Policy Tree") - partition_tree = tree.add("osdu/partition/") - instance_tree = tree.add("osdu/instance/") - data_partition_tree = partition_tree.add(ctx.obj.data_partition) - for pol in result: - # pol['id'].basename - short_policy_id = os.path.basename(pol['id']) - dir = os.path.dirname(pol['id']) - if pol['id'].startswith("osdu/instance/"): - instance_tree.add(short_policy_id) - else: - data_partition_tree.add(short_policy_id) - console.print(tree) - raise typer.Exit() - - try: - # silly math for figuring out terminal size - width, height = os.get_terminal_size() - except: - # not a tty... no worries just simple output - output = output.simple - - # simple output - if output == output.simple: - for pol in result: - print(pol['id']) - raise typer.Exit() - - # else let's make it look pretty - if search or search_name: - table = Table(title="Search results") - else: - table = Table(title="Lookup of all policy IDs") - name_width = 0 - - for pol in result: - id_width = len(pol['id']) - if id_width > name_width: - name_width = id_width - - # silly math for figuring out terminal size - preview_width = width - 6 - name_width - table.add_column("Name", justify="full", style="cyan", - no_wrap=True, min_width=name_width) - table.add_column("Preview", justify="left", style="green", - no_wrap=True, max_width=preview_width) - - for pol in result: - preview = pol['raw'].replace("\n", " ").strip() - preview = " ".join(preview.split()) - if preview.startswith("p"): - table.add_row(pol['id'], preview.strip()) - else: - console.print("Error: unexpected preview data") - console.print(table) - - -@cli.command("ls", rich_help_panel="Policy Commands") -def list( - ctx: typer.Context, - policy_list: List[str] = typer.Argument(None), - raw: bool = typer.Option( - False, "--raw", help="Full output from policy service"), - quiet: bool = typer.Option( - False, "--quiet", help="Don't display policy"), - output: OutputType = typer.Option("fancy", help="Output style"), - search: str = typer.Option( - "", "--search", "-s", help="Regex search string", rich_help_panel="Search Options"), - search_name: str = typer.Option( - "", "--name", "-n", help="Regex search string in policy name only", rich_help_panel="Search Options"), - download: bool = typer.Option( - False, "--download", "-d", help="download policy to file") -): - ''' - Lookup policies or a policy. - - If policy_list is - on command-line, the list will be read from standard-in - ''' - if output == output.excel: - error_console.print("Error: Not supported output type") - raise typer.Exit(1) # non-zero exit status - - if policy_list == ['-']: - policy_list = sys.stdin.read().strip().split() - - if not policy_list: - display_all_policies(ctx=ctx, output=output, raw=raw, - search=search, search_name=search_name) - else: - for policy_id in policy_list: - if '*' in policy_id: - display_all_policies(ctx=ctx, output=output, - raw=raw, search=policy_id) - return - # if includes a name without .rego, let's add it to make it easier to use cli - if not policy_id.endswith(".rego"): - policy_id = policy_id + ".rego" - - short_policy_id = os.path.basename(policy_id) - - result = lookup_policy(ctx=ctx, policy_id=policy_id, quiet=quiet) - if result: - if raw: - if download: - filename = short_policy_id.removesuffix( - 'rego') + 'json' - with console.status(f"Saving {filename}..."): - f = open(filename, "w") - f.write(str(result)) - f.close() - if not quiet: - console.print( - f"{policy_id} raw saved as '{filename}'") - elif quiet: - pass - elif output == output.fancy: - console.print(result) - else: # simple - print(result) - else: - if download: - with console.status(f"Saving {short_policy_id}..."): - f = open(short_policy_id, "w") - f.write(str(result['raw'])) - f.close() - if not quiet: - console.print( - f"{policy_id} saved as '{short_policy_id}'") - elif quiet: - pass - elif output == output.fancy: - console.print(result['raw']) - else: # simple - print(result['raw']) - else: - raise typer.Exit(1) # non-zero exit status - - -def cleanup_url(url): - if url.endswith("/"): - return url.rstrip(url[-1]) - return url - - -@cli.command(hidden=True, rich_help_panel="Policy Developer Utils") -def setup(ctx: typer.Context, - raw: bool = typer.Option(False, "--raw", help="Don't Mask details"), - ): - """ - Display setup details. Hidden command - """ - if raw: - console.print(ctx.obj) - for name, value in os.environ.items(): - console.print("{0}: {1}".format(name, value)) - else: - x = dict(sorted(vars(ctx.obj).items())) - for key in x: - if "token" in key: - console.print(f"[green]{key}[/]: ****") - else: - if str(x[key]).endswith("/"): - console.print( - f"[green]{key}[/]: {x[key]} [red]Warning ending /[/]") - else: - console.print(f"[green]{key}[/]: {x[key]}") - - for name, value in os.environ.items(): - if "token" in name.lower() or \ - "key" in name.lower() or \ - "password" in name.lower() or \ - "secret" in name.lower() or \ - "client" in name.lower(): - console.print("{0}: {1}".format(name, "****")) - else: - console.print("{0}: {1}".format(name, value)) - - -@cli.command(rich_help_panel="Policy Commands") -def health(ctx: typer.Context): - """ - Is Policy service healthy - """ - try: - with console.status("Getting health..."): - r = requests.get(ctx.obj.policy_url + "/health", - headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - console.print(":thumbsup-emoji:", r.text) - else: - error_console.print(r.text) - raise typer.Exit(1) # non-zero exit status - - -@cli.command(rich_help_panel="Utils") -def info(ctx: typer.Context, - service: str = typer.Option( - "policy", "-s", "--service", help="Get search info") - ): - """ - Info on Dataset, Entitlement, Legal, Policy Service, Search, and Storage - """ - if "storage" in service: - get_info(ctx=ctx, base_url=ctx.obj.storage_url) - elif "search" in service: - get_info(ctx=ctx, base_url=ctx.obj.search_url) - elif "entitlement" in service: - get_info(ctx=ctx, base_url=ctx.obj.entitlements_url) - elif "legal" in service: - get_info(ctx=ctx, base_url=ctx.obj.legal_url) - elif "policy" in service: - get_info(ctx=ctx, base_url=ctx.obj.policy_url) - elif "dataset" in service: - get_info(ctx=ctx, base_url=ctx.obj.dataset_url) - else: - error_console.print(f"Error: Unsupported service {service}") - raise typer.Exit(1) # non-zero exit status - - -@cli.command(rich_help_panel="Policy Developer Utils") -def diff(ctx: typer.Context, - policy_id_1: str = typer.Argument(...), - policy_id_2: str = typer.Argument(...), - n: int = typer.Option( - 3, "-n", help="Number of adjacent lines to show"), - ): - """ - Compare two policies, show the delta in a context diff format. - - Context diffs are a compact way of showing just the lines that have changed plus - a few lines of context. The changes are shown in a before/after style. - The number of context lines is set by n which defaults to three. - """ - - # if without .rego, let's add it to make it easier to use cli - if not policy_id_1.endswith(".rego"): - policy_id_1 = policy_id_1 + ".rego" - - if not policy_id_2.endswith(".rego"): - policy_id_2 = policy_id_2 + ".rego" - - result1 = lookup_policy(ctx, policy_id_1) - result2 = lookup_policy(ctx, policy_id_2) - - differences = difflib.context_diff( - a=result1['raw'].splitlines(), - b=result2['raw'].splitlines(), - fromfile=policy_id_1, - tofile=policy_id_2, - n=n) - console.print('\n'.join(differences)) - - -def lookup_policy(ctx: typer.Context, policy_id, quiet=False): - short_policy_id = os.path.basename(policy_id) - if "osdu/instance/" in policy_id: # lookup instance policy - with console.status(f"Getting instance policy {short_policy_id}..."): - result = get_instance_policy(ctx, short_policy_id, quiet) - elif "osdu/partition/" in policy_id: # lookup partition policy - with console.status(f"Getting partition policy {short_policy_id}..."): - result = get_partition_policy(ctx, short_policy_id, quiet) - else: # assume it's a partition policy - with console.status(f"Getting partition policy {short_policy_id}..."): - result = get_partition_policy(ctx, short_policy_id, quiet) - if not result: - # nope report the error and let's still check for a instance policy - if not quiet: - error_console.print( - f"[green]Checking for '{short_policy_id}' in instance policies[/]...") - result = get_instance_policy(ctx, short_policy_id, quiet) - if not result: - if not quiet: - error_console.print(f"Error: Unable to find {policy_id}") - raise typer.Exit(1) # non-zero exit status - - return result - - -@cli.command(rich_help_panel="Policy Developer Utils") -def compile( - ctx: typer.Context, - file: Path = typer.Option(..., - "-f", "--file", - exists=True, - file_okay=True, - dir_okay=False, - writable=False, - readable=True, - resolve_path=True, - ), - metrics: bool = False, - opt_walk: bool = typer.Option(False, "--walk"), - instrument: bool = False, - template: bool = typer.Option( - False, "--template", "-t", help="Input is a template to be rendered"), - force: bool = typer.Option( - False, "--force", help="No confirmation prompt") -): - """ - Use OPA's Compile API to partially evaluate a query. - - Diagnostic compile on Policy Service. Requires service have ENABLE_DEV_DIAGNOSTICS enabled. - """ - with rich.progress.open(file, "r") as f: - data = f.read() - f.close() - if template: - template_data = Template(data) - data = template_data.substitute( - {'data_partition': ctx.obj.data_partition} - ) - console.print(Panel(data, title="rendered policy", highlight=True)) - if not force: - typer.confirm(f"compile in {ctx.obj.data_partition}", abort=True) - - bdata = data.encode('utf-8') - files = {'file': ("compile.json", bdata)} - params = {'metrics': metrics, 'instrument': instrument} - - try: - r = requests.post(ctx.obj.policy_url + "/compile", files=files, - params=params, headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - console.print(r.json()) - else: - error_console.print(r.text, r.status_code) - raise typer.Exit(1) # non-zero exit status - - if opt_walk: - # Load the resulting set of query ASTs out of the JSON response. - if "result" in r.json() and "queries" in r.json()["result"]: - tree = r.json()["result"]["queries"] - console.print( - Panel(str(tree), title="Abstract Syntax Tree", highlight=True)) - qs = ast.QuerySet.from_data(tree) - # Pretty print the ASTs. - walk.pretty_print(qs) - else: - error_console.print( - "Error: unexpected response from policy service. Check if policy exists") - raise typer.Exit(1) # non-zero exit status - - -@cli.command(rich_help_panel="Policy Developer Utils") -def config(ctx: typer.Context): - """ - Diagnostic config on Policy Service. - - Requires service have ENABLE_DEV_DIAGNOSTICS enabled. - """ - try: - r = requests.get(ctx.obj.diag_url + "/config", headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - console.print(r.json()) - else: - error_console.print(r.text) - raise typer.Exit(1) # non-zero exit status - - -def get_legal_tags(ctx: typer.Context, tag_name: str = None): - """ - Get legal tags json - """ - try: - if tag_name: - with console.status(f"Getting legal tag {tag_name}..."): - r = requests.get(ctx.obj.legal_url + - "/legaltags/" + tag_name, headers=headers(ctx)) - else: - with console.status("Getting legaltags..."): - r = requests.get(ctx.obj.legal_url + "/legaltags" + - "?valid=true", headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - return r.json() - else: - error_console.print("Error:", r.text) - raise typer.Exit(1) # non-zero exit status - - -def create_legal_tag(ctx: typer.Context, json_data, update=False): - """ - Create or update a legal tag - """ - try: - if update: - with console.status(f"Updating legal tag..."): - r = requests.put(ctx.obj.legal_url + "/legaltags", json=json.loads( - json_data), headers=headers(ctx, content_type_json=True)) - else: - with console.status(f"Creating legal tag..."): - r = requests.post(ctx.obj.legal_url + "/legaltags", json=json.loads( - json_data), headers=headers(ctx, content_type_json=True)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - return r.json() - else: - if r.status_code == 409: - error_console.print("Error: Legal tag already exists") - raise typer.Exit(2) # non-zero exit status - elif r.status_code == 404 and update: - js = r.json() - message = "Cannot update a LegalTag that does not exist" - if "message" in js: - message = js["message"] - error_console.print(f"Error: {message}") - raise typer.Exit(3) # non-zero exit status - else: - error_console.print("Error:", r.text, r.status_code) - raise typer.Exit(1) # non-zero exit status - - -def get_a_random_legal_tag(ctx: typer.Context): - """ - Get a random legal tag - """ - r_json = get_legal_tags(ctx=ctx) - lucky_tag = random.choice(r_json["legalTags"]) - return lucky_tag["name"].strip() - - -@cli.command(rich_help_panel="Utils") -def add_legal_tag( - ctx: typer.Context, - file: Path = typer.Option(..., - "-f", "--file", - exists=True, - file_okay=True, - dir_okay=False, - writable=False, - readable=True, - resolve_path=True, - ), - name: str = typer.Option("osdu-admincli-testing", - help="Tag name to use in template"), - days: int = typer.Option( - 30, help="Tag expiration date offset from now to use in template"), - template: bool = typer.Option( - False, "--template", "-t", help="Input is a template to be rendered"), - force: bool = typer.Option( - False, "--force", help="No confirmation prompt"), - update: bool = typer.Option( - False, "--update", help="Update existing legal tag") -): - """ - Create or Update a legal tag - - Updating legal tags: Currently the legaltags API only allows the updating of the description, contract ID, expiration date and extensionProperties properties - - Template support includes: - * data_partition - * expiration_date - * name - * random_name - * uuid - """ - - with rich.progress.open(file, "r") as f: - data = f.read() - f.close() - - if template: - random_name = name + '-' + ctx.obj.uuid - expiration_date = datetime.datetime.now() + datetime.timedelta(days=days) - template_data = Template(data) - data = template_data.substitute( - # legal tags support a max of 100 character names - { - 'data_partition': ctx.obj.data_partition, - 'DATA_PARTITION': ctx.obj.data_partition, - 'name': name[:100], - 'random_name': random_name[:100], - 'uuid': ctx.obj.uuid, - 'expiration_date': expiration_date.strftime("%Y-%m-%d") - } - ) - if update: - console.print( - Panel(data, title=f"rendered update legal tag", highlight=True)) - else: - console.print( - Panel(data, title=f"rendered add legal tag", highlight=True)) - - if not is_json(data): - error_console.print("Error: Not valid json") - raise typer.Exit(1) - - if not force: - typer.confirm(f"Add legal tag", abort=True) - - r_json = create_legal_tag(ctx=ctx, json_data=data, update=update) - console.print(json.dumps(r_json, indent=4)) - - -@cli.command(rich_help_panel="Utils") -def legal_tags( - ctx: typer.Context, - tag_list: List[str] = typer.Argument( - None, help="List of tags to show"), - raw: bool = typer.Option( - False, "--raw", help="Get raw json output"), - random: bool = typer.Option( - False, "--random", help="Get a random legal tag"), - limit: int = typer.Option(None, help="Limit number")): - """ - Show legal tags. - """ - - if tag_list == ['-']: - tag_list = sys.stdin.read().strip().split() - - if tag_list: - for tag in tag_list: - r_json = get_legal_tags(ctx=ctx, tag_name=tag) - console.print(json.dumps(r_json, indent=4)) - else: - if raw: - r_json = get_legal_tags(ctx=ctx) - console.print(json.dumps(r_json, indent=4)) - elif random: - console.print(get_a_random_legal_tag(ctx=ctx)) - else: - r_json = get_legal_tags(ctx=ctx) - count = 0 - for name in r_json["legalTags"]: - console.print(name["name"]) - count = count + 1 - if count == limit: - break - - -@cli.command("opa-add", rich_help_panel="Policy Developer Utils") -def add_to_opa( - ctx: typer.Context, - policy_id: str = typer.Argument(...), - file: Path = typer.Option(..., - "-f", "--file", - exists=True, - file_okay=True, - dir_okay=False, - writable=False, - readable=True, - resolve_path=True, - ), - template: bool = typer.Option( - False, "--template", "-t", help="Input is a template to be rendered"), - force: bool = typer.Option( - False, "--force", help="No confirmation prompt"), - url: str = typer.Option( - "http://localhost:8181", help="Base URL to connect to OPA") -): - """ - [green]Add or update[/green] a policy directly in OPA :sparkles: for LOCAL testing/development - """ - if ctx.obj.debug: - console.print(f"url: {url}") - - if policy_id.startswith('osdu/instance/'): - policy_type = "instance" - elif policy_id.startswith('osdu/partition/'): - policy_type = "partition" - else: - error_console.print( - "Error: policy_id must start with osdu/instance/ or osdu/partition/") - raise typer.Exit(1) # non-zero exit status - - if not policy_id.endswith(".rego"): - policy_id = policy_id + ".rego" - - short_policy_id = os.path.basename(policy_id) - - console.print(f"Reading contents of {file} as '{policy_id}'") - - with rich.progress.open(file, "r") as f: - data = f.read() - f.close() - - if template: - template_data = Template(data) - data = template_data.substitute( - { - 'data_partition': ctx.obj.data_partition, - 'DATA_PARTITION': ctx.obj.data_partition, - 'name': short_policy_id.removesuffix('.rego') - } - ) - console.print( - Panel(data, title=f"rendered {policy_type} policy", highlight=True)) - - if not force: - typer.confirm(f"Add {policy_id} to OPA", abort=True) - - with console.status(f"Adding policy {policy_id}"): - r = opa.put_opa_policy_direct( - policy_id=policy_id, data=data, base_url=url) - if r.ok: - console.print(f"Policy {policy_id} added to OPA") - else: - error_console.print( - f"Error adding policy {policy_id} to OPA {r.json()} {r.status_code}") - raise typer.Exit(1) # non-zero exit status - - -@cli.command("check", rich_help_panel="Policy Developer Utils") -def opa_check( - ctx: typer.Context, - file_list: List[str] = typer.Argument(...), - display: bool = typer.Option( - False, "--display", "-d", help="Display policy"), - template: bool = typer.Option( - False, "--template", "-t", help="Input is a template to be rendered"), -): - """ - Check rego file for errors - """ - if file_list == ['-']: - file_list = sys.stdin.read().strip().split() - - chk = opa.OpaCheck() - for file in file_list: - policy_id = os.path.basename(file) - console.print(f"Reading contents of {file} as '{policy_id}'") - - with rich.progress.open(file, "r") as f: - data = f.read() - f.close() - - if template: - try: - template_data = Template(data) - data = template_data.substitute( - { - 'data_partition': ctx.obj.data_partition, - 'DATA_PARTITION': ctx.obj.data_partition, - 'name': policy_id.removesuffix('.rego') - } - ) - except ValueError as err: - error_console.print(f"Error in {file}: {err}") - if display: - console.print(Panel(data, title=f"{policy_id}", highlight=True)) - - with console.status(f"Checking {policy_id}"): - result, error_msg = chk.check(rego=data, filename=policy_id) - if result: - if 'rego_parse_error' in error_msg: - error_console.print( - f"{policy_id}: rego_parse_error:\n{error_msg}") - else: - error_console.print(f"{policy_id}: error {error_msg}") - chk.delete() # cleanup before exit - raise typer.Exit(1) # non-zero exit status - else: - console.print(f"{policy_id}: [green]OK[/]") - chk.delete() - - -@cli.command("opa-rm", rich_help_panel="Policy Developer Utils") -def delete_from_opa( - ctx: typer.Context, - policy_list: List[str] = typer.Argument(...), - force: bool = typer.Option( - False, "--force", help="No confirmation prompt"), - url: str = typer.Option("http://localhost:8181", - help="Base URL to connect to OPA") -): - """ - [red]delete[/red] a policy directly from OPA. :fire: for LOCAL testing/development - - If policy_list is - on command-line, the list will be read from standard-in - """ - interactive = True - if policy_list == ['-']: - interactive = False - policy_list = sys.stdin.read().strip().split() - - for policy_id in policy_list: - - if policy_id.startswith('osdu/instance/'): - policy_type = "instance" - elif policy_id.startswith('osdu/partition/'): - policy_type = "partition" - else: - error_console.print( - "Error: policy_id must start with osdu/instance/ or osdu/partition/") - raise typer.Exit(1) # non-zero exit status - - if not policy_id.endswith(".rego"): - policy_id = policy_id + ".rego" - short_policy_id = os.path.basename(policy_id) - - if not force: - typer.confirm(f"Delete {policy_id} from OPA", abort=True) - - with console.status(f"Adding policy {policy_id}"): - r = opa.delete_opa_policy_direct(policy_id=policy_id, base_url=url) - - if r.ok: - console.print(f"Policy {policy_id} deleted from OPA") - else: - error_console.print( - f"Error deleting policy {policy_id} from OPA {r.json()} {r.status_code}") - raise typer.Exit(1) # non-zero exit status - - -@cli.callback() -def main( - ctx: typer.Context, - token: str = typer.Option(None, '-t', '--token', envvar="TOKEN"), - policy_url: str = typer.Option( - None, '-p', '--policy-url', envvar="POLICY_URL"), - base_url: str = typer.Option( - None, '--host', '--url', '--base-url', envvar="BASE_URL"), - entitlements_url: str = typer.Option( - None, '-e', '--ent-url', envvar="ENTITLEMENTS_BASE_URL", hidden=True), - storage_url: str = typer.Option( - None, '-s', '--storage-url', envvar="STORAGE_BASE_URL", hidden=True), - search_url: str = typer.Option( - None, '-x', '--ent-url', envvar="SEARCH_BASE_URL", hidden=True), - legal_url: str = typer.Option( - None, '-l', '--legal-url', envvar="LEGAL_BASE_URL", hidden=True), - dataset_url: str = typer.Option( - None, '--dataset-url', envvar="DATASET_BASE_URL", hidden=True), - path: str = typer.Option( - '/api/policy/v1', envvar="POLICY_PATH", hidden=True), - data_partition: str = typer.Option( - None, '-d', '--data-partition-id', envvar="DATA_PARTITION"), - x_user_id: str = typer.Option( - None, envvar="XUSERID", help="optional user id, added to headers"), - debug: bool = typer.Option(False, hidden=True, help="Debug output"), - correlation_id: bool = typer.Option( - True, hidden=True, help="Enable/disable correlation-id"), - verbose: bool = typer.Option( - False, "-V", "--verbose", hidden=True, help="Verbose"), - version: Optional[bool] = typer.Option( - None, - "--version", - "-v", - help="Show the application's version and exit.", - callback=_version_callback, - is_eager=True - ) -) -> None: - - if not token: - error_console.print("Missing token; pass --token or set env[TOKEN]") - raise typer.Exit(1) # non-zero exit status - - if not data_partition: - error_console.print( - "Missing data_partition; pass --data-partition or set env[DATA_PARTITION]") - raise typer.Exit(1) # non-zero exit status - - if not base_url: - if not policy_url: - error_console.print( - "Missing url and base_url; pass --url or set env[POLICY_URL]") - raise typer.Exit(1) # non-zero exit status - base_url = policy_url - - base_url = cleanup_url(base_url) - if not policy_url: - policy_url = base_url - - if not entitlements_url: - entitlements_url = base_url + '/api/entitlements/v2' - - if not storage_url: - storage_url = base_url + '/api/storage/v2' - - if not search_url: - search_url = base_url + '/api/search/v2' - - if not legal_url: - legal_url = base_url + "/api/legal/v1" - - if not dataset_url: - dataset_url = base_url + "/api/dataset/v1" - - uuid = None - if correlation_id: - uuid = uuid7str() - - ctx.obj = SimpleNamespace( - token=token, - data_partition=data_partition, - policy_url=cleanup_url(policy_url) + path, - base_url=base_url, - diag_url=cleanup_url(policy_url) + "/diag", - entitlements_url=cleanup_url(entitlements_url), - storage_url=cleanup_url(storage_url), - search_url=cleanup_url(search_url), - dataset_url=cleanup_url(dataset_url), - debug=debug, - legal_url=cleanup_url(legal_url), - uuid=uuid, - user_id=getpass.getuser(), - x_user_id=x_user_id) - - if debug: # hidden option - console.print(f"token: {ctx.obj.token}") - console.print(f"base_url: {ctx.obj.base_url}") - console.print(f"policy_url: {ctx.obj.policy_url}") - console.print(f"entitlements_url: {ctx.obj.entitlements_url}") - console.print(f"storage_url: {ctx.obj.storage_url}") - console.print(f"search_url: {ctx.obj.search_url}") - console.print(f"legal_url: {ctx.obj.legal_url}") - console.print(f"data_partition: {ctx.obj.data_partition}") - console.print(f"dataset_url: {ctx.obj.dataset_url}") - console.print(f"x-user-id: {ctx.obj.x_user_id}") - console.print(f"uuid: {ctx.obj.uuid}") - if verbose: - console.print(f"Correlation-ID: {ctx.obj.uuid}") - - -@cli.command(rich_help_panel="Policy Commands") -def add( - ctx: typer.Context, - policy_id: str = typer.Argument(...), - file: Path = typer.Option(..., - "-f", "--file", - exists=True, - file_okay=True, - dir_okay=False, - writable=False, - readable=True, - resolve_path=True, - ), - template: bool = typer.Option( - False, "--template", "-t", help="Input is a template to be rendered"), - force: bool = typer.Option( - False, "--force", help="No confirmation prompt") -): - """ - [green]Add or update[/green] a policy. :sparkles: - - If using option --template: - - ${data_partition} = data_partition associated with your token - ${name} = short name you give as a policy_id on the command line - """ - if policy_id.startswith('osdu/instance/'): - error_console.print("Error: Instance policies not supported") - raise typer.Abort() - short_policy_id = os.path.basename(policy_id) - if not short_policy_id.endswith(".rego"): - short_policy_id = short_policy_id + ".rego" - console.print( - f"add contents of {file} as '{short_policy_id}' in {ctx.obj.data_partition}") - with rich.progress.open(file, "r") as f: - data = f.read() - f.close() - - if template: - template_data = Template(data) - data = template_data.substitute( - {'data_partition': ctx.obj.data_partition, - 'DATA_PARTITION': ctx.obj.data_partition, - 'name': short_policy_id.removesuffix('.rego') - }) - console.print(Panel(data, title="rendered policy", highlight=True)) - - if not force: - typer.confirm( - f"add {short_policy_id} in {ctx.obj.data_partition}", abort=True) - bdata = data.encode('utf-8') - files = {'file': (short_policy_id, bdata)} - - with console.status(f"Adding partition policy {short_policy_id}"): - add_partition_policy(ctx=ctx, policy_id=short_policy_id, files=files) - - -@cli.command("rm", rich_help_panel="Policy Commands") -def delete( - ctx: typer.Context, - policy_list: List[str] = typer.Argument(...), - force: bool = typer.Option(False, "--force", help="No confirmation prompt") - #... if sys.stdin.isatty() else sys.stdin.read().strip() -): - """ - [red]delete[/red] a partition policy. :fire: - - If policy_list is - on command-line, the list will be read from standard-in - """ - interactive = True - if policy_list == ['-']: - interactive = False - policy_list = sys.stdin.read().strip().split() - - for policy_id in policy_list: - if "osdu/instance/" in policy_id: - error_console.print("Error: Instance policies not supported") - raise typer.Abort() - - short_policy_id = os.path.basename(policy_id) - if not short_policy_id.endswith(".rego"): - short_policy_id = short_policy_id + ".rego" - - with console.status(f"Checking partition policy {short_policy_id}"): - result = get_partition_policy(ctx, short_policy_id) - - delete = False - if result: - long_policy_id = result['id'] - if not force and interactive: - delete = typer.confirm( - f"Delete {long_policy_id} in {ctx.obj.data_partition}", abort=True) - elif not force and not interactive: - console.print( - f"preview: would have deleted '{long_policy_id} in {ctx.obj.data_partition}'. Use --force") - continue - - if delete or force: - console.print(f"deleting {short_policy_id}") - with console.status(f"Deleting partition policy {short_policy_id}"): - result = delete_partition_policy(ctx, short_policy_id) - if result: - console.print(f"Policy [green]{long_policy_id}[/] deleted") - else: - error_console.print(f"Delete of {long_policy_id} failed") - else: - if force: - with console.status(f"Deleting partition policy {short_policy_id}"): - result = delete_partition_policy(ctx, short_policy_id) - if result: - console.print( - f"Policy [green]{short_policy_id}[/] deleted") - else: - error_console.print(f"Delete of {short_policy_id} failed") - else: - error_console.print(f"Skipping delete of {policy_id}") - - -def get_info(ctx: typer.Context, base_url): - """ - Get info - """ - try: - with console.status("Getting info..."): - r = requests.get(base_url + "/info", headers=headers(ctx)) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - console.print(r.json()) - else: - error_console.print(r.text) - raise typer.Exit(1) # non-zero exit status - - -@cli.command(rich_help_panel="Utils") -def storage( - ctx: typer.Context, - id_list: List[str] = typer.Argument(..., help="IDs to retrieve"), - versions: bool = typer.Option( - False, "--versions", "-V", help="Show versions"), - version: str = typer.Option( - None, "-v", "--version", help="Get particular version"), - dataset: bool = typer.Option( - False, "--dataset", "-D", help="dataset"), - get: bool = typer.Option( - False, "--get", hidden=True, help="get dataset"), - download: bool = typer.Option( - False, "--download", "-d", help="download dataset"), - raw: bool = typer.Option(False, "--raw", help="json output"), -): - """ - Storage and Dataset record retrieval utility - """ - if id_list == ['-']: - id_list = sys.stdin.read().strip().split() - - if not len(id_list): - error_console.print("Error: Missing input 'ID_LIST...'") - raise typer.Exit(1) # non-zero exit status - - for id in id_list: - try: - if versions: - url = ctx.obj.storage_url + "/records/versions/" + id - elif version: - url = ctx.obj.storage_url + "/records/" + id + "/" + version - elif dataset: - url = ctx.obj.dataset_url + "/getDatasetRegistry?id=" + id - elif get or download: - url = ctx.obj.dataset_url + "/getRetrievalInstructions?id=" + id - #url = ctx.obj.dataset_url + "/getRetrievalInstructions?kindSubType=" + id - else: - url = ctx.obj.storage_url + "/records/" + id - - with console.status(f"Retrieving record..."): - r = requests.get(url, - headers=headers(ctx) - ) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - if raw: - console.print(r.json()) - elif get or download: - get_dataset(ctx=ctx, data=r.json(), download=download) - else: - console.print(r.json()) - else: - error_console.print("Error:", r.text, r.status_code) - raise typer.Exit(1) # non-zero exit status - - -def get_filename_from_storage(ctx: typer.Context, id): - try: - with console.status(f"Retrieving storage record {id}..."): - r = requests.get(ctx.obj.storage_url + "/records/" + id, - headers=headers(ctx) - ) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - r_json = r.json() - if "data" in r_json and \ - "DatasetProperties" in r_json["data"] and \ - "FileSourceInfo" in r_json["data"]["DatasetProperties"] and \ - "Name" in r_json["data"]["DatasetProperties"]["FileSourceInfo"]: - name = r_json["data"]["DatasetProperties"]["FileSourceInfo"]["Name"] - return name - else: - error_console.print( - f"Error: FileSourceInfo Name Path missing ['data']['DatasetProperties']['FileSourceInfo']['Name'] in record {id}") - if ctx.obj.debug: - console.print(r_json) - raise typer.Exit(1) # non-zero exit status - - -def get_dataset(ctx: typer.Context, data, download=False, raw=False): - id = data["delivery"][0]["datasetRegistryId"] - unsigned_url = data["delivery"][0]["retrievalProperties"]["unsignedUrl"] - name = get_filename_from_storage(ctx=ctx, id=id) - filename = os.path.basename(name) - signed_url = data["delivery"][0]["retrievalProperties"]["signedUrl"] - - if download: - try: - with console.status(f"Downloading {id} {name}..."): - r = requests.get(signed_url) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - if r.ok: - with console.status(f"Saving {filename}..."): - f = open(filename, "wb") - f.write(r.content) - f.close() - console.print(f"Downloaded '{name}' from '{id}' as '{filename}'") - else: - console.print(f"unsigned_url: {unsigned_url}") - console.print(f"signed_url: {signed_url}") - - -@cli.command(rich_help_panel="Utils") -def search( - ctx: typer.Context, - id: str = typer.Argument(None), - kind: str = typer.Option( - "*:*:*:*", help="The kind(s) of the record to query"), - query: str = typer.Option( - "", help="Query string based on Lucene query string syntax"), - sort_field: str = typer.Option("id", help="Sort field"), - sort_order: str = typer.Option( - "ASC", help="Sort Order", callback=search_cli.search_order_callback), - limit: int = typer.Option( - 10, help="The maximum number of results to return from the given offset"), - offset: int = typer.Option( - 0, help="The starting offset from which to return results"), - detail: bool = typer.Option( - False, "--detail", help="Show more details"), - id_only: bool = typer.Option( - False, "--id-only", help="Only show IDs"), - as_owner: bool = typer.Option( - False, "--as-owner", help="Query as owner"), - random_id: bool = typer.Option( - False, "--random-id", help="Get a random ID"), - output: OutputType = typer.Option("fancy", help="Output style"), - file: Path = typer.Option("search.xlsx", - "-f", "--file", - dir_okay=False, - resolve_path=True, - help="Excel file for output=excel" - ), - open_file: bool = typer.Option( - False, "--open", help="Open file after saving"), - raw: bool = typer.Option( - False, "--raw", help="Show raw/json search results"), - spatial_filter: Path = typer.Option(None, - exists=True, - file_okay=True, - dir_okay=False, - writable=False, - readable=True, - resolve_path=True, - help="Spatial filter json file" - ), - with_cursor: bool = typer.Option( - False, "--with-cursor", help="Use cursors"), - cursor: str = typer.Option( - None, "--cursor", help="Pass cursor back") -): - """ - Search utility - - Kind must follow the convention: {Schema-Authority}:{dataset-name}:{record-type}:{version} - - Example Searches: - Wellbore Master Data Instances for Well with ID 1691: - --kind="*:*:master-data--Wellbore:*" --query=data.WellID:\\"osdu:master-data--Well:1691:\\" - - Wellbore Trajectory Work Product Components associated with Wellbore ID 1691: - --kind="*:*:work-product-component--WellboreTrajectory:*" --query=data.WellboreID:\\"osdu:master-data--Wellbore:1691:\\" - - Any record with any field equal "well": - --kind="*:*:*:*" --query=well - - Where source is blended or TNO: - --kind="*:*:*:*" --query="data.Source:(BLENDED TNO)" - - Where source is exactly "TNO": - --kind="*:*:*:*" --query=data.Source:\\"TNO\\" - - All wellbore logs from 2022 year: - --kind="*:*:work-product-component--WellLog:*" --query="createTime:[2022-01-01 TO 2022-12-31]" - - All well logs deeper than 4000m: - --kind="*:*:work-product-component--WellLog:*" query="data.BottomMeasuredDepth:[4000 TO *]" - - All well logs deeper than 2000m or shallower than 4000m: - --kind="*:*:work-product-component--WellLog:*" --query="data.BottomMeasuredDepth:(>=2000 OR <=4000)" - - Note: --with-cursor - To process the next --with-cursor request, the search service keeps the search context alive for 1 minute, - which is the time required to process the next batch of results. Each cursor request sets a new expiry time. - The cursor will expire after 1 min and won't return any more results if the requests are not made in specified time. - """ - if id: - query = f"id:\"{id}\"" - - # Note: above \\ is for help output, so just one \ is needed - - # search_json_data = { - # 'kind': kind, - # 'query': query, - # 'offset': offset, - # 'limit': limit, - # 'queryAsOwner': queryasowner, - # 'aggregateBy': aggregateby, - # 'sort': { - # 'field': [ - # 'id', - # ], - # 'order': [ - # 'ASC', - # ], - # }, - # 'returnedFields': [], - # } - - sort_data = { - 'field': [sort_field], - 'order': [sort_order] - } - search_json_data = { - 'kind': kind, - 'query': query, - 'limit': limit, - 'offset': offset, - 'sort': sort_data, - 'queryAsOwner': as_owner - } - - if spatial_filter: - try: - with rich.progress.open(spatial_filter, "r") as f: - spatial_filter_data = json.load(f) - f.close() - except json.decoder.JSONDecodeError as err: - error_console.print( - f"Error: {spatial_filter} malformed json. {err}") - raise typer.Exit(1) # non-zero exit status - - search_json_data["spatialFilter"] = spatial_filter_data - #search_json_data["returnedFields"] = ["Location"] - - if with_cursor: - url = ctx.obj.search_url + "/query_with_cursor" - if cursor: - search_json_data["cursor"] = cursor - else: - url = ctx.obj.search_url + "/query" - - if ctx.obj.debug: - console.print(search_json_data) - - try: - with console.status(f"Retrieving search results..."): - r = requests.post(url, - json=search_json_data, - headers=headers(ctx) - ) - except requests.exceptions.RequestException as e: - raise SystemExit(e) - - try: - # silly math for figuring out terminal size - width, height = os.get_terminal_size() - except: - # not a tty... no worries just simple output - output = output.simple - - if r.ok: - if "results" in r.json(): - count = len(r.json()["results"]) - - if raw: - console.print(r.json()) - elif not count: - error_console.print("No results found") - raise typer.Exit(2) # non-zero exit status - elif id_only: - search_cli.display_search_results_idonly(r.json()) - elif random_id: - search_cli.display_search_results_idonly_random(r.json()) - elif output == output.fancy: - search_cli.display_search_results_fancy( - r.json(), show_kind=detail) - elif output == output.simple: - search_cli.display_search_results_simple(r.json()) - elif output == output.excel: - with console.status(f"Saving search results..."): - search_cli.display_search_results_excel( - r.json(), show_kind=detail, excelfile=file.name) - #os.system(f"open -a'Microsoft Excel.app' {file.name}") - # os.startfile(f"{file.name}") - if open_file: - search_cli.open_file(file.name) - elif output == output.tree: - error_console.print("Error: Not supported output type") - raise typer.Exit(1) # non-zero exit status - - if with_cursor: - console.print( - f":warning-emoji: [bold] Cursor: [blue]{r.json()['cursor']}") - else: - error_console.print("Error:", r.text, r.status_code) - raise typer.Exit(1) # non-zero exit status - - -if __name__ == "__main__": - cli() diff --git a/frontend/admincli/rego/README.md b/frontend/admincli/rego/README.md deleted file mode 100644 index ccd06c0fb6889530a803bb9b1251d66e14a2eb0d..0000000000000000000000000000000000000000 --- a/frontend/admincli/rego/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Rego -This directory contains code from from https://github.com/open-policy-agent/rego-python -Under the Apache License Version 2.0, January 2004 diff --git a/frontend/admincli/rego/ast.py b/frontend/admincli/rego/ast.py deleted file mode 100644 index 6a1c671991d739de81caca3b21b14d7e7866fe54..0000000000000000000000000000000000000000 --- a/frontend/admincli/rego/ast.py +++ /dev/null @@ -1,252 +0,0 @@ -import json - - -class QuerySet(object): - def __init__(self, queries): - self.queries = queries - - @classmethod - def from_data(cls, data): - return cls([Query.from_data(q) for q in data]) - - def __str__(self): - return ( - self.__class__.__name__ - + "(" - + ", ".join(q.__class__.__name__ + "(" + str(q) + ")" for q in self.queries) - + ")" - ) - - -class Query(object): - def __init__(self, exprs): - self.exprs = exprs - - @classmethod - def from_data(cls, data): - return cls([Expr.from_data(e) for e in data]) - - def __str__(self): - return "; ".join(str(e) for e in self.exprs) - - -class Expr(object): - def __init__(self, terms): - self.terms = terms - - @property - def operator(self): - if not self.is_call(): - raise ValueError("not a call expr") - return self.terms[0] - - @property - def operands(self): - if not self.is_call(): - raise ValueError("not a call expr") - return self.terms[1:] - - def is_call(self): - return not isinstance(self.terms, Term) - - def op(self): - return ".".join([str(t.value.value) for t in self.operator.value.terms]) - - @classmethod - def from_data(cls, data): - terms = data["terms"] - if isinstance(terms, dict): - return cls(Term.from_data(terms)) - return cls([Term.from_data(t) for t in terms]) - - def __str__(self): - if self.is_call(): - return ( - str(self.operator) - + "(" - + ", ".join(str(o) for o in self.operands) - + ")" - ) - return str(self.terms) - - -class Term(object): - def __init__(self, value): - self.value = value - - @classmethod - def from_data(cls, data): - if data["type"] == "null": - data["value"] = None - return cls(_VALUE_MAP[data["type"]].from_data(data["value"])) - - def __str__(self): - return str(self.value) - - -class Scalar(object): - def __init__(self, value): - self.value = value - - @classmethod - def from_data(cls, data): - return cls(data) - - def __str__(self): - return json.dumps(self.value) - - -class Var(object): - def __init__(self, value): - self.value = value - - @classmethod - def from_data(cls, data): - return cls(data) - - def __str__(self): - return str(self.value) - - -class Ref(object): - def __init__(self, terms): - self.terms = terms - - def operand(self, idx): - return self.terms[idx] - - @classmethod - def from_data(cls, data): - return cls([Term.from_data(x) for x in data]) - - def __str__(self): - return str(self.terms[0]) + "".join("[" + str(t) + "]" for t in self.terms[1:]) - - -class Array(object): - def __init__(self, terms): - self.terms = terms - - @classmethod - def from_data(cls, data): - return cls([Term.from_data(x) for x in data]) - - def __str__(self): - return "[" + ",".join(str(x) for x in self.terms) + "]" - - -class Set(object): - def __init__(self, terms): - self.terms = terms - - @classmethod - def from_data(cls, data): - return cls([Term.from_data(x) for x in data]) - - def __str__(self): - if len(self.terms) == 0: - return "set()" - return "{" + ",".join(str(x) for x in self.terms) + "}" - - -class Object(object): - def __init__(self, *pairs): - self.pairs = pairs - - @classmethod - def from_data(cls, data): - return cls(*[(Term.from_data(p[0]), Term.from_data(p[1])) for p in data]) - - def __str__(self): - return "{" + ",".join({str(x): str(y) for (x, y) in self.pairs}) + "}" - - -class Call(object): - def __init__(self, terms): - self.terms = terms - - @classmethod - def from_data(cls, data): - return cls([Term.from_data(x) for x in data]) - - @property - def operator(self): - return self.terms[0] - - @property - def operands(self): - return self.terms[1:] - - def op(self): - return ".".join([str(t.value.value) for t in self.operator.value.terms]) - - def __str__(self): - return str(self.operator) + "(" + ", ".join(str(o) for o in self.operands) + ")" - - -class ArrayComprehension(object): - def __init__(self, term, body): - self.term = term - self.body = body - - @classmethod - def from_data(cls, data): - return cls(Term.from_data(data["term"]), Query.from_data(data["body"])) - - def __str__(self): - return "[" + str(self.term) + " | " + str(self.body) + "]" - - -class SetComprehension(object): - def __init__(self, term, body): - self.term = term - self.body = body - - @classmethod - def from_data(cls, data): - return cls(Term.from_data(data["term"]), Query.from_data(data["body"])) - - def __str__(self): - return "{" + str(self.term) + " | " + str(self.body) + "}" - - -class ObjectComprehension(object): - def __init__(self, key, value, body): - self.key = key - self.value = value - self.body = body - - @classmethod - def from_data(cls, data): - return cls( - Term.from_data(data["key"]), - Term.from_data(data["value"]), - Query.from_data(data["body"]), - ) - - def __str__(self): - return ( - "{" + str(self.key) + ":" + str(self.value) + " | " + str(self.body) + "}" - ) - - -def is_comprehension(x): - """Returns true if this is a comprehension type.""" - return isinstance(x, (ObjectComprehension, SetComprehension, ArrayComprehension)) - - -_VALUE_MAP = { - "null": Scalar, - "boolean": Scalar, - "number": Scalar, - "string": Scalar, - "var": Var, - "ref": Ref, - "array": Array, - "set": Set, - "object": Object, - "call": Call, - "objectcomprehension": ObjectComprehension, - "setcomprehension": SetComprehension, - "arraycomprehension": ArrayComprehension, -} diff --git a/frontend/admincli/rego/walk.py b/frontend/admincli/rego/walk.py deleted file mode 100644 index 94b6b1f91eeda349f651992da0a92a5be96faa7f..0000000000000000000000000000000000000000 --- a/frontend/admincli/rego/walk.py +++ /dev/null @@ -1,59 +0,0 @@ -import json -from rich.console import Console -from rego import ast - -console = Console() - -def walk(node, vis): - next = vis(node) - if next is None: - return - - if isinstance(node, ast.QuerySet): - for q in node.queries: - walk(q, next) - elif isinstance(node, ast.Query): - for e in node.exprs: - walk(e, next) - elif isinstance(node, ast.Expr): - if node.is_call(): - walk(node.operator, next) - for o in node.operands: - walk(o, next) - else: - walk(node.terms, next) - elif isinstance(node, ast.Term): - walk(node.value, next) - elif isinstance(node, (ast.Ref, ast.Array, ast.Set, ast.Call)): - for t in node.terms: - walk(t, next) - elif isinstance(node, ast.Object): - for p in node.pairs: - walk(p[0], next) - walk(p[1], next) - elif isinstance(node, ast.ObjectComprehension): - walk(node.key, next) - walk(node.value, next) - walk(node.body, next) - elif isinstance(node, (ast.SetComprehension, ast.ArrayComprehension)): - walk(node.term, next) - walk(node.body, next) - - -def pretty_print(node): - class printer(object): - def __init__(self, indent): - self.indent = indent - - def __call__(self, node): - if isinstance(node, ast.Scalar): - name = node.__class__.__name__ + "(" + json.dumps(node.value) + ")" - elif isinstance(node, ast.Var): - name = node.__class__.__name__ + "(" + node.value + ")" - else: - name = node.__class__.__name__ - console.print(" " * self.indent + name) - return printer(self.indent + 2) - - vis = printer(0) - walk(node, vis) diff --git a/frontend/admincli/requirements-dev.txt b/frontend/admincli/requirements-dev.txt deleted file mode 100644 index 2f8cc87dfad6fde793e82dfc6d75522d144d208e..0000000000000000000000000000000000000000 --- a/frontend/admincli/requirements-dev.txt +++ /dev/null @@ -1,8 +0,0 @@ -XlsxWriter == 3.0.3 -pytest == 7.1.3 -requests == 2.25.1 -rich == 12.6.0 -typer == 0.6.1 -uuid7 == 0.1.0 -boto3 -jwt diff --git a/frontend/admincli/requirements.txt b/frontend/admincli/requirements.txt deleted file mode 100644 index e27d2dbdf0dcc86a9232269fcef00c66e1055be8..0000000000000000000000000000000000000000 --- a/frontend/admincli/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -# Automatically generated by https://github.com/damnever/pigar. - -XlsxWriter == 3.0.3 -requests == 2.25.1 -rich == 12.6.0 -typer == 0.6.1 -uuid7 == 0.1.0 diff --git a/frontend/admincli/run-integration-tests.sh b/frontend/admincli/run-integration-tests.sh deleted file mode 100755 index faab7f5fcf36a13109ba34a8e9ba7fd7100bc0b3..0000000000000000000000000000000000000000 --- a/frontend/admincli/run-integration-tests.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env bash -# USAGE: -# -c CLOUD_PROVIDER -while getopts c: flag -do - case "${flag}" in - c) CLOUD_PROVIDER=${OPTARG};; - esac -done - -python3 --version -python3 -m venv env -source env/bin/activate -python3 -m pip install --upgrade pip -python3 -m pip install -r requirements.txt -python3 -m pip install -r requirements-dev.txt - -# AWS -if [ ${CLOUD_PROVIDER} == "aws" ]; then - export BASE_URL=$AWS_BASE_URL - export TOKEN=$(python3 ../../app/tests/aws/aws_jwt_client.py) -elif [ ${CLOUD_PROVIDER} == "gcp" ]; then - export BASE_URL=$HOST - export TOKEN=$(python3 ../../app/tests/anthos/anthos_jwt_client.py) -elif [ ${CLOUD_PROVIDER} == "azure" ]; then - export BASE_URL=$AZURE_DNS_NAME - export TOKEN=$(curl -ks -XPOST "https://login.microsoftonline.com/${AZURE_TENANT_ID}/oauth2/token" -d "grant_type=client_credentials&client_id=${AZURE_PRINCIPAL_ID}&client_secret=${AZURE_PRINCIPAL_SECRET}&resource=${AZURE_APP_ID}" | jq --raw-output '.access_token') -elif [ ${CLOUD_PROVIDER} == "ibm" ]; then - export BASE_URL=$IBM_POLICY_BASE_URL - export TOKEN=$svctoken -else - echo "Not supported cloud provider ${CLOUD_PROVIDER}" - exit 1 -fi - -if [ -z $TOKEN ]; then - echo "[ERROR] Not able to get token (TOKEN)" - exit 1 -fi - -echo DATA_PARTITION $DATA_PARTITION -echo CLOUD_PROVIDER: $CLOUD_PROVIDER -echo BASE_URL: $BASE_URL -python3 pol.py setup -python3 -m pytest -v diff --git a/frontend/admincli/search_cli.py b/frontend/admincli/search_cli.py deleted file mode 100644 index 4621e02103e8486482dae9ac0021c83bf906b5d9..0000000000000000000000000000000000000000 --- a/frontend/admincli/search_cli.py +++ /dev/null @@ -1,98 +0,0 @@ -from rich.table import Table -from rich.console import Console -import xlsxwriter -import os, sys, subprocess -import typer -import random - -console = Console() -error_console = Console(stderr=True, style="bold red") - -def display_search_results_excel(data, - show_kind = False, - excelfile = "search.xlsx", - worksheetname = "Search Results"): - - # Workbook() takes one, non-optional, argument - # which is the filename that we want to create. - workbook = xlsxwriter.Workbook(excelfile) - - # The workbook object is then used to add new - # worksheet via the add_worksheet() method. - worksheet = workbook.add_worksheet(worksheetname) - - # Create a new Format object to formats cells - # in worksheets using add_format() method. - # here we create bold format object. - bold = workbook.add_format({'bold': 1}) - - headings = ['ID', "Kind", "Authority", "Source", "Type", "Create Time", "Create User"] - - # Write a row of data starting from 'A1' with bold format - worksheet.write_row('A1', headings, bold) - - # Start from the first cell below the headers. - row = 1 - col = 0 - for item in data["results"]: - worksheet.write_row(row=row, col=0, data=( - item['id'], item["kind"], item["authority"], item["source"], item["type"], item["createTime"], item["createUser"] - )) - row += 1 - - worksheet.set_column(0, 0, 50) # Column A width set to 20. - worksheet.set_column(1, 1, 30) # Columns B width set to 30. - worksheet.set_column(2, 3, 8) # Columns C-D width set to 8. - worksheet.set_column(4, 4, 20) # Columns width set to 25. - worksheet.set_column(5, 6, 25) # Columns width set to 25. - - # Finally, close the Excel file - workbook.close() - console.print(f"Search results ({row-1} records) saved as '{excelfile}'") - -def display_search_results_simple(data): - for item in data["results"]: - created = item["createTime"] - - print(item['id'], item["kind"], item["authority"], item["source"], item["type"], created) - -def display_search_results_idonly(data): - for item in data["results"]: - print(item['id']) - -def display_search_results_idonly_random(data): - lucky = random.choice(data["results"]) - print(lucky['id']) - -def display_search_results_fancy(data, show_kind=False): - count=len(data["results"]) - if not count: - return - table = Table(title=f"Search Results ({count})") - table.add_column("ID", justify="full", style="cyan", no_wrap=True, min_width=25) - if show_kind: - table.add_column("Kind", justify="full", style="cyan", no_wrap=True, min_width=25) - table.add_column("Authority", justify="center", style="green", no_wrap=True, max_width=10) - table.add_column("Source", justify="center", style="green", no_wrap=True, max_width=8) - table.add_column("Type", justify="center", style="green", no_wrap=True, min_width=15) - table.add_column("Created", justify="center", style="green", no_wrap=True, max_width=11) - - for item in data["results"]: - created = item["createTime"] - if show_kind: - table.add_row(item['id'], item["kind"], item["authority"], item["source"], item["type"], created) - else: - table.add_row(item['id'], item["authority"], item["source"], item["type"], created) - console.print(table) - -def open_file(filename): - if sys.platform == "win32": - os.startfile(filename) - else: - opener = "open" if sys.platform == "darwin" else "xdg-open" - subprocess.call([opener, filename]) - -def search_order_callback(ctx: typer.Context, value: str): - if value != "ASC" and value != 'DESC': - raise typer.BadParameter("Only ASC or DESC is allowed") - return value \ No newline at end of file diff --git a/frontend/admincli/setenv.sh b/frontend/admincli/setenv.sh deleted file mode 100644 index d9d62cc387bad13f6cb7ae4b9e42f7d570e301fa..0000000000000000000000000000000000000000 --- a/frontend/admincli/setenv.sh +++ /dev/null @@ -1,10 +0,0 @@ -# Example env var settings -# load by . ./setenv -export DATA_PARTITION=osdu -export TOKEN="$(gcloud auth print-access-token)" -export BASE_URL="https://policy-dev.osdu.lol" -export POLICY_URL="http://localhost:8080" -#export ENTITLEMENTS_URL="https://policy-dev.osdu.lol/api/entitlements/v2" -#export LEGAL_URL="https://policy-dev.osdu.lol/api/legal/v1" -#export STORAGE_URL="https://policy-dev.osdu.lol/api/storage/v2" -#export SEARCH_URL="https://policy-dev.osdu.lol/api/search/v2" diff --git a/frontend/admincli/setenvsa.sh b/frontend/admincli/setenvsa.sh deleted file mode 100644 index 5cd2786267e6c6f9e015c761147db0419fb14df9..0000000000000000000000000000000000000000 --- a/frontend/admincli/setenvsa.sh +++ /dev/null @@ -1,7 +0,0 @@ -# Example env var settings using service account in gcp -# load by . ./setenv -export DATA_PARTITION=osdu -export TOKEN="$(gcloud auth print-access-token --impersonate-service-account=$SERVICE_ACCOUNT)" -export POLICY_URL=$GCP_BASE_URL -export ENTITLEMENTS_URL=$GCP_BASE_URL/api/entitlements/v2/groups -export LEGAL_URL=$GCP_BASE_URL/api/legal/v1/legaltags diff --git a/frontend/admincli/tests/conftest.py b/frontend/admincli/tests/conftest.py deleted file mode 100644 index 9d0a7707978c999d62fad12c2d5b0fe96ad006bb..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/conftest.py +++ /dev/null @@ -1,35 +0,0 @@ -# conftest.py -import pytest -import os - -""" -""" -def pytest_addoption(parser): - parser.addoption("--data_partition", action="store", help="data-partition-id to use") - -@pytest.fixture(scope='session') -def data_partition(request): - data_partition_val = request.config.option.data_partition - if data_partition_val is None: - data_partition_val = os.getenv('DATA_PARTITION') - if data_partition_val is None: - data_partition_val = "osdu" - return data_partition_val - -""" -# NUM_SEARCH_STORAGE_DATASET_TESTS -Number of times to go thru workflow - -Call to search, get a random ID from that -Look up that random ID in search -Look up that random ID in dataset -Attempt to download the file in that random ID -""" -NUM_SEARCH_STORAGE_DATASET_TESTS=25 - -""" -# RANDOM_SEARCH_LMIT -When calling search to get a random ID, -How many records should we pull before -selecting one of them. -""" -RANDOM_SEARCH_LMIT=250 diff --git a/frontend/admincli/tests/create_tag_with_extension.json b/frontend/admincli/tests/create_tag_with_extension.json deleted file mode 100644 index 5133e6830348957f6364817fad4e7b00a6ce53df..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/create_tag_with_extension.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "opendes-dz-test", - "description": "Legal Tag added for Well - updated", - "properties": { - "countryOfOrigin": [ - "US", - "CA" - ], - "contractId": "123457", - "expirationDate": "2025-12-26", - "originator": "Schlumberger", - "dataType": "Third Party Data", - "securityClassification": "Private", - "personalData": "No Personal Data", - "exportClassification": "EAR99", - "extensionProperties": { - "AgreementIdentifier": "dz-test", - "EffectiveDate": "2022-06-01T00:00:00", - "TerminationDate": "2099-12-31T00:00:00", - "AffiliateEnablementIndicator": true, - "AgreementParties": [ - { - "AgreementPartyType": "EnabledAffiliate", - "AgreementParty": "Shell RDS" - } - ] - } - } -} diff --git a/frontend/admincli/tests/evaluate_query.json b/frontend/admincli/tests/evaluate_query.json deleted file mode 100644 index d1cf443d0ada66a42606b6a3a55d29c7b0438daa..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/evaluate_query.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "input": { - "operation": "update", - "records": [ - { - "id":"${data_partition}:test:1.4.1654807204111", - "kind":"${data_partition}:bulkupdate:test:1.1.1654807204111", - "legal":{ - "legaltags":[ - "${legal_tag}" - ], - "otherRelevantDataCountries":["US"], - "status":"compliant" - }, - "acls":{ - "viewers":["data.default.viewers@${data_partition}.${domain}"], - "owners":["data.default.owners@${data_partition}.${domain}"] - } - } - ] - } -} diff --git a/frontend/admincli/tests/example.rego b/frontend/admincli/tests/example.rego deleted file mode 100644 index fb8300d31156c74435881ef749a76c715e2a7749..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/example.rego +++ /dev/null @@ -1,5 +0,0 @@ -package osdu.partition["${data_partition}"].${name} - -allow { - input.subject.clearance_level >= data.reports[_].clearance_level -} diff --git a/frontend/admincli/tests/legaltag_create.json b/frontend/admincli/tests/legaltag_create.json deleted file mode 100644 index 88aa24253c2fe4f9916b44c8e7a7a51e9950c0c6..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/legaltag_create.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "${name}", - "description": "A legal tag for testing AdminCLI", - "properties": { - "countryOfOrigin": [ - "US" - ], - "contractId": "A1234", - "expirationDate": "${expiration_date}", - "originator": "Default", - "dataType": "Public Domain Data", - "securityClassification": "Public", - "personalData": "No Personal Data", - "exportClassification": "EAR99" - } -} diff --git a/frontend/admincli/tests/legaltag_create_extension_random.json b/frontend/admincli/tests/legaltag_create_extension_random.json deleted file mode 100644 index 2259e8d1ad291bd4c192aefd6454db0e2a9c5be3..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/legaltag_create_extension_random.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "${random_name}", - "description": "Legal Tag added for Well - updated", - "properties": { - "countryOfOrigin": [ - "US", - "CA" - ], - "contractId": "123457", - "expirationDate": "${expiration_date}", - "originator": "Schlumberger", - "dataType": "Third Party Data", - "securityClassification": "Private", - "personalData": "No Personal Data", - "exportClassification": "EAR99", - "extensionProperties": { - "AgreementIdentifier": "dz-test", - "EffectiveDate": "2022-06-01T00:00:00", - "TerminationDate": "2099-12-31T00:00:00", - "AffiliateEnablementIndicator": true, - "AgreementParties": [ - { - "AgreementPartyType": "EnabledAffiliate", - "AgreementParty": "Shell RDS" - } - ] - } - } -} diff --git a/frontend/admincli/tests/legaltag_create_random.json b/frontend/admincli/tests/legaltag_create_random.json deleted file mode 100644 index 3bef2ce3094c9662661e35ac26bc3138f2d12fc7..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/legaltag_create_random.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "${random_name}", - "description": "A legal tag for testing AdminCLI", - "properties": { - "countryOfOrigin": [ - "US" - ], - "contractId": "A1234", - "expirationDate": "${expiration_date}", - "originator": "Default", - "dataType": "Public Domain Data", - "securityClassification": "Public", - "personalData": "No Personal Data", - "exportClassification": "EAR99" - } -} diff --git a/frontend/admincli/tests/legaltag_update.json b/frontend/admincli/tests/legaltag_update.json deleted file mode 100644 index df2093b37beb7315b66f6614310242b93b77d04d..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/legaltag_update.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "${name}", - "description": "A legal tag", - "contractId": "A1234", - "expirationDate": "${expiration_date}" -} diff --git a/frontend/admincli/tests/spatial_filter.json b/frontend/admincli/tests/spatial_filter.json deleted file mode 100644 index 89def1a1cf3c29818ff93eb326491e202435a35c..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/spatial_filter.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "field": "data.Location", - "byDistance": { - "point": { - "latitude": 37.450727, - "longitude": -122.174762 - }, - "distance": 1500 - } -} diff --git a/frontend/admincli/tests/spatial_filter2.json b/frontend/admincli/tests/spatial_filter2.json deleted file mode 100644 index b6694ac9eeaad68bfb82efeb0402c6f97b5ac2f0..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/spatial_filter2.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "field": "data.SpatialLocation.Wgs84Coordinates", - "byGeoPolygon": { - "points": [ - {"longitude":-90.65, "latitude":28.56}, - {"longitude":-90.65, "latitude":35.56}, - {"longitude":-85.65, "latitude":35.56}, - {"longitude":-85.65, "latitude":28.56}, - {"longitude":-90.65, "latitude":28.56} - ] - } -} diff --git a/frontend/admincli/tests/spatial_filter3.json b/frontend/admincli/tests/spatial_filter3.json deleted file mode 100644 index eb05d4c7c9c4cdb68b156c203249731d66b2bb6b..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/spatial_filter3.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "field": "SpatialLocation.Wgs84Coordinates", - "geometries": { - "topLeft": { - "longitude": 2.9493408203125, - "latitude": 50.859180945520826 - }, - "bottomRight": { - "longitude": 6.2580810546875, - "latitude": 54.75956761546834 - } - } -} diff --git a/frontend/admincli/tests/spatial_filter_entireworld.json b/frontend/admincli/tests/spatial_filter_entireworld.json deleted file mode 100644 index c1a3ec47b3f0f2f97c7e939b52ac7487f1c963e7..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/spatial_filter_entireworld.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "field": "data.SpatialLocation.Wgs84Coordinates", - "byBoundingBox": { - "topLeft": { - "latitude": 90, - "longitude": -180 - }, - - "bottomRight": { - "latitude": -90, - "longitude": 180 - } - } - } diff --git a/frontend/admincli/tests/spatial_filter_ref.json b/frontend/admincli/tests/spatial_filter_ref.json deleted file mode 100644 index 3b48756bfa71a78bb27d2fb13b4f745673589e2b..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/spatial_filter_ref.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "field": "data.SpatialLocation.Wgs84Coordinates", - "byDistance": { - "point": { - "longitude": 5.98136045, - "latitude": 51.43503877 - }, - "distance": 1609.34 - } -} diff --git a/frontend/admincli/tests/test_cli.py b/frontend/admincli/tests/test_cli.py deleted file mode 100644 index e7d05937f258cf6b2943b7b757c264e1d9c352e0..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/test_cli.py +++ /dev/null @@ -1,233 +0,0 @@ -import sys -import os -import pytest -from typer.testing import CliRunner -sys.path.append(os.path.abspath('..')) -from pol import cli - -runner = CliRunner() - -@pytest.mark.skipif(sys.version_info == (3, 9), reason="requires python3.9") - -@pytest.mark.parametrize("rego_path,policy_name", - [("tests/example", "example"), - ("tests/example", "example2"), - ("../../app/tests/templates/search2","search2")]) -def test_cli_policy_add_policy(rego_path, policy_name): - result = runner.invoke(cli, ["add", "-f", f"{rego_path}.rego", "-t", policy_name, "--force"]) - assert result.exit_code == 0 - -def test_cli_policy_ls(data_partition): - result = runner.invoke(cli, ["ls"]) - assert result.exit_code == 0 - assert "osdu/instance/legal.rego" in result.stdout - assert "osdu/instance/entitlements.rego" in result.stdout - assert "osdu/instance/dataauthz.rego" in result.stdout - assert f"osdu/partition/{data_partition}/dataauthz.rego" in result.stdout - -def test_cli_policy_ls_raw(data_partition): - result = runner.invoke(cli, ["ls", "--raw"]) - assert result.exit_code == 0 - assert "osdu/instance/legal.rego" in result.stdout - assert "osdu/instance/entitlements.rego" in result.stdout - assert "osdu/instance/dataauthz.rego" in result.stdout - assert f"osdu/partition/{data_partition}/dataauthz.rego" in result.stdout - -def test_cli_policy_ls_output_tree(): - result = runner.invoke(cli, ["ls", "--output=tree"]) - assert result.exit_code == 0 - -def test_cli_policy_ls_output_fancy(data_partition): - result = runner.invoke(cli, ["ls", "--output=fancy"]) - assert result.exit_code == 0 - assert "osdu/instance/legal.rego" in result.stdout - assert "osdu/instance/entitlements.rego" in result.stdout - assert "osdu/instance/dataauthz.rego" in result.stdout - assert f"osdu/partition/{data_partition}/dataauthz.rego" in result.stdout - -def test_cli_policy_ls_output_simple(data_partition): - result = runner.invoke(cli, ["ls", "--output=simple"]) - assert result.exit_code == 0 - assert "osdu/instance/legal.rego" in result.stdout - assert "osdu/instance/entitlements.rego" in result.stdout - assert "osdu/instance/dataauthz.rego" in result.stdout - assert f"osdu/partition/{data_partition}/dataauthz.rego" in result.stdout - -def test_cli_policy_ls_dataauthz(data_partition): - result = runner.invoke(cli, ["ls", "dataauthz"]) - assert f"package osdu.partition[\"{data_partition}\"].dataauthz" in result.stdout - assert result.exit_code == 0 - -def test_cli_policy_ls_dataauthz_download(data_partition): - result = runner.invoke(cli, ["ls", "dataauthz", "--download"]) - assert f"saved as" in result.stdout - assert result.exit_code == 0 - assert os.path.exists("dataauthz.rego"), f"dataauthz.rego does not exist" - os.remove("dataauthz.rego") - -def test_cli_policy_ls_dataauthz_search_download_raw(data_partition): - result = runner.invoke(cli, ["ls", "dataauthz", "search", "--raw", "--download"]) - assert f"raw saved as" in result.stdout - assert result.exit_code == 0 - assert os.path.exists("dataauthz.json"), f"dataauthz.json does not exist" - assert os.path.exists("search.json"), f"dataauthz.json does not exist" - os.remove("dataauthz.json") - os.remove("search.json") - -def test_cli_policy_ls_stdin_dataauthz(data_partition): - result = runner.invoke(cli, ["ls", "-"], input="dataauthz\n") - assert result.exit_code == 0 - assert f"package osdu.partition[\"{data_partition}\"].dataauthz" in result.stdout - -def test_cli_policy_ls_more_than_1_legal_dataauthz(data_partition): - result = runner.invoke(cli, ["ls", "osdu/instance/legal.rego", "dataauthz"]) - assert 'package osdu.instance.legal' in result.stdout - assert f"package osdu.partition[\"{data_partition}\"].dataauthz" in result.stdout - assert result.exit_code == 0 - -def test_cli_policy_ls_dataauthz_rego(data_partition): - result = runner.invoke(cli, ["ls", "dataauthz.rego"]) - assert f"package osdu.partition[\"{data_partition}\"].dataauthz" in result.stdout - assert result.exit_code == 0 - -def test_cli_policy_ls_osdu_partition_osdu_dataauthz(data_partition): - result = runner.invoke(cli, ["ls", "osdu/partition/" + data_partition +"/dataauthz"]) - assert f"package osdu.partition[\"{data_partition}\"].dataauthz" in result.stdout - assert result.exit_code == 0 - -def test_cli_policy_ls_osdu_partition_osdu_dataauthz_rego(data_partition): - result = runner.invoke(cli, ["ls", "osdu/partition/" + data_partition +"/dataauthz.rego"]) - assert f"package osdu.partition[\"{data_partition}\"].dataauthz" in result.stdout - assert result.exit_code == 0 - -def test_cli_policy_ls_osdu_partition_osdu_search2(data_partition): - result = runner.invoke(cli, ["ls", "osdu/partition/" + data_partition +"/search2.rego"]) - if result.exit_code: - pytest.xfail(f"Policy search2 not available yet") - assert f"package osdu.partition[\"{data_partition}\"].search2" in result.stdout - assert result.exit_code == 0 - -def test_cli_policy_ls_osdu_partition_osdu_search2_quiet(data_partition): - result = runner.invoke(cli, ["ls", "osdu/partition/" + data_partition +"/search2.rego"]) - if result.exit_code: - pytest.xfail(f"Policy search2 not available yet") - result = runner.invoke(cli, ["ls", "osdu/partition/" + data_partition +"/search2.rego", "--quiet"]) - assert result.exit_code == 0 - -def test_cli_policy_diff(data_partition): - result = runner.invoke(cli, ["ls", "osdu/partition/" + data_partition +"/search2.rego", "--quiet"]) - if result.exit_code: - pytest.xfail(f"Policy search2 not available yet") - result = runner.invoke(cli, ["diff", "search", "search2"]) - assert result.exit_code == 0 - -def test_cli_policy_diff_n(data_partition): - result = runner.invoke(cli, ["ls", "osdu/partition/" + data_partition +"/search2.rego", "--quiet"]) - if result.exit_code: - pytest.xfail(f"Policy search2 not available yet") - result = runner.invoke(cli, ["diff", "search", "search2", "-n", "0"]) - assert result.exit_code == 0 - -def test_cli_policy_health(): - result = runner.invoke(cli, ["health"]) - assert result.exit_code == 0 - assert "Healthy" in result.stdout - -def test_cli_info_default(): - result = runner.invoke(cli, ["info"]) - assert result.exit_code == 0, f"got non-zero exit code, {result.stdout}" - assert "version" in result.stdout - -@pytest.mark.parametrize("service", [("policy"), ("dataset"), ("storage"), ("search"), ("entitlement"), ("legal")]) -def test_cli_info(service): - result = runner.invoke(cli, ["info", "-s", service]) - assert result.exit_code == 0, f"got non-zero exit code when checking {service}, {result.stdout}" - assert "version" in result.stdout - -def test_cli_group(): - result = runner.invoke(cli, ["groups"]) - assert result.exit_code == 0 - assert "service.policy." in result.stdout - -def test_cli_group_all(): - result = runner.invoke(cli, ["groups", "--all"]) - assert result.exit_code == 0 - -def test_cli_group_domain(): - result = runner.invoke(cli, ["groups", "--domain"]) - assert result.exit_code == 0 - -def test_cli_group_domain_suffix(): - result = runner.invoke(cli, ["groups", "--domain-suffix"]) - assert result.exit_code == 0 - -def test_cli_legaltags(): - result = runner.invoke(cli, ["legal-tags"]) - assert result.exit_code == 0 - -def test_cli_legaltags_raw(): - result = runner.invoke(cli, ["legal-tags", "--raw"]) - assert result.exit_code == 0 - -def test_cli_legaltags_limit1(): - result = runner.invoke(cli, ["legal-tags", "--limit=1"]) - assert result.exit_code == 0 - -def test_cli_legaltags_random(): - result = runner.invoke(cli, ["legal-tags", "--random"]) - assert result.exit_code == 0 - -def test_cli_legaltags_lookup(): - result = runner.invoke(cli, ["legal-tags", "--random"]) - legal_tag = result.stdout.strip() - result = runner.invoke(cli, ["legal-tags", legal_tag]) - assert result.exit_code == 0 - -def test_cli_legaltags_add(): - result = runner.invoke(cli, ["add-legal-tag", "-f", "tests/legaltag_create.json", "-t", "--force"]) - if result.exit_code == 2: - pytest.xfail("legal tag already exists") - assert result.exit_code == 0, f"got non-zero exit status {result.stdout}" - -def test_cli_legaltags_add_random(): - result = runner.invoke(cli, ["add-legal-tag", "-f", "tests/legaltag_create_random.json", "-t", "--force"]) - assert result.exit_code == 0, f"got non-zero exit status {result.stdout}" - -def test_cli_legaltags_add_extensions_random(): - result = runner.invoke(cli, ["add-legal-tag", "-f", "tests/legaltag_create_extension_random.json", "-t", "--force"]) - assert result.exit_code == 0, f"got non-zero exit status {result.stdout}" - -def test_cli_legaltags_update(): - result = runner.invoke(cli, ["add-legal-tag", "-f", "tests/legaltag_update.json", "--update", "-t", "--days=7", "--force"]) - if result.exit_code == 3: - pytest.xfail("legal tag doesn't exist yet") - assert result.exit_code == 0, f"got non-zero exit status {result.stdout}" - -# eval -def test_cli_policy_eval(): - legal_result = runner.invoke(cli, ["legal-tags", "--random"]) - legal_tag = legal_result.stdout.strip() - result = runner.invoke(cli, ["ls", "search2", "--quiet"]) - if result.exit_code: - pytest.xfail(f"Policy search2 not available yet") - result = runner.invoke(cli, ["eval", "--output=simple", "-f", "tests/evaluate_query.json", "-t", "search2", f"--legal-tag={legal_tag}", "--force"]) - assert result.exit_code == 0, f"expected 0 exit status. stdout: {result.stdout}" - assert "{}" in result.stdout - -# translate -def test_cli_policy_translate(): - result = runner.invoke(cli, ["ls", "search2", "--quiet"]) - if result.exit_code: - pytest.xfail(f"Policy search2 not available yet") - result = runner.invoke(cli, ["translate", "-f", "tests/translate_data.json", "-t", "search2", "--force"]) - assert result.exit_code == 0 - assert "query" in result.stdout - assert "bool" in result.stdout - assert "should" in result.stdout - assert "filter" in result.stdout - -def test_cli_show_completion(): - result = runner.invoke(cli, ["--show-completion"]) - if result.exit_code == 2: - pytest.skip("Not supported env") - assert result.exit_code == 0 diff --git a/frontend/admincli/tests/test_help.py b/frontend/admincli/tests/test_help.py deleted file mode 100644 index 2701390f0cd53a7be7ead2a5fc4465e7435e6bc2..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/test_help.py +++ /dev/null @@ -1,33 +0,0 @@ -import sys -import os -import pytest -from typer.testing import CliRunner -sys.path.append(os.path.abspath('..')) -from pol import cli - -runner = CliRunner() - -@pytest.mark.skipif(sys.version_info == (3, 9), reason="requires python3.9") - -# Test all the helps -# Help for Policy Commands -def test_cli_help(): - result = runner.invoke(cli, ["--help"]) - assert result.exit_code == 0 - -@pytest.mark.parametrize("test_input", [("ls"), ("add"), ("rm"), ("eval"), ("health"), ("translate")]) -def test_cli_policy_help(test_input): - result = runner.invoke(cli, [test_input, "--help"]) - assert result.exit_code == 0, f"exit status, stdout: {result.stdout}" - -# Help for Policy Developer Utils/Commands -@pytest.mark.parametrize("test_input", [("diff"), ("compile"), ("opa-add"), ("opa-rm"), ("health"), ("translate")]) -def test_cli_devutil_help(test_input): - result = runner.invoke(cli, [test_input, "--help"]) - assert result.exit_code == 0, f"exit status, stdout: {result.stdout}" - -# Help for Utils/Commands -@pytest.mark.parametrize("test_input", [("info"), ("search"), ("storage")]) -def test_cli_util_help(test_input): - result = runner.invoke(cli, [test_input, "--help"]) - assert result.exit_code == 0, f"exit status, stdout: {result.stdout}" \ No newline at end of file diff --git a/frontend/admincli/tests/test_search_storage.py b/frontend/admincli/tests/test_search_storage.py deleted file mode 100644 index 904aebb87205548722b511e66519b9a60e239679..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/test_search_storage.py +++ /dev/null @@ -1,214 +0,0 @@ -import sys -import os -import pytest -from typer.testing import CliRunner - -from tests.conftest import NUM_SEARCH_STORAGE_DATASET_TESTS, RANDOM_SEARCH_LMIT - -sys.path.append(os.path.abspath('..')) -from pol import cli - -runner = CliRunner() - -""" -Test AdminCLI Search and Storage/Dataset commands and workflows -While this absolutely tests AdminCLI this is really geared towards -preship validation testing. -""" -@pytest.mark.skipif(sys.version_info == (3, 9), reason="requires python3.9") - -def test_cli_search(): - result = runner.invoke(cli, ["search"]) - assert result.exit_code == 0 - -def test_cli_search_raw(): - result = runner.invoke(cli, ["search", "--raw"]) - assert "createTime" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_limit1(): - result = runner.invoke(cli, ["search", "--limit=1"]) - assert "dataset" or "osdu" or "opendes" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_limit2(): - result = runner.invoke(cli, ["search", "--limit", "2"]) - assert "dataset" or "osdu" or "opendes" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_output_simple(): - result = runner.invoke(cli, ["search", "--output=simple"]) - assert "dataset" or "osdu" or "opendes" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_output_fancy(): - result = runner.invoke(cli, ["search", "--output=fancy"]) - assert "dataset" or "osdu" or "opendes" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_output_excel(): - result = runner.invoke(cli, ["search", "--output=excel"]) - assert result.exit_code == 0 - -def test_cli_search_id_only(): - result = runner.invoke(cli, ["search", "--id-only"]) - assert "dataset" or "osdu" or "opendes" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_random_id(): - result = runner.invoke(cli, ["search", "--random-id"]) - assert "dataset" or "osdu" or "opendes" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_query_well(): - result = runner.invoke(cli, ["search", "--query", "well"]) - # Will exit 2 if there isn't well data loaded - if result.exit_code == 2: - pytest.xfail(f"No well search data") - assert result.exit_code == 0, f"unexpected return status {result.exit_code} {result.stdout}" - -def test_cli_search_kind(): - result = runner.invoke(cli, ["search", "--kind", '*:*:*:*']) - assert "dataset" or "osdu" or "opendes" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_storage_workflow_thefirst(): - # Get an ID from search - result = runner.invoke(cli, ["search", "--limit=1", "--id-only"]) - assert result.exit_code == 0 - id = result.stdout.strip() - check_storage_record(id) - -def test_cli_search_doesnotexist(): - result = runner.invoke(cli, ["search", "doesnotexist"]) - assert "No results found" in result.stdout - assert result.exit_code == 2 - -def test_cli_storage_doesnotexist(): - result = runner.invoke(cli, ["storage", "doesnotexist"]) - assert "Not a valid record id" in result.stdout - assert result.exit_code == 1 - -def test_cli_search_storage_workflow_a_random_outof_100(): - # select a random record out of 100 - result = runner.invoke(cli, ["search", "--limit=100", "--random-id"]) - assert result.exit_code == 0 - id = result.stdout.strip() - check_storage_record(id) - -def test_cli_search_storage_workflow_a_random_outof_1000(): - # select a random record out of 100 - result = runner.invoke(cli, ["search", "--limit=1000", "--random-id"]) - assert result.exit_code == 0 - id = result.stdout.strip() - check_storage_record(id) - -def test_cli_search_storage_workflow_first25(): - # select a random record out of 100 - result = runner.invoke(cli, ["search", "--limit=25", "--id-only"]) - assert result.exit_code == 0 - for id in result.stdout.splitlines(): - print(id) - check_storage_record(id.strip()) - -def check_storage_record(id): - assert "dataset" or "osdu" or "opendes" in id - result = runner.invoke(cli, ["storage", id]) - if "Record not found" in result.stdout: - pytest.xfail(f"Record {id} not found in storage") - assert "data" in result.stdout - assert "createTime" in result.stdout - assert "createUser" in result.stdout - if "DatasetProperties" not in result.stdout: - pytest.xfail(f"No dataset info for {id} in storage, likely No DMS handler for kindSubType") - assert "DatasetProperties" in result.stdout - assert "FileSourceInfo" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_storage_dataset_workflow_thefirst(): - # Get an ID from search - result = runner.invoke(cli, ["search", "--limit=1", "--id-only"]) - id = result.stdout.strip() - check_storage_dataset_record(id) - -def test_cli_search_storage_dataset_workflow_a_random_outof_100(): - # Get an ID from search - result = runner.invoke(cli, ["search", "--limit=100", "--random-id"]) - id = result.stdout.strip() - check_storage_dataset_record(id) - -def test_cli_search_storage_dataset_workflow_first25(): - # select a random record out of 100 - result = runner.invoke(cli, ["search", "--limit=25", "--id-only"]) - assert result.exit_code == 0 - for id in result.stdout.splitlines(): - print(id) - check_storage_dataset_record(id.strip()) - -def check_storage_dataset_record(id): - assert "dataset" or "osdu" or "opendes" in id - result = runner.invoke(cli, ["storage", id, "--dataset"]) - assert "datasetRegistries" in result.stdout - assert result.exit_code == 0 - -def test_cli_search_storage_dataset_download_workflow_thefirst(): - # Get an ID from search - result = runner.invoke(cli, ["search", "--limit=1", "--id-only"]) - assert result.exit_code == 0 - id = result.stdout.strip() - check_storage_get_record_download(id) - -def get_random_storage_ids(n, limit=100): - idlist = [] - for x in range(n): - idlist.append(get_random_storage_id(limit=limit)) - print(idlist) - return idlist - -def get_random_storage_id(limit=10): - result = runner.invoke(cli, ["search", f"--limit={limit}", "--random-id"]) - assert result.exit_code == 0 - id = result.stdout.strip() - return id - -@pytest.mark.parametrize("id", get_random_storage_ids(NUM_SEARCH_STORAGE_DATASET_TESTS, limit=RANDOM_SEARCH_LMIT)) -def test_cli_random_search_storage_dataset_download(id): - check_storage_record(id) - check_storage_dataset_record(id) - check_storage_get_record_download(id) - -def test_cli_search_storage_dataset_download_workflow_a_random_outof_100(): - id = get_random_storage_id(limit=100) - check_storage_get_record_download(id) - -def test_cli_search_storage_dataset_download_workflow_first15(): - # Get an ID from search - result = runner.invoke(cli, ["search", "--limit=15", "--id-only"]) - assert result.exit_code == 0 - for id in result.stdout.splitlines(): - print(id) - check_storage_get_record_download(id.strip()) - -def check_storage_get_record_download(id): - assert "dataset" or "osdu" or "opendes" in id - result = runner.invoke(cli, ["storage", id, "--get"]) - if "No DMS handler" in result.stdout: - pytest.xfail(f"Download not supported for id {id}") - if "Collection Path is missing in the record metadata" in result.stdout: - pytest.xfail(f"Bad record metadata {id}") - if "FileSourceInfo Name Path missing" in result.stdout: - pytest.xfail(f"Bad record metadata {id}") - if "Dataset Metadata does not contain dataset" in result.stdout: - pytest.xfail(f"Bad record metadata {id}") - assert result.exit_code == 0, f"Expected storage id {id} --get to have details on dataset stdout:{result.stdout}" - result = runner.invoke(cli, ["storage", id, "--download", "--raw"]) - assert result.exit_code == 0 - result = runner.invoke(cli, ["storage", id, "--download"]) - assert result.exit_code == 0 - assert "Downloaded" in result.stdout - filename = result.stdout.split(' as ')[1].replace("'",'') - filename = filename.strip() - print(f"stdout: {result.stdout}") - filename = os.path.basename(filename) - assert os.path.exists(filename), f"downloaded file {filename} does not exist {result.stdout}" - os.remove(filename) \ No newline at end of file diff --git a/frontend/admincli/tests/test_translate_api_3.rego b/frontend/admincli/tests/test_translate_api_3.rego deleted file mode 100644 index 8ae219b2bdc7192514d1a5f677dada21b0a54400..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/test_translate_api_3.rego +++ /dev/null @@ -1,11 +0,0 @@ -package osdu.partition["${data_partition}"].test_translate_api_3 - -import data.osdu.instance.entitlements - -entitlementsApiResponse := entitlements.groups -permissionSet := {e | e := entitlementsApiResponse.body.groups[_].email} - -allow { - input.operation == ["view", "create", "update", "delete", "purge"][_] - input.record.acl.owners[_] == permissionSet[_] -} diff --git a/frontend/admincli/tests/test_xrm.py b/frontend/admincli/tests/test_xrm.py deleted file mode 100644 index 6812c9d0a339511276de477005d8d15b4a56446f..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/test_xrm.py +++ /dev/null @@ -1,18 +0,0 @@ -import sys -import os -import pytest -from typer.testing import CliRunner -sys.path.append(os.path.abspath('..')) -from pol import cli - -runner = CliRunner() - -@pytest.mark.skipif(sys.version_info == (3, 9), reason="requires python3.9") - -@pytest.mark.parametrize("policy_name", [("example"), ("example2")]) -def test_cli_policy_rm(policy_name, data_partition): - result = runner.invoke(cli, ["ls", policy_name, "--quiet"]) - if result.exit_code: - pytest.xfail(f"Policy {policy_name} not found in data_partition {data_partition}") - result = runner.invoke(cli, ["rm", policy_name, "--force"]) - assert result.exit_code == 0, f"stdout: {result.stdout}" \ No newline at end of file diff --git a/frontend/admincli/tests/translate_api_3.json b/frontend/admincli/tests/translate_api_3.json deleted file mode 100644 index bd260ed29bbd2d64da12c5a3baf20afae8dfac84..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/translate_api_3.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "query": "data.osdu.partition[\"${data_partition}\"].test_translate_api_3.allow == true", - "input": { - "operation": "view", - "groups": [ - "data.default.owners@${data_partition}.${domain}", - "data.default.viewers@${data_partition}.${domain}" - ] - }, - "unknowns": [ - "input.record" - ] -} diff --git a/frontend/admincli/tests/translate_data.json b/frontend/admincli/tests/translate_data.json deleted file mode 100644 index 4804141db886b94fc039095fbc96203af5c87c83..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/translate_data.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "query": "data.osdu.partition[\"${data_partition}\"].${name}.allow == true", - "input": { - "operation": "view", - "groups": ["AAA", "BBB"] - }, - "unknowns": ["input.record"] -} diff --git a/frontend/admincli/tests/translate_data2.json b/frontend/admincli/tests/translate_data2.json deleted file mode 100644 index 50874725f0fc364f49c54899e44d1bfd43a1ee6b..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/translate_data2.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "query": "data.osdu.partition[\"${data_partition}\"].${name}.allow == true", - "input": { - "operation": "view", - "groups": [ - "data.default.owners@osdu.example.com", - "data.default.viewers@osdu.example.com" - ] - }, - "unknowns": [ - "input.record" - ] -} diff --git a/frontend/admincli/tests/translate_data3.json b/frontend/admincli/tests/translate_data3.json deleted file mode 100644 index 2d0d202b0b4fb767cf01d40d104c6f0566edcc85..0000000000000000000000000000000000000000 --- a/frontend/admincli/tests/translate_data3.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "query": "data.osdu.partition[\"${data_partition}\"].${name}.allow == true", - "input": { - "operation": "view", - "groups": [ - ] - }, - "unknowns": [ - "input.record" - ] -}