Commit 10b58c0a authored by David Diederich's avatar David Diederich
Browse files

Merge remote-tracking branch 'origin/master' into release/0.4

Conflicts:
	cloud-providers/osdu-gcp-cloudrun.yml
	cloud-providers/osdu-gcp.yml
	scanners/fossa.yml
parents 49fe3b3b 3da5ffee
Pipeline #18787 passed with stage
in 8 seconds
# PIP_REQUIREMENTS:
# If there is more than one requirements file, or it is named something other than requirements.txt,
# set the variable PIP_REQUIREMENTS to be a space delimited list of the requirements file. There is no
# way to escape an embedded space within the filename -- you need to rename the file to have no whitespace.
# PIP_CMD, VENV_CMD:
# Normally, you should not need to configure these. If you override the image (presumably to use a different version of python),
# then you may need to alter these commands as well. In particular, old versions of python did not include the 'venv' module, so
# you may wish to set VENV_CMD to 'virtualenv'.
.python:
image: python:3.7-buster
tags: ['osdu-medium']
cache:
key: $CI_JOB_IMAGE
paths:
- venv
before_script:
- test -z "$PIP_REQUIREMENTS" && export PIP_REQUIREMENTS="requirements.txt"; echo "$PIP_REQUIREMENTS"
- test -z "$PIP_CMD" && export PIP_CMD="pip"; echo "$PIP_CMD"
- test -z "$VENV_CMD" && export VENV_CMD="python -m venv"; echo "$VENV_CMD"
# Establish a virtual environment for the packages, which is cached between runs
- $VENV_CMD venv
- source venv/bin/activate
# In case the requirements files don't mention these, we need them for the compile-and-unit-test step
- $PIP_CMD install --upgrade pip
- $PIP_CMD install wheel pytest pytest-cov
# Install all the requirements
- |
for req in $PIP_REQUIREMENTS; do
echo "--------------------------------------------------------------------------------"
echo -e "Installing $req\n"
$PIP_CMD install -r $req
done
# If the unit tests are stored in a directory other than ./tests/unit, define it using PYTEST_DIR
# Also, you can configure the test coverage module using PYTEST_COV, it defaults to 'app'
compile-and-unit-test:
extends:
- .python
- .skipForTriggeringMergeRequests
stage: build
script:
- test -z "$PYTEST_DIR" && export PYTEST_DIR="tests/unit"; echo "$PYTEST_DIR"
- test -z "$PYTEST_COV" && export PYTEST_COV="app"; echo "$PYTEST_COV"
# This is used by the fossa-analyze job later (and is theoretically available for any other job that might want it)
- $PIP_CMD freeze > all-requirements.txt
# This runs the unit tests
- pytest --junit-xml=unit_tests_report.xml --cov="$PYTEST_COV" --cov-report=xml:coverage.xml --cov-report=term "$PYTEST_DIR"
coverage: '/^(?i)(TOTAL).*\s+(\d+\%)$/'
artifacts:
when: always
paths:
- all-requirements.txt
reports:
junit: unit_tests_report.xml
cobertura: coverage.xml
.aws_variables:
variables:
ACCESS_KEY_ID: $AWS_ACCESS_KEY_ID
SECRET_ACCESS_KEY: $AWS_SECRET_ACCESS_KEY
ACCESS_KEY_ID: $AWS_ACCOUNT_ACCESS_KEY_ID
SECRET_ACCESS_KEY: $AWS_ACCOUNT_SECRET_ACCESS_KEY
AWS_ACCESS_KEY_ID: $AWS_ACCOUNT_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY: $AWS_ACCOUNT_SECRET_ACCESS_KEY
INTEGRATION_TEST_DIR: $AWS_TEST_SUBDIR
SERVICE_NAME: $AWS_SERVICE
BUILD_DIR: $AWS_BUILD_SUBDIR
......
......@@ -51,6 +51,8 @@
ELASTIC_PASSWORD: $AZURE_ELASTIC_PASSWORD
VENDOR: azure
HOST: https://${AZURE_DNS_NAME}
ACL_OWNERS: data.test1
ACL_VIEWERS: data.test1
# Entitlement Section
ENTITLEMENT_MEMBER_NAME_VALID: $AZURE_PRINCIPAL_ID
AZURE_AD_OTHER_APP_RESOURCE_ID: $AZURE_APP_ID_OTHER
......@@ -80,6 +82,7 @@
SEARCH_HOST: https://${AZURE_DNS_NAME}/api/search/v2/
aad_client_id: $AZURE_APP_ID
STORAGE_HOST: https://${AZURE_DNS_NAME}/api/storage/v2/
SCHEMA_HOST: https://${AZURE_DNS_NAME}/api/schema-service/v1
aad_client_id: $AZURE_APP_ID
DEFAULT_DATA_PARTITION_ID_TENANT1: opendes
DEFAULT_DATA_PARTITION_ID_TENANT2: $AZURE_DEFAULT_DATA_PARTITION_ID_TENANT2 # legal=common search=othertenant2
......@@ -96,12 +99,31 @@
PRIVATE_TENANT1: $MY_TENANT
PRIVATE_TENANT2: tenant2
#File
FILE_SERVICE_HOST: $FILE_URL
FILE_SERVICE_HOST: https://${AZURE_DNS_NAME}/api/file/v2
USER_ID: "osdu-user"
EXIST_FILE_ID: "8900a83f-18c6-4b1d-8f38-309a208779cc"
DATA_PARTITION_ID: "opendes"
TIME_ZONE: "UTC+0"
#WKS
OS_WKS_SCHEMA_KIND: opendes:wks:wellbore:1.0.0
#Unit
client_id: $AZURE_PRINCIPAL_ID
VIRTUAL_SERVICE_HOST_NAME: $AZURE_DNS_NAME
#Crs_Catalog
AZURE_DEPLOY_TENANT: $AZURE_TENANT_ID
AZURE_DEPLOY_CLIENT_ID: $AZURE_PRINCIPAL_ID
AZURE_DEPLOY_CLIENT_SECRET: $AZURE_PRINCIPAL_SECRET
#Register
REGISTER_BASE_URL: https://${AZURE_DNS_NAME}/
SUBSCRIPTION_ID: $AZURE_EVENT_SUBSCRIPTION_ID
SUBSCRIBER_SECRET: $AZURE_EVENT_SUBSCRIBER_SECRET
TEST_TOPIC_NAME: $AZURE_EVENT_TOPIC_NAME
# Notification Section
NOTIFICATION_BASE_URL: https://${AZURE_DNS_NAME}/api/notification/v1/
NOTIFICATION_REGISTER_BASE_URL: https://${AZURE_DNS_NAME}
REGISTER_CUSTOM_PUSH_URL_HMAC: https://${AZURE_DNS_NAME}/api/register/v1/test/challenge/1
TOPIC_ID: $AZURE_EVENT_TOPIC_NAME
HMAC_SECRET: $AZURE_EVENT_SUBSCRIBER_SECRET
# JOBS
# --------------------------------------------------------------------------------
......@@ -169,14 +191,17 @@ azure_deploy:
# Install Service
- helm upgrade -i osdu-gitlab-$CI_PROJECT_NAME chart --set image.repository=${AZURE_REGISTRY}.azurecr.io --set image.branch=$BRANCH --set image.tag=$TAG
# Increasing to 900s as rolling updates are happening and each service is expected to have minimum 2 containers.
- kubectl rollout status deployment.v1.apps/osdu-gitlab-$CI_PROJECT_NAME -n osdu --timeout=900s
- pod=$(kubectl get pod -n osdu|grep $CI_PROJECT_NAME |tail -1 |awk '{print $1}')
- status=$(kubectl wait -n osdu --for=condition=Ready pod/$pod --timeout=200s)
- status=$(kubectl wait -n osdu --for=condition=Ready pod/$pod --timeout=300s)
- if [[ "$status" != *"met"* ]]; then echo "POD didn't start correctly" ; exit 1 ; fi
only:
variables:
- $AZURE == 'true'
azure_test:
image: $CI_REGISTRY/danielscholl/azure-maven/azure-maven:v1.0
stage: integration
needs: ["azure_deploy"]
extends:
......@@ -184,10 +209,16 @@ azure_test:
- .azure_variables
script:
- |
if [ "$AZURE_SERVICE" == "file" ] || [ "$AZURE_SERVICE" == "delivery" ] || [ "$AZURE_SERVICE" == "ingestion-workflow" ]; then
$MAVEN clean verify -f $AZURE_TEST_SUBDIR/pom.xml # This Variable comes from the individual pipeline
if [ "$AZURE_TEST_TYPE" == "python" ]; then
cd $AZURE_TEST_SUBDIR
chmod +x ./run-integration-tests.sh
./run-integration-tests.sh
else
mvn clean verify -f $AZURE_TEST_SUBDIR/pom.xml
if [ "$AZURE_SERVICE" == "file" ] || [ "$AZURE_SERVICE" == "delivery" ] || [ "$AZURE_SERVICE" == "ingestion-workflow" ]; then
$MAVEN clean verify -f $AZURE_TEST_SUBDIR/pom.xml # This Variable comes from the individual pipeline
else
mvn clean verify -f $AZURE_TEST_SUBDIR/pom.xml
fi
fi
only:
variables:
......@@ -195,3 +226,4 @@ azure_test:
except:
variables:
- $AZURE_SKIP_TEST == 'true'
......@@ -50,13 +50,15 @@
OSDU_GCP_STORAGE_QUERY_RECORD_FOR_CONVERSION_HOST: https://os-storage-attcrcktoa-uc.a.run.app/api/storage/v2/query/records:batch
OSDU_GCP_INDEXER_QUEUE_HOST: https://indexer-queue-attcrcktoa-uc.a.run.app/api/indexer/v1/_dps/task-handlers/enqueue
OSDU_GCP_UNIT_HOSTNAME: os-unit-attcrcktoa-uc.a.run.app
OSDU_GCP_CRS_CONVERTER_HOSTNAME: crs-converter-attcrcktoa-uc.a.run.app
OSDU_GCP_CRS_CATALOG_HOSTNAME: os-crs-catalog-attcrcktoa-uc.a.run.app
OSDU_GCP_SCHEMA_URL: https://os-schema-attcrcktoa-uc.a.run.app
REDIS_GROUP_HOST: 10.234.198.27
REDIS_SEARCH_HOST: 10.99.138.107
REDIS_STORAGE_HOST: 10.16.59.203
REDIS_SYNC_HOST: 10.116.62.35
OSDU_GCP_REDIS_SEARCH_PORT: 6379
OSDU_GCP_SPRING_PROFILES_ACTIVE: dev
OSDU_GCP_SPRING_PROFILES_ACTIVE: dev
# variables for unit deployment
OSDU_GCP_UNIT_CATALOG_BUCKET: nice-etching-277309-unit-catalog-bucket
# common variables
......@@ -99,10 +101,10 @@
CLIENT_TENANT: nonexistenttenant
OSDU_TENANT: osdu
REGISTER_BASE_URL: https://os-register-attcrcktoa-uc.a.run.app/
ENVIRONMENT: dev
# variables for notification tests
HMAC_SECRET: $OSDU_GCP_SUBSCRIBER_SECRET
REGISTER_CUSTOM_PUSH_URL_HMAC: https://os-register-attcrcktoa-uc.a.run.app/api/register/v1/test/challenge/hmac-integration-test
ENVIRONMENT: DEV_GKE
TOPIC_ID: records-changed
NOTIFICATION_BASE_URL: $OSDU_GCP_NOTIFICATION_URL
# variables for legal tests
......@@ -118,12 +120,18 @@
ELASTIC_HOST: $OSDU_GCP_ELASTIC_HOST
ELASTIC_PORT: $OSDU_GCP_ELASTIC_PORT
DATA_PARTITION_ID_TENANT1: osdu
# variables for schema tests
VENDOR: $OSDU_GCP_VENDOR
HOST: $OSDU_GCP_SCHEMA_URL
PRIVATE_TENANT1: osdu
PRIVATE_TENANT2: osdu
SHARED_TENANT: osdu
osdu-gcp-containerize:
stage: containerize
needs: ["compile-and-unit-test"]
extends: .osdu-gcp-variables
image: google/cloud-sdk
image: gcr.io/google.com/cloudsdktool/cloud-sdk
cache: {}
script:
- gcloud auth activate-service-account --key-file $OSDU_GCP_DEPLOY_FILE
......@@ -135,7 +143,7 @@ osdu-gcp-containerize:
- $OSDU_GCP == 'true'
osdu-gcp-deploy:
image: google/cloud-sdk
image: gcr.io/google.com/cloudsdktool/cloud-sdk
needs: ["osdu-gcp-containerize"]
stage: deploy
extends: .osdu-gcp-variables
......@@ -144,14 +152,14 @@ osdu-gcp-deploy:
- gcloud auth activate-service-account --key-file $OSDU_GCP_DEPLOY_FILE
- gcloud config set project $OSDU_GCP_PROJECT
- >
gcloud beta run deploy $OSDU_GCP_APPLICATION_NAME
--image gcr.io/$OSDU_GCP_PROJECT/$OSDU_GCP_APPLICATION_NAME/$OSDU_GCP_SERVICE-$OSDU_GCP_VENDOR:$CI_COMMIT_SHORT_SHA
--platform managed
gcloud beta run deploy $OSDU_GCP_APPLICATION_NAME
--image gcr.io/$OSDU_GCP_PROJECT/$OSDU_GCP_APPLICATION_NAME/$OSDU_GCP_SERVICE-$OSDU_GCP_VENDOR:$CI_COMMIT_SHORT_SHA
--platform managed
--region $OSDU_GCP_CLOUDRUN_REGION
--allow-unauthenticated
--service-account $OSDU_GCP_SERVICE_ACCOUNT
--memory 512M
$OSDU_GCP_CLOUD_RUN_PARAMETERS
--allow-unauthenticated
--service-account $OSDU_GCP_SERVICE_ACCOUNT
--memory 512M
$OSDU_GCP_CLOUD_RUN_PARAMETERS
--set-env-vars=$OSDU_GCP_ENV_VARS
- gcloud run services update-traffic $OSDU_GCP_APPLICATION_NAME --platform managed --region $OSDU_GCP_CLOUDRUN_REGION --to-latest
only:
......@@ -213,6 +221,7 @@ osdu-gcp-test-python:
variables:
INTEGRATION_TESTER: $OSDU_GCP_INTEGRATION_TESTER
MY_TENANT: osdu
GOOGLE_AUDIENCES: $GOOGLE_AUDIENCE
stage: integration
image: python:3.7
needs: ["osdu-gcp-deploy"]
......@@ -225,15 +234,15 @@ osdu-gcp-test-python:
variables:
- $OSDU_GCP == 'true' && $OSDU_GCP_INT_TEST_TYPE == 'python'
tag_gcp_image:
osdu-gcp-tagging:
image: google/cloud-sdk
needs:
- job: osdu-gcp-deploy
stage: Integration
needs: ["osdu-gcp-deploy"]
stage: integration
extends: .osdu-gcp-variables
cache: {}
script:
- gcloud auth activate-service-account --key-file $OSDU_GCP_DEPLOY_FILE
- gcloud config set project $OSDU_GCP_PROJECT
- gcloud container images add-tag gcr.io/$OSDU_GCP_PROJECT/$OSDU_GCP_APPLICATION_NAME/$OSDU_GCP_SERVICE-$OSDU_GCP_VENDOR:$CI_COMMIT_SHORT_SHA gcr.io/$OSDU_GCP_PROJECT/$OSDU_GCP_APPLICATION_NAME/$OSDU_GCP_SERVICE-$OSDU_GCP_VENDOR:$CI_COMMIT_TAG
- gcloud container images add-tag gcr.io/$OSDU_GCP_PROJECT/$OSDU_GCP_APPLICATION_NAME/$OSDU_GCP_SERVICE-$OSDU_GCP_VENDOR:$CI_COMMIT_SHORT_SHA gcr.io/$OSDU_GCP_PROJECT/$OSDU_GCP_APPLICATION_NAME/$OSDU_GCP_SERVICE-$OSDU_GCP_VENDOR:$CI_COMMIT_TAG
only:
- tags
- tags
\ No newline at end of file
.OSDU_GCP_variables:
variables:
# service's URLs
OSDU_GCP_ENTITLEMENTS_URL: https://os-entitlements-gcp-attcrcktoa-uc.a.run.app/entitlements/v1/
OSDU_GCP_STORAGE_URL: https://os-storage-dot-nice-etching-277309.uc.r.appspot.com/api/storage/v2/
OSDU_GCP_RECORDS_ROOT_URL: https://os-storage-dot-nice-etching-277309.uc.r.appspot.com/api/storage/v2/
OSDU_GCP_LEGAL_HOST_URL: https://os-legal-dot-nice-etching-277309.uc.r.appspot.com/api/legal/v1/
OSDU_GCP_AIRFLOW_URL: https://c410714dc037b0e02p-tp.appspot.com/
OSDU_GCP_DELIVERY_SERVICE_URL: https://os-delivery-attcrcktoa-uc.a.run.app/api/delivery/v2/
OSDU_GCP_INGEST_HOST: https://os-ingest-attcrcktoa-uc.a.run.app/
OSDU_GCP_WORKFLOW_SERVICE_URL: https://os-workflow-attcrcktoa-uc.a.run.app/
OSDU_GCP_SEARCH_HOST: https://os-search-dot-nice-etching-277309.uc.r.appspot.com/api/search/v2/
OSDU_GCP_SEARCH_QUERY_URL: https://os-search-dot-nice-etching-277309.uc.r.appspot.com//api/search/v2/query/
OSDU_GCP_INDEXER_HOST: https://os-indexer-dot-nice-etching-277309.uc.r.appspot.com/api/indexer/v2/
OSDU_GCP_FILE_URL: https://os-file-attcrcktoa-uc.a.run.app/
REDIS_GROUP_HOST: 10.234.198.27
REDIS_SEARCH_HOST: 10.99.138.107
REDIS_STORAGE_HOST: 10.16.59.203
REDIS_SYNC_HOST: 10.116.62.35
# variables for integration tests
DEPLOY_ENV: empty
DOMAIN: osdu-gcp.go3-nrg.projects.epam.com
ENTITLEMENTS_DOMAIN: osdu-gcp.go3-nrg.projects.epam.com
TENANT_NAME: osdu
MY_TENANT_PROJECT: osdu
MY_TENANT: osdu
DATA_GROUP: osdu
DEFAULT_DATA_PARTITION_ID_TENANT1: osdu
DEFAULT_DATA_PARTITION_ID_TENANT2: osdudeploy
INTEGRATION_TEST_AUDIENCE: 689762842995-pv217jo3k8j803kk6gqf52qb5amos3a9.apps.googleusercontent.com
INT_TEST_VENDOR: gcp
PROJECT: nice-etching-277309
GCLOUD_PROJECT: nice-etching-277309
LEGAL_TAG: osdu-demo-legaltag
OTHER_RELEVANT_DATA_COUNTRIES: US
STORAGE_HOST: https://os-storage-dot-nice-etching-277309.uc.r.appspot.com/api/storage/v2/
.osdu-gcp:
tags: ["osdu-medium"]
image: google/cloud-sdk
only:
variables:
- $OSDU_GCP_BUILD_SUBDIR && $OSDU_GCP_INT_TEST_SUBDIR && $OSDU_GCP_APPLICATION_NAME && $OSDU_GCP_ENVIRONMENT && $OSDU_GCP_PROJECT && $OSDU_GCP_DEPLOY_FILE
variables:
OSDU_GCP_DEPLOY_DIR: OSDU_GCP_deploy_dir
before_script:
- mkdir -p "$OSDU_GCP_DEPLOY_DIR"
- gcloud auth activate-service-account --key-file="$OSDU_GCP_DEPLOY_FILE"
osdu-gcp-deploy:
extends: .osdu-gcp
stage: deploy
needs: ["compile-and-unit-test"]
script:
- cat $OSDU_GCP_APP_YAML > $OSDU_GCP_DEPLOY_DIR/app.yaml
- cp -v "${OSDU_GCP_BUILD_SUBDIR}"/target/*-spring-boot.jar "$OSDU_GCP_DEPLOY_DIR"
- cd "$OSDU_GCP_DEPLOY_DIR"
- echo --version="$CI_COMMIT_SHORT_SHA" --project="$OSDU_GCP_PROJECT" app.yaml
- gcloud auth list
- gcloud app deploy --quiet --version="$CI_COMMIT_SHORT_SHA" --project="$OSDU_GCP_PROJECT" app.yaml
only:
variables:
- $OSDU_GCP == 'true'
osdu-gcp-test:
extends:
- .osdu-gcp
- .maven
- .OSDU_GCP_variables
stage: integration
needs: ["osdu-gcp-deploy"]
variables:
# integration test variables for search service
ENTITLEMENTS_HOST: $OSDU_GCP_ENTITLEMENTS_URL
ELASTIC_PASSWORD: $OSDU_GCP_ELASTIC_PASSWORD
ELASTIC_USER_NAME: $OSDU_GCP_ELASTIC_USER
ELASTIC_HOST: $OSDU_GCP_ELASTIC_HOST
ELASTIC_PORT: $OSDU_GCP_ELASTIC_PORT
SEARCH_ON_BEHALF_INTEGRATION_TESTER: $OSDU_GCP_INTEGRATION_TESTER
INDEXER_HOST: $OSDU_GCP_INDEXER_HOST
SEARCH_INTEGRATION_TESTER: $OSDU_GCP_INTEGRATION_TESTER
SEARCH_HOST: $OSDU_GCP_SEARCH_HOST
# integration test variables for search service
PUBSUB_TOKEN: $OSDU_GCP_PUBSUB_TOKEN
STORAGE_URL: $OSDU_GCP_STORAGE_URL
AUTHORIZE_API: $OSDU_GCP_ENTITLEMENTS_URL
HOST_URL: $OSDU_GCP_LEGAL_HOST_URL
LEGAL_URL: $OSDU_GCP_LEGAL_HOST_URL
ENTITLEMENT_URL: $OSDU_GCP_ENTITLEMENT_URL
INTEGRATION_TESTER: $OSDU_GCP_INTEGRATION_TESTER
NO_DATA_ACCESS_TESTER: $OSDU_GCP_NO_DATA_ACCESS_TESTER
script:
- export GCP_DEPLOY_FILE=$(echo $OSDU_GCP_DEPLOY_FILE)
- cd $OSDU_GCP_INT_TEST_SUBDIR
# start tests
- $MAVEN test 2>&1 | tee test-results.log
only:
variables:
- $OSDU_GCP == 'true'
artifacts:
when: always
paths:
- $OSDU_GCP_INT_TEST_SUBDIR/test-results.log
expire_in: 1 week
# This checks to see if the NOTICE file generated based on the current code
# matches the one that is committed in the repository. Because FOSSA can
# occassionally order the lines different between executions, we use a python
# program to parse the file into sections and sort each section. Then we compare
# and expect the same number of sections, names of sections, and contents
# (without consider order).
fossa-check-notice:
image: $CI_REGISTRY/divido/fossa-cli/fossa-cli:v4.1
stage: attribution
needs: ['fossa-analyze']
tags: ['osdu-small']
only:
variables:
- $FOSSA_API_KEY
artifacts:
when: on_failure
paths:
- public
script:
# fossa-check-for-licensing-issues needs a CI_COMMIT_BRANCH defined to know how to parse the FOSSA API results
# When building tags, this isn't defined by GitLab. In that case, we use the tag name instead. If that's not defined
# then things will fail and we'll have to make this smarter
- test -z "$CI_COMMIT_BRANCH" && export CI_COMMIT_BRANCH="$CI_COMMIT_TAG"
- mkdir -p public
- fossa report licenses --project "${CI_PROJECT_TITLE}" --branch "${CI_COMMIT_BRANCH}" > public/NOTICE
- mkdir committed-NOTICE generated-NOTICE
- |
python <<EOF
import re
import os
regexSeparator = re.compile(r"^=+$")
regexSubtitle = re.compile(r"^The following software have components provided under the terms of this license:$")
# ----------------------------------------
def writeComponents(sectionFile, components):
for component in sorted(components):
sectionFile.write(component)
sectionFile.close()
# ----------------------------------------
def matchSectionHeader(lines, idx):
if (idx <= 3): return None
sep1 = regexSeparator.match(lines[idx - 4].rstrip())
sep2 = regexSeparator.match(lines[idx - 2].rstrip())
sub = regexSubtitle.match(lines[idx - 1].rstrip())
blank = (lines[idx].rstrip() == "")
if sep1 and sep2 and sub and blank:
return lines[idx - 3].rstrip()
return None
# ----------------------------------------
def split(licenseFile, outputDir):
lines = []
with open(licenseFile, 'r') as f:
lines = f.readlines()
# --------------------
sectionFile = None
components = []
for idx in range(0, len(lines)):
line = lines[idx]
if (line.startswith("-")):
components.append(line)
newHeader = matchSectionHeader(lines, idx)
if newHeader:
if sectionFile is not None: writeComponents(sectionFile, components)
sectionFile = open(os.path.join(outputDir, newHeader), 'w')
components = []
# Write the header to the new file
for writeIdx in range(idx - 4, idx + 1):
sectionFile.write(lines[writeIdx])
# ----------------------------------------
split("NOTICE", "committed-NOTICE")
split("public/NOTICE", "generated-NOTICE")
EOF
- |
if ! diff -r committed-NOTICE generated-NOTICE; then
echo "================================================================================"
echo "There are differences between the NOTICE file in the repository and the one generated just now by this command"
echo "The most likely cause of this is changes to the project dependencies -- the NOTICE file is providing attribution"
echo "to the projects being used, so it must be updated whenever that changes. This includes projects used indirectly as"
echo "dependencies of dependencies."
echo "----------------------------------------"
echo "You can download the NOTICE file from this job's artifacts and directly commit it to the repository to resolve"
echo "this. Before doing so, review the differences to make sure that they make sense given the changes that you have"
echo "made. If they do not, reach out to a maintainer to help diagnose the issue."
exit 1
fi
# --------------------------------------------------------------------------------
fossa-report:
image: $CI_REGISTRY/divido/fossa-cli/fossa-cli:v4.1
stage: attribution
needs: ['fossa-analyze']
tags: ['osdu-small']
only:
variables:
# To get the FOSSA_ACCESS, log into the project's main page and generate a "Live Project Report" URL from the Summary tab
# The access token will be shown in the URL (the last path component)
- $FOSSA_API_KEY && $FOSSA_ACCESS
variables:
FOSSA_REPORT_URL: https://app.fossa.com/api/revisions/custom%2B12773%2F${CI_PROJECT_NAME}%24${CI_COMMIT_SHA}/attribution?access=${FOSSA_ACCESS}&includeProjectLicense=true&includeLicenseScan=true&includeDirectDependencies=true&includeDeepDependencies=true&includeLicenseHeaders=&includeLicenseList=true&format=HTML
artifacts:
paths:
- public
script:
- mkdir -p public
- wget $FOSSA_REPORT_URL -O public/fossa-notice.html
include:
- local: 'scanners/fossa-global.yml'
# --------------------------------------------------------------------------------
fossa-analyze:
extends: .maven
image: $CI_REGISTRY/divido/fossa-cli-utilities/fossa-cli-utilities:v3.1
stage: scan
needs: ['compile-and-unit-test']
tags: ['osdu-medium']
only:
variables:
- $FOSSA_API_KEY
script:
# fossa-check-for-licensing-issues needs a CI_COMMIT_BRANCH defined to know how to parse the FOSSA API results
# When building tags, this isn't defined by GitLab. In that case, we use the tag name instead. If that's not defined
# then things will fail and we'll have to make this smarter
- test -z "$CI_COMMIT_BRANCH" && export CI_COMMIT_BRANCH="$CI_COMMIT_TAG"
- $MAVEN install -DskipTests=true
- cp "$CI_PROJECT_DIR/.mvn/community-maven.settings.xml" ~/.m2/settings.xml
- fossa analyze --project "${CI_PROJECT_TITLE}" --project-url "${CI_PROJECT_URL}" --branch "${CI_COMMIT_BRANCH}"
- fossa-check-for-licensing-issues
include:
- local: 'scanners/fossa-global.yml'
# --------------------------------------------------------------------------------
fossa-analyze:
image: $CI_REGISTRY/divido/fossa-cli-utilities/fossa-cli-utilities:v4.0
stage: scan
needs: ['compile-and-unit-test']
tags: ['osdu-medium']
only:
variables:
- $FOSSA_API_KEY
script:
# fossa-check-for-licensing-issues needs a CI_COMMIT_BRANCH defined to know how to parse the FOSSA API results
# When building tags, this isn't defined by GitLab. In that case, we use the tag name instead. If that's not defined
# then things will fail and we'll have to make this smarter
- test -z "$CI_COMMIT_BRANCH" && export CI_COMMIT_BRANCH="$CI_COMMIT_TAG"
- |
if [ -e all-requirements.txt ]; then
# FOSSA needs all requirements in a single file, which has to be named "requirements.txt"
mv all-requirements.txt requirements.txt
else
echo "I was expecting a file named 'all-requirements.txt' to have been generated by compile-and-unit-test"
echo "However, that file doesn't seem to exist"
echo "----------------------------------------"
echo "That file should have been the output of a 'pip freeze', so that I knew what the full list of deep"
echo "dependencies were. I can't reasonably generate that in this job, because I don't know what python image"
echo "is appropriate. If this structure has been changed in the build/python.yml, you may need to update this"
echo "logic as well (in scanners/fossa-python.yml)"
exit 1
fi
- fossa analyze --project "${CI_PROJECT_TITLE}" --project-url "${CI_PROJECT_URL}" --branch "${CI_COMMIT_BRANCH}"
- fossa-check-for-licensing-issues
include:
- local: 'scanners/fossa-maven.yml'
fossa-analyze:
extends: .maven
image: $CI_REGISTRY/divido/fossa-cli-utilities/fossa-cli-utilities:v3.1
stage: scan
needs: ['compile-and-unit-test']
tags: ['osdu-medium']
only:
variables:
- $FOSSA_API_KEY
script:
# fossa-check-for-licensing-issues needs a CI_COMMIT_BRANCH defined to know how to parse the FOSSA API results
# When building tags, this isn't defined by GitLab. In that case, we use the tag name instead. If that's not defined
# then things will fail and we'll have to make this smarter
- test -z "$CI_COMMIT_BRANCH" && export CI_COMMIT_BRANCH="$CI_COMMIT_TAG"
- $MAVEN install -DskipTests=true
- cp "$CI_PROJECT_DIR/.mvn/community-maven.settings.xml" ~/.m2/settings.xml
- fossa analyze --project "${CI_PROJECT_TITLE}" --project-url "${CI_PROJECT_URL}" --branch "${CI_COMMIT_BRANCH}"
- fossa-check-for-licensing-issues
# --------------------------------------------------------------------------------
# This checks to see if the NOTICE file generated based on the current code
# matches the one that is committed in the repository. Because FOSSA can
# occassionally order the lines different between executions, we use a python
# program to parse the file into sections and sort each section. Then we compare
# and expect the same number of sections, names of sections, and contents
# (without consider order).