Skip to content
Snippets Groups Projects
python.yml 7.88 KiB
Newer Older
# PIP_REQUIREMENTS:
#   If there is more than one requirements file, or it is named something other than requirements.txt,
#   set the variable PIP_REQUIREMENTS to be a space delimited list of the requirements file. There is no
#   way to escape an embedded space within the filename -- you need to rename the file to have no whitespace.

# PIP_REQUIREMENTS_TOOLING_ONLY:
#   These requirements files contain libraries that are used during the compilation / testing, but are not
#   linked into the project. They are excluded from the all-requirements.txt output, omitting them from the
#   FOSSA license analysis steps

# PIP_EXTRA_INDEX_URLS:
#   If specified, this space delimited list of URLs is used as extra index urls for PIP. This affects the
#   the pip install step, and is also echoed to the all-requirements.txt file for future use.

# PIP_CMD, VENV_CMD:
#   Normally, you should not need to configure these. If you override the image (presumably to use a different version of python),
#   then you may need to alter these commands as well. In particular, old versions of python did not include the 'venv' module, so
#   you may wish to set VENV_CMD to 'virtualenv'.

# REQ_COLLECT:
#   This collects requirements from all the specified PIP_REQUIREMENTS files (but not PIP_REQUIREMENTS_TOOLING_ONLY)
#   into a single file, named 'all-requirements.txt'. This is used by later stages that want to review
#   or scan dependencies, but only operate on a single requirements file. By default, this uses the
#   default-requirements-collection function specified here, but it can be overridden to be any other
#   set of commands, including a custom script stored in the project
  tags: ['osdu-medium']

  before_script:
    - |
      function default-requirements-collection() {
        echo $PIP_EXTRA_INDEXES | xargs -rn 2 > /tmp/pip-extra-index-urls.txt
        grep -h -- --extra-index-url $PIP_REQUIREMENTS >> /tmp/pip-extra-index-urls.txt || true

        sort -u /tmp/pip-extra-index-urls.txt > all-requirements.txt
        $PIP_CMD freeze >> all-requirements.txt
      }

    - test -z "$PIP_REQUIREMENTS" && export PIP_REQUIREMENTS="requirements.txt"; echo "$PIP_REQUIREMENTS"
    - echo "$PIP_REQUIREMENTS_TOOLING_ONLY"
    - test -z "$REQ_COLLECT" && export REQ_COLLECT="default-requirements-collection"; echo $REQ_COLLECT
    - test -z "$PIP_CMD" && export PIP_CMD="pip"; echo "$PIP_CMD"
    - test -z "$VENV_CMD" && export VENV_CMD="python -m venv"; echo "$VENV_CMD"
    # Add extra index url arguments
    - export PIP_EXTRA_INDEXES=$(echo $PIP_EXTRA_INDEX_URLS | xargs -rn 1 echo --extra-index-url); echo $PIP_EXTRA_INDEXES
    # Establish a virtual environment for the packages, which is cached between runs

    # In case the requirements files don't mention these, we need them for the compile-and-unit-test step
    - $PIP_CMD install $PIP_EXTRA_INDEXES --upgrade pip
    # Install all the normal library dependency requirements
    - |
      for req in $PIP_REQUIREMENTS; do
        echo "--------------------------------------------------------------------------------"
        echo -e "Installing $req\n"

        outFile=$(dirname $req)/python-pip-$(basename $req)-output.txt
        $PIP_CMD install $PIP_EXTRA_INDEXES -r $req | tee $outFile
    # Build the all-requirements.txt file for future pipeline stages
    - $REQ_COLLECT

    # Install extra tooling that is used in the environment, but not linked to
    # This will not be part of the FOSSA analysis scan / attribution list
    - $PIP_CMD install $PIP_EXTRA_INDEXES wheel pytest pytest-cov
    - |
      for req in $PIP_REQUIREMENTS_TOOLING_ONLY; do
        echo "--------------------------------------------------------------------------------"
        echo -e "Installing $req\n"

        outFile=$(dirname $req)/python-pip-$(basename $req)-output.txt
        $PIP_CMD install $PIP_EXTRA_INDEXES -r $req | tee $outFile
      done

# If the unit tests are stored in a  directory other than ./tests/unit, define it using PYTEST_DIR
# Also, you can configure the test coverage module using PYTEST_COV, it defaults to 'app'
compile-and-unit-test:
  extends:
    - .python
    - .skipForTriggeringMergeRequests

  stage: build
  script:
    - test -z "$PYTEST_DIR" && export PYTEST_DIR="tests/unit"; echo "$PYTEST_DIR"
    - test -z "$PYTEST_COV" && export PYTEST_COV="app"; echo "$PYTEST_COV"
    - echo "$PYTEST_OPTIONS"
    - time pytest --junit-xml=unit_tests_report.xml $PYTEST_OPTIONS --cov="$PYTEST_COV" --cov-report=xml:coverage.xml --cov-report=term "$PYTEST_DIR"
    # Finally, collect version information. Only output information if the setup.py can be run successfully

    # This function runs setup.py for the requested information (--version or --name), and output a file with
    # the contents if it worked, or prints the contents if it didn't, but not both
    - |
      function run-python-setup() {
        if ! python setup.py $1 >& $2; then
          echo -e "\e[31m**** Could not run 'python setup.py $1' in $(pwd). Will not save as artifact, details below *****\e[0m";
          cat $2
          rm $2
        fi
      }

    # This checks every directory for the presence of a setup.py file, ignoring any specified in the
    # IGNORE_PYTHON_SETUP variable and known non-source directories (venv / .git). Once found it
    # requests the version and package name information and stores them as artifacts
    - echo $IGNORE_PYTHON_SETUP | xargs -r readlink -f > /tmp/ignore-python-setup-dirs.txt; cat /tmp/ignore-python-setup-dirs.txt
    - |
      find $(pwd) \( -name venv -o -name .git \) -prune , -type d -print0 | while read -d $'\0' subdir; do
        if [ -e "$subdir/setup.py" ] && ! grep -qxF "$subdir" /tmp/ignore-python-setup-dirs.txt; then
          cd "$subdir"

          run-python-setup --version python-version-output.txt
          run-python-setup --name python-package-name.output.txt
        fi
      done

  coverage: '/^(?i)(TOTAL).*\s+(\d+\%)$/'
  artifacts:
    when: always
      - ./**/python-*-output.txt
    reports:
      junit: unit_tests_report.xml
      coverage_report:
        coverage_format: cobertura
        path: coverage.xml

analyze-python-dependencies:
  extends:
    - .skipForTriggeringMergeRequests

  image: $CI_REGISTRY/osdu/platform/deployment-and-operations/release-scripts/tasklemon:v2.4
  tags: ['osdu-small']
  needs: ['compile-and-unit-test']
  stage: scan

  artifacts:
    paths:
      - python-dependencies.json

  script:
    - analyze-python.js
    - format-dependencies.js

sonarqube-scan-python:
  stage: scan
  needs: ["compile-and-unit-test"]
  dependencies:
  - compile-and-unit-test
  rules:
  - if: $SONAR_CLOUD_TOKEN
  variables:
    SONAR_USER_HOME: "${CI_PROJECT_DIR}/.sonar"  # Defines the location of the analysis task cache
    GIT_DEPTH: "0"  # Tells git to fetch all the branches of the project, required by the analysis task
  image:
    name: sonarsource/sonar-scanner-cli:latest
    entrypoint: [""]
  cache:
    key: "${CI_JOB_NAME}"
    paths:
      - .sonar/cache
  script:
    - |
      SONAR_SCANNER_OPTS="
        -Dsonar.host.url=${SONAR_CLOUD_URL}
        -Dsonar.token=${SONAR_CLOUD_TOKEN}
        -Dsonar.projectKey=org.opengroup.osdu:$CI_PROJECT_NAME
        -Dsonar.organization=osdu
        -Dsonar.python.coverage.reportPaths=coverage.xml"

      if [ -n "$CI_MERGE_REQUEST_IID" ]; then
        SONAR_SCANNER_OPTS="$SONAR_SCANNER_OPTS
          -Dsonar.pullrequest.gitlab.api_url=https://community.opengroup.org/api/v4
          -Dsonar.pullrequest.gitlab.project_id=$CI_PROJECT_ID
          -Dsonar.pullrequest.key=$CI_MERGE_REQUEST_IID
          -Dsonar.pullrequest.branch=$CI_COMMIT_REF_NAME
          -Dsonar.pullrequest.base=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME"
      fi
    - sonar-scanner $SONAR_SCANNER_OPTS

  allow_failure: true