https://github.com/GPflow/GPflow
Raw File
Tip revision: 6206625b55074c8f279331c403f2aa777f9a3ec6 authored by Jesper Nielsen on 17 October 2022, 12:38:58 UTC
Support determistic randomness.
Tip revision: 6206625
config.yml
version: 2.1

parameters:
  min_py_ver:
    type: string
    default: "3.7"
  max_py_ver:
    type: string
    default: "3.10"
  min_tf_ver:
    type: string
    default: "2.4.0"
  max_tf_ver:
    type: string
    default: "2.10.0"
  min_tfp_ver:
    type: string
    default: "0.12.0"
  max_tfp_ver:
    type: string
    default: "0.18.0"
  min_venv_dir:
    type: string
    default: min_venv
  max_venv_dir:
    type: string
    default: max_venv

  tmp_results_dir:
    type: string
    default: "/tmp/results"
  tmp_results_dir_file:
    type: string
    default: "/tmp/results_path"
  checkout_dir:
    type: string
    default: "/tmp/checkout"
  results_dir:
    type: string
    default: "/tmp/checkout/ci_benchmark_results"
  plots_dir:
    type: string
    default: "/tmp/checkout/docs/benchmark_plots"
  gh_pages_branch:
    type: string
    default: "gh-pages"


commands:
  run_tests:
    parameters:
      venv_dir:
        type: string
      pytest_filter:
        type: string
    steps:
      - checkout
      - attach_workspace:
          at: /tmp/
      - run:
          name: Run tests
          command: "
            source /tmp/<<parameters.venv_dir>>/bin/activate \n
            pytest
              -v
              -W ignore::UserWarning
              --durations=10
              -m \"<<parameters.pytest_filter>>\"
              --cov=./gpflow
              --cov-report=xml
              ./tests
          "
      - run:
          name: Upload coverage report
          command: |
            curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --keyring trustedkeys.gpg --import # One-time step
            curl -Os "https://uploader.codecov.io/latest/linux/codecov"
            curl -Os "https://uploader.codecov.io/latest/linux/codecov.SHA256SUM"
            curl -Os "https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig"
            gpgv codecov.SHA256SUM.sig codecov.SHA256SUM
            shasum -a 256 -c codecov.SHA256SUM || sha256sum -c codecov.SHA256SUM
            unset NODE_OPTIONS  # See https://github.com/codecov/uploader/issues/475
            chmod +x codecov
            ./codecov -v -t "${CODECOV_TOKEN}" -n "${CIRCLE_BUILD_NUM}"

jobs:
  install_gpflow:
    parameters:
      py_ver:
        type: string
      tf_ver:
        type: string
      tfp_ver:
        type: string
      venv_dir:
        type: string
    docker:
      - image: cimg/python:<<parameters.py_ver>>
    steps:
      - checkout
      - run:
          name: Setup virtual environment
          command: |
            virtualenv -p python<<parameters.py_ver>> /tmp/<<parameters.venv_dir>>
            source /tmp/<<parameters.venv_dir>>/bin/activate
            pip install --progress-bar=off -U pip
      - run:
          name: Install GPflow
          command: "
            source /tmp/<<parameters.venv_dir>>/bin/activate \n
            # Everything is installed in one pip command, to allow for better dependency version
            # resolution. Explicit tensorflow and tensorflow-probability version, to ensure
            # consistency between them. \n
            pip install --progress-bar=off
              -e .
              -r tests_requirements.txt
              tensorflow==<<parameters.tf_ver>>
              tensorflow-probability==<<parameters.tfp_ver>> \n
          "
      - run:
          name: Log dependencies
          command: |
            # Log installed dependencies, to make various debugging easier.
            source /tmp/<<parameters.venv_dir>>/bin/activate
            python --version
            pip freeze
      - persist_to_workspace:
          root: /tmp/
          paths:
            - <<parameters.venv_dir>>

  verify-install:
    parameters:
      py_ver:
        type: string
      venv_dir:
        type: string
    docker:
      - image: cimg/python:<<parameters.py_ver>>
    steps:
      - checkout
      - attach_workspace:
          at: /tmp/
      - run:
          name: Check installed dependencies are compatible
          command: |
            source /tmp/<<parameters.venv_dir>>/bin/activate
            pip check -vvv
            python -c "import gpflow"

  type-check:
    parameters:
      py_ver:
        type: string
      venv_dir:
        type: string
    docker:
      - image: cimg/python:<<parameters.py_ver>>
    steps:
      - checkout
      - attach_workspace:
          at: /tmp/
      - run:
          name: Run type check
          command: |
            source /tmp/<<parameters.venv_dir>>/bin/activate
            # Test without flags, as that is what our users are likely to do:
            mypy gpflow tests benchmark doc/*.py setup.py
            # Test with flags improves coverage:
            mypy $(python -m gpflow.mypy_flags) gpflow tests benchmark doc/*.py setup.py

  format-check:
    parameters:
      py_ver:
        type: string
      venv_dir:
        type: string
    docker:
      - image: cimg/python:<<parameters.py_ver>>
    steps:
      - checkout
      - attach_workspace:
          at: /tmp/
      - run:
          name: Run format check
          command: |
            source /tmp/<<parameters.venv_dir>>/bin/activate
            make format-check

  unit-test:
    parameters:
      py_ver:
        type: string
      venv_dir:
        type: string
    docker:
      - image: cimg/python:<<parameters.py_ver>>
    steps:
      - run_tests:
          venv_dir: <<parameters.venv_dir>>
          pytest_filter: not notebooks and not docs

  notebook-test:
    parameters:
      py_ver:
        type: string
      venv_dir:
        type: string
    docker:
      - image: cimg/python:<<parameters.py_ver>>
    steps:
      - run_tests:
          venv_dir: <<parameters.venv_dir>>
          pytest_filter: notebooks

  docs-test:
    docker:
      # build-docs below use `max` version, so let's test with the same version we're going to use.
      - image: cimg/python:<<pipeline.parameters.max_py_ver>>
    steps:
      - run_tests:
          venv_dir: <<pipeline.parameters.max_venv_dir>>
          pytest_filter: docs

  build-notebooks:
    # Note that /tmp/gpflow_build_docs is hard-coded a couple of times below. This path is hardcoded
    # in doc/build_docs.py.
    docker:
      # At the time of writing we cannot generate documentation for type numpy with older versions,
      # so we need to use `max` versions here.
      - image: cimg/python:<<pipeline.parameters.max_py_ver>>
        environment:
          DOCS: True  # This is used in gpflow/ci_utils.py
    parallelism: 10
    steps:
      - checkout
      - attach_workspace:
          at: /tmp/
      - run:
          name: Build notebooks
          no_output_timeout: 60m
          command: |
            source /tmp/<<pipeline.parameters.max_venv_dir>>/bin/activate
            mkdir -p /tmp/gpflow_build_docs
            python doc/build_docs.py --shard ${CIRCLE_NODE_INDEX}/${CIRCLE_NODE_TOTAL}
            # Each shard must output unique files, so clean up output directory:
            find /tmp/gpflow_build_docs -type f -and -not -name \*.ipynb -delete
      - persist_to_workspace:
          root: /tmp/
          paths:
            - gpflow_build_docs

  build-docs:
    docker:
      # At the time of writing we cannot generate documentation for type numpy with older versions,
      # so we need to use `max` versions here.
      - image: cimg/python:<<pipeline.parameters.max_py_ver>>
        environment:
          DOCS: True  # This is used in gpflow/ci_utils.py
    steps:
      - checkout
      - attach_workspace:
          at: /tmp/
      - run:
          name: Install pandoc
          command: |
            sudo apt update
            sudo apt install pandoc
      - run:
          name: Build documentation
          no_output_timeout: 60m
          command: |
            source /tmp/<<pipeline.parameters.max_venv_dir>>/bin/activate
            mkdir docs_tmp
            python doc/build_docs.py --shard collect <<pipeline.git.branch>> docs_tmp
      - add_ssh_keys:
          fingerprints:  # Add key to give write-access below
            - "b5:9b:f3:74:26:00:b3:c3:de:3a:b3:03:ee:0b:0b:64"
      - run:
          name: Commit documentation
          command: |
            source /tmp/<<pipeline.parameters.max_venv_dir>>/bin/activate

            git clone -b <<pipeline.parameters.gh_pages_branch>> git@github.com:GPflow/GPflow.git gh-pages

            for version in docs_tmp/*; do
                rm -rf gh-pages/docs/${version}
            done
            cp -r docs_tmp/* gh-pages/docs

            python doc/update_versions.py <<pipeline.git.branch>> gh-pages/docs

            cd gh-pages/docs

            git config user.email "docs.bot@gpflow.com"
            git config user.name "Docs Bot"
            git add .
            git commit -m "Build documentation for <<pipeline.git.branch>> [ci skip]"
            git push origin <<pipeline.parameters.gh_pages_branch>>

  prepare_benchmarks:
    docker:
      - image: cimg/python:<<pipeline.parameters.max_py_ver>>
    steps:
      - checkout
      - attach_workspace:
          at: /tmp/
      - run:
          name: Prepare benchmarks
          command: |
            source /tmp/<<pipeline.parameters.max_venv_dir>>/bin/activate
            python -m benchmark.run --no_plot --shard start ci <<pipeline.parameters.tmp_results_dir>> > <<pipeline.parameters.tmp_results_dir_file>>
      - persist_to_workspace:
          root: /tmp/
          paths:
            - results
            - results_path

  run_benchmarks:
    docker:
      - image: cimg/python:<<pipeline.parameters.max_py_ver>>
    parallelism: 10
    steps:
      - checkout
      - attach_workspace:
          at: /tmp/
      - run:
          name: Run benchmarks
          command: |
            source /tmp/<<pipeline.parameters.max_venv_dir>>/bin/activate
            results_dir=$(cat <<pipeline.parameters.tmp_results_dir_file>> | tr -d '\t\r\n ')
            python -m benchmark.run --no_plot --shard ${CIRCLE_NODE_INDEX}/${CIRCLE_NODE_TOTAL} --no-subdir ci ${results_dir}
      - persist_to_workspace:
          root: /tmp/
          paths:
            - results

  store_benchmarks:
    docker:
      - image: cimg/python:<<pipeline.parameters.max_py_ver>>
    steps:
      - checkout
      - attach_workspace:
          at: /tmp/
      - run:
          name: Plot benchmarks
          command: |
            source /tmp/<<pipeline.parameters.max_venv_dir>>/bin/activate
            results_dir=$(cat <<pipeline.parameters.tmp_results_dir_file>> | tr -d '\t\r\n ')
            python -m benchmark.run --no_plot --shard collect --no_subdir ci ${results_dir}
      - add_ssh_keys:
          fingerprints:  # Add key to give write-access below
            - "b5:9b:f3:74:26:00:b3:c3:de:3a:b3:03:ee:0b:0b:64"
      - run:
          name: Generate and commit plots
          command: |
            source /tmp/<<pipeline.parameters.max_venv_dir>>/bin/activate

            echo Checking out
            mkdir -p <<pipeline.parameters.checkout_dir>>
            git clone -b <<pipeline.parameters.gh_pages_branch>> git@github.com:GPflow/GPflow.git <<pipeline.parameters.checkout_dir>>

            echo Moving results
            mkdir -p <<pipeline.parameters.results_dir>>
            mv <<pipeline.parameters.tmp_results_dir>>/* <<pipeline.parameters.results_dir>>

            echo Plotting
            rm -rf <<pipeline.parameters.plots_dir>>
            python -m benchmark.plot --no_subdir ci <<pipeline.parameters.results_dir>> <<pipeline.parameters.plots_dir>>

            echo Pushing
            cd <<pipeline.parameters.checkout_dir>>
            git config user.email "benchmark.bot@gpflow.com"
            git config user.name "Benchmarks Bot"
            git add .
            git commit -m "Run benchmarks [ci skip]"
            git push origin <<pipeline.parameters.gh_pages_branch>>

            echo Done

  deploy:
    docker:
      - image: cimg/python:<<pipeline.parameters.min_py_ver>>
    steps:
      - checkout
      - run:
          name: Verify git tag vs. VERSION
          command: |
            VERSION="v$(cat VERSION | tr -d '\t\r\n ')"
            if [ "$VERSION" != "$CIRCLE_TAG" ]; then
              echo "The package version ($VERSION) and the latest tag version ($CIRCLE_TAG) are different"
              exit 1
            fi
      - run:
          name: Install twine
          command: |
            # Run in a fresh virtual environment, to avoid conflicts with preinstalled packages.
            virtualenv -p python<<pipeline.parameters.min_py_ver>> .venv
            source .venv/bin/activate

            pip install --progress-bar=off -U pip
            pip install --progress-bar=off twine
      - run:
          name: Init .pypirc
          command: |
            echo -e "[pypi]" >> ~/.pypirc
            echo -e "username = artemav" >> ~/.pypirc
            echo -e "password = $PYPI_PASSWORD" >> ~/.pypirc
      - run:
          name: Create pip package
          command: |
            source .venv/bin/activate
            python setup.py bdist_wheel sdist
      - run:
          name: Upload to PyPI
          command: |
            source .venv/bin/activate
            twine upload dist/*

  noop:
    docker:
      - image: cimg/python:<<pipeline.parameters.min_py_ver>>
    steps:
      - run: echo ok


workflows:
  version: 2.1

  build_test_and_deploy:
    when:
      not:
        equal: [scheduled_pipeline, <<pipeline.trigger_source>>]
    jobs:
      - install_gpflow:
          name: install_gpflow_min
          py_ver: <<pipeline.parameters.min_py_ver>>
          tf_ver: <<pipeline.parameters.min_tf_ver>>
          tfp_ver: <<pipeline.parameters.min_tfp_ver>>
          venv_dir: <<pipeline.parameters.min_venv_dir>>
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - install_gpflow:
          name: install_gpflow_max
          py_ver: <<pipeline.parameters.max_py_ver>>
          tf_ver: <<pipeline.parameters.max_tf_ver>>
          tfp_ver: <<pipeline.parameters.max_tfp_ver>>
          venv_dir: <<pipeline.parameters.max_venv_dir>>
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - verify-install:
          name: verify-install-min
          py_ver: <<pipeline.parameters.min_py_ver>>
          venv_dir: <<pipeline.parameters.min_venv_dir>>
          requires:
            - install_gpflow_min
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - verify-install:
          name: verify-install-max
          py_ver: <<pipeline.parameters.max_py_ver>>
          venv_dir: <<pipeline.parameters.max_venv_dir>>
          requires:
            - install_gpflow_max
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - type-check:
          name: type-check-min
          py_ver: <<pipeline.parameters.min_py_ver>>
          venv_dir: <<pipeline.parameters.min_venv_dir>>
          requires:
            - install_gpflow_min
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - type-check:
          name: type-check-max
          py_ver: <<pipeline.parameters.max_py_ver>>
          venv_dir: <<pipeline.parameters.max_venv_dir>>
          requires:
            - install_gpflow_max
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - format-check:
          name: format-check-min
          py_ver: <<pipeline.parameters.min_py_ver>>
          venv_dir: <<pipeline.parameters.min_venv_dir>>
          requires:
            - install_gpflow_min
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - format-check:
          name: format-check-max
          py_ver: <<pipeline.parameters.max_py_ver>>
          venv_dir: <<pipeline.parameters.max_venv_dir>>
          requires:
            - install_gpflow_max
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - docs-test:
          requires:
            - install_gpflow_max
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - noop:
          name: fast-tests
          requires:
            - verify-install-min
            - verify-install-max
            - type-check-min
            - type-check-max
            - format-check-min
            - format-check-max
            - docs-test
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - unit-test:
          name: unit-test-min
          py_ver: <<pipeline.parameters.min_py_ver>>
          venv_dir: <<pipeline.parameters.min_venv_dir>>
          requires:
            - install_gpflow_min
            - fast-tests
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - unit-test:
          name: unit-test-max
          py_ver: <<pipeline.parameters.max_py_ver>>
          venv_dir: <<pipeline.parameters.max_venv_dir>>
          requires:
            - install_gpflow_max
            - fast-tests
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - notebook-test:
          name: notebook-test-min
          py_ver: <<pipeline.parameters.min_py_ver>>
          venv_dir: <<pipeline.parameters.min_venv_dir>>
          requires:
            - install_gpflow_min
            - fast-tests
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - notebook-test:
          name: notebook-test-max
          py_ver: <<pipeline.parameters.max_py_ver>>
          venv_dir: <<pipeline.parameters.max_venv_dir>>
          requires:
            - install_gpflow_max
            - fast-tests
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - noop:
          name: all-tests
          requires:
            - fast-tests
            - unit-test-min
            - unit-test-max
            - notebook-test-min
            - notebook-test-max
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
      - build-notebooks:
          requires:
            - install_gpflow_max
            - all-tests
          filters:
            branches:
              only:
                - master
                - develop
      - build-docs:
          requires:
            - install_gpflow_max
            - build-notebooks
          filters:
            branches:
              only:
                - master
                - develop
      - deploy:
          requires:
            - all-tests
          filters:
            tags:
              only: /^v[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
            branches:
              ignore: /.*/

  run_benchmarks:
    when:
      and:
        - equal: [scheduled_pipeline, <<pipeline.trigger_source>>]
        - equal: [run_benchmarks, <<pipeline.schedule.name>>]
    jobs:
      - install_gpflow:
          py_ver: <<pipeline.parameters.max_py_ver>>
          tf_ver: <<pipeline.parameters.max_tf_ver>>
          tfp_ver: <<pipeline.parameters.max_tfp_ver>>
          venv_dir: <<pipeline.parameters.max_venv_dir>>
      - prepare_benchmarks:
          requires:
            - install_gpflow
      - run_benchmarks:
          requires:
            - install_gpflow
            - prepare_benchmarks
      - store_benchmarks:
          requires:
            - install_gpflow
            - run_benchmarks
back to top