Skip to content

MAINT HonestForestClassifier now has bootstrap=True as the default. And removing old MIGHT code #1360

MAINT HonestForestClassifier now has bootstrap=True as the default. And removing old MIGHT code

MAINT HonestForestClassifier now has bootstrap=True as the default. And removing old MIGHT code #1360

Workflow file for this run

name: "Build and unit tests"
concurrency:
group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.type }}
cancel-in-progress: true
on:
pull_request:
paths:
- "**"
push:
branches: [main]
paths:
- "**.py"
tags:
- "v*.*.*"
workflow_dispatch:
env:
INSTALLDIR: "build-install"
CCACHE_DIR: "${{ github.workspace }}/.ccache"
jobs:
build_and_test:
name: Meson build ${{ matrix.os }} - py${{ matrix.python-version }}
timeout-minutes: 20
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04, macos-latest]
python-version: [3.9, "3.10", "3.11"]
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash
env:
# to make sure coverage/test command builds cleanly
FORCE_SUBMODULE: True
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
architecture: "x64"
cache: "pip"
cache-dependency-path: "pyproject.toml"
- name: Install packages for Ubuntu
if: ${{ matrix.os == 'ubuntu-22.04'}}
run: |
sudo apt-get update
sudo apt-get install -y libopenblas-dev libatlas-base-dev liblapack-dev gfortran libgmp-dev libmpfr-dev libsuitesparse-dev ccache libmpc-dev
sudo apt-get install -y gcc
sudo apt-get update
- name: Install Ccache for MacOSX
if: ${{ matrix.os == 'macos-latest'}}
run: |
brew install ccache
brew install gcc
brew install gettext
- name: show-gcc
run: |
gcc --version
- name: Install Python packages
run: |
python -m pip install spin
python -m spin setup-submodule
python -m pip install .[build]
python -m pip install .[test]
- name: Prepare compiler cache
id: prep-ccache
shell: bash
run: |
mkdir -p "${CCACHE_DIR}"
echo "dir=$CCACHE_DIR" >> $GITHUB_OUTPUT
NOW=$(date -u +"%F-%T")
echo "timestamp=${NOW}" >> $GITHUB_OUTPUT
- name: Setup compiler cache
uses: actions/cache@v4
id: cache-ccachev1
# Reference: https://docs.github.com/en/actions/guides/caching-dependencies-to-speed-up-workflows#matching-a-cache-key
# NOTE: The caching strategy is modeled in a way that it will always have a unique cache key for each workflow run
# (even if the same workflow is run multiple times). The restore keys are not unique and for a partial match, they will
# return the most recently created cache entry, according to the GitHub Action Docs.
with:
path: ${{ steps.prep-ccache.outputs.dir }}
# Restores ccache from either a previous build on this branch or on main
key: ${{ github.workflow }}-${{ matrix.python-version }}-ccache-linux-${{ steps.prep-ccache.outputs.timestamp }}
# This evaluates to `Linux Tests-3.9-ccache-linux-` which is not unique. As the CI matrix is expanded, this will
# need to be updated to be unique so that the cache is not restored from a different job altogether.
restore-keys: |
${{ github.workflow }}-${{ matrix.python-version }}-ccache-linux-
- name: Setup build and install scikit-tree
run: |
./spin build -j 2
- name: Ccache performance
shell: bash -l {0}
run: ccache -s
- name: build-path
run: |
echo "$PWD/build-install/"
export INSTALLED_PATH=$PWD/build-install/usr/lib/python${{matrix.python-version}}/site-packages
- name: Run unit tests and coverage
run: |
./spin --help
./spin coverage --help
./spin test --help
./spin test
- name: debug
run: |
ls $PWD/build-install/usr/lib/python${{matrix.python-version}}/site-packages/
echo "Okay..."
ls $PWD/build
ls ./
- name: Save build
uses: actions/upload-artifact@v4
with:
name: sktree-build
path: $PWD/build
build_and_test_slow:
name: Slow Meson build ${{ matrix.os }} - py${{ matrix.python-version }}
timeout-minutes: 20
needs: [build_and_test]
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04]
python-version: ["3.11"]
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash
env:
# to make sure coverage/test command builds cleanly
FORCE_SUBMODULE: True
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
architecture: "x64"
cache: "pip"
cache-dependency-path: "pyproject.toml"
- name: show-gcc
run: |
gcc --version
- name: Install Ccache for MacOSX
if: ${{ matrix.os == 'macos-latest'}}
run: |
brew install ccache
- name: Install packages for Ubuntu
if: ${{ matrix.os == 'ubuntu-22.04'}}
run: |
sudo apt-get update
sudo apt-get install -y libopenblas-dev libatlas-base-dev liblapack-dev gfortran libgmp-dev libmpfr-dev libsuitesparse-dev ccache libmpc-dev
- name: Install Python packages
run: |
python -m pip install spin
python -m spin setup-submodule
python -m pip install .[build]
python -m pip install .[test]
- name: Prepare compiler cache
id: prep-ccache
shell: bash
run: |
mkdir -p "${CCACHE_DIR}"
echo "dir=$CCACHE_DIR" >> $GITHUB_OUTPUT
NOW=$(date -u +"%F-%T")
echo "timestamp=${NOW}" >> $GITHUB_OUTPUT
- name: Setup compiler cache
uses: actions/cache@v4
id: cache-ccachev1
# Reference: https://docs.github.com/en/actions/guides/caching-dependencies-to-speed-up-workflows#matching-a-cache-key
# NOTE: The caching strategy is modeled in a way that it will always have a unique cache key for each workflow run
# (even if the same workflow is run multiple times). The restore keys are not unique and for a partial match, they will
# return the most recently created cache entry, according to the GitHub Action Docs.
with:
path: ${{ steps.prep-ccache.outputs.dir }}
# Restores ccache from either a previous build on this branch or on main
key: ${{ github.workflow }}-${{ matrix.python-version }}-ccache-linux-${{ steps.prep-ccache.outputs.timestamp }}
# This evaluates to `Linux Tests-3.9-ccache-linux-` which is not unique. As the CI matrix is expanded, this will
# need to be updated to be unique so that the cache is not restored from a different job altogether.
restore-keys: |
${{ github.workflow }}-${{ matrix.python-version }}-ccache-linux-
- name: Setup build and install scikit-tree
run: |
./spin build -j 2 --forcesubmodule
- name: Ccache performance
shell: bash -l {0}
run: ccache -s
- name: build-path
run: |
echo "$PWD/build-install/"
export INSTALLED_PATH=$PWD/build-install/usr/lib/python${{matrix.python-version}}/site-packages
- name: Run unit tests and coverage
run: |
./spin --help
./spin coverage --help
./spin test --help
./spin coverage
cp $PWD/build-install/usr/lib/python${{matrix.python-version}}/site-packages/coverage.xml ./coverage.xml
- name: debug
run: |
ls $PWD/build-install/usr/lib/python${{matrix.python-version}}/site-packages/
echo "Okay..."
ls $PWD/build
ls ./
- name: Upload coverage stats to codecov
uses: codecov/codecov-action@v4
with:
# spin goes into the INSTALLED path in order to run pytest
files: ./coverage.xml
fail_ci_if_error: true
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
- name: Save build
uses: actions/upload-artifact@v4
with:
name: sktree-build
path: $PWD/build
# build-windows:
# name: Meson build Windows
# runs-on: windows-2019
# steps:
# - name: Checkout
# uses: actions/checkout@v4
# with:
# submodules: recursive
# - name: Setup Python
# uses: actions/setup-python@v5
# with:
# python-version: "3.10"
# architecture: "x64"
# cache: "pip"
# cache-dependency-path: "pyproject.toml"
# - name: install-rtools
# run: |
# choco install rtools -y --no-progress --force --version=4.0.0.20220206
# echo "c:\rtools40\ucrt64\bin;" >> $env:GITHUB_PATH
# - name: show-gcc
# run: |
# gcc --version
# - name: Install system dependencies
# run: |
# # Download and install Miniconda
# Invoke-WebRequest -Uri https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe -OutFile miniconda.exe
# Start-Process -Wait -FilePath miniconda.exe -ArgumentList '/S', '/AddToPath=1'
# # Add Miniconda to the PATH
# $env:Path += ";C:\Users\runneradmin\Miniconda3\Scripts;C:\Users\runneradmin\Miniconda3"
# # Update conda
# conda update -y conda
# # Add conda-forge channel and install openblas
# conda config --add channels conda-forge
# conda install -y openblas
# - name: pip-packages
# run: |
# pip install .[build]
# pip install .[test]
# pip install spin
# pip install numpy==1.22.4
# # - name: Download cibw_before_build_win.sh for openblas-libs
# # shell: bash
# # run: |
# # # mkdir tools
# # # mkdir tools/wheels
# # # curl -L https://raw.githubusercontent.com/scipy/scipy/main/tools/wheels/cibw_before_build_win.sh -o cibw_before_build_win.sh
# # # curl -L https://raw.githubusercontent.com/scipy/scipy/main/tools/openblas_support.py -o tools/openblas_support.py
# # # curl -L https://raw.githubusercontent.com/scipy/scipy/main/tools/wheels/LICENSE_win32.txt -o tools/wheels/LICENSE_win32.txt
# # - name: Run cibw_before_build_win.sh for openblas-libs
# # shell: bash
# # run: |
# # set -xe
# # ls
# # bash ./tools/setup_windows.sh
# # - name: Set PKG_CONFIG_PATH for openblas-libs
# # run: echo "PKG_CONFIG_PATH=c:\opt\64\lib\pkgconfig;" >> $GITHUB_ENV
# # - name: Install system dependencies like openblas
# # run: |
# # choco install openblas -y
# - name: Build
# run: |
# echo "SCIPY_USE_PROPACK=1" >> $env:GITHUB_ENV
# echo "FORCE_SUBMODULE=True" >> $env:GITHUB_ENV
# spin setup_submodule --forcesubmodule
# spin build -j 2
# # Necessary because GitHub Actions checks out the repo to D:\ while OpenBLAS
# # got installed to C:\ higher up. The copying with `--win-cp-openblas` fails
# # when things are split over drives.
# # cp C:\opt\64\bin\*.dll $pwd\build-install\Lib\site-packages\scikit-tree\.libs\
# # python tools\openblas_support.py --write-init $PWD\build-install\Lib\site-packages\scikit-tree\
# - name: test
# run: |
# spin test