Skip to content

threadpoolexecutor for distributing MPI #490

threadpoolexecutor for distributing MPI

threadpoolexecutor for distributing MPI #490

name: Unit Test nmma
on:
push:
paths-ignore:
- 'doc/**'
pull_request:
paths-ignore:
- 'doc/**'
workflow_dispatch:
jobs:
download-files:
runs-on: ubuntu-latest
steps:
- name: Checkout branch being tested
uses: actions/checkout@v3
- name: Cache svdmodels directory
uses: actions/cache@v3
with:
path: svdmodels
key: svdmodels-${{ runner.os }}-${{ hashFiles('**/LICENSE') }}
restore-keys: svdmodels-${{ runner.os }}-
- name: Check if SVD models exist
id: check-artifact-download
run: |
if [ -d "svdmodels" ]; then
echo "Artifacts exist, skipping download."
echo "::set-output name=exists::true"
else
echo "Artifacts do not exist, will download."
echo "::set-output name=exists::false"
fi
- name: Download SVD models
if: steps.check-artifact-download.outputs.exists == 'false'
run: |
wget --show-progress -O svdmodels.tar.gz https://enlil.gw.physik.uni-potsdam.de/~jhawar/svdmodels/svdmodels.tar.gz
mkdir -p svdmodels
tar -xvf svdmodels.tar.gz -C svdmodels/
rm svdmodels.tar.gz
working-directory: ${{ github.workspace }}
build:
runs-on: ubuntu-latest
needs: download-files
strategy:
fail-fast: false
matrix:
python-version: ['3.9', '3.10', '3.11']
services:
postgres:
image: postgres
env:
POSTGRES_USER: nmma
POSTGRES_PASSWORD: anything
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout branch being tested
uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Restore SVD models from cache
uses: actions/cache@v3
with:
path: svdmodels
key: svdmodels-${{ runner.os }}-${{ hashFiles('**/LICENSE') }}
restore-keys: svdmodels-${{ runner.os }}-
- name: Get pip cache dir
id: pip-cache
run: |
python -m pip install --upgrade pip setuptools wheel
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-2-${{ hashFiles('**/pyproject.toml', '**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-2-
- name: Install dependencies
run: |
sudo apt-get update
# sudo apt-get install gfortran swig libhdf5-serial-dev libmpich-dev
sudo apt-get update
sudo apt install -y openmpi-bin libopenmpi-dev gfortran build-essential libblas3 libblas-dev liblapack3 liblapack-dev libatlas-base-dev texlive texlive-latex-extra texlive-fonts-recommended dvipng cm-super
python -m pip install --upgrade git+https://github.com/bitranox/wrapt_timeout_decorator.git
python -m pip install pytest pytest-cov flake8 pytest-aiohttp sqlparse freezegun PyJWT joblib tensorflow afterglowpy coveralls
python -m pip install .
git clone https://github.com/JohannesBuchner/MultiNest && cd MultiNest/build && cmake .. && make && cd ../..
pwd
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --exclude docs
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --exclude docs,versioneer.py,nmma/_version.py,nmma/tests,nmma/*/__init__.py
- name: Initialize database
run: |
echo "localhost:5432:*:nmma:anything" > ~/.pgpass
chmod 600 ~/.pgpass
psql -U nmma -h localhost -c "GRANT ALL PRIVILEGES ON DATABASE nmma TO nmma;" nmma
- name: Test with pytest
run: |
python -m coverage run --source nmma -m pytest nmma/tests/*.py
env:
LD_LIBRARY_PATH: .:/home/runner/work/nmma/nmma/MultiNest/lib # for Linux
- name: Run Coveralls
if: ${{ success() }}
run: |
coveralls --service=github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}