-
Notifications
You must be signed in to change notification settings - Fork 191
1323 lines (1277 loc) · 52.3 KB
/
Tests.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Distributed under the MIT License.
# See LICENSE.txt for details.
# Continuous integration tests that pull requests are required to pass. This
# workflow can also be dispatched manually to tag and release versions.
name: Tests
# Set any defaults for the runs below.
# - use bash as the default shell since this is almost certainly what
# is always expected. We use regular expressions in a few places
# that rely on bash.
defaults:
run:
shell: bash
# Note that by default the jobs only run on the base repository, testing pull
# requests and merge commits. Enable GitHub Actions in your fork's repository
# settings to also run the tests on every push to one of your branches.
on:
# We run all jobs when pull requests are opened, commits are pushed, or pull
# requests are re-opened after being closed.
# The jobs triggered by this event run on the base repository of the pull
# request, so they have access to its caches.
pull_request:
# We run those jobs that require no information about a pull request (e.g.
# unit tests) also on `push` events. This setup tests merge commits into
# `develop` and also builds up caches on `develop` that can be re-used by PRs.
# It also runs the jobs on forks if they have GitHub Actions enabled.
push:
branches-ignore:
- gh-pages
# Allow running the workflow manually to run tests and optionally release a
# version on success (see the dev guide on "Automatic versioning")
workflow_dispatch:
inputs:
release_version:
description: >
Enter a version name YYYY.MM.DD[.TWEAK] to create a release on success
required: false
default: ''
timezone:
description: >
Timezone used for validating the version name. The release must be
approved by the end of the day in the specified timezone. See
/usr/share/zoneinfo for a list of possible values.
required: false
default: 'America/Los_Angeles'
clear_ccache:
description: >
Enter 'yes' without quotes to clear ccache before running
required: false
default: ''
container:
description: >
Container to use for builds
required: false
default: 'sxscollaboration/spectre:dev'
# Allow running this workflow as a job in another workflow. This is used to
# test a new Docker container before publishing it.
workflow_call:
inputs:
container:
description: >
Container to use for builds
required: false
type: string
default: 'sxscollaboration/spectre:dev'
# Cancel all other queued or in-progress runs of this workflow when it is
# scheduled, so repeated pushes to a branch or a PR don't block CI. Repeated
# pushes to 'develop' and 'release' are not canceled, so every merge commit is
# tested.
concurrency:
group: ${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/develop'
&& github.ref != 'refs/heads/release' }}
jobs:
# Make sure no commits are prefixed with `fixup` or similar keywords. See
# `tools/CheckCommits.sh` for details.
check_commits:
name: Commits
# Only run on pull requests since we don't check _all_ commits, but only
# those that came after the PR's base ref.
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check commits
# `CheckCommits.sh` tests against the local `develop` branch, so that's
# where we fetch the pull-request's base-branch to. Typically, it is
# the upstream `sxs-collaboration/spectre/develop` branch.
run: >
cd $GITHUB_WORKSPACE
git remote add upstream
https://github.com/${{ github.repository }}.git
git remote -v
git fetch upstream ${{ github.base_ref }}:develop
./tools/CheckCommits.sh
# - Run simple textual checks over files in the repository, e.g. checking for
# a license, line length limits etc. See `tools/CheckFiles.sh` for details.
# - Run format checker for python to make sure the code is formatted correctly
# - Check the metadata are consistent
check_files_and_formatting:
name: Files and formatting
runs-on: ubuntu-latest
container:
image: ${{ inputs.container || 'sxscollaboration/spectre:dev' }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
# Work around https://github.com/actions/checkout/issues/760
- name: Trust checkout
run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
# The action above checks out the `github.ref` by default, which points to
# the merge commit with the target branch for pull-request events. For
# this job we check out the pull-request HEAD instead. It makes
# git-related issues easier to debug because the state matches the local
# repository. It also prevents releases that happened since the
# pull-request branch was last rebased from disrupting tests that involve
# the latest release tag.
- name: Checkout pull-request HEAD
if: github.event_name == 'pull_request'
run: |
git checkout ${{ github.event.pull_request.head.sha }}
# Some tests involve release tags, which may not have been pushed to
# forks. Fetching them here.
- name: Fetch upstream tags on forks
if: github.repository != 'sxs-collaboration/spectre'
run: |
git fetch --tags https://github.com/sxs-collaboration/spectre
- name: Install Python dependencies
run: |
pip3 install -r .github/scripts/requirements-release.txt
pip3 install -r support/Python/dev_requirements.txt
- name: Test tools
run: |
python3 -m unittest discover -p 'Test_CompileReleaseNotes.py' \
tests.tools -v
- name: Check Python formatting
run: |
cd $GITHUB_WORKSPACE
echo "Using 'black' to check Python formatting..."
black --check .
echo "Using 'isort' to check Python formatting..."
isort --check-only .
- name: Test script
run: |
cd $GITHUB_WORKSPACE
./tools/CheckFiles.sh --test
- name: Check files
run: |
cd $GITHUB_WORKSPACE
./tools/CheckFiles.sh
- name: Check metadata
run: |
python3 tools/CheckMetadata.py
- name: Check the metadata is consistent with the releases
# No need to check this on forks. They would need to set a Zenodo token
# for this test. Also disable on PRs because they don't have access to
# the repo's secrets.
if: >
github.repository == 'sxs-collaboration/spectre'
&& github.event_name != 'pull_request'
run: |
python3 .github/scripts/Release.py prepare -vv --check-only \
--zenodo-token ${{ secrets.ZENODO_READONLY_TOKEN }} \
--github-token ${{ secrets.GITHUB_TOKEN }}
python3 .github/scripts/Release.py publish -vv --check-only \
--zenodo-token ${{ secrets.ZENODO_READONLY_TOKEN }} \
--github-token ${{ secrets.GITHUB_TOKEN }} \
--auto-publish
- name: Check release notes
run: |
python3 tools/CompileReleaseNotes.py -vv -o release_notes.md \
--github-token ${{ secrets.GITHUB_TOKEN }}
- name: Upload release notes
uses: actions/upload-artifact@v4
with:
name: release-notes
path: release_notes.md
# GitHub doesn't display artifacts until the workflow has completed, so we
# print the release notes here to be able to review them before approving
# a release
- name: Print release notes
run: |
cat release_notes.md
# Lint with clang-tidy. We check only code that changed relative to the
# nearest common ancestor commit with `sxs-collaboration/spectre/develop`.
clang_tidy:
name: Clang-tidy
if: >
(github.event_name == 'pull_request'
&& github.repository == 'sxs-collaboration/spectre'
&& github.base_ref == 'develop')
|| github.ref != 'refs/heads/develop'
runs-on: ubuntu-latest
container:
image: ${{ inputs.container || 'sxscollaboration/spectre:dev' }}
strategy:
matrix:
build_type: [Debug, Release]
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
# Work around https://github.com/actions/checkout/issues/760
- name: Trust checkout
run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
# Some cpp20 features aren't supported until clang-tidy-16. See:
# https://stackoverflow.com/questions/46114214/lambda-implicit-capture-fails-with-variable-declared-from-structured-binding#comment135007519_46115028
# The clang-tidy-diff tool doesn't report exit codes until version
# 18.1.0, so we grab the version that does:
# https://github.com/llvm/llvm-project/commit/4294bca5e4f6e6e8cfdbd9fbe8751c5e5415fd47
- name: Install clang-tidy-16
run: |
apt-get update -y
apt-get remove -y clang-tidy clang-tidy-14
apt-get install -y clang-tidy-16
ln -s /usr/bin/clang-tidy-16 /usr/bin/clang-tidy
wget https://raw.githubusercontent.com/llvm/llvm-project/4294bca5e4f6e6e8cfdbd9fbe8751c5e5415fd47/clang-tools-extra/clang-tidy/tool/clang-tidy-diff.py
chmod +x clang-tidy-diff.py
mv clang-tidy-diff.py /usr/bin/clang-tidy-diff.py
ln -s /usr/bin/clang-tidy-diff.py /usr/bin/clang-tidy-diff
- name: Configure annotations
# Has to be accessible outside the container, see issue:
# https://github.com/actions/toolkit/issues/305
run: |
cp .github/problem_matchers/ClangTidy.json "$HOME/"
echo "::add-matcher::$HOME/ClangTidy.json"
- name: Fetch sxs-collaboration/spectre/develop
run: >
cd $GITHUB_WORKSPACE
git remote add upstream
https://github.com/sxs-collaboration/spectre.git
git remote -v
git fetch upstream develop
- name: Configure with cmake
run: >
mkdir build && cd build
cmake
-D CMAKE_C_COMPILER=clang-14
-D CMAKE_CXX_COMPILER=clang++-14
-D CMAKE_Fortran_COMPILER=gfortran
-D CHARM_ROOT=${CHARM_ROOT}
-D CMAKE_BUILD_TYPE=${{ matrix.build_type }}
-D OVERRIDE_ARCH=x86-64
-D USE_CCACHE=OFF
-D USE_PCH=OFF
-D DEBUG_SYMBOLS=OFF
-D BUILD_PYTHON_BINDINGS=ON
$GITHUB_WORKSPACE
make -j4 module_All
- name: Check clang-tidy
run: >
UPSTREAM_HASH=$(git merge-base HEAD upstream/develop)
echo "Running clang-tidy relative to: $UPSTREAM_HASH\n"
git diff -U0 $UPSTREAM_HASH |
clang-tidy-diff -path build -p1 -use-color \
-extra-arg=-I/usr/include/hdf5/serial
# Build the documentation and check for problems, then upload as a workflow
# artifact and deploy to gh-pages.
doc_check:
name: Documentation
needs: check_files_and_formatting
runs-on: ubuntu-latest
container:
image: ${{ inputs.container || 'sxscollaboration/spectre:dev' }}
env:
CCACHE_DIR: /work/ccache
CCACHE_READONLY: 1
CCACHE_COMPILERCHECK: content
CCACHE_BASEDIR: $GITHUB_WORKSPACE
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Work around https://github.com/actions/checkout/issues/760
- name: Trust checkout
run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
# These can be installed directly in the Docker container instead
- name: Install Python dependencies
run: |
python3 -m pip install -r support/Python/dev_requirements.txt
- name: Download release notes
uses: actions/download-artifact@v4
id: release-notes
with:
name: release-notes
path: /work
- name: Append release notes to changelog
# The sed command escapes @ symbols at beginning of words (GitHub
# usernames) so they aren't interpreted as Doxygen commands
run: |
echo "" >> docs/Changelog.md
cat /work/release_notes.md | sed 's/\(\B\@\)/\\\1/g' \
>> docs/Changelog.md
- name: Restore ccache
uses: actions/cache/restore@v4
id: restore-ccache
env:
CACHE_KEY_PREFIX: ccache-gcc-11-Debug-pch-ON
with:
path: /work/ccache
key: "${{ env.CACHE_KEY_PREFIX }}-${{ github.run_id }}"
restore-keys: |
${{ env.CACHE_KEY_PREFIX }}-
# - Make sure to use the same build configuration as the unit tests from
# which we restore the ccache.
# - Set `BUILD_TESTING=OFF` to test a CMake configuration with tests
# turned off.
- name: Configure with cmake
run: >
mkdir build && cd build
cmake
-D CMAKE_C_COMPILER=gcc-11
-D CMAKE_CXX_COMPILER=g++-11
-D CMAKE_Fortran_COMPILER=gfortran-11
-D CMAKE_CXX_FLAGS="-Werror"
-D OVERRIDE_ARCH=x86-64
-D CHARM_ROOT=${CHARM_ROOT}
-D CMAKE_BUILD_TYPE=Debug
-D DEBUG_SYMBOLS=OFF
-D USE_PCH=ON
-D USE_XSIMD=ON
-D USE_CCACHE=ON
-D ENABLE_OPENMP=ON
-D BUILD_PYTHON_BINDINGS=ON
-D BUILD_SHARED_LIBS=ON
-D MEMORY_ALLOCATOR=SYSTEM
-D BUILD_DOCS=ON
-D BUILD_TESTING=OFF
$GITHUB_WORKSPACE
- name: Check documentation
working-directory: build
run: |
make doc-check
# Re-build with coverage information on pushes to develop for deployment
# to gh-pages.
- name: Build documentation with coverage
if: github.event_name == 'push' && github.ref == 'refs/heads/develop'
working-directory: build
run: |
make doc-coverage
- name: Build Python docs
working-directory: build
run: |
make py-docs
# Upload as an artifact to make available to deployment and to PRs
- name: Prepare for upload
working-directory: build
run: |
tar -cf docs-html.tar --directory docs/html .
- name: Upload documentation
uses: actions/upload-artifact@v4
with:
name: docs-html
path: build/docs-html.tar
# Deploy to gh-pages on pushes to develop
# See docs: https://github.com/actions/deploy-pages
docs-deploy:
name: Deploy documentation
if: github.event_name == 'push' && github.ref == 'refs/heads/develop'
needs: doc_check
runs-on: ubuntu-latest
permissions:
pages: write
id-token: write
environment:
name: github-pages
url: ${{ steps.deploy.outputs.page_url }}
steps:
- uses: actions/deploy-pages@v4
id: deploy
with:
artifact_name: docs-html
# Build all test executables and run unit tests on a variety of compiler
# configurations.
unit_tests:
name: Unit tests
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
# We have a sparse clang test configuration to reduce the amount of
# GitHub cache space we use. GCC being the production compiler on
# supercomputers means we need to thoroughly test it.
compiler:
- gcc-9
- gcc-10
- gcc-11
build_type: [Debug, Release]
include:
# Generate code coverage report for a single build
# Note: currently disabled because it exceeds the available disk space
# - compiler: gcc-9
# build_type: Debug
# COVERAGE: ON
# TEST_TIMEOUT_FACTOR: 3
# This configuration seems to run consistently slower than newer gcc
# or clang builds, so we increase the test timeout a bit
- compiler: gcc-10
build_type: Debug
TEST_TIMEOUT_FACTOR: 2
# Test 3D rendering with ParaView
# Note: currently disabled because of some unknown upstream issue
# test_3d_rendering: ON
# Need Python version consistent with ParaView
PYTHON_VERSION: "3.9"
# Don't modify the gcc-11 Debug build so its cache can be reused for
# the documentation build
# - compiler: gcc-11
# build_type: Debug
# Test with Python 3.8 so that we retain backwards compatibility. We
# keep track of Python versions on supercomputers in this issue:
# https://github.com/sxs-collaboration/spectre/issues/442
- compiler: gcc-11
build_type: Release
PYTHON_VERSION: "3.8"
# Disable building executable for this build for now because it
# exceeds the available memory. See issue:
# https://github.com/sxs-collaboration/spectre/issues/5472
test_executables: OFF
# Test building with static libraries. Do so with clang in release
# mode because these builds use up little disk space compared to GCC
# builds or clang Debug builds
- compiler: clang-13
build_type: Release
BUILD_SHARED_LIBS: OFF
use_xsimd: OFF
MEMORY_ALLOCATOR: JEMALLOC
# Add a test without PCH to the build matrix, which only builds core
# libraries. Building all the tests without the PCH takes very long
# and the most we would catch is a missing include of something that's
# in the PCH.
# Use this test also to build and test all input files with "normal"
# or higher priority. The other configurations only test input files
# with "high" priority to reduce the total build time.
- compiler: clang-13
build_type: Debug
use_pch: OFF
unit_tests: OFF
input_file_tests_min_priority: "normal"
# Test with ASAN
- compiler: clang-14
build_type: Debug
# When building with ASAN we also need python bindings to be
# disabled because otherwise we get link errors. See issue:
# https://github.com/sxs-collaboration/spectre/issues/1890
# So we are also using this build to test building without Python
# bindings enabled.
ASAN: ON
BUILD_PYTHON_BINDINGS: OFF
MEMORY_ALLOCATOR: JEMALLOC
TEST_TIMEOUT_FACTOR: 2
- compiler: clang-14
build_type: Release
# Test compatibility with oldest supported CMake version
CMAKE_VERSION: "3.18.2"
# Use an MPI version of Charm
CHARM_ROOT: /work/charm_7_0_0/mpi-linux-x86_64-smp-clang
# MPI running tests is a bit slower than multicore
TEST_TIMEOUT_FACTOR: 3
# If MPI should be tested
USE_MPI: ON
# Test `install` target with clang in Release mode because it uses
# little disk space
install: ON
container:
image: ${{ inputs.container || 'sxscollaboration/spectre:dev' }}
env:
# We run unit tests with the following compiler flags:
# - `-Werror`: Treat warnings as error.
# - `-march=x86-64`: Make sure we are building on a consistent
# architecture so caching works. This is necessary because GitHub
# may run the job on different hardware.
CXXFLAGS: "-Werror"
# We make sure to use a fixed absolute path for the ccache directory
CCACHE_DIR: /work/ccache
# Use a separate temp directory to conserve cache space on GitHub
CCACHE_TEMPDIR: /work/ccache-tmp
# Control the max cache size. We evict unused entries in a step below to
# make sure that each build only uses what it need of this max size.
CCACHE_MAXSIZE: "2G"
# Control the compression level. The ccache docs recommend at most level
# 5 to avoid slowing down compilation.
CCACHE_COMPRESS: 1
CCACHE_COMPRESSLEVEL: 5
# We hash the content of the compiler rather than the location and mtime
# to make sure the cache works across the different machines
CCACHE_COMPILERCHECK: content
# Rewrite absolute paths starting with this base dir to relative paths
# before hashing. This is needed to reuse the cache for the formaline
# test below, which builds in a different directory.
CCACHE_BASEDIR: $GITHUB_WORKSPACE
# These vars are to allow running charm with MPI as root inside the
# container which arises from using the "--privileged" flag just below.
OMPI_ALLOW_RUN_AS_ROOT: 1
OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1
# The number of cores to run on. This is given at:
# https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories
NUMBER_OF_CORES: 4
# See https://lists.cs.illinois.edu/lists/arc/charm/2018-10/msg00011.html
# for why we need this
options: --privileged
steps:
- name: Record start time
id: start
run: |
echo "time=$(date +%s)" >> "$GITHUB_OUTPUT"
- name: Checkout repository
uses: actions/checkout@v4
# Work around https://github.com/actions/checkout/issues/760
- name: Trust checkout
run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
- uses: ./.github/actions/parse-compiler
with:
compiler: ${{ matrix.compiler }}
# Install the selected compiler. We don't bundle all of them in the
# container because they take up a lot of space.
- name: Install compiler
run: |
apt-get update -y
if [[ $COMPILER_ID = gcc ]]; then
apt-get install -y $CC $CXX $FC
else
apt-get install -y $CC $FC
fi
# Install specific CMake version if requested
- name: Install CMake version
if: matrix.CMAKE_VERSION
working-directory: /work
run: |
CMAKE_VERSION=${{ matrix.CMAKE_VERSION }}
wget -O cmake-install.sh "https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-linux-x86_64.sh"
sh cmake-install.sh --prefix=/usr --skip-license
rm cmake-install.sh
- name: Install Python version
if: matrix.PYTHON_VERSION
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.PYTHON_VERSION }}
- name: Install Python dependencies
if: matrix.PYTHON_VERSION
run: |
python3 -m pip install -r support/Python/requirements.txt \
-r support/Python/dev_requirements.txt
python3 -m pip list -v
- name: Install ParaView
if: matrix.test_3d_rendering == 'ON'
working-directory: /work
# Can't just `apt-get install python3-paraview` because we need the
# headless build of ParaView. So we download a binary from paraview.org
# (which is built for a specific Python version unfortunately).
run: |
apt-get install -y libglvnd-dev # Needed to find 'libglapi.so'
wget -O paraview.tar.gz --no-check-certificate "https://www.paraview.org/paraview-downloads/download.php?submit=Download&version=v5.10&type=binary&os=Linux&downloadFile=ParaView-5.10.1-osmesa-MPI-Linux-Python3.9-x86_64.tar.gz"
tar -xzf paraview.tar.gz
rm paraview.tar.gz
mv ParaView-* /opt/paraview
echo "/opt/paraview/bin" >> $GITHUB_PATH
# Make 'paraview' Python package available
PYTHONPATH=/opt/paraview/lib/python3.9/site-packages:$PYTHONPATH
# Give system-installed Python packages priority over ParaView's
PYTHONPATH=$pythonLocation/lib/python3.9/site-packages:$PYTHONPATH
echo "PYTHONPATH=$PYTHONPATH" >> $GITHUB_ENV
- name: Install MPI and Charm++
if: matrix.USE_MPI == 'ON'
working-directory: /work
run: |
apt-get install -y libopenmpi-dev
cd /work/charm_7_0_0 && ./build charm++ mpi-linux-x86_64-smp clang \
-j ${NUMBER_OF_CORES} -g0 -O1 --build-shared --with-production
# Assign a unique cache key for every run.
# - We will save the cache using this unique key, but only on the develop
# branch. This way we regularly update the cache without filling up the
# storage space with caches from other branches.
# - To restore the most recent cache we provide a partially-matched
# "restore key".
- name: Restore ccache
uses: actions/cache/restore@v4
id: restore-ccache
env:
CACHE_KEY_PREFIX: "ccache-${{ matrix.compiler }}-\
${{ matrix.build_type }}-pch-${{ matrix.use_pch || 'ON' }}"
with:
path: /work/ccache
key: "${{ env.CACHE_KEY_PREFIX }}-${{ github.run_id }}"
restore-keys: |
${{ env.CACHE_KEY_PREFIX }}-
- name: Configure ccache
# Print the ccache configuration and reset statistics
run: |
ccache -pz
- name: Clear ccache
# Clear ccache if requested
if: >
github.event_name == 'workflow_dispatch'
&& github.event.inputs.clear_ccache == 'yes'
run: |
ccache -C
- name: Configure build with cmake
# Notes on the build configuration:
# - We don't need debug symbols during CI, so we turn them off to reduce
# memory usage.
run: >
mkdir build && cd build
BUILD_PYTHON_BINDINGS=${{ matrix.BUILD_PYTHON_BINDINGS }}
BUILD_SHARED_LIBS=${{ matrix.BUILD_SHARED_LIBS }}
MATRIX_CHARM_ROOT=${{ matrix.CHARM_ROOT }}
ASAN=${{ matrix.ASAN }}
MEMORY_ALLOCATOR=${{ matrix.MEMORY_ALLOCATOR }}
UBSAN_UNDEFINED=${{ matrix.UBSAN_UNDEFINED }}
UBSAN_INTEGER=${{ matrix.UBSAN_INTEGER }}
USE_PCH=${{ matrix.use_pch }}
USE_XSIMD=${{ matrix.use_xsimd }}
COVERAGE=${{ matrix.COVERAGE }}
TEST_TIMEOUT_FACTOR=${{ matrix.TEST_TIMEOUT_FACTOR }}
INPUT_FILE_MIN_PRIO=${{ matrix.input_file_tests_min_priority }}
cmake --version
cmake
-D CMAKE_C_COMPILER=${CC}
-D CMAKE_CXX_COMPILER=${CXX}
-D CMAKE_Fortran_COMPILER=${FC}
-D CMAKE_CXX_FLAGS="${CXXFLAGS} ${{ matrix.EXTRA_CXX_FLAGS }}"
-D OVERRIDE_ARCH=x86-64
-D CHARM_ROOT=${MATRIX_CHARM_ROOT:-$CHARM_ROOT}
-D CMAKE_BUILD_TYPE=${{ matrix.build_type }}
-D DEBUG_SYMBOLS=OFF
-D BACKTRACE_LIB=/usr/local/lib/libbacktrace.a
-D BACKTRACE_HEADER_DIR=/usr/local/include
-D UNIT_TESTS_IN_TEST_EXECUTABLES=OFF
-D SPECTRE_INPUT_FILE_TEST_MIN_PRIORITY=${INPUT_FILE_MIN_PRIO:-'high'}
-D STRIP_SYMBOLS=ON
-D STUB_EXECUTABLE_OBJECT_FILES=ON
-D STUB_LIBRARY_OBJECT_FILES=ON
-D USE_PCH=${USE_PCH:-'ON'}
-D USE_XSIMD=${USE_XSIMD:-'ON'}
-D USE_CCACHE=ON
-D ENABLE_OPENMP=ON
-D COVERAGE=${COVERAGE:-'OFF'}
-D BUILD_PYTHON_BINDINGS=${BUILD_PYTHON_BINDINGS:-'ON'}
-D BUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-'ON'}
-D ASAN=${ASAN:-'OFF'}
-D UBSAN_UNDEFINED=${UBSAN_UNDEFINED:-'OFF'}
-D UBSAN_INTEGER=${UBSAN_INTEGER:-'OFF'}
-D MEMORY_ALLOCATOR=${MEMORY_ALLOCATOR:-'SYSTEM'}
-D SPECTRE_UNIT_TEST_TIMEOUT_FACTOR=${TEST_TIMEOUT_FACTOR:-'1'}
-D SPECTRE_INPUT_FILE_TEST_TIMEOUT_FACTOR=${TEST_TIMEOUT_FACTOR:-'1'}
-D SPECTRE_PYTHON_TEST_TIMEOUT_FACTOR=${TEST_TIMEOUT_FACTOR:-'1'}
-D CMAKE_INSTALL_PREFIX=/work/spectre_install
-D BUILD_DOCS=OFF
--warn-uninitialized
$GITHUB_WORKSPACE 2>&1 | tee CMakeOutput.txt 2>&1
- name: Check for CMake warnings
working-directory: build
run: |
! grep -A 6 "CMake Warning" ./CMakeOutput.txt
- name: Build unit tests
if: matrix.unit_tests != 'OFF'
working-directory: build
run: |
make -j${NUMBER_OF_CORES} unit-tests
- name: Run unit tests
if: matrix.unit_tests != 'OFF' && matrix.COVERAGE != 'ON'
working-directory: build
run: |
# We get occasional random timeouts, repeat tests to see if
# it is a random timeout or systematic.
#
# We run ctest -L unit before build test-executables to make
# sure that all the unit tests are actually built by the
# unit-tests target.
ctest -j${NUMBER_OF_CORES} -L unit \
--output-on-failure --repeat after-timeout:3
- name: Run unit tests with coverage reporting
if: matrix.COVERAGE == 'ON'
working-directory: build
run: |
make unit-test-coverage
rm -r docs/html/unit-test-coverage
- name: Upload coverage report to codecov.io
if: matrix.COVERAGE == 'ON'
uses: codecov/codecov-action@v4
with:
files: build/tmp/coverage.info
token: ${{ secrets.CODECOV_TOKEN }}
# Display the job as failed if upload fails (defaults to false for
# some reason)
fail_ci_if_error: true
# We currently don't require codecov in our guidelines, so don't fail
# the CI build if codecov fails to upload
continue-on-error: true
# Avoid running out of disk space by cleaning up the build directory
- name: Clean up unit tests
working-directory: build
run: |
pwd
ls | xargs du -sh
du -sh .
rm -f bin/Test_*
# Build the executables in a single thread to reduce memory usage
# sufficiently so they compile on the GitHub-hosted runners
- name: Build executables
if: matrix.COVERAGE != 'ON' && matrix.test_executables != 'OFF'
working-directory: build
run: |
make test-executables
- name: Build Benchmark executable
if: matrix.build_type == 'Release'
working-directory: build
run: |
make -j${NUMBER_OF_CORES} Benchmark
# Delete unused cache entries before uploading the cache
- name: Clean up ccache
if: github.ref == 'refs/heads/develop'
run: |
now=$(date +%s)
job_duration=$((now - ${{ steps.start.outputs.time }}))
ccache --evict-older-than "${job_duration}s"
# Save the cache after everything has been built. Also save on failure or
# on cancellation (`always()`) because a partial cache is better than no
# cache.
- name: Save ccache
if: always() && github.ref == 'refs/heads/develop'
uses: actions/cache/save@v4
with:
path: /work/ccache
key: ${{ steps.restore-ccache.outputs.cache-primary-key }}
- name: Print size of build directory
working-directory: build
run: |
pwd
ls | xargs du -sh
du -sh .
- name: Diagnose ccache
run: |
ccache -s
- name: Run non-unit tests
if: matrix.COVERAGE != 'ON' && matrix.test_executables != 'OFF'
working-directory: build
run: |
# We get occasional random timeouts, repeat tests to see if
# it is a random timeout or systematic
#
# Only use 2 cores because these tests run more slowly.
ctest -j2 -LE unit --output-on-failure \
--repeat after-timeout:3
- name: Install
if: matrix.install == 'ON'
working-directory: build
# Make sure the `install` target runs without error. We could add some
# basic smoke tests here to make sure the installation worked.
run: |
make install
- name: Print size of install directory
if: matrix.install == 'ON'
working-directory: /work/spectre_install
# Remove files post-install to reduce disk space for later on.
run: |
pwd
ls | xargs du -sh
du -sh .
rm -r ./*
- name: Test formaline tar can be built
# - We only run the formaline tests in debug mode to reduce total build
# time in CI. We don't run them with ASAN because then we run out of
# disk space.
# - We do run for all compilers, though, because formaline injects data
# at the linking stage, which means we are somewhat tied to the
# compiler version.
# - We make sure to use the same compiler flags as the full build above
# so ccache is able to speed up the build.
if: matrix.build_type == 'Debug' && matrix.ASAN != 'ON'
working-directory: build
run: >
make EvolveBurgers -j${NUMBER_OF_CORES}
if [ ! -f ./bin/EvolveBurgers ]; then
echo "Could not find the executable EvolveBurgers";
echo "which we use for testing formaline";
exit 1
fi
# We disable ASAN's leak sanitizer because Charm++ has false
# positives that would cause the build to fail. We disable
# leak sanitizer for the ctest runs inside CMake anyway.
ASAN_OPTIONS=detect_leaks=0 ./bin/EvolveBurgers
--dump-source-tree-as spectre_src --dump-only
mkdir spectre_src;
mv spectre_src.tar.gz spectre_src;
cd spectre_src;
tar xf spectre_src.tar.gz;
mkdir build-formaline;
cd build-formaline
BUILD_PYTHON_BINDINGS=${{ matrix.BUILD_PYTHON_BINDINGS }}
MATRIX_CHARM_ROOT=${{ matrix.CHARM_ROOT }}
MEMORY_ALLOCATOR=${{ matrix.MEMORY_ALLOCATOR }};
USE_PCH=${{ matrix.use_pch }};
USE_XSIMD=${{ matrix.use_xsimd }}
cmake
-D CMAKE_C_COMPILER=${CC}
-D CMAKE_CXX_COMPILER=${CXX}
-D CMAKE_Fortran_COMPILER=${FC}
-D CMAKE_CXX_FLAGS="${CXXFLAGS} ${{ matrix.EXTRA_CXX_FLAGS }}"
-D OVERRIDE_ARCH=x86-64
-D BUILD_SHARED_LIBS=ON
-D CHARM_ROOT=${MATRIX_CHARM_ROOT:-$CHARM_ROOT}
-D CMAKE_BUILD_TYPE=${{ matrix.build_type }}
-D DEBUG_SYMBOLS=OFF
-D UNIT_TESTS_IN_TEST_EXECUTABLES=OFF
-D STRIP_SYMBOLS=ON
-D STUB_EXECUTABLE_OBJECT_FILES=ON
-D STUB_LIBRARY_OBJECT_FILES=ON
-D USE_PCH=${USE_PCH:-'ON'}
-D USE_XSIMD=${USE_XSIMD:-'ON'}
-D USE_CCACHE=ON
-D BUILD_PYTHON_BINDINGS=${BUILD_PYTHON_BINDINGS:-'ON'}
-D MEMORY_ALLOCATOR=${MEMORY_ALLOCATOR:-'SYSTEM'}
-D BUILD_DOCS=OFF
..
make EvolveBurgers -j${NUMBER_OF_CORES}
# Run on multiple cores to run both the "parse" and "execute" tests
# simultaneously.
ctest -j${NUMBER_OF_CORES} -R InputFiles.Burgers.Step.yaml
--output-on-failure
cd .. && rm -r build-formaline
- name: Test bundled exporter
if: matrix.ASAN != 'ON'
working-directory: build
run: |
make -j${NUMBER_OF_CORES} BundledExporter
mkdir build-test-exporter && cd build-test-exporter
cmake -D SPECTRE_ROOT=$GITHUB_WORKSPACE/build \
$GITHUB_WORKSPACE/tests/Unit/IO/Exporter/BundledExporter
make -j${NUMBER_OF_CORES}
./TestSpectreExporter \
$GITHUB_WORKSPACE/tests/Unit/Visualization/Python/VolTestData0.h5 \
element_data 0 Psi 0 0 0 -0.07059806932542323
cd .. && rm -r build-test-exporter
- name: Diagnose ccache
run: |
ccache -s
# Build all test executables and run unit tests on macOS
unit_tests_macos:
name: Unit tests on macOS
strategy:
fail-fast: false
matrix:
include:
# The number of cores is given at:
# https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources
- arch: x86
macos-version: 13 # Intel
NUM_CORES: 4
- arch: arm64
macos-version: 14 # Apple Silicon
NUM_CORES: 3
runs-on: macos-${{ matrix.macos-version }}
env:
# We install some low-level dependencies with Homebrew. They get picked up
# by `spack external find`.
SPECTRE_BREW_DEPS: >- # Line breaks are spaces, no trailing newline
autoconf automake boost catch2 ccache cmake gsl hdf5 openblas yaml-cpp
# We install these packages with Spack and cache them. The full specs are
# listed below. This list is only needed to create the cache.
SPECTRE_SPACK_DEPS: blaze charmpp libxsmm
CCACHE_DIR: $HOME/ccache
CCACHE_TEMPDIR: $HOME/ccache-tmp
CCACHE_MAXSIZE: "2G"
CCACHE_COMPRESS: 1
CCACHE_COMPRESSLEVEL: 5
CCACHE_COMPILERCHECK: content
SPACK_SKIP_MODULES: true
SPACK_COLOR: always
NUM_CORES: ${{ matrix.NUM_CORES }}
steps:
- name: Record start time
id: start
run: |
echo "time=$(date +%s)" >> "$GITHUB_OUTPUT"
- name: Checkout repository
uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install Homebrew dependencies
run: |
brew install $SPECTRE_BREW_DEPS
# We install the remaining dependencies with Spack and cache them.
# See the `unit_tests` job above for details on the cache configuration.
- name: Restore dependency cache
uses: actions/cache/restore@v4
id: restore-dependencies
env:
CACHE_KEY_PREFIX: "dependencies-macos-${{ matrix.macos-version }}"
with:
path: ~/dependencies
key: "${{ env.CACHE_KEY_PREFIX }}-${{ github.run_id }}"
restore-keys: |
${{ env.CACHE_KEY_PREFIX }}-
- name: Install Spack
# Pin a specific version of Spack to avoid breaking CI builds when
# Spack changes.
run: |
cd $HOME
git clone -c feature.manyFiles=true \
https://github.com/spack/spack.git
cd spack
git checkout 7d1de58378fa210b7db887964fcc17187a504ad8
- name: Configure Spack
# - To avoid re-building packages that are already installed by Homebrew
# we let Spack find them.
# - Add the dependency cache as binary mirror.
run: |
source $HOME/spack/share/spack/setup-env.sh
spack debug report
spack compiler find && spack compiler list
spack external find
spack config get packages
spack mirror add dependencies file://$HOME/dependencies/spack
# Install the remaining dependencies from source with Spack. We install
# them in an environment that we can activate later. After building the
# dependencies from source we cache them as compressed tarballs.
- name: Install Spack dependencies
run: |
source $HOME/spack/share/spack/setup-env.sh
spack env create spectre
spack env activate spectre
spack add blaze@3.8.2 ~blas ~lapack smp=none
spack add charmpp@7.0.0 +shared backend=multicore build-target=charm++
# Use main branch until spack has 2.0 release
spack add libxsmm@main
spack concretize --reuse
spack install --no-check-signature
spack find -v
- name: Update dependency cache
if: github.ref == 'refs/heads/develop'
run: |
source $HOME/spack/share/spack/setup-env.sh
# Clear existing buildcache so we don't accumulate old versions of
# packages in the cache
rm -rf $HOME/dependencies
spack buildcache create -ufa -m dependencies $SPECTRE_SPACK_DEPS
# Allow the buildcache creation to fail without failing the job, since
# it sometimes runs out of memory
continue-on-error: true
- name: Save dependency cache
if: github.ref == 'refs/heads/develop'
uses: actions/cache/save@v4
with:
path: ~/dependencies
key: ${{ steps.restore-dependencies.outputs.cache-primary-key }}
# Install remaining pure Python dependencies with pip because the Spack
# package index can be incomplete (it can't mirror all of pip)
- name: Install Python dependencies
run: |
source $HOME/spack/share/spack/setup-env.sh
spack env activate spectre