-
Notifications
You must be signed in to change notification settings - Fork 34
135 lines (119 loc) · 5.65 KB
/
test-terra-jupyter-python.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
name: Test terra-jupyter-python
# Perform smoke tests on the terra-jupyter-python Docker image to have some amount of confidence that
# Python package versions are compatible.
#
# To configure the minimal auth needed for these tests to be able to read public data from Google Cloud Platform:
# Step 1: Create a service account per these instructions:
# https://github.com/google-github-actions/setup-gcloud/blob/master/setup-gcloud/README.md
# Step 2: Give the service account the following permissions within the project: BigQuery User
# Step 3: Store its key and project id as GitHub repository secrets TD_GCP_SA_KEY and GCP_PROJECT_ID.
# https://docs.github.com/en/free-pro-team@latest/actions/reference/encrypted-secrets#creating-encrypted-secrets-for-a-repository
on:
pull_request:
branches: [ master ]
paths:
- 'terra-jupyter-python/**'
- '.github/workflows/test-terra-jupyter-python.yml'
push:
# Note: GitHub secrets are not passed to pull requests from forks. For community contributions from
# regular contributors, its a good idea for the contributor to configure the GitHub actions to run correctly
# in their fork as described above.
#
# For occasional contributors, the dev team will merge the PR fork branch to a branch in upstream named
# test-community-contribution-<PR#> to run all the GitHub Action smoke tests.
branches: [ 'test-community-contribution*' ]
paths:
- 'terra-jupyter-python/**'
- '.github/workflows/test-terra-jupyter-python.yml'
workflow_dispatch:
# Allows manually triggering of workflow on a selected branch via the GitHub Actions tab.
# GitHub blog demo: https://github.blog/changelog/2020-07-06-github-actions-manual-triggers-with-workflow_dispatch/.
env:
GOOGLE_PROJECT: ${{ secrets.GCP_PROJECT_ID }}
PIP_USER: "false"
R_LIBS: "/home/jupyter/notebooks/packages"
jobs:
test_docker_image:
runs-on: self-hosted
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Free up some disk space
run: sudo rm -rf /usr/share/dotnet
- id: auth
uses: google-github-actions/auth@v1
with:
credentials_json: ${{ secrets.TD_GCP_SA_KEY }}
create_credentials_file: true
- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@v0.3.0
with:
project_id: ${{ secrets.GCP_PROJECT_ID }}
- name: Build Docker image and base images too, if needed
run: |
gcloud auth configure-docker
./build_smoke_test_image.sh terra-jupyter-python
- name: Run Python code specific to notebooks with nbconvert
# Run all notebooks from start to finish, regardles of error, so that we can capture the
# result as a workflow artifact.
# See also https://github.com/marketplace/actions/run-notebook if a more complicated
# workflow for notebooks is needed in the future.
run: |
chmod a+w $GITHUB_WORKSPACE/terra-jupyter-python/tests
chmod a+r "${{ steps.auth.outputs.credentials_file_path }}"
docker run \
--env GOOGLE_PROJECT \
--env PIP_USER \
--env PIP_TARGET \
--env R_LIBS \
--volume "${{ steps.auth.outputs.credentials_file_path }}":/tmp/credentials.json:ro \
--env GOOGLE_APPLICATION_CREDENTIALS="/tmp/credentials.json" \
--volume $GITHUB_WORKSPACE/terra-jupyter-python/tests:/tests \
--workdir=/tests \
--entrypoint="" \
terra-jupyter-python:smoke-test \
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html_embed --ExecutePreprocessor.allow_errors=True --execute "${nb}" ; done'
- name: Upload workflow artifacts
uses: actions/upload-artifact@v2
with:
name: notebook-execution-results
path: terra-jupyter-python/tests/*.html
retention-days: 30
- name: Test Python code with pytest
run: |
chmod a+r "${{ steps.auth.outputs.credentials_file_path }}"
docker run \
--env GOOGLE_PROJECT \
--env PIP_USER \
--env PIP_TARGET \
--env R_LIBS \
--volume "${{ steps.auth.outputs.credentials_file_path }}":/tmp/credentials.json:ro \
--env GOOGLE_APPLICATION_CREDENTIALS="/tmp/credentials.json" \
--volume $GITHUB_WORKSPACE/terra-jupyter-python/tests:/tests \
--workdir=/tests \
--entrypoint="" \
terra-jupyter-python:smoke-test \
/bin/sh -c "pip3 install pytest --user ; pytest"
- name: Test Python code specific to notebooks with nbconvert
# Simply 'Cell -> Run All` these notebooks and expect no errors in the case of a successful run of the test suite.
# If the tests throw any exceptions, execution of the notebooks will halt at that point. Look at the workflow
# artifacts to understand if there are more failures than just the one that caused this task to halt.
run: |
chmod a+w $GITHUB_WORKSPACE/terra-jupyter-python
chmod a+r "${{ steps.auth.outputs.credentials_file_path }}"
docker run \
--env GOOGLE_PROJECT \
--env PIP_USER \
--env PIP_TARGET \
--env R_LIBS \
--volume "${{ steps.auth.outputs.credentials_file_path }}":/tmp/credentials.json:ro \
--env GOOGLE_APPLICATION_CREDENTIALS="/tmp/credentials.json" \
--volume $GITHUB_WORKSPACE/terra-jupyter-python/tests:/tests \
--workdir=/tests \
--entrypoint="" \
terra-jupyter-python:smoke-test \
/bin/sh -c 'for nb in *ipynb ; do jupyter nbconvert --to html_embed --execute "${nb}" ; done'