forked from ibis-project/ibis
-
Notifications
You must be signed in to change notification settings - Fork 0
/
justfile
308 lines (245 loc) · 8.4 KB
/
justfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
# list justfile recipes
default:
just --list
# clean untracked files
clean:
git clean -fdx -e 'ci/ibis-testing-data'
# lock dependencies without updating existing versions
lock:
#!/usr/bin/env bash
set -euo pipefail
uv sync --all-extras --group dev --group tests --group docs --no-install-project --no-install-workspace
just export-deps
# update locked dependencies
update *packages:
#!/usr/bin/env bash
set -euo pipefail
packages=({{ packages }})
args=(--all-extras --group dev --group tests --group docs --no-install-project --no-install-workspace)
if [ "${#packages[@]}" -eq 0 ]; then
args+=(--upgrade)
else
for package in "${packages[@]}"; do
args+=(--upgrade-package "${package}")
done
fi
uv sync "${args[@]}"
just export-deps
# export locked dependencies
@export-deps:
uv export \
--frozen \
--format requirements-txt \
--all-extras \
--group dev \
--group tests \
--group docs \
--no-hashes \
--no-header > requirements-dev.txt
# show all backends
@list-backends:
yj -tj < pyproject.toml | jq -rcM '.project["entry-points"]["ibis.backends"] | keys | sort[]'
# format code
fmt:
ruff format .
ruff check --fix .
# run all non-backend tests; additional arguments are forwarded to pytest
check *args:
pytest -m core {{ args }}
# run pytest for ci; additional arguments are forwarded to pytest
ci-check extras *args:
uv run --group tests {{ extras }} pytest --junitxml=junit.xml --cov=ibis --cov-report=xml:coverage.xml {{ args }}
# run backend doctests
backend-doctests backend *args:
#!/usr/bin/env bash
args=(pytest --doctest-modules {{ args }})
for file in ibis/backends/{{ backend }}/**.py; do
if grep -qPv '.*test.+' <<< "${file}"; then
args+=("${file}")
fi
done
if [ -n "${CI}" ]; then
uv run --all-extras --group tests "${args[@]}"
else
"${args[@]}"
fi
# lint code
lint:
ruff format -q . --check
ruff check .
# run the test suite for one or more backends
test +backends:
#!/usr/bin/env bash
set -euo pipefail
pytest_args=("-m" "$(sed 's/ / or /g' <<< '{{ backends }}')")
if ! [[ "{{ backends }}" =~ impala|pyspark ]]; then
pytest_args+=("-n" "auto" "-q" "--dist" "loadgroup")
fi
pytest "${pytest_args[@]}"
# run doctests
doctest *args:
#!/usr/bin/env bash
set -eo pipefail
if [ -n "${CI}" ]; then
runner=(uv run --all-extras --group tests)
else
runner=(python -m)
fi
# TODO(cpcloud): why doesn't pytest --ignore-glob=test_*.py work?
"${runner[@]}" pytest --doctest-modules {{ args }} $(
find \
ibis \
-wholename '*.py' \
-and -not -wholename '*test*.py' \
-and -not -wholename '*__init__*' \
-and -not -wholename '*gen_*.py' \
-and -not -wholename '*ibis/backends/flink/*' # FIXME(deepyaman)
)
# download testing data
download-data owner="ibis-project" repo="testing-data" rev="master":
#!/usr/bin/env bash
set -euo pipefail
outdir="{{ justfile_directory() }}/ci/ibis-testing-data"
rm -rf "$outdir"
url="https://github.com/{{ owner }}/{{ repo }}"
args=("$url")
if [ "{{ rev }}" = "master" ]; then
args+=("--depth" "1")
fi
args+=("$outdir")
git clone "${args[@]}"
if [ "{{ rev }}" != "master" ]; then
git -C "${outdir}" checkout "{{ rev }}"
fi
# download the iceberg jar used for testing pyspark and iceberg integration
download-iceberg-jar pyspark scala="2.12" iceberg="1.6.1":
#!/usr/bin/env bash
set -eo pipefail
runner=(python)
if [ -n "${CI}" ]; then
runner=(uv run --extra pyspark python)
fi
pyspark="$("${runner[@]}" -c "import pyspark; print(pyspark.__file__.rsplit('/', 1)[0])")"
pushd "${pyspark}/jars"
jar="iceberg-spark-runtime-{{ pyspark }}_{{ scala }}-{{ iceberg }}.jar"
url="https://search.maven.org/remotecontent?filepath=org/apache/iceberg/iceberg-spark-runtime-{{ pyspark }}_{{ scala }}/{{ iceberg }}/${jar}"
curl -qSsL -o "${jar}" "${url}"
ls "${jar}"
# start backends using docker compose; no arguments starts all backends
up *backends:
#!/usr/bin/env bash
set -eo pipefail
if [ -n "$CI" ]; then
# don't show a big pile of output when running in CI
args=(--quiet-pull --no-color)
else
args=()
fi
docker compose up --build --wait "${args[@]}" {{ backends }}
# stop and remove containers -> clean up dangling volumes -> start backends
reup *backends:
just down {{ backends }}
docker system prune --force --volumes
just up {{ backends }}
# stop and remove containers; clean up networks and volumes
down *backends:
#!/usr/bin/env bash
set -euo pipefail
if [ -z "{{ backends }}" ]; then
docker compose down --volumes --remove-orphans
else
docker compose rm {{ backends }} --force --stop --volumes
fi
# stop all containers, prune networks, and remove all volumes
stop *backends:
just down {{ backends }}
docker network prune -f
docker volume prune -af
# tail logs for one or more services
tail *services:
docker compose logs --follow {{ services }}
# run the benchmark suite
bench +args='ibis/tests/benchmarks':
pytest --benchmark-only --benchmark-enable --benchmark-autosave {{ args }}
# run benchmarks and compare with a previous run
benchcmp number *args:
just bench --benchmark-compare {{ number }} {{ args }}
# check for invalid links in a locally built version of the docs
checklinks *args:
#!/usr/bin/env bash
set -euo pipefail
lychee --base docs/_output $(find docs/_output -name '*.html') {{ args }}
# view the changelog for upcoming release (use --pretty to format with glow)
view-changelog flags="":
#!/usr/bin/env bash
set -euo pipefail
npx -y -p conventional-changelog-cli \
-- conventional-changelog --config ./.conventionalcommits.js \
| ([ "{{ flags }}" = "--pretty" ] && glow -p - || cat -)
# profile something
profile +args:
pyinstrument {{ args }}
# generate API documentation
docs-apigen *args:
cd docs && quartodoc interlinks
quartodoc build {{ args }} --config docs/_quarto.yml
# build documentation
docs-render:
#!/usr/bin/env bash
set -euo pipefail
# Check if the folder "reference" exists and has contents
if [ ! -d "docs/reference" ] || [ -z "$(ls -A docs/reference)" ]; then
just docs-apigen
fi
quarto render docs
# preview docs
docs-preview:
#!/usr/bin/env bash
set -euo pipefail
# Check if the folder "reference" exists and has contents
if [ ! -d "docs/reference" ] || [ -z "$(ls -A docs/reference)" ]; then
just docs-apigen
fi
quarto preview docs
# regen api and preview docs
docs-api-preview:
just docs-apigen --verbose
quarto preview docs
# deploy docs to netlify
docs-deploy:
quarto publish --no-prompt --no-browser --no-render netlify docs
# build jupyterlite repl
build-jupyterlite:
#!/usr/bin/env bash
set -euo pipefail
mkdir -p docs/_output/jupyterlite
rm -rf dist/
ibis_dev_version="$(just bump-version)"
uvx --from=toml-cli toml set --toml-path=pyproject.toml project.version "$ibis_dev_version"
sed -i "s/__version__ = \".+\"/__version__ = \"$ibis_dev_version\"/" ibis/__init__.py
uv build --wheel
git checkout pyproject.toml ibis/__init__.py
jupyter lite build \
--debug \
--no-libarchive \
--piplite-wheels "dist/ibis_framework-${ibis_dev_version}-py3-none-any.whl" \
--piplite-wheels "https://duckdb.github.io/duckdb-pyodide/wheels/duckdb-1.1.2-cp311-cp311-emscripten_3_1_46_wasm32.whl" \
--apps repl \
--no-unused-shared-packages \
--output-dir docs/_output/jupyterlite
# jupyter lite build can copy from the nix store, and preserves the
# original write bit; without this the next run of this rule will result in
# a permission error when the build tries to remove existing files
chmod -R u+w docs/_output/jupyterlite
# run the entire docs build pipeline
docs-build-all:
just docs-apigen --verbose
just docs-render
just build-jupyterlite
just checklinks docs/_output --offline --no-progress
# open chat
chat *args:
zulip-term {{ args }}
# bump the version number to the next pre-release version
@bump-version:
uv run --only-group dev python ci/release/bump_version.py