forked from nilearn/nilearn
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtox.ini
More file actions
303 lines (266 loc) · 9.47 KB
/
tox.ini
File metadata and controls
303 lines (266 loc) · 9.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
; See https://tox.wiki/en/stable/
[tox]
requires =
tox>=4
tox-uv
envlist =
py3{10,11,12,13,14}-{plotting,flaky}
skip_missing_interpreters = true
; ENVIRONMENTS
; ------------
[testenv]
extras =
plotting: plotting
flaky: plotting
dependency_groups =
plotting: test
plotting: rich
flaky: test
commands =
plotting: coverage erase
plotting: {[testenv:latest]commands}
plotting: {[testenv:test_doc]commands}
flaky: coverage erase
flaky: pytest -m "flaky or single_process" {posargs:}
[min_python]
description = minimum python version
skip_install = false
base_python = 3.10
[global_var]
passenv =
CI
USERNAME
# Pass user color preferences through
PY_COLORS
FORCE_COLOR
NO_COLOR
CLICOLOR
CLICOLOR_FORCE
; COMMANDS
; --------
[testenv:latest]
description = Run tests on latest version of all dependencies (plotting not included).
passenv = {[global_var]passenv}
dependency_groups = test
commands =
; run tests=
; - in parallel
; - with code coverage
; - generating report and tsv of tests statistics
; - sklearn.exceptions.ConvergenceWarning and FutureWarning raise an error: so that they are dealt with they appear in our dependencies
; run tests with a timeout
pytest -m "not slow and not flaky and not single_process" -W error::sklearn.exceptions.ConvergenceWarning -W error::FutureWarning --timeout=60 --cov=nilearn --cov-report=xml --cov-report=html --cov-append --report=report.html --numprocesses auto --csv results/pytest_output/pytest_output.csv {posargs:}
pytest -m "slow and not flaky and not single_process" -W error::sklearn.exceptions.ConvergenceWarning -W error::FutureWarning --cov=nilearn --cov-report=xml --cov-report=html --cov-append --report=report_slow.html --numprocesses auto --csv results/pytest_output/pytest_output_slow_tests.csv {posargs:}
[testenv:flaky]
description = Run flaky tests and those that must be run on a single process.
passenv = {[global_var]passenv}
extras = plotting
dependency_groups = test
commands =
coverage erase
pytest -m "flaky or single_process" {posargs:}
[testenv:plotting]
description = Run tests on latest version of all dependencies.
passenv = {[global_var]passenv}
extras = plotting
dependency_groups =
test
rich
commands =
coverage erase
{[testenv:latest]commands}
{[testenv:test_doc]commands}
[testenv:test_doc]
description = run tests on doc
passenv = {[global_var]passenv}
extras = plotting
dependency_groups = test
commands =
pytest --numprocesses auto doc/_additional_doctests.txt --cov-append --report=report_doc.html
; TODO find a way to rely on globbing instead of listing a specific folder
pytest --numprocesses auto --cov-append --doctest-glob='*.rst' doc/manipulating_images/ --report=report_doc.html
[testenv:pre]
description = Run latest and test_doc on pre-release version of all dependencies.
passenv = {[global_var]passenv}
pip_pre = true
extras = plotting
dependency_groups =
test
rich
commands =
coverage erase
{[testenv:latest]commands}
{[testenv:test_doc]commands}
[testenv:min]
description = Run tests on minimum version of all dependencies (plotting not included).
base_python = {[min_python]base_python}
passenv = {[global_var]passenv}
dependency_groups =
test
min
uv_resolution = lowest-direct
commands =
coverage erase
{[testenv:latest]commands}
[testenv:plot_min]
description = Run tests on minimum version of all dependencies (with plotly).
base_python = {[min_python]base_python}
passenv = {[global_var]passenv}
dependency_groups =
test
min_plotting
uv_resolution = lowest-direct
commands =
coverage erase
{[testenv:latest]commands}
[testenv:pytest_mpl_generate]
description = Generate new baseline of figures to test with pytest-mpl.
To avoid reproducibility issues,
these figures are generated with the oldest supported python
and matplotlib.
base_python = {[min_python]base_python}
passenv = {[global_var]passenv}
dependency_groups =
test
min_plotting
uv_resolution = lowest-direct
allowlist_externals =
rm
commands =
rm -fr nilearn/plotting/tests/baseline/*.png
pytest nilearn/plotting/tests/test_baseline_comparisons.py --numprocesses auto --mpl --mpl-generate-path=nilearn/plotting/tests/baseline {posargs:}
rm -fr nilearn/glm/tests/baseline/*.png
pytest nilearn/glm/tests/test_baseline_comparisons.py --numprocesses auto --mpl --mpl-generate-path=nilearn/glm/tests/baseline {posargs:}
rm -fr nilearn/maskers/tests/baseline/*.png
pytest nilearn/maskers/tests/test_baseline_comparisons.py --numprocesses auto --mpl --mpl-generate-path=nilearn/maskers/tests/baseline {posargs:}
[testenv:pytest_mpl]
description = Run tests with pytest-mpl
to make sure figures look the same as with expected baseline.
To avoid reproducibility issues,
these figures are generated with the oldest supported python
and matplotlib.
base_python = {[min_python]base_python}
passenv = {[global_var]passenv}
dependency_groups =
test
min_plotting
uv_resolution = lowest-direct
commands =
pytest nilearn/glm/tests/test_baseline_comparisons.py --cov=nilearn --cov-report=xml --cov-report=html --cov-append --numprocesses auto --mpl --mpl-results-path=results --mpl-baseline-path=nilearn/glm/tests/baseline --mpl-generate-summary=html
pytest nilearn/maskers/tests/test_baseline_comparisons.py --cov=nilearn --cov-report=xml --cov-report=html --cov-append --numprocesses auto --mpl --mpl-results-path=results --mpl-baseline-path=nilearn/maskers/tests/baseline --mpl-generate-summary=html
pytest nilearn/plotting/tests/test_baseline_comparisons.py --cov=nilearn --cov-report=xml --cov-report=html --cov-append --numprocesses auto --mpl --mpl-results-path=results --mpl-baseline-path=nilearn/plotting/tests/baseline --mpl-generate-summary=html
[testenv:nightly]
description = Run tests on latest python with nightly build version of all dependencies.
base_python = 3.14
passenv = {[global_var]passenv}
setenv =
PIP_INDEX_URL = {env:PIP_INDEX_URL:https://pypi.anaconda.org/scientific-python-nightly-wheels/simple}
PIP_EXTRA_INDEX_URL = {env:PIP_EXTRA_INDEX_URL:https://pypi.org/simple}
extras = plotting
dependency_groups = test
pip_pre = true
allowlist_externals =
pip
; recreating the environment to avoid dependency conflict when not starting from a clean slate
recreate = true
commands =
; not using uv for those install (for now)
pip install --verbose --upgrade git+https://github.com/nipy/nibabel
pip install --verbose --upgrade --pre --index-url {env:PIP_INDEX_URL} pandas scipy scikit-learn matplotlib numpy
pip install --verbose --upgrade --pre --index-url {env:PIP_INDEX_URL} numpy
pip list
pytest --numprocesses auto --report=report.html --csv results/pytest_output/pytest_output.csv {posargs:}
[testenv:doc]
description = Build doc with minimum supported version of python and all dependencies (plotting included).
base_python = {[min_python]base_python}
passenv =
{[global_var]passenv}
PATTERN
dependency_groups =
doc
min_plotting
rich
uv_resolution = lowest-direct
allowlist_externals =
make
bash
commands =
{envpython} maint_tools/show-python-packages-versions.py
make --directory doc clean
; Update the authors file and the names file
; in case a contributor has been added to citation.cff
; but did not run the maint_tools/citation_cff_maint.py script.
{envpython} maint_tools/citation_cff_maint.py
make --directory doc {posargs:}
[testenv:doc_latest]
description = Build doc with latest supported version of python and all dependencies (plotting included).
base_python = 3.14
extras = plotting
dependency_groups =
doc
rich
passenv =
{[global_var]passenv}
PATTERN
allowlist_externals =
make
bash
commands =
{envpython} maint_tools/show-python-packages-versions.py
make --directory doc clean
; Update the authors file and the names file
; in case a contributor has been added to citation.cff
; but did not run the maint_tools/citation_cff_maint.py script.
{envpython} maint_tools/citation_cff_maint.py
make --directory doc {posargs:}
[testenv:doc_qc]
description = Run a couple quality checks of the docstrings...
extras = plotting
dependency_groups = rich
deps =
numpydoc>=1.8.0
passenv =
{[global_var]passenv}
PATTERN
commands =
{envpython} maint_tools/missing_default_in_docstring.py
{envpython} maint_tools/check_docstrings.py
[testenv:plot_test_timing]
description = Plot timing of tests.
skip_install = true
deps =
pandas
plotly
kaleido
passenv =
{[global_var]passenv}
PATTERN
commands =
{envpython} maint_tools/plot_test_timing.py
[testenv:linkcheck]
description = check links in doc
base_python = {[min_python]base_python}
extras = plotting
dependency_groups = doc
passenv =
{[global_var]passenv}
allowlist_externals =
make
git
commands =
git fetch --tags
make --directory doc clean
; Update the authors file and the names file
; in case a contributor has been added to citation.cff
; but did not run the maint_tools/citation_cff_maint.py script.sklearn/svm/_base.
{envpython} maint_tools/citation_cff_maint.py
make --directory doc linkcheck
[testenv:archi]
description = build figure of the dependencies between Nilearn subpackages
skip_install = false
deps =
pydeps
allowlist_externals =
pydeps
commands =
pydeps nilearn -v --noshow --only nilearn --max-module-depth 2 --exclude-exact nilearn.tests nilearn.conftest