Skip to content

Commit 95d9617

Browse files
committed
Cleanups
- prettier - stricter mypy - remove split-reduce
1 parent 9dd126c commit 95d9617

25 files changed

+15155
-288
lines changed

.github/workflows/benchmarks.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ on:
88
jobs:
99
benchmark:
1010
# if: ${{ contains( github.event.pull_request.labels.*.name, 'run-benchmark') && github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' }} # Run if the PR has been labelled correctly.
11-
if: ${{ github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' }} # Always run.
11+
if: ${{ github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' }} # Always run.
1212
name: Linux
1313
runs-on: ubuntu-20.04
1414
env:

.github/workflows/ci.yaml

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -67,12 +67,7 @@ jobs:
6767
fail-fast: false
6868
matrix:
6969
os: ["ubuntu-latest"]
70-
env:
71-
[
72-
"no-xarray",
73-
"no-dask",
74-
"minimal-requirements",
75-
]
70+
env: ["no-xarray", "no-dask", "minimal-requirements"]
7671
steps:
7772
- uses: actions/checkout@v4
7873
with:
@@ -110,7 +105,7 @@ jobs:
110105
steps:
111106
- uses: actions/checkout@v4
112107
with:
113-
repository: 'pydata/xarray'
108+
repository: "pydata/xarray"
114109
fetch-depth: 0 # Fetch all history for all branches and tags.
115110
- name: Set up conda environment
116111
uses: mamba-org/setup-micromamba@v1

.github/workflows/pypi.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ jobs:
1212
- name: Set up Python
1313
uses: actions/setup-python@v4
1414
with:
15-
python-version: '3.x'
15+
python-version: "3.x"
1616
- name: Install dependencies
1717
run: |
1818
python -m pip install --upgrade pip

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
docs/source/generated/
22
html/
33
.asv/
4+
asv_bench/pkgs/
45

56
# Byte-compiled / optimized / DLL files
67
__pycache__/

.pre-commit-config.yaml

Lines changed: 53 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1,56 +1,61 @@
11
ci:
2-
autoupdate_schedule: quarterly
2+
autoupdate_schedule: quarterly
33

44
repos:
5-
- repo: https://github.com/astral-sh/ruff-pre-commit
6-
# Ruff version.
7-
rev: 'v0.0.292'
8-
hooks:
9-
- id: ruff
10-
args: ["--fix", "--show-fixes"]
11-
12-
- repo: https://github.com/pre-commit/pre-commit-hooks
13-
rev: v4.4.0
14-
hooks:
15-
- id: check-yaml
16-
- id: trailing-whitespace
17-
- id: end-of-file-fixer
18-
- id: check-docstring-first
19-
20-
- repo: https://github.com/psf/black-pre-commit-mirror
21-
rev: 23.9.1
22-
hooks:
23-
- id: black
24-
25-
- repo: https://github.com/executablebooks/mdformat
26-
rev: 0.7.17
27-
hooks:
5+
- repo: https://github.com/astral-sh/ruff-pre-commit
6+
# Ruff version.
7+
rev: "v0.0.292"
8+
hooks:
9+
- id: ruff
10+
args: ["--fix", "--show-fixes"]
11+
12+
- repo: https://github.com/pre-commit/mirrors-prettier
13+
rev: "v3.0.3"
14+
hooks:
15+
- id: prettier
16+
17+
- repo: https://github.com/pre-commit/pre-commit-hooks
18+
rev: v4.5.0
19+
hooks:
20+
- id: check-yaml
21+
- id: trailing-whitespace
22+
- id: end-of-file-fixer
23+
- id: check-docstring-first
24+
25+
- repo: https://github.com/psf/black-pre-commit-mirror
26+
rev: 23.9.1
27+
hooks:
28+
- id: black
29+
30+
- repo: https://github.com/executablebooks/mdformat
31+
rev: 0.7.17
32+
hooks:
2833
- id: mdformat
2934
additional_dependencies:
3035
- mdformat-black
3136
- mdformat-myst
3237

33-
- repo: https://github.com/nbQA-dev/nbQA
34-
rev: 1.7.0
35-
hooks:
36-
- id: nbqa-black
37-
- id: nbqa-ruff
38-
args: [--fix]
39-
40-
- repo: https://github.com/kynan/nbstripout
41-
rev: 0.6.1
42-
hooks:
43-
- id: nbstripout
44-
args: [--extra-keys=metadata.kernelspec metadata.language_info.version]
45-
46-
- repo: https://github.com/codespell-project/codespell
47-
rev: v2.2.6
48-
hooks:
49-
- id: codespell
50-
additional_dependencies:
51-
- tomli
52-
53-
- repo: https://github.com/abravalheri/validate-pyproject
54-
rev: v0.14
55-
hooks:
56-
- id: validate-pyproject
38+
- repo: https://github.com/nbQA-dev/nbQA
39+
rev: 1.7.0
40+
hooks:
41+
- id: nbqa-black
42+
- id: nbqa-ruff
43+
args: [--fix]
44+
45+
- repo: https://github.com/kynan/nbstripout
46+
rev: 0.6.1
47+
hooks:
48+
- id: nbstripout
49+
args: [--extra-keys=metadata.kernelspec metadata.language_info.version]
50+
51+
- repo: https://github.com/codespell-project/codespell
52+
rev: v2.2.6
53+
hooks:
54+
- id: codespell
55+
additional_dependencies:
56+
- tomli
57+
58+
- repo: https://github.com/abravalheri/validate-pyproject
59+
rev: v0.15
60+
hooks:
61+
- id: validate-pyproject

asv_bench/asv.conf.json

Lines changed: 96 additions & 96 deletions
Original file line numberDiff line numberDiff line change
@@ -1,98 +1,98 @@
11
{
2-
// The version of the config file format. Do not change, unless
3-
// you know what you are doing.
4-
"version": 1,
5-
6-
// The name of the project being benchmarked
7-
"project": "flox",
8-
9-
// The project's homepage
10-
"project_url": "http://flox.readthedocs.io/",
11-
12-
// The URL or local path of the source code repository for the
13-
// project being benchmarked
14-
"repo": "..",
15-
16-
// The Python project's subdirectory in your repo. If missing or
17-
// the empty string, the project is assumed to be located at the root
18-
// of the repository.
19-
// "repo_subdir": "",
20-
21-
// Customizable commands for building, installing, and
22-
// uninstalling the project. See asv.conf.json documentation.
23-
//
24-
// "install_command": ["in-dir={env_dir} python -mpip install {wheel_file}"],
25-
// "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"],
26-
// "build_command": [
27-
// "python setup.py build",
28-
// "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}"
29-
// ],
30-
31-
// List of branches to benchmark. If not provided, defaults to "master"
32-
// (for git) or "default" (for mercurial).
33-
"branches": ["main"], // for git
34-
"dvcs": "git",
35-
36-
// timeout in seconds for installing any dependencies in environment
37-
// defaults to 10 min
38-
"install_timeout": 600,
39-
40-
// the base URL to show a commit for the project.
41-
"show_commit_url": "http://github.com/xarray-contrib/flox/commit/",
42-
43-
// The Pythons you'd like to test against. If not provided, defaults
44-
// to the current version of Python used to run `asv`.
45-
// "pythons": ["3.9"],
46-
47-
"environment_type": "mamba",
48-
"conda_channels": ["conda-forge"],
49-
"conda_environment_file": "../ci/benchmark.yml",
50-
51-
// The directory (relative to the current directory) that benchmarks are
52-
// stored in. If not provided, defaults to "benchmarks"
53-
"benchmark_dir": "benchmarks",
54-
55-
// The directory (relative to the current directory) to cache the Python
56-
// environments in. If not provided, defaults to "env"
57-
"env_dir": ".asv/env",
58-
59-
// The directory (relative to the current directory) that raw benchmark
60-
// results are stored in. If not provided, defaults to "results".
61-
"results_dir": ".asv/results",
62-
63-
// The directory (relative to the current directory) that the html tree
64-
// should be written to. If not provided, defaults to "html".
65-
"html_dir": ".asv/html",
66-
67-
// The number of characters to retain in the commit hashes.
68-
// "hash_length": 8,
69-
70-
// `asv` will cache results of the recent builds in each
71-
// environment, making them faster to install next time. This is
72-
// the number of builds to keep, per environment.
73-
// "build_cache_size": 2,
74-
75-
// The commits after which the regression search in `asv publish`
76-
// should start looking for regressions. Dictionary whose keys are
77-
// regexps matching to benchmark names, and values corresponding to
78-
// the commit (exclusive) after which to start looking for
79-
// regressions. The default is to start from the first commit
80-
// with results. If the commit is `null`, regression detection is
81-
// skipped for the matching benchmark.
82-
//
83-
// "regressions_first_commits": {
84-
// "some_benchmark": "352cdf", // Consider regressions only after this commit
85-
// "another_benchmark": null, // Skip regression detection altogether
86-
// },
87-
88-
// The thresholds for relative change in results, after which `asv
89-
// publish` starts reporting regressions. Dictionary of the same
90-
// form as in ``regressions_first_commits``, with values
91-
// indicating the thresholds. If multiple entries match, the
92-
// maximum is taken. If no entry matches, the default is 5%.
93-
//
94-
// "regressions_thresholds": {
95-
// "some_benchmark": 0.01, // Threshold of 1%
96-
// "another_benchmark": 0.5, // Threshold of 50%
97-
// },
2+
// The version of the config file format. Do not change, unless
3+
// you know what you are doing.
4+
"version": 1,
5+
6+
// The name of the project being benchmarked
7+
"project": "flox",
8+
9+
// The project's homepage
10+
"project_url": "http://flox.readthedocs.io/",
11+
12+
// The URL or local path of the source code repository for the
13+
// project being benchmarked
14+
"repo": "..",
15+
16+
// The Python project's subdirectory in your repo. If missing or
17+
// the empty string, the project is assumed to be located at the root
18+
// of the repository.
19+
// "repo_subdir": "",
20+
21+
// Customizable commands for building, installing, and
22+
// uninstalling the project. See asv.conf.json documentation.
23+
//
24+
// "install_command": ["in-dir={env_dir} python -mpip install {wheel_file}"],
25+
// "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"],
26+
// "build_command": [
27+
// "python setup.py build",
28+
// "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}"
29+
// ],
30+
31+
// List of branches to benchmark. If not provided, defaults to "master"
32+
// (for git) or "default" (for mercurial).
33+
"branches": ["main"], // for git
34+
"dvcs": "git",
35+
36+
// timeout in seconds for installing any dependencies in environment
37+
// defaults to 10 min
38+
"install_timeout": 600,
39+
40+
// the base URL to show a commit for the project.
41+
"show_commit_url": "http://github.com/xarray-contrib/flox/commit/",
42+
43+
// The Pythons you'd like to test against. If not provided, defaults
44+
// to the current version of Python used to run `asv`.
45+
// "pythons": ["3.9"],
46+
47+
"environment_type": "mamba",
48+
"conda_channels": ["conda-forge"],
49+
"conda_environment_file": "../ci/benchmark.yml",
50+
51+
// The directory (relative to the current directory) that benchmarks are
52+
// stored in. If not provided, defaults to "benchmarks"
53+
"benchmark_dir": "benchmarks",
54+
55+
// The directory (relative to the current directory) to cache the Python
56+
// environments in. If not provided, defaults to "env"
57+
"env_dir": ".asv/env",
58+
59+
// The directory (relative to the current directory) that raw benchmark
60+
// results are stored in. If not provided, defaults to "results".
61+
"results_dir": ".asv/results",
62+
63+
// The directory (relative to the current directory) that the html tree
64+
// should be written to. If not provided, defaults to "html".
65+
"html_dir": ".asv/html"
66+
67+
// The number of characters to retain in the commit hashes.
68+
// "hash_length": 8,
69+
70+
// `asv` will cache results of the recent builds in each
71+
// environment, making them faster to install next time. This is
72+
// the number of builds to keep, per environment.
73+
// "build_cache_size": 2,
74+
75+
// The commits after which the regression search in `asv publish`
76+
// should start looking for regressions. Dictionary whose keys are
77+
// regexps matching to benchmark names, and values corresponding to
78+
// the commit (exclusive) after which to start looking for
79+
// regressions. The default is to start from the first commit
80+
// with results. If the commit is `null`, regression detection is
81+
// skipped for the matching benchmark.
82+
//
83+
// "regressions_first_commits": {
84+
// "some_benchmark": "352cdf", // Consider regressions only after this commit
85+
// "another_benchmark": null, // Skip regression detection altogether
86+
// },
87+
88+
// The thresholds for relative change in results, after which `asv
89+
// publish` starts reporting regressions. Dictionary of the same
90+
// form as in ``regressions_first_commits``, with values
91+
// indicating the thresholds. If multiple entries match, the
92+
// maximum is taken. If no entry matches, the default is 5%.
93+
//
94+
// "regressions_thresholds": {
95+
// "some_benchmark": 0.01, // Threshold of 1%
96+
// "another_benchmark": 0.5, // Threshold of 50%
97+
// },
9898
}

asv_bench/benchmarks/README_CI.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ The `asv` suite can be run for any PR on GitHub Actions (check workflow `.github
1313
We use `asv continuous` to run the job, which runs a relative performance measurement. This means that there's no state to be saved and that regressions are only caught in terms of performance ratio (absolute numbers are available but they are not useful since we do not use stable hardware over time). `asv continuous` will:
1414

1515
- Compile `scikit-image` for _both_ commits. We use `ccache` to speed up the process, and `mamba` is used to create the build environments.
16-
- Run the benchmark suite for both commits, _twice_ (since `processes=2` by default).
16+
- Run the benchmark suite for both commits, _twice_ (since `processes=2` by default).
1717
- Generate a report table with performance ratios:
1818
- `ratio=1.0` -> performance didn't change.
1919
- `ratio<1.0` -> PR made it slower.

ci/docs.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,4 @@ dependencies:
1818
- jupyter
1919
- sphinx-codeautolink
2020
- pip:
21-
- -e ..
21+
- -e ..

ci/environment.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ dependencies:
99
- netcdf4
1010
- pandas
1111
- numpy>=1.20
12-
- lxml # for mypy coverage report
12+
- lxml # for mypy coverage report
1313
- matplotlib
1414
- pip
1515
- pytest
@@ -24,4 +24,4 @@ dependencies:
2424
- numba
2525
- scipy
2626
- pip:
27-
- numbagg>=0.3
27+
- numbagg>=0.3

ci/upstream-dev-env.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ dependencies:
1414
- pytest-xdist
1515
- pip
1616
- pip:
17-
- git+https://github.com/pydata/xarray
18-
- git+https://github.com/pandas-dev/pandas
19-
- git+https://github.com/dask/dask
20-
- git+https://github.com/ml31415/numpy-groupies
21-
- git+https://github.com/numbagg/numbagg
17+
- git+https://github.com/pydata/xarray
18+
- git+https://github.com/pandas-dev/pandas
19+
- git+https://github.com/dask/dask
20+
- git+https://github.com/ml31415/numpy-groupies
21+
- git+https://github.com/numbagg/numbagg

0 commit comments

Comments
 (0)