Skip to content

Python 3.13 in CI #425

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Mar 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci-additional.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:

env:
CONDA_ENV_FILE: ci/environment.yml
PYTHON_VERSION: "3.12"
PYTHON_VERSION: "3.13"

steps:
- uses: actions/checkout@v4
Expand Down Expand Up @@ -95,7 +95,7 @@ jobs:
shell: bash -l {0}
env:
CONDA_ENV_FILE: ci/environment.yml
PYTHON_VERSION: "3.12"
PYTHON_VERSION: "3.13"

steps:
- uses: actions/checkout@v4
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,14 @@ jobs:
matrix:
os: ["ubuntu-latest"]
env: ["environment"]
python-version: ["3.10", "3.12"]
python-version: ["3.10", "3.13"]
include:
- os: "windows-latest"
env: "environment"
python-version: "3.12"
python-version: "3.13"
- os: "ubuntu-latest"
env: "no-dask" # "no-xarray", "no-numba"
python-version: "3.12"
python-version: "3.13"
- os: "ubuntu-latest"
env: "minimal-requirements"
python-version: "3.10"
Expand Down
4 changes: 0 additions & 4 deletions .github/workflows/testpypi-release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@ jobs:

- uses: actions/setup-python@v5
name: Install Python
with:
python-version: "3.12"

- name: Install dependencies
run: |
Expand Down Expand Up @@ -64,8 +62,6 @@ jobs:
steps:
- uses: actions/setup-python@v5
name: Install Python
with:
python-version: "3.12"
- uses: actions/download-artifact@v4
with:
name: releases
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/upstream-dev-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.12"]
python-version: ["3.13"]
steps:
- uses: actions/checkout@v4
with:
Expand Down
7 changes: 5 additions & 2 deletions flox/aggregations.py
Original file line number Diff line number Diff line change
Expand Up @@ -590,6 +590,7 @@ class Scan:
identity: Any
# dtype of result
dtype: Any = None
preserves_dtype: bool = False
# "Mode" of applying binary op.
# for np.add we apply the op directly to the `state` array and the `current` array.
# for ffill, bfill we concat `state` to `current` and then run the scan again.
Expand Down Expand Up @@ -719,16 +720,18 @@ def scan_binary_op(left_state: ScanState, right_state: ScanState, *, agg: Scan)
reduction="nanlast",
scan="ffill",
# Important: this must be NaN otherwise, ffill does not work.
identity=np.nan,
identity=dtypes.NA,
mode="concat_then_scan",
preserves_dtype=True,
)
bfill = Scan(
"bfill",
binary_op=None,
reduction="nanlast",
scan="ffill",
# Important: this must be NaN otherwise, bfill does not work.
identity=np.nan,
identity=dtypes.NA,
preserves_dtype=True,
mode="concat_then_scan",
preprocess=reverse,
finalize=reverse,
Expand Down
17 changes: 10 additions & 7 deletions flox/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2823,9 +2823,6 @@ def groupby_scan(
# nothing to do, no NaNs!
return array

is_bool_array = np.issubdtype(array.dtype, bool)
array = array.astype(np.int_) if is_bool_array else array

if expected_groups is not None:
raise NotImplementedError("Setting `expected_groups` and binning is not supported yet.")
expected_groups = _validate_expected_groups(nby, expected_groups)
Expand Down Expand Up @@ -2855,6 +2852,11 @@ def groupby_scan(
if array.dtype.kind in "Mm":
cast_to = array.dtype
array = array.view(np.int64)
elif array.dtype.kind == "b":
array = array.view(np.int8)
cast_to = None
if agg.preserves_dtype:
cast_to = bool
else:
cast_to = None

Expand All @@ -2869,6 +2871,7 @@ def groupby_scan(
agg.dtype = np.result_type(array.dtype, np.uint)
else:
agg.dtype = array.dtype if dtype is None else dtype
agg.identity = xrdtypes._get_fill_value(agg.dtype, agg.identity)

(single_axis,) = axis_ # type: ignore[misc]
# avoid some roundoff error when we can.
Expand All @@ -2887,7 +2890,7 @@ def groupby_scan(

if not has_dask:
final_state = chunk_scan(inp, axis=single_axis, agg=agg, dtype=agg.dtype)
result = _finalize_scan(final_state)
result = _finalize_scan(final_state, dtype=agg.dtype)
else:
result = dask_groupby_scan(inp.array, inp.group_idx, axes=axis_, agg=agg)

Expand Down Expand Up @@ -2940,9 +2943,9 @@ def _zip(group_idx: np.ndarray, array: np.ndarray) -> AlignedArrays:
return AlignedArrays(group_idx=group_idx, array=array)


def _finalize_scan(block: ScanState) -> np.ndarray:
def _finalize_scan(block: ScanState, dtype) -> np.ndarray:
assert block.result is not None
return block.result.array
return block.result.array.astype(dtype, copy=False)


def dask_groupby_scan(array, by, axes: T_Axes, agg: Scan) -> DaskArray:
Expand Down Expand Up @@ -2985,7 +2988,7 @@ def dask_groupby_scan(array, by, axes: T_Axes, agg: Scan) -> DaskArray:
)

# 3. Unzip and extract the final result array, discard groups
result = map_blocks(_finalize_scan, accumulated, dtype=agg.dtype)
result = map_blocks(partial(_finalize_scan, dtype=agg.dtype), accumulated, dtype=agg.dtype)

assert result.chunks == array.chunks

Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ classifiers = [
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
dependencies = [
"pandas>=1.5",
Expand Down
Loading