Skip to content
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -109,3 +109,4 @@ doc/source/quickstart/.ipynb_checkpoints/
dist
.python-version
answer_nosetests.xml
.venv/
11 changes: 6 additions & 5 deletions yt/_typing.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Any, Optional, TypeAlias

import numpy as np
import numpy.typing as npt
import unyt as un

FieldDescT = tuple[str, tuple[str, list[str], str | None]]
Expand All @@ -12,12 +13,12 @@
FieldKey = tuple[FieldType, FieldName]
ImplicitFieldKey = FieldName
AnyFieldKey = FieldKey | ImplicitFieldKey
DomainDimensions = tuple[int, ...] | list[int] | np.ndarray
DomainDimensions = tuple[int, ...] | list[int] | npt.NDArray

ParticleCoordinateTuple = tuple[
str, # particle type
tuple[np.ndarray, np.ndarray, np.ndarray], # xyz
float | np.ndarray, # hsml
tuple[npt.NDArray, npt.NDArray, npt.NDArray], # xyz
float | npt.NDArray, # hsml
]

# Geometry specific types
Expand All @@ -33,5 +34,5 @@
# np.ndarray[...] syntax is runtime-valid from numpy 1.22, we quote it until our minimal
# runtime requirement is bumped to, or beyond this version

MaskT = Optional["np.ndarray[Any, np.dtype[np.bool_]]"]
AlphaT = Optional["np.ndarray[Any, np.dtype[np.float64]]"]
MaskT = Optional["npt.NDArray[np.bool_]"]
AlphaT = Optional["npt.NDArray[np.float64]"]
7 changes: 4 additions & 3 deletions yt/frontends/artio/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from collections import defaultdict

import numpy as np
import numpy.typing as npt

from yt.data_objects.field_data import YTFieldData
from yt.data_objects.index_subobjects.octree_subset import OctreeSubset
Expand Down Expand Up @@ -339,10 +340,10 @@ def _read_fluid_fields(self, fields, dobj, chunk=None):

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
"""
Accepts icoords and ires and returns appropriate fcoords and fwidth.
Mostly useful for cases where we have irregularly spaced or structured
Expand Down
13 changes: 7 additions & 6 deletions yt/frontends/ramses/hilbert.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Any, Optional

import numpy as np
import numpy.typing as npt

from yt.data_objects.selection_objects.region import YTRegion
from yt.geometry.selection_routines import (
Expand Down Expand Up @@ -49,8 +50,8 @@


def hilbert3d(
ijk: "np.ndarray[Any, np.dtype[np.int64]]", bit_length: int
) -> "np.ndarray[Any, np.dtype[np.float64]]":
ijk: "npt.NDArray[np.int64]", bit_length: int
) -> "npt.NDArray[np.int64]":
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@cphyc mind double checking my fix here? the hilbert index arrays will always be int arrays from what I can see.

(also if you're up for reviewing the rest of the PR it'd be welcome!)

"""Compute the order using Hilbert indexing.

Arguments
Expand All @@ -70,11 +71,11 @@ def hilbert3d(
def get_intersecting_cpus(
ds,
region: YTRegion,
LE: Optional["np.ndarray[Any, np.dtype[np.float64]]"] = None,
LE: Optional["npt.NDArray[np.float64]"] = None,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oh! nothing to change here, but I just remembered: the Any in the original np.ndarray[Any, np.dtype[np.float64]] is a reference to the array shape. So the update here is equivalent as npt.NDArray[dtype] is a type alias to np.ndarray[tuple[Any,...], dtype]

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

dx: float = 1.0,
dx_cond: float | None = None,
factor: float = 4.0,
bound_keys: Optional["np.ndarray[Any, np.dtype[np.float64]]"] = None,
bound_keys: Optional["npt.NDArray[np.float64]"] = None,
) -> set[int]:
"""
Find the subset of CPUs that intersect the bbox in a recursive fashion.
Expand Down Expand Up @@ -119,8 +120,8 @@ def get_intersecting_cpus(

def get_cpu_list_cuboid(
ds,
X: "np.ndarray[Any, np.dtype[np.float64]]",
bound_keys: "np.ndarray[Any, np.dtype[np.float64]]",
X: "npt.NDArray[np.float64]",
bound_keys: "npt.NDArray[np.float64]",
) -> set[int]:
"""
Return the list of the CPU intersecting with the cuboid containing the positions.
Expand Down
11 changes: 6 additions & 5 deletions yt/frontends/ramses/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import TYPE_CHECKING, Union

import numpy as np
import numpy.typing as npt
from unyt import unyt_array

from yt._maintenance.deprecation import issue_deprecation_warning
Expand Down Expand Up @@ -37,7 +38,7 @@ def convert_ramses_ages(ds, conformal_ages):


def convert_ramses_conformal_time_to_physical_time(
ds, conformal_time: np.ndarray
ds, conformal_time: npt.NDArray
) -> unyt_array:
"""
Convert conformal times (as defined in RAMSES) to physical times.
Expand Down Expand Up @@ -82,7 +83,7 @@ def _ramses_particle_binary_file_handler(
subset: "RAMSESDomainSubset",
fields: list[FieldKey],
count: int,
) -> dict[FieldKey, np.ndarray]:
) -> dict[FieldKey, npt.NDArray]:
"""General file handler for binary file, called by _read_particle_subset

Parameters
Expand All @@ -96,7 +97,7 @@ def _ramses_particle_binary_file_handler(
count: integer
The number of elements to count
"""
tr = {}
tr: dict[FieldKey, npt.NDArray] = {}
ds = subset.domain.ds
foffsets = particle_handler.field_offsets
fname = particle_handler.fname
Expand Down Expand Up @@ -130,7 +131,7 @@ def _ramses_particle_csv_file_handler(
subset: "RAMSESDomainSubset",
fields: list[FieldKey],
count: int,
) -> dict[FieldKey, np.ndarray]:
) -> dict[FieldKey, npt.NDArray]:
"""General file handler for csv file, called by _read_particle_subset

Parameters
Expand All @@ -146,7 +147,7 @@ def _ramses_particle_csv_file_handler(
"""
from yt.utilities.on_demand_imports import _pandas as pd

tr = {}
tr: dict[FieldKey, npt.NDArray] = {}
ds = subset.domain.ds
foffsets = particle_handler.field_offsets
fname = particle_handler.fname
Expand Down
11 changes: 6 additions & 5 deletions yt/frontends/ramses/particle_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from typing import TYPE_CHECKING, Any

import numpy as np
import numpy.typing as npt

from yt._typing import FieldKey
from yt.config import ytcfg
Expand Down Expand Up @@ -71,7 +72,7 @@ class ParticleFileHandler(abc.ABC, HandlerMixin):
# assumed to be `self`).
reader: Callable[
["RAMSESDomainSubset", list[FieldKey], int],
dict[FieldKey, np.ndarray],
dict[FieldKey, npt.NDArray],
]

# Name of the config section (if any)
Expand Down Expand Up @@ -162,7 +163,7 @@ def header(self) -> dict[str, Any]:
self.read_header()
return self._header

def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, npt.NDArray]):
"""
This function allows custom code to be called to handle special cases,
such as the particle birth time.
Expand All @@ -173,7 +174,7 @@ def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
----------
field : FieldKey
The field name.
data_dict : dict[FieldKey, np.ndarray]
data_dict : dict[FieldKey, npt.NDArray]
A dictionary containing the data.

By default, this function does nothing.
Expand Down Expand Up @@ -346,7 +347,7 @@ def birth_file_fname(self):
def has_birth_file(self):
return os.path.exists(self.birth_file_fname)

def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, npt.NDArray]):
_ptype, fname = field
if not (fname == "particle_birth_time" and self.ds.cosmological_simulation):
return
Expand Down Expand Up @@ -492,7 +493,7 @@ def read_header(self):
self._field_offsets = field_offsets
self._field_types = _pfields

def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, npt.NDArray]):
_ptype, fname = field
if not (fname == "particle_birth_time" and self.ds.cosmological_simulation):
return
Expand Down
5 changes: 3 additions & 2 deletions yt/frontends/rockstar/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import Any, Optional

import numpy as np
import numpy.typing as npt

from yt.data_objects.static_output import ParticleDataset
from yt.frontends.halo_catalog.data_structures import HaloCatalogFile
Expand All @@ -21,7 +22,7 @@ class RockstarBinaryFile(HaloCatalogFile):
header: dict
_position_offset: int
_member_offset: int
_Npart: "np.ndarray[Any, np.dtype[np.int64]]"
_Npart: "npt.NDArray[np.int64]"
_ids_halos: list[int]
_file_size: int

Expand Down Expand Up @@ -49,7 +50,7 @@ def __init__(self, ds, io, filename, file_id, range):

def _read_member(
self, ihalo: int
) -> Optional["np.ndarray[Any, np.dtype[np.int64]]"]:
) -> Optional["npt.NDArray[np.int64]"]:
if ihalo not in self._ids_halos:
return None

Expand Down
5 changes: 3 additions & 2 deletions yt/frontends/stream/misc.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import numpy as np
import numpy.typing as npt

from yt._typing import DomainDimensions


def _validate_cell_widths(
cell_widths: list[np.ndarray],
cell_widths: list[npt.NDArray],
domain_dimensions: DomainDimensions,
) -> list[np.ndarray]:
) -> list[npt.NDArray]:
# check dimensionality
if (nwids := len(cell_widths)) != (ndims := len(domain_dimensions)):
raise ValueError(
Expand Down
5 changes: 3 additions & 2 deletions yt/geometry/coordinates/coordinate_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from yt.funcs import fix_unitary, is_sequence, parse_center_array, validate_width_tuple
from yt.units.yt_array import YTArray, YTQuantity
from yt.utilities.exceptions import YTCoordinateNotImplemented, YTInvalidWidthError
import numpy.typing as npt


def _unknown_coord(data):
Expand Down Expand Up @@ -158,7 +159,7 @@ def pixelize(
periodic=True,
*,
return_mask: Literal[False],
) -> "np.ndarray[Any, np.dtype[np.float64]]": ...
) -> "npt.NDArray[np.float64]": ...

@overload
def pixelize(
Expand All @@ -173,7 +174,7 @@ def pixelize(
*,
return_mask: Literal[True],
) -> tuple[
"np.ndarray[Any, np.dtype[np.float64]]", "np.ndarray[Any, np.dtype[np.bool_]]"
"npt.NDArray[np.float64]", "npt.NDArray[np.bool_]"
]: ...

@abc.abstractmethod
Expand Down
7 changes: 4 additions & 3 deletions yt/geometry/geometry_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import weakref

import numpy as np
import numpy.typing as npt

from yt._maintenance.deprecation import issue_deprecation_warning
from yt.config import ytcfg
Expand Down Expand Up @@ -51,10 +52,10 @@ def _detect_output_fields(self):

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
# What's the use of raising NotImplementedError for this, when it's an
# abstract base class? Well, only *some* of the subclasses have it --
# and for those that *don't*, we should not be calling it -- and since
Expand Down
7 changes: 4 additions & 3 deletions yt/geometry/grid_geometry_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from collections import defaultdict

import numpy as np
import numpy.typing as npt

from yt.arraytypes import blankRecordArray
from yt.config import ytcfg
Expand Down Expand Up @@ -447,10 +448,10 @@ def _chunk_io(

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
"""
Accepts icoords and ires and returns appropriate fcoords and fwidth.
Mostly useful for cases where we have irregularly spaced or structured
Expand Down
7 changes: 4 additions & 3 deletions yt/geometry/oct_geometry_handler.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
import numpy.typing as npt

from yt.fields.field_detector import FieldDetector
from yt.geometry.geometry_handler import Index
Expand Down Expand Up @@ -119,10 +120,10 @@ def _mesh_sampling_particle_field(data):

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
"""
Accepts icoords and ires and returns appropriate fcoords and fwidth.
Mostly useful for cases where we have irregularly spaced or structured
Expand Down
7 changes: 4 additions & 3 deletions yt/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from urllib.parse import urlsplit

import numpy as np
import numpy.typing as npt
from more_itertools import always_iterable

from yt._maintenance.deprecation import (
Expand Down Expand Up @@ -687,7 +688,7 @@ def load_amr_grids(


def load_particles(
data: Mapping[AnyFieldKey, np.ndarray | tuple[np.ndarray, str]],
data: Mapping[AnyFieldKey, npt.NDArray | tuple[npt.NDArray, str]],
length_unit=None,
bbox=None,
sim_time=None,
Expand Down Expand Up @@ -826,7 +827,7 @@ def parse_unit(unit, dimension):
field_units, data, _ = process_data(data)
sfh = StreamDictFieldHandler()

pdata: dict[AnyFieldKey, np.ndarray | tuple[np.ndarray, str]] = {}
pdata: dict[AnyFieldKey, npt.NDArray | tuple[npt.NDArray, str]] = {}
for key in data.keys():
field: FieldKey
if not isinstance(key, tuple):
Expand Down Expand Up @@ -1816,7 +1817,7 @@ def load_hdf5_file(
fn: Union[str, "os.PathLike[str]"],
root_node: str | None = "/",
fields: list[str] | None = None,
bbox: np.ndarray | None = None,
bbox: npt.NDArray | None = None,
nchunks: int = 0,
dataset_arguments: dict | None = None,
):
Expand Down
Loading
Loading