Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
bf12877
Add aggregate_time support to convert_and_aggregate
FabianHofmann Mar 13, 2026
5543e1a
add missing docstrings
FabianHofmann Mar 16, 2026
e0c511c
Add mypy type checking with strict settings and fix all type errors a…
FabianHofmann Mar 16, 2026
9228ca7
Enable ruff rules B, SIM, RET, C4, TC, NPY, G, PTH, RUF100 and fix al…
FabianHofmann Mar 16, 2026
820e7e2
Include test directory in mypy checking and fix all test type errors
FabianHofmann Mar 16, 2026
b1edf3f
Remove examples/ from mypy exclude (no .py files to check, notebooks …
FabianHofmann Mar 16, 2026
c5a7a8d
Include doc/ in mypy checking and fix type errors in conf.py and char…
FabianHofmann Mar 16, 2026
8153e27
Bump mypy python_version to 3.11, enabling proper xarray Self types a…
FabianHofmann Mar 16, 2026
16cd01c
Fix mypy errors: remove unused type: ignore comments, fix type annota…
FabianHofmann Mar 16, 2026
6591a14
Enable ruff DOC/D417 rules for docstring-signature alignment and fix …
FabianHofmann Mar 16, 2026
80d6cdb
Address PR review: add 'legacy' default, drop False, extract _aggrega…
FabianHofmann Mar 24, 2026
5a2fba9
update release notes
FabianHofmann Mar 24, 2026
7fe656b
Merge branch 'feat/aggregate_time' into refac/type-annotation
FabianHofmann Mar 24, 2026
0f1d464
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 24, 2026
dfdcc97
Add docstrings to era5/convert modules and replace str with Literal t…
FabianHofmann Mar 26, 2026
fd68cd3
Enforce D103 (missing docstring in public function) via ruff
FabianHofmann Mar 26, 2026
2d0497c
Add docstrings to all dataset modules (sarah, ncep, cordex, gebco) an…
FabianHofmann Mar 26, 2026
2486e59
Merge branch 'master' into refac/type-annotation
FabianHofmann Mar 26, 2026
67f7366
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 26, 2026
aef7b64
Ignore D103 in test/ and examples/, fix duplicate _aggregate_time def…
FabianHofmann Mar 26, 2026
182b93e
Remove commented-out D103 ignore line
FabianHofmann Mar 26, 2026
0c807ce
Fix docstring lint errors: D205, D401, D100, D101, D105, DOC201
FabianHofmann Mar 26, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,13 @@ repos:
# Run the formatter.
- id: ruff-format

# Type checking with mypy
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.15.0
hooks:
- id: mypy
additional_dependencies: ['types-PyYAML', 'types-requests']

# Find common spelling mistakes in comments and docstrings
- repo: https://github.com/codespell-project/codespell
rev: v2.4.2
Expand Down
18 changes: 9 additions & 9 deletions atlite/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,16 @@
__version__ = version("atlite")
# e.g. "0.17.0" # TODO, in the network structure it should use the dev version
match = re.match(r"(\d+\.\d+(\.\d+)?)", __version__)
assert match, f"Could not determine release_version of pypsa: {__version__}"
assert match, f"Could not determine release_version of atlite: {__version__}"
release_version = match.group(0)

__all__ = [
Cutout,
ExclusionContainer,
compute_indicatormatrix,
regrid,
cspinstallations,
solarpanels,
windturbines,
__version__,
"Cutout",
"ExclusionContainer",
"compute_indicatormatrix",
"regrid",
"cspinstallations",
"solarpanels",
"windturbines",
"__version__",
]
54 changes: 54 additions & 0 deletions atlite/_types.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
# SPDX-FileCopyrightText: Contributors to atlite <https://github.com/pypsa/atlite>
#
# SPDX-License-Identifier: MIT

from __future__ import annotations

from pathlib import Path
from typing import Any, Literal, TypeAlias, TypedDict

import geopandas as gpd
import numpy as np
import scipy.sparse as sp
import xarray as xr
from pyproj import CRS
from shapely.geometry.base import BaseGeometry

NDArray: TypeAlias = np.ndarray[Any, np.dtype[np.floating[Any]]]
NDArrayInt: TypeAlias = np.ndarray[Any, np.dtype[np.signedinteger[Any]]]
NDArrayBool: TypeAlias = np.ndarray[Any, np.dtype[np.bool_]]
DataArray: TypeAlias = xr.DataArray
Dataset: TypeAlias = xr.Dataset
PathLike: TypeAlias = str | Path
NumericArray: TypeAlias = NDArray | DataArray
Number: TypeAlias = int | float | np.number[Any]
GeoDataFrame: TypeAlias = gpd.GeoDataFrame
GeoSeries: TypeAlias = gpd.GeoSeries
Geometry: TypeAlias = BaseGeometry
CrsLike: TypeAlias = str | int | CRS | dict[str, Any] | None
SparseMatrix: TypeAlias = sp.lil_matrix | sp.csr_matrix

TrackingType: TypeAlias = (
Literal["horizontal", "tilted_horizontal", "vertical", "dual"] | None
)
ClearskyModel: TypeAlias = Literal["simple", "enhanced"]
TrigonModel: TypeAlias = Literal["simple", "perez"]
IrradiationType: TypeAlias = Literal["total", "direct", "diffuse", "ground"]
HeatPumpSource: TypeAlias = Literal["air", "soil"]
OrientationName: TypeAlias = Literal["latitude_optimal", "constant", "latitude"]
DataFormat: TypeAlias = Literal["grib", "netcdf"]


class ERA5RetrievalParams(TypedDict, total=False):
product: str
area: list[float]
grid: str
chunks: dict[str, int] | None
tmpdir: str | Path | None
lock: Any | None
data_format: Literal["grib", "netcdf"]
year: list[str]
month: list[str] | str
day: list[str] | str
time: str | list[str]
variable: str | list[str]
49 changes: 39 additions & 10 deletions atlite/aggregate.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,58 @@
# SPDX-FileCopyrightText: Contributors to atlite <https://github.com/pypsa/atlite>
#
# SPDX-License-Identifier: MIT
"""
Functions for aggregating results.
"""
"""Functions for aggregating results."""

from __future__ import annotations

from typing import TYPE_CHECKING, cast

import dask
import xarray as xr

if TYPE_CHECKING:
import pandas as pd
from scipy.sparse import spmatrix

from atlite._types import DataArray


def aggregate_matrix(
da: DataArray,
matrix: spmatrix,
index: pd.Index,
) -> DataArray:
"""
Aggregate spatial data with a sparse matrix.

Parameters
----------
da : xarray.DataArray
DataArray with spatial dimensions ``y`` and ``x``.
matrix : scipy.sparse.spmatrix
Aggregation matrix mapping flattened spatial cells to ``index``.
index : pandas.Index
Index defining the aggregated dimension.

def aggregate_matrix(da, matrix, index):
Returns
-------
xarray.DataArray
Aggregated data indexed by ``index`` and, if present, time.
"""
if index.name is None:
index = index.rename("dim_0")
if isinstance(da.data, dask.array.core.Array):
da = da.stack(spatial=("y", "x"))
da = da.chunk(dict(spatial=-1))
return xr.apply_ufunc(
da = da.chunk({"spatial": -1})
result = xr.apply_ufunc(
lambda da: da * matrix.T,
da,
input_core_dims=[["spatial"]],
output_core_dims=[[index.name]],
dask="parallelized",
output_dtypes=[da.dtype],
dask_gufunc_kwargs=dict(output_sizes={index.name: index.size}),
dask_gufunc_kwargs={"output_sizes": {index.name: index.size}},
).assign_coords(**{index.name: index})
else:
da = da.stack(spatial=("y", "x")).transpose("spatial", "time")
return xr.DataArray(matrix * da, [index, da.coords["time"]])
return cast("DataArray", result)
da = da.stack(spatial=("y", "x")).transpose("spatial", "time")
return xr.DataArray(matrix * da, [index, da.coords["time"]])
Loading
Loading