Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions RELEASE_NOTES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@ Upcoming Release
``pip install git+https://github.com/pypsa/atlite``.


* Add ``aggregate_time={"sum", "mean", False}`` to ``convert_and_aggregate`` for temporal
aggregation with and without spatial aggregation, and deprecate ``capacity_factor``/``capacity_factor_timeseries``
in favor of it

`v0.5.0 <https://github.com/PyPSA/atlite/releases/tag/v0.5.0>`__ (13th March 2026)
=======================================================================================

Expand Down
101 changes: 75 additions & 26 deletions atlite/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,11 @@

import datetime as dt
import logging
import warnings
from collections import namedtuple
from operator import itemgetter
from pathlib import Path
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Literal

import geopandas as gpd
import numpy as np
Expand Down Expand Up @@ -43,8 +44,6 @@
logger = logging.getLogger(__name__)

if TYPE_CHECKING:
from typing import Literal

from atlite.resource import TurbineConfig


Expand All @@ -58,6 +57,7 @@ def convert_and_aggregate(
shapes_crs=4326,
per_unit=False,
return_capacity=False,
aggregate_time: Literal["sum", "mean", False] | None = None,
capacity_factor=False,
capacity_factor_timeseries=False,
show_progress=False,
Expand Down Expand Up @@ -93,12 +93,17 @@ def convert_and_aggregate(
return_capacity : boolean
Additionally returns the installed capacity at each bus corresponding
to ``layout`` (defaults to False).
aggregate_time : "sum", "mean", False, or None
Controls temporal aggregation of results. ``"sum"`` sums over time,
``"mean"`` averages over time, ``False`` returns full timeseries.
``None`` keeps the historical default behavior: time-summed results
without spatial aggregation and full timeseries with spatial
aggregation. Replaces the deprecated ``capacity_factor`` and
``capacity_factor_timeseries`` parameters.
capacity_factor : boolean
If True, the static capacity factor of the chosen resource for each
grid cell is computed.
Deprecated. Use ``aggregate_time="mean"`` instead.
capacity_factor_timeseries : boolean
If True, the capacity factor time series of the chosen resource for
each grid cell is computed.
Deprecated. Use ``aggregate_time=False`` instead (which is the default).
show_progress : boolean, default False
Whether to show a progress bar.
dask_kwargs : dict, default {}
Expand All @@ -116,17 +121,21 @@ def convert_and_aggregate(

**With aggregation** (``matrix``, ``shapes``, or ``layout`` given):
Time-series of renewable generation aggregated to buses, with
dimensions ``(bus, time)``.
dimensions ``(bus, time)``. If ``aggregate_time`` is set, the time
dimension is reduced accordingly.

**Without aggregation** (none of the above given):

- ``capacity_factor_timeseries=True``: per-cell capacity factor
time series with dimensions ``(time, y, x)`` in p.u. Individual
locations can be extracted with
``result.sel(x=lon, y=lat, method="nearest")``.
- ``capacity_factor=True``: time-averaged capacity factor per cell
with dimensions ``(y, x)`` in p.u.
- Otherwise: total energy sum per cell with dimensions ``(y, x)``.
- ``aggregate_time=False``: per-cell timeseries ``(time, y, x)``.
- ``aggregate_time="mean"``: time-averaged per cell ``(y, x)``.
- ``aggregate_time="sum"``: time-summed per cell ``(y, x)``.

Legacy behavior (deprecated):

- ``capacity_factor_timeseries=True``: equivalent to
``aggregate_time=False``.
- ``capacity_factor=True``: equivalent to ``aggregate_time="mean"``.
- No flags: historical default behavior.

units : xr.DataArray (optional)
The installed units per bus in MW corresponding to ``layout``
Expand All @@ -138,6 +147,41 @@ def convert_and_aggregate(
pv : Generate solar PV generation time-series.

"""
if (
aggregate_time is not None
and aggregate_time is not False
and aggregate_time
not in (
"sum",
"mean",
)
):
raise ValueError(
f"aggregate_time must be 'sum', 'mean', False, or None, got {aggregate_time!r}"
)

if capacity_factor or capacity_factor_timeseries:
if aggregate_time is not None and aggregate_time is not False:
raise ValueError(
"Cannot use 'aggregate_time' together with deprecated "
"'capacity_factor' or 'capacity_factor_timeseries'."
)
if capacity_factor:
warnings.warn(
"capacity_factor is deprecated. Use aggregate_time='mean' instead.",
FutureWarning,
stacklevel=2,
)
aggregate_time = "mean"
if capacity_factor_timeseries:
warnings.warn(
"capacity_factor_timeseries is deprecated. "
"Use aggregate_time=False instead.",
FutureWarning,
stacklevel=2,
)
aggregate_time = False

func_name = convert_func.__name__.replace("convert_", "")
logger.info(f"Convert and aggregate '{func_name}'.")
da = convert_func(cutout.data, **convert_kwds)
Expand All @@ -150,16 +194,15 @@ def convert_and_aggregate(
"One of `matrix`, `shapes` and `layout` must be "
"given for `per_unit` or `return_capacity`"
)
if capacity_factor or capacity_factor_timeseries:
if capacity_factor_timeseries:
res = da.rename("capacity factor")
else:
res = da.mean("time").rename("capacity factor")
res.attrs["units"] = "p.u."
return maybe_progressbar(res, show_progress, **dask_kwargs)
else:

effective_aggregate_time = "sum" if aggregate_time is None else aggregate_time
if effective_aggregate_time == "mean":
res = da.mean("time")
elif effective_aggregate_time == "sum":
res = da.sum("time", keep_attrs=True)
return maybe_progressbar(res, show_progress, **dask_kwargs)
else:
res = da
return maybe_progressbar(res, show_progress, **dask_kwargs)

if matrix is not None:
if shapes is not None:
Expand Down Expand Up @@ -216,6 +259,12 @@ def convert_and_aggregate(
else:
results.attrs["units"] = "MW"

effective_aggregate_time = False if aggregate_time is None else aggregate_time
if effective_aggregate_time == "mean":
results = results.mean("time")
elif effective_aggregate_time == "sum":
results = results.sum("time", keep_attrs=True)

if return_capacity:
return maybe_progressbar(results, show_progress, **dask_kwargs), capacity
else:
Expand Down Expand Up @@ -666,7 +715,7 @@ def wind(
Get per-cell capacity factor time series (no aggregation):

>>> cf = cutout.wind(turbine="Vestas_V112_3MW",
... capacity_factor_timeseries=True)
... aggregate_time=False)
>>> cf.dims
('time', 'y', 'x')
>>> location_cf = cf.sel(x=6.9, y=53.1, method="nearest")
Expand Down Expand Up @@ -850,7 +899,7 @@ def pv(cutout, panel, orientation, tracking=None, clearsky_model=None, **params)
Get per-cell capacity factor time series (no aggregation):

>>> cf = cutout.pv(panel="CSi", orientation="latitude_optimal",
... capacity_factor_timeseries=True)
... aggregate_time=False)
>>> location_cf = cf.sel(x=6.9, y=53.1, method="nearest")

References
Expand Down
181 changes: 181 additions & 0 deletions test/test_aggregate_time.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,181 @@
# SPDX-FileCopyrightText: Contributors to atlite <https://github.com/pypsa/atlite>
#
# SPDX-License-Identifier: MIT

import warnings

import numpy as np
import pytest
import xarray as xr

from atlite.convert import convert_and_aggregate


class MockCutout:
def __init__(self, data):
self.data = data
grid_coords = np.array([(x, y) for y in data.y.values for x in data.x.values])
import pandas as pd

self.grid = pd.DataFrame(grid_coords, columns=["x", "y"])


def identity_convert(ds, **kwargs):
return ds["var"]


@pytest.fixture
def cutout():
np.random.seed(42)
times = xr.date_range("2020-01-01", periods=24, freq="h")
data = xr.Dataset(
{
"var": xr.DataArray(
np.random.rand(24, 3, 4),
dims=["time", "y", "x"],
coords={
"time": times,
"y": [50.0, 51.0, 52.0],
"x": [5.0, 6.0, 7.0, 8.0],
},
)
}
)
return MockCutout(data)


class TestAggregateTimeNoSpatial:
def test_aggregate_time_false_returns_timeseries(self, cutout):
result = convert_and_aggregate(cutout, identity_convert, aggregate_time=False)
assert "time" in result.dims

def test_aggregate_time_mean(self, cutout):
result = convert_and_aggregate(cutout, identity_convert, aggregate_time="mean")
assert "time" not in result.dims
expected = cutout.data["var"].mean("time")
np.testing.assert_allclose(result.values, expected.values)

def test_aggregate_time_sum(self, cutout):
result = convert_and_aggregate(cutout, identity_convert, aggregate_time="sum")
assert "time" not in result.dims
expected = cutout.data["var"].sum("time")
np.testing.assert_allclose(result.values, expected.values)

def test_default_no_spatial_aggregates_over_time(self, cutout):
result = convert_and_aggregate(cutout, identity_convert)
expected = cutout.data["var"].sum("time")
assert "time" not in result.dims
xr.testing.assert_identical(result, expected)


class TestAggregateTimeWithSpatial:
def test_aggregate_time_mean_with_layout(self, cutout):
layout = xr.DataArray(
np.ones((3, 4)),
dims=["y", "x"],
coords={"y": cutout.data.y, "x": cutout.data.x},
)
result_ts = convert_and_aggregate(
cutout,
identity_convert,
layout=layout,
aggregate_time=False,
)
result_mean = convert_and_aggregate(
cutout,
identity_convert,
layout=layout,
aggregate_time="mean",
)
assert "time" in result_ts.dims
assert "time" not in result_mean.dims
np.testing.assert_allclose(result_mean.values, result_ts.mean("time").values)

def test_aggregate_time_sum_with_layout(self, cutout):
layout = xr.DataArray(
np.ones((3, 4)),
dims=["y", "x"],
coords={"y": cutout.data.y, "x": cutout.data.x},
)
result_ts = convert_and_aggregate(
cutout,
identity_convert,
layout=layout,
aggregate_time=False,
)
result_sum = convert_and_aggregate(
cutout,
identity_convert,
layout=layout,
aggregate_time="sum",
)
assert "time" not in result_sum.dims
np.testing.assert_allclose(result_sum.values, result_ts.sum("time").values)

def test_aggregate_time_with_per_unit(self, cutout):
layout = xr.DataArray(
np.ones((3, 4)) * 2.0,
dims=["y", "x"],
coords={"y": cutout.data.y, "x": cutout.data.x},
)
result_pu = convert_and_aggregate(
cutout,
identity_convert,
layout=layout,
per_unit=True,
aggregate_time="mean",
)
assert "time" not in result_pu.dims

result_pu_ts = convert_and_aggregate(
cutout,
identity_convert,
layout=layout,
per_unit=True,
aggregate_time=False,
)
np.testing.assert_allclose(result_pu.values, result_pu_ts.mean("time").values)


class TestDeprecatedParams:
def test_capacity_factor_warns(self, cutout):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = convert_and_aggregate(
cutout, identity_convert, capacity_factor=True
)
assert any(
"capacity_factor is deprecated" in str(warning.message) for warning in w
)
assert "time" not in result.dims

def test_capacity_factor_timeseries_warns(self, cutout):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
result = convert_and_aggregate(
cutout, identity_convert, capacity_factor_timeseries=True
)
assert any(
"capacity_factor_timeseries is deprecated" in str(warning.message)
for warning in w
)
assert "time" in result.dims

def test_capacity_factor_with_aggregate_time_raises(self, cutout):
with pytest.raises(ValueError, match="Cannot use"):
convert_and_aggregate(
cutout,
identity_convert,
capacity_factor=True,
aggregate_time="mean",
)


class TestInvalidArgs:
def test_invalid_aggregate_time_value(self, cutout):
with pytest.raises(ValueError, match="aggregate_time must be"):
convert_and_aggregate(cutout, identity_convert, aggregate_time="invalid")

def test_aggregate_time_true_raises(self, cutout):
with pytest.raises(ValueError, match="aggregate_time must be"):
convert_and_aggregate(cutout, identity_convert, aggregate_time=True)
Loading