diff --git a/CHANGELOG.md b/CHANGELOG.md index 3b632e9f3..73fd9dc9d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,7 +54,29 @@ Until here --> ## [7.0.0] - Unreleased -**Summary**: Performance release with **up to 67x faster model building** for large systems through batched/vectorized operations. +**Summary**: Performance release with **up to 67x faster model building** for large systems through batched/vectorized operations. Renames `label` to `id` across the API with deprecation support, introduces `IdList[T]` as the standard container, and redesigns the `Flow` constructor. + +### ✨ Added + +- **`IdList[T]` container** (`flixopt/id_list.py`): New generic frozen ordered container replacing `FlowContainer`, `ElementContainer`, `ResultsContainer`, and `CarrierContainer`. Provides dict-like access by primary key, short-key fallback, or positional index, with helpful error messages including close-match suggestions. + +### 💥 Breaking Changes + +- **`label` renamed to `id`**: All element constructors now use `id` instead of `label`. The old `label` parameter and `.label` / `.label_full` properties are deprecated and will be removed in v8.0.0. Use `.id` everywhere. +- **`Flow` constructor redesigned**: `bus` is now the first positional argument; `flow_id` (optional) sets the short name, defaulting to the bus name. Old forms `Flow(label, bus)` and `Flow(label, bus=...)` remain deprecated until v8.0.0. +- **`Flow.id` returns qualified name**: `Flow.id` now returns `component(flow_id)` (e.g., `Boiler(Q_fu)`) instead of just the short name. Use `flow.flow_id` for the short name. +- **`Flow.flow_id`**: New public property for the short flow identifier (e.g., `'Q_fu'`). This replaces the internal `_short_id` for Flow objects. +- **Container classes replaced**: `FlowContainer`, `ElementContainer`, `ResultsContainer` replaced by `IdList`. `EffectCollection` and `CarrierContainer` now inherit from `IdList`. Access patterns (`[]`, `in`, `keys()`, `values()`, `items()`, `get()`) are preserved. + +### 🗑️ Deprecated + +The following items are deprecated and will be removed in **v8.0.0**: + +- `Element(label=...)` — use `Element(id=...)` instead +- `Flow(id=...)` — use `Flow(flow_id=...)` instead +- `.label` property — use `.id` instead +- `.label_full` property — use `.id` instead +- `Flow(label, bus)` positional form — use `Flow(bus, flow_id=...)` instead ### 🚀 Performance diff --git a/flixopt/__init__.py b/flixopt/__init__.py index ecfb33dff..1bded9aaa 100644 --- a/flixopt/__init__.py +++ b/flixopt/__init__.py @@ -31,10 +31,11 @@ ) from .config import CONFIG from .core import TimeSeriesData -from .effects import PENALTY_EFFECT_LABEL, Effect +from .effects import PENALTY_EFFECT_ID, PENALTY_EFFECT_LABEL, Effect from .elements import Bus, Flow from .flow_system import FlowSystem from .flow_system_status import FlowSystemStatus +from .id_list import IdList from .interface import InvestParameters, Piece, Piecewise, PiecewiseConversion, PiecewiseEffects, StatusParameters from .optimization import Optimization, SegmentedOptimization from .plot_result import PlotResult @@ -48,6 +49,8 @@ 'Flow', 'Bus', 'Effect', + 'IdList', + 'PENALTY_EFFECT_ID', 'PENALTY_EFFECT_LABEL', 'Source', 'Sink', diff --git a/flixopt/batched.py b/flixopt/batched.py index 7f7d5882c..4bc732b62 100644 --- a/flixopt/batched.py +++ b/flixopt/batched.py @@ -21,9 +21,9 @@ from .core import PlausibilityError from .features import fast_isnull, fast_notnull, stack_along_dim +from .id_list import IdList, element_id_list from .interface import InvestParameters, StatusParameters from .modeling import _scalar_safe_isel_drop -from .structure import ElementContainer if TYPE_CHECKING: from .components import LinearConverter, Transmission @@ -541,12 +541,12 @@ def __init__( self._dim_name = dim_name self._effect_ids = effect_ids self._timesteps_extra = timesteps_extra - self._by_label = {s.label_full: s for s in storages} + self._by_id = {s.id: s for s in storages} @cached_property def ids(self) -> list[str]: - """All storage IDs (label_full).""" - return [s.label_full for s in self._storages] + """All storage IDs.""" + return [s.id for s in self._storages] @property def element_ids(self) -> list[str]: @@ -559,13 +559,13 @@ def dim_name(self) -> str: return self._dim_name @cached_property - def elements(self) -> ElementContainer: - """ElementContainer of storages.""" - return ElementContainer(self._storages) + def elements(self) -> IdList: + """IdList of storages.""" + return element_id_list(self._storages) def __getitem__(self, label: str): - """Get a storage by its label_full.""" - return self._by_label[label] + """Get a storage by its id.""" + return self._by_id[label] def __len__(self) -> int: return len(self._storages) @@ -575,29 +575,29 @@ def __len__(self) -> int: @cached_property def with_investment(self) -> list[str]: """IDs of storages with investment parameters.""" - return [s.label_full for s in self._storages if isinstance(s.capacity_in_flow_hours, InvestParameters)] + return [s.id for s in self._storages if isinstance(s.capacity_in_flow_hours, InvestParameters)] @cached_property def with_optional_investment(self) -> list[str]: """IDs of storages with optional (non-mandatory) investment.""" - return [sid for sid in self.with_investment if not self._by_label[sid].capacity_in_flow_hours.mandatory] + return [sid for sid in self.with_investment if not self._by_id[sid].capacity_in_flow_hours.mandatory] @cached_property def with_mandatory_investment(self) -> list[str]: """IDs of storages with mandatory investment.""" - return [sid for sid in self.with_investment if self._by_label[sid].capacity_in_flow_hours.mandatory] + return [sid for sid in self.with_investment if self._by_id[sid].capacity_in_flow_hours.mandatory] @cached_property def with_balanced(self) -> list[str]: """IDs of storages with balanced charging/discharging flow sizes.""" - return [s.label_full for s in self._storages if s.balanced] + return [s.id for s in self._storages if s.balanced] # === Investment Data === @cached_property def invest_params(self) -> dict[str, InvestParameters]: - """Investment parameters for storages with investment, keyed by label_full.""" - return {sid: self._by_label[sid].capacity_in_flow_hours for sid in self.with_investment} + """Investment parameters for storages with investment, keyed by id.""" + return {sid: self._by_id[sid].capacity_in_flow_hours for sid in self.with_investment} @cached_property def investment_data(self) -> InvestmentData | None: @@ -640,12 +640,12 @@ def relative_maximum_charge_state(self) -> xr.DataArray: @cached_property def charging_flow_ids(self) -> list[str]: """Flow IDs for charging flows, aligned with self.ids.""" - return [s.charging.label_full for s in self._storages] + return [s.charging.id for s in self._storages] @cached_property def discharging_flow_ids(self) -> list[str]: """Flow IDs for discharging flows, aligned with self.ids.""" - return [s.discharging.label_full for s in self._storages] + return [s.discharging.id for s in self._storages] # === Capacity and Charge State Bounds === @@ -775,7 +775,7 @@ def validate(self) -> None: for storage in self._storages: storage.validate_config() - sid = storage.label_full + sid = storage.id # Capacity required for non-default relative bounds (DataArray checks) if storage.capacity_in_flow_hours is None: @@ -841,7 +841,7 @@ class FlowsData: """Batched data container for all flows with indexed access. Provides: - - Element lookup by label: `flows['Boiler(gas_in)']` or `flows.get('label')` + - Element lookup by id: `flows['Boiler(gas_in)']` or `flows.get('id')` - Categorizations as list[str]: `flows.with_status`, `flows.with_investment` - Batched parameters as xr.DataArray with flow dimension @@ -855,15 +855,15 @@ def __init__(self, flows: list[Flow], flow_system: FlowSystem): flows: List of all Flow elements. flow_system: Parent FlowSystem for model coordinates. """ - self.elements: ElementContainer[Flow] = ElementContainer(flows) + self.elements: IdList = element_id_list(flows) self._fs = flow_system def __getitem__(self, label: str) -> Flow: - """Get a flow by its label_full.""" + """Get a flow by its id.""" return self.elements[label] def get(self, label: str, default: Flow | None = None) -> Flow | None: - """Get a flow by label, returning default if not found.""" + """Get a flow by id, returning default if not found.""" return self.elements.get(label, default) def __len__(self) -> int: @@ -875,7 +875,7 @@ def __iter__(self): @property def ids(self) -> list[str]: - """List of all flow IDs (label_full).""" + """List of all flow IDs.""" return list(self.elements.keys()) @property @@ -890,7 +890,7 @@ def _ids_index(self) -> pd.Index: def _categorize(self, condition) -> list[str]: """Return IDs of flows matching condition(flow) -> bool.""" - return [f.label_full for f in self.elements.values() if condition(f)] + return [f.id for f in self.elements.values() if condition(f)] def _mask(self, condition) -> xr.DataArray: """Return boolean DataArray mask for condition(flow) -> bool.""" @@ -901,7 +901,7 @@ def _mask(self, condition) -> xr.DataArray: ) # === Flow Categorizations === - # All return list[str] of label_full IDs. + # All return list[str] of element IDs. @cached_property def with_status(self) -> list[str]: @@ -1101,12 +1101,12 @@ def with_previous_flow_rate(self) -> list[str]: @cached_property def invest_params(self) -> dict[str, InvestParameters]: - """Investment parameters for flows with investment, keyed by label_full.""" + """Investment parameters for flows with investment, keyed by id.""" return {fid: self[fid].size for fid in self.with_investment} @cached_property def status_params(self) -> dict[str, StatusParameters]: - """Status parameters for flows with status, keyed by label_full.""" + """Status parameters for flows with status, keyed by id.""" return {fid: self[fid].status_parameters for fid in self.with_status} @cached_property @@ -1415,7 +1415,7 @@ def effects_per_startup(self) -> xr.DataArray | None: @cached_property def previous_states(self) -> dict[str, xr.DataArray]: - """Previous status for flows with previous_flow_rate, keyed by label_full. + """Previous status for flows with previous_flow_rate, keyed by id. Returns: Dict mapping flow_id -> binary DataArray (time dimension). @@ -1653,7 +1653,7 @@ def __init__(self, effect_collection: EffectCollection): @cached_property def effect_ids(self) -> list[str]: - return [e.label for e in self._effects] + return [e.id for e in self._effects] @property def element_ids(self) -> list[str]: @@ -1671,11 +1671,11 @@ def effect_index(self) -> pd.Index: @property def objective_effect_id(self) -> str: - return self._collection.objective_effect.label + return self._collection.objective_effect.id @property def penalty_effect_id(self) -> str: - return self._collection.penalty_effect.label + return self._collection.penalty_effect.id def _effect_values(self, attr_name: str, default: float) -> list: """Extract per-effect attribute values, substituting default for None.""" @@ -1731,17 +1731,17 @@ def effects_with_over_periods(self) -> list[Effect]: @property def period_weights(self) -> dict[str, xr.DataArray]: - """Get period weights for each effect, keyed by effect label.""" + """Get period weights for each effect, keyed by effect id.""" result = {} for effect in self._effects: effect_weights = effect.period_weights default_weights = effect._flow_system.period_weights if effect_weights is not None: - result[effect.label] = effect_weights + result[effect.id] = effect_weights elif default_weights is not None: - result[effect.label] = default_weights + result[effect.id] = default_weights else: - result[effect.label] = effect._fit_coords(name='period_weights', data=1, dims=['period']) + result[effect.id] = effect._fit_coords(name='period_weights', data=1, dims=['period']) return result def effects(self) -> list[Effect]: @@ -1749,7 +1749,7 @@ def effects(self) -> list[Effect]: return self._effects def __getitem__(self, label: str) -> Effect: - """Look up an effect by label (delegates to the collection).""" + """Look up an effect by id (delegates to the collection).""" return self._collection[label] def values(self): @@ -1801,7 +1801,7 @@ class BusesData: def __init__(self, buses: list[Bus]): self._buses = buses - self.elements: ElementContainer = ElementContainer(buses) + self.elements: IdList = element_id_list(buses) @property def element_ids(self) -> list[str]: @@ -1814,7 +1814,7 @@ def dim_name(self) -> str: @cached_property def with_imbalance(self) -> list[str]: """IDs of buses allowing imbalance.""" - return [b.label_full for b in self._buses if b.allows_imbalance] + return [b.id for b in self._buses if b.allows_imbalance] @cached_property def imbalance_elements(self) -> list[Bus]: @@ -1827,9 +1827,9 @@ def balance_coefficients(self) -> dict[tuple[str, str], float]: coefficients = {} for bus in self._buses: for f in bus.inputs.values(): - coefficients[(bus.label_full, f.label_full)] = 1.0 + coefficients[(bus.id, f.id)] = 1.0 for f in bus.outputs.values(): - coefficients[(bus.label_full, f.label_full)] = -1.0 + coefficients[(bus.id, f.id)] = -1.0 return coefficients def validate(self) -> None: @@ -1846,7 +1846,7 @@ def validate(self) -> None: zero_penalty = np.all(np.equal(bus.imbalance_penalty_per_flow_hour, 0)) if zero_penalty: logger.warning( - f'In Bus {bus.label_full}, the imbalance_penalty_per_flow_hour is 0. Use "None" or a value > 0.' + f'In Bus {bus.id}, the imbalance_penalty_per_flow_hour is 0. Use "None" or a value > 0.' ) @@ -1866,7 +1866,7 @@ def __init__( self._flows_data = flows_data self._effect_ids = effect_ids self._timestep_duration = timestep_duration - self.elements: ElementContainer = ElementContainer(components_with_status) + self.elements: IdList = element_id_list(components_with_status) @property def element_ids(self) -> list[str]: @@ -1898,7 +1898,7 @@ def with_prevent_simultaneous(self) -> list[Component]: @cached_property def status_params(self) -> dict[str, StatusParameters]: """Dict of component_id -> StatusParameters.""" - return {c.label: c.status_parameters for c in self._components_with_status} + return {c.id: c.status_parameters for c in self._components_with_status} @cached_property def previous_status_dict(self) -> dict[str, xr.DataArray]: @@ -1907,7 +1907,7 @@ def previous_status_dict(self) -> dict[str, xr.DataArray]: for c in self._components_with_status: prev = self._get_previous_status_for_component(c) if prev is not None: - result[c.label] = prev + result[c.id] = prev return result def _get_previous_status_for_component(self, component) -> xr.DataArray | None: @@ -2004,7 +2004,7 @@ def __init__(self, converters: list[LinearConverter], flow_ids: list[str], times self._converters = converters self._flow_ids = flow_ids self._timesteps = timesteps - self.elements: ElementContainer = ElementContainer(converters) + self.elements: IdList = element_id_list(converters) @property def element_ids(self) -> list[str]: @@ -2029,7 +2029,7 @@ def with_piecewise(self) -> list[LinearConverter]: @cached_property def factor_element_ids(self) -> list[str]: """Element IDs for converters with linear conversion factors.""" - return [c.label for c in self.with_factors] + return [c.id for c in self.with_factors] @cached_property def max_equations(self) -> int: @@ -2073,17 +2073,17 @@ def signed_coefficients(self) -> dict[tuple[str, str], float | xr.DataArray]: intermediate: dict[tuple[str, str], list[tuple[int, float | xr.DataArray]]] = defaultdict(list) for conv in self.with_factors: - flow_map = {fl.label: fl.label_full for fl in conv.flows.values()} + flow_map = {fl.flow_id: fl.id for fl in conv.flows.values()} # +1 for inputs, -1 for outputs - flow_signs = {f.label_full: 1.0 for f in conv.inputs.values() if f.label_full in all_flow_ids_set} - flow_signs.update({f.label_full: -1.0 for f in conv.outputs.values() if f.label_full in all_flow_ids_set}) + flow_signs = {f.id: 1.0 for f in conv.inputs.values() if f.id in all_flow_ids_set} + flow_signs.update({f.id: -1.0 for f in conv.outputs.values() if f.id in all_flow_ids_set}) for eq_idx, conv_factors in enumerate(conv.conversion_factors): for flow_label, coeff in conv_factors.items(): flow_id = flow_map.get(flow_label) sign = flow_signs.get(flow_id, 0.0) if flow_id else 0.0 if sign != 0.0: - intermediate[(conv.label, flow_id)].append((eq_idx, coeff * sign)) + intermediate[(conv.id, flow_id)].append((eq_idx, coeff * sign)) # Stack each (converter, flow) pair's per-equation values into a DataArray result: dict[tuple[str, str], float | xr.DataArray] = {} @@ -2112,12 +2112,12 @@ def n_equations_per_converter(self) -> xr.DataArray: @cached_property def piecewise_element_ids(self) -> list[str]: """Element IDs for converters with piecewise conversion.""" - return [c.label for c in self.with_piecewise] + return [c.id for c in self.with_piecewise] @cached_property def piecewise_segment_counts_dict(self) -> dict[str, int]: """Dict mapping converter_id -> number of segments.""" - return {c.label: len(list(c.piecewise_conversion.piecewises.values())[0]) for c in self.with_piecewise} + return {c.id: len(list(c.piecewise_conversion.piecewises.values())[0]) for c in self.with_piecewise} @cached_property def piecewise_max_segments(self) -> int: @@ -2145,7 +2145,7 @@ def piecewise_flow_breakpoints(self) -> dict[str, tuple[xr.DataArray, xr.DataArr all_flow_ids: set[str] = set() for conv in self.with_piecewise: for flow_label in conv.piecewise_conversion.piecewises: - flow_id = conv.flows[flow_label].label_full + flow_id = conv.flows[flow_label].id all_flow_ids.add(flow_id) result = {} @@ -2155,15 +2155,15 @@ def piecewise_flow_breakpoints(self) -> dict[str, tuple[xr.DataArray, xr.DataArr # Check if this converter has this flow found = False for flow_label, piecewise in conv.piecewise_conversion.piecewises.items(): - if conv.flows[flow_label].label_full == flow_id: + if conv.flows[flow_label].id == flow_id: starts = [p.start for p in piecewise] ends = [p.end for p in piecewise] - breakpoints[conv.label] = (starts, ends) + breakpoints[conv.id] = (starts, ends) found = True break if not found: # This converter doesn't have this flow - use NaN - breakpoints[conv.label] = ( + breakpoints[conv.id] = ( [np.nan] * self.piecewise_max_segments, [np.nan] * self.piecewise_max_segments, ) @@ -2238,7 +2238,7 @@ class TransmissionsData: def __init__(self, transmissions: list[Transmission], flow_ids: list[str]): self._transmissions = transmissions self._flow_ids = flow_ids - self.elements: ElementContainer = ElementContainer(transmissions) + self.elements: IdList = element_id_list(transmissions) @property def element_ids(self) -> list[str]: @@ -2261,12 +2261,12 @@ def balanced(self) -> list[Transmission]: @cached_property def bidirectional_ids(self) -> list[str]: """Element IDs for bidirectional transmissions.""" - return [t.label for t in self.bidirectional] + return [t.id for t in self.bidirectional] @cached_property def balanced_ids(self) -> list[str]: """Element IDs for balanced transmissions.""" - return [t.label for t in self.balanced] + return [t.id for t in self.balanced] # === Flow Masks for Batched Selection === @@ -2274,14 +2274,14 @@ def _build_flow_mask(self, transmission_ids: list[str], flow_getter) -> xr.DataA """Build (transmission, flow) mask: 1 if flow belongs to transmission. Args: - transmission_ids: List of transmission labels to include. - flow_getter: Function that takes a transmission and returns its flow label_full. + transmission_ids: List of transmission ids to include. + flow_getter: Function that takes a transmission and returns its flow id. """ all_flow_ids = self._flow_ids mask_data = np.zeros((len(transmission_ids), len(all_flow_ids))) for t_idx, t_id in enumerate(transmission_ids): - t = next(t for t in self._transmissions if t.label == t_id) + t = next(t for t in self._transmissions if t.id == t_id) flow_id = flow_getter(t) if flow_id in all_flow_ids: f_idx = all_flow_ids.index(flow_id) @@ -2296,32 +2296,32 @@ def _build_flow_mask(self, transmission_ids: list[str], flow_getter) -> xr.DataA @cached_property def in1_mask(self) -> xr.DataArray: """(transmission, flow) mask: 1 if flow is in1 for transmission.""" - return self._build_flow_mask(self.element_ids, lambda t: t.in1.label_full) + return self._build_flow_mask(self.element_ids, lambda t: t.in1.id) @cached_property def out1_mask(self) -> xr.DataArray: """(transmission, flow) mask: 1 if flow is out1 for transmission.""" - return self._build_flow_mask(self.element_ids, lambda t: t.out1.label_full) + return self._build_flow_mask(self.element_ids, lambda t: t.out1.id) @cached_property def in2_mask(self) -> xr.DataArray: """(transmission, flow) mask for bidirectional: 1 if flow is in2.""" - return self._build_flow_mask(self.bidirectional_ids, lambda t: t.in2.label_full) + return self._build_flow_mask(self.bidirectional_ids, lambda t: t.in2.id) @cached_property def out2_mask(self) -> xr.DataArray: """(transmission, flow) mask for bidirectional: 1 if flow is out2.""" - return self._build_flow_mask(self.bidirectional_ids, lambda t: t.out2.label_full) + return self._build_flow_mask(self.bidirectional_ids, lambda t: t.out2.id) @cached_property def balanced_in1_mask(self) -> xr.DataArray: """(transmission, flow) mask for balanced: 1 if flow is in1.""" - return self._build_flow_mask(self.balanced_ids, lambda t: t.in1.label_full) + return self._build_flow_mask(self.balanced_ids, lambda t: t.in1.id) @cached_property def balanced_in2_mask(self) -> xr.DataArray: """(transmission, flow) mask for balanced: 1 if flow is in2.""" - return self._build_flow_mask(self.balanced_ids, lambda t: t.in2.label_full) + return self._build_flow_mask(self.balanced_ids, lambda t: t.in2.id) # === Loss Properties === @@ -2362,9 +2362,7 @@ def has_absolute_losses_mask(self) -> xr.DataArray: @cached_property def transmissions_with_abs_losses(self) -> list[str]: """Element IDs for transmissions with absolute losses.""" - return [ - t.label for t in self._transmissions if t.absolute_losses is not None and np.any(t.absolute_losses != 0) - ] + return [t.id for t in self._transmissions if t.absolute_losses is not None and np.any(t.absolute_losses != 0)] def validate(self) -> None: """Validate all transmissions (config + DataArray checks). @@ -2382,7 +2380,7 @@ def validate(self) -> None: errors: list[str] = [] for transmission in self._transmissions: - tid = transmission.label_full + tid = transmission.id # Balanced size compatibility (DataArray check) if transmission.balanced: diff --git a/flixopt/carrier.py b/flixopt/carrier.py index 8a663eca9..ca4ac0de0 100644 --- a/flixopt/carrier.py +++ b/flixopt/carrier.py @@ -7,7 +7,8 @@ from __future__ import annotations -from .structure import ContainerMixin, Interface, register_class_for_io +from .id_list import IdList +from .structure import Interface, register_class_for_io @register_class_for_io @@ -125,7 +126,7 @@ def __str__(self): return self.name -class CarrierContainer(ContainerMixin['Carrier']): +class CarrierContainer(IdList['Carrier']): """Container for Carrier objects. Uses carrier.name for keying. Provides dict-like access to carriers @@ -146,14 +147,10 @@ class CarrierContainer(ContainerMixin['Carrier']): ``` """ - def __init__(self, carriers: list[Carrier] | dict[str, Carrier] | None = None): + def __init__(self, carriers: list[Carrier] | None = None): """Initialize a CarrierContainer. Args: carriers: Initial carriers to add. """ - super().__init__(elements=carriers, element_type_name='carriers') - - def _get_label(self, carrier: Carrier) -> str: - """Extract name from Carrier for keying.""" - return carrier.name + super().__init__(carriers, key_fn=lambda c: c.name, display_name='carriers') diff --git a/flixopt/comparison.py b/flixopt/comparison.py index 9f26b290b..8b6c0fe1c 100644 --- a/flixopt/comparison.py +++ b/flixopt/comparison.py @@ -375,7 +375,7 @@ def inputs(self) -> xr.Dataset: Returns: xr.Dataset with all input parameters. Variable naming follows - the pattern ``{element.label_full}|{parameter_name}``. + the pattern ``{element.id}|{parameter_name}``. Examples: ```python diff --git a/flixopt/components.py b/flixopt/components.py index 08dba94fe..9837bc7bf 100644 --- a/flixopt/components.py +++ b/flixopt/components.py @@ -54,7 +54,7 @@ class LinearConverter(Component): See Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. inputs: list of input Flows that feed into the converter. outputs: list of output Flows that are produced by the converter. status_parameters: Information about active and inactive state of LinearConverter. @@ -62,7 +62,7 @@ class LinearConverter(Component): status variable (binary) in all Flows! If possible, use StatusParameters in a single Flow instead to keep the number of binary variables low. conversion_factors: Linear relationships between flows expressed as a list of - dictionaries. Each dictionary maps flow labels to their coefficients in one + dictionaries. Each dictionary maps flow ids to their coefficients in one linear equation. The number of conversion factors must be less than the total number of flows to ensure degrees of freedom > 0. Either 'conversion_factors' OR 'piecewise_conversion' can be used, but not both. @@ -79,7 +79,7 @@ class LinearConverter(Component): ```python heat_exchanger = LinearConverter( - label='primary_hx', + id='primary_hx', inputs=[hot_water_in], outputs=[hot_water_out], conversion_factors=[{'hot_water_in': 0.95, 'hot_water_out': 1}], @@ -90,7 +90,7 @@ class LinearConverter(Component): ```python heat_pump = LinearConverter( - label='air_source_hp', + id='air_source_hp', inputs=[electricity_in], outputs=[heat_output], conversion_factors=[{'electricity_in': 3, 'heat_output': 1}], @@ -101,7 +101,7 @@ class LinearConverter(Component): ```python chp_unit = LinearConverter( - label='gas_chp', + id='gas_chp', inputs=[natural_gas], outputs=[electricity_out, heat_out], conversion_factors=[ @@ -115,7 +115,7 @@ class LinearConverter(Component): ```python electrolyzer = LinearConverter( - label='pem_electrolyzer', + id='pem_electrolyzer', inputs=[electricity_in, water_in], outputs=[hydrogen_out, oxygen_out], conversion_factors=[ @@ -130,7 +130,7 @@ class LinearConverter(Component): ```python variable_efficiency_converter = LinearConverter( - label='variable_converter', + id='variable_converter', inputs=[fuel_in], outputs=[power_out], piecewise_conversion=PiecewiseConversion( @@ -171,16 +171,17 @@ class LinearConverter(Component): def __init__( self, - label: str, - inputs: list[Flow], - outputs: list[Flow], + id: str | None = None, + inputs: list[Flow] | None = None, + outputs: list[Flow] | None = None, status_parameters: StatusParameters | None = None, conversion_factors: list[dict[str, Numeric_TPS]] | None = None, piecewise_conversion: PiecewiseConversion | None = None, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): - super().__init__(label, inputs, outputs, status_parameters, meta_data=meta_data, color=color) + super().__init__(id, inputs, outputs, status_parameters, meta_data=meta_data, color=color, **kwargs) self.conversion_factors = conversion_factors or [] self.piecewise_conversion = piecewise_conversion @@ -214,15 +215,15 @@ def validate_config(self) -> None: for flow in conversion_factor: if flow not in self.flows: raise PlausibilityError( - f'{self.label}: Flow {flow} in conversion_factors is not in inputs/outputs' + f'{self.id}: Flow {flow} in conversion_factors is not in inputs/outputs' ) if self.piecewise_conversion: for flow in self.flows.values(): if isinstance(flow.size, InvestParameters) and flow.size.fixed_size is None: logger.warning( f'Using a Flow with variable size (InvestParameters without fixed_size) ' - f'and a piecewise_conversion in {self.label_full} is uncommon. Please verify intent ' - f'({flow.label_full}).' + f'and a piecewise_conversion in {self.id} is uncommon. Please verify intent ' + f'({flow.id}).' ) def _plausibility_checks(self) -> None: @@ -244,9 +245,9 @@ def _transform_conversion_factors(self) -> list[dict[str, xr.DataArray]]: transformed_dict = {} for flow, values in conversion_factor.items(): # TODO: Might be better to use the label of the component instead of the flow - ts = self._fit_coords(f'{self.flows[flow].label_full}|conversion_factor{idx}', values) + ts = self._fit_coords(f'{self.flows[flow].id}|conversion_factor{idx}', values) if ts is None: - raise PlausibilityError(f'{self.label_full}: conversion factor for flow "{flow}" must not be None') + raise PlausibilityError(f'{self.id}: conversion factor for flow "{flow}" must not be None') transformed_dict[flow] = ts list_of_conversion_factors.append(transformed_dict) return list_of_conversion_factors @@ -274,7 +275,7 @@ class Storage(Component): See Args: - label: Element identifier used in the FlowSystem. + id: Element identifier used in the FlowSystem. charging: Incoming flow for loading the storage. discharging: Outgoing flow for unloading the storage. capacity_in_flow_hours: Storage capacity in flow-hours (kWh, m³, kg). @@ -317,7 +318,7 @@ class Storage(Component): ```python battery = Storage( - label='lithium_battery', + id='lithium_battery', charging=battery_charge_flow, discharging=battery_discharge_flow, capacity_in_flow_hours=100, # 100 kWh capacity @@ -333,7 +334,7 @@ class Storage(Component): ```python thermal_storage = Storage( - label='hot_water_tank', + id='hot_water_tank', charging=heat_input, discharging=heat_output, capacity_in_flow_hours=500, # 500 kWh thermal capacity @@ -351,7 +352,7 @@ class Storage(Component): ```python pumped_hydro = Storage( - label='pumped_hydro', + id='pumped_hydro', charging=pump_flow, discharging=turbine_flow, capacity_in_flow_hours=InvestParameters( @@ -371,7 +372,7 @@ class Storage(Component): ```python fuel_storage = Storage( - label='natural_gas_storage', + id='natural_gas_storage', charging=gas_injection, discharging=gas_withdrawal, capacity_in_flow_hours=10000, # 10,000 m³ storage volume @@ -408,9 +409,9 @@ class Storage(Component): def __init__( self, - label: str, - charging: Flow, - discharging: Flow, + id: str | None = None, + charging: Flow | None = None, + discharging: Flow | None = None, capacity_in_flow_hours: Numeric_PS | InvestParameters | None = None, relative_minimum_charge_state: Numeric_TPS = 0, relative_maximum_charge_state: Numeric_TPS = 1, @@ -427,15 +428,17 @@ def __init__( cluster_mode: Literal['independent', 'cyclic', 'intercluster', 'intercluster_cyclic'] = 'intercluster_cyclic', meta_data: dict | None = None, color: str | None = None, + **kwargs, ): # TODO: fixed_relative_chargeState implementieren super().__init__( - label, + id, inputs=[charging], outputs=[discharging], prevent_simultaneous_flows=[charging, discharging] if prevent_simultaneous_charge_and_discharge else None, meta_data=meta_data, color=color, + **kwargs, ) self.charging = charging @@ -521,12 +524,12 @@ def validate_config(self) -> None: if self.capacity_in_flow_hours is None: if self.relative_minimum_final_charge_state is not None: raise PlausibilityError( - f'Storage "{self.label_full}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. ' + f'Storage "{self.id}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. ' f'A capacity is required for relative final charge state constraints.' ) if self.relative_maximum_final_charge_state is not None: raise PlausibilityError( - f'Storage "{self.label_full}" has relative_maximum_final_charge_state but no capacity_in_flow_hours. ' + f'Storage "{self.id}" has relative_maximum_final_charge_state but no capacity_in_flow_hours. ' f'A capacity is required for relative final charge state constraints.' ) @@ -536,7 +539,7 @@ def validate_config(self) -> None: self.discharging.size, InvestParameters ): raise PlausibilityError( - f'Balancing charging and discharging Flows in {self.label_full} is only possible with Investments.' + f'Balancing charging and discharging Flows in {self.id} is only possible with Investments.' ) def _plausibility_checks(self) -> None: @@ -551,7 +554,7 @@ def __repr__(self) -> str: # Use build_repr_from_init directly to exclude charging and discharging return fx_io.build_repr_from_init( self, - excluded_params={'self', 'label', 'charging', 'discharging', 'kwargs'}, + excluded_params={'self', 'id', 'charging', 'discharging', 'kwargs'}, skip_default_size=True, ) + fx_io.format_flow_details(self) @@ -571,7 +574,7 @@ class Transmission(Component): operation with flow direction constraints. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. in1: The primary inflow (side A). Pass InvestParameters here for capacity optimization. out1: The primary outflow (side B). in2: Optional secondary inflow (side B) for bidirectional operation. @@ -594,7 +597,7 @@ class Transmission(Component): ```python power_line = Transmission( - label='110kv_line', + id='110kv_line', in1=substation_a_out, out1=substation_b_in, relative_losses=0.03, # 3% line losses @@ -605,7 +608,7 @@ class Transmission(Component): ```python gas_pipeline = Transmission( - label='interstate_pipeline', + id='interstate_pipeline', in1=compressor_station_a, out1=distribution_hub_b, in2=compressor_station_b, @@ -620,7 +623,7 @@ class Transmission(Component): ```python heating_network = Transmission( - label='dh_main_line', + id='dh_main_line', in1=Flow( label='heat_supply', bus=central_plant_bus, @@ -640,7 +643,7 @@ class Transmission(Component): ```python conveyor_belt = Transmission( - label='material_transport', + id='material_transport', in1=loading_station, out1=unloading_station, absolute_losses=25, # 25 kW motor power when running @@ -669,9 +672,9 @@ class Transmission(Component): def __init__( self, - label: str, - in1: Flow, - out1: Flow, + id: str | None = None, + in1: Flow | None = None, + out1: Flow | None = None, in2: Flow | None = None, out2: Flow | None = None, relative_losses: Numeric_TPS | None = None, @@ -681,9 +684,10 @@ def __init__( balanced: bool = False, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): super().__init__( - label, + id, inputs=[flow for flow in (in1, in2) if flow is not None], outputs=[flow for flow in (out1, out2) if flow is not None], status_parameters=status_parameters, @@ -692,6 +696,7 @@ def __init__( else [in1, in2], meta_data=meta_data, color=color, + **kwargs, ) self.in1 = in1 self.out1 = out1 @@ -749,9 +754,7 @@ def _propagate_status_parameters(self) -> None: for flow in input_flows: if flow.status_parameters is None: flow.status_parameters = StatusParameters() - flow.status_parameters.link_to_flow_system( - self._flow_system, f'{flow.label_full}|status_parameters' - ) + flow.status_parameters.link_to_flow_system(self._flow_system, f'{flow.id}|status_parameters') rel_min = flow.relative_minimum needs_update = ( rel_min is None @@ -823,7 +826,7 @@ def _create_prevent_simultaneous_constraints(self) -> None: ) def storage(self, label: str) -> Storage: - """Get a storage by its label_full.""" + """Get a storage by its id.""" return self.elements[label] @property @@ -1069,8 +1072,8 @@ def _add_balanced_flow_sizes_constraint(self) -> None: discharge_ids = [] for sid in balanced_ids: s = self.data[sid] - cid = s.charging.label_full - did = s.discharging.label_full + cid = s.charging.id + did = s.discharging.id if cid in investment_ids_set and did in investment_ids_set: charge_ids.append(cid) discharge_ids.append(did) @@ -1118,7 +1121,7 @@ def _add_batched_initial_final_constraints(self, charge_state) -> None: # Batched numeric initial constraint if storages_numeric_initial: - ids = [s.label_full for s, _ in storages_numeric_initial] + ids = [s.id for s, _ in storages_numeric_initial] values = stack_along_dim([v for _, v in storages_numeric_initial], self.dim_name, ids) cs_initial = charge_state.sel({dim: ids}).isel(time=0) self.model.add_constraints( @@ -1128,7 +1131,7 @@ def _add_batched_initial_final_constraints(self, charge_state) -> None: # Batched equals_final constraint if storages_equals_final: - ids = [s.label_full for s in storages_equals_final] + ids = [s.id for s in storages_equals_final] cs_subset = charge_state.sel({dim: ids}) self.model.add_constraints( cs_subset.isel(time=0) == cs_subset.isel(time=-1), @@ -1137,7 +1140,7 @@ def _add_batched_initial_final_constraints(self, charge_state) -> None: # Batched max final constraint if storages_max_final: - ids = [s.label_full for s, _ in storages_max_final] + ids = [s.id for s, _ in storages_max_final] values = stack_along_dim([v for _, v in storages_max_final], self.dim_name, ids) cs_final = charge_state.sel({dim: ids}).isel(time=-1) self.model.add_constraints( @@ -1147,7 +1150,7 @@ def _add_batched_initial_final_constraints(self, charge_state) -> None: # Batched min final constraint if storages_min_final: - ids = [s.label_full for s, _ in storages_min_final] + ids = [s.id for s, _ in storages_min_final] values = stack_along_dim([v for _, v in storages_min_final], self.dim_name, ids) cs_final = charge_state.sel({dim: ids}).isel(time=-1) self.model.add_constraints( @@ -1164,7 +1167,7 @@ def _add_batched_cluster_cyclic_constraints(self, charge_state) -> None: if not cyclic_storages: return - ids = [s.label_full for s in cyclic_storages] + ids = [s.id for s in cyclic_storages] cs_subset = charge_state.sel({self.dim_name: ids}) self.model.add_constraints( cs_subset.isel(time=0) == cs_subset.isel(time=-2), @@ -1322,18 +1325,18 @@ def _add_initial_final_constraints_legacy(self, storage, cs) -> None: if isinstance(storage.initial_charge_state, str): # 'equals_final' self.model.add_constraints( cs.isel(time=0) == cs.isel(time=-1), - name=f'storage|{storage.label}|initial_charge_state', + name=f'storage|{storage.id}|initial_charge_state', ) else: self.model.add_constraints( cs.isel(time=0) == storage.initial_charge_state, - name=f'storage|{storage.label}|initial_charge_state', + name=f'storage|{storage.id}|initial_charge_state', ) if storage.maximal_final_charge_state is not None: self.model.add_constraints( cs.isel(time=-1) >= storage.minimal_final_charge_state, - name=f'storage|{storage.label}|final_charge_min', + name=f'storage|{storage.id}|final_charge_min', ) logger.debug(f'StoragesModel created constraints for {len(self.elements)} storages') @@ -1706,14 +1709,14 @@ def _add_cyclic_or_initial_constraints(self) -> None: for storage in self.elements.values(): if storage.cluster_mode == 'intercluster_cyclic': - cyclic_ids.append(storage.label_full) + cyclic_ids.append(storage.id) else: initial = storage.initial_charge_state if initial is not None: if isinstance(initial, str) and initial == 'equals_final': - cyclic_ids.append(storage.label_full) + cyclic_ids.append(storage.id) else: - initial_fixed_ids.append(storage.label_full) + initial_fixed_ids.append(storage.id) initial_values.append(initial) # Add cyclic constraints @@ -1779,9 +1782,9 @@ def _add_upper_bound_constraint(self, combined: xr.DataArray, sample_name: str) for storage in self.elements.values(): if isinstance(storage.capacity_in_flow_hours, InvestParameters): - invest_ids.append(storage.label_full) + invest_ids.append(storage.id) elif storage.capacity_in_flow_hours is not None: - fixed_ids.append(storage.label_full) + fixed_ids.append(storage.id) fixed_caps.append(storage.capacity_in_flow_hours) # Investment storages: combined <= size @@ -1954,7 +1957,7 @@ class SourceAndSink(Component): or bidirectional grid connections where buying and selling occur at the same location. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. inputs: Input-flows into the SourceAndSink representing consumption/demand side. outputs: Output-flows from the SourceAndSink representing supply/generation side. prevent_simultaneous_flow_rates: If True, prevents simultaneous input and output @@ -1968,7 +1971,7 @@ class SourceAndSink(Component): ```python electricity_market = SourceAndSink( - label='grid_connection', + id='grid_connection', inputs=[electricity_purchase], # Buy from grid outputs=[electricity_sale], # Sell to grid prevent_simultaneous_flow_rates=True, # Can't buy and sell simultaneously @@ -1979,7 +1982,7 @@ class SourceAndSink(Component): ```python gas_storage_facility = SourceAndSink( - label='underground_gas_storage', + id='underground_gas_storage', inputs=[gas_injection_flow], # Inject gas into storage outputs=[gas_withdrawal_flow], # Withdraw gas from storage prevent_simultaneous_flow_rates=True, # Injection or withdrawal, not both @@ -1990,7 +1993,7 @@ class SourceAndSink(Component): ```python dh_connection = SourceAndSink( - label='district_heating_tie', + id='district_heating_tie', inputs=[heat_purchase_flow], # Purchase heat from network outputs=[heat_sale_flow], # Sell excess heat to network prevent_simultaneous_flow_rates=False, # May allow simultaneous flows @@ -2001,7 +2004,7 @@ class SourceAndSink(Component): ```python waste_heat_exchange = SourceAndSink( - label='industrial_heat_hub', + id='industrial_heat_hub', inputs=[ waste_heat_input_a, # Receive waste heat from process A waste_heat_input_b, # Receive waste heat from process B @@ -2032,23 +2035,25 @@ class SourceAndSink(Component): def __init__( self, - label: str, + id: str | None = None, inputs: list[Flow] | None = None, outputs: list[Flow] | None = None, prevent_simultaneous_flow_rates: bool = True, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): - # Convert dict to list for deserialization compatibility (FlowContainers serialize as dicts) + # Convert dict to list for deserialization compatibility (IdLists serialize as dicts) _inputs_list = list(inputs.values()) if isinstance(inputs, dict) else (inputs or []) _outputs_list = list(outputs.values()) if isinstance(outputs, dict) else (outputs or []) super().__init__( - label, + id, inputs=_inputs_list, outputs=_outputs_list, prevent_simultaneous_flows=_inputs_list + _outputs_list if prevent_simultaneous_flow_rates else None, meta_data=meta_data, color=color, + **kwargs, ) self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates @@ -2063,7 +2068,7 @@ class Source(Component): unlimited supply capability subject to flow constraints, demand patterns and effects. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. outputs: Output-flows from the source. Can be single flow or list of flows for sources providing multiple commodities or services. meta_data: Used to store additional information about the Element. Not used @@ -2075,14 +2080,14 @@ class Source(Component): Simple electricity grid connection: ```python - grid_source = Source(label='electrical_grid', outputs=[grid_electricity_flow]) + grid_source = Source(id='electrical_grid', outputs=[grid_electricity_flow]) ``` Natural gas supply with cost and capacity constraints: ```python gas_supply = Source( - label='gas_network', + id='gas_network', outputs=[ Flow( label='natural_gas_flow', @@ -2098,7 +2103,7 @@ class Source(Component): ```python multi_fuel_plant = Source( - label='flexible_generator', + id='flexible_generator', outputs=[coal_electricity, gas_electricity, biomass_electricity], prevent_simultaneous_flow_rates=True, # Can only use one fuel at a time ) @@ -2108,7 +2113,7 @@ class Source(Component): ```python solar_farm = Source( - label='solar_pv', + id='solar_pv', outputs=[ Flow( label='solar_power', @@ -2131,19 +2136,21 @@ class Source(Component): def __init__( self, - label: str, + id: str | None = None, outputs: list[Flow] | None = None, meta_data: dict | None = None, prevent_simultaneous_flow_rates: bool = False, color: str | None = None, + **kwargs, ): self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates super().__init__( - label, + id, outputs=outputs, meta_data=meta_data, prevent_simultaneous_flows=outputs if prevent_simultaneous_flow_rates else None, color=color, + **kwargs, ) @@ -2157,7 +2164,7 @@ class Sink(Component): unlimited consumption capability subject to flow constraints, demand patterns and effects. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. inputs: Input-flows into the sink. Can be single flow or list of flows for sinks consuming multiple commodities or services. meta_data: Used to store additional information about the Element. Not used @@ -2169,14 +2176,14 @@ class Sink(Component): Simple electrical demand: ```python - electrical_load = Sink(label='building_load', inputs=[electricity_demand_flow]) + electrical_load = Sink(id='building_load', inputs=[electricity_demand_flow]) ``` Heat demand with time-varying profile: ```python heat_demand = Sink( - label='district_heating_load', + id='district_heating_load', inputs=[ Flow( label='heat_consumption', @@ -2192,7 +2199,7 @@ class Sink(Component): ```python flexible_building = Sink( - label='smart_building', + id='smart_building', inputs=[electricity_heating, gas_heating, heat_pump_heating], prevent_simultaneous_flow_rates=True, # Can only use one heating mode ) @@ -2202,7 +2209,7 @@ class Sink(Component): ```python factory_load = Sink( - label='manufacturing_plant', + id='manufacturing_plant', inputs=[ Flow( label='electricity_process', @@ -2226,16 +2233,17 @@ class Sink(Component): def __init__( self, - label: str, + id: str | None = None, inputs: list[Flow] | None = None, meta_data: dict | None = None, prevent_simultaneous_flow_rates: bool = False, color: str | None = None, + **kwargs, ): """Initialize a Sink (consumes flow from the system). Args: - label: Unique element label. + id: Unique element id. inputs: Input flows for the sink. meta_data: Arbitrary metadata attached to the element. prevent_simultaneous_flow_rates: If True, prevents simultaneous nonzero flow rates @@ -2245,9 +2253,10 @@ def __init__( self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates super().__init__( - label, + id, inputs=inputs, meta_data=meta_data, prevent_simultaneous_flows=inputs if prevent_simultaneous_flow_rates else None, color=color, + **kwargs, ) diff --git a/flixopt/config.py b/flixopt/config.py index c79681cdc..aa4ca4bac 100644 --- a/flixopt/config.py +++ b/flixopt/config.py @@ -20,7 +20,7 @@ COLORLOG_AVAILABLE = False escape_codes = None -__all__ = ['CONFIG', 'MultilineFormatter', 'SUCCESS_LEVEL', 'DEPRECATION_REMOVAL_VERSION'] +__all__ = ['CONFIG', 'MultilineFormatter', 'SUCCESS_LEVEL', 'DEPRECATION_REMOVAL_V7', 'DEPRECATION_REMOVAL_V8'] if COLORLOG_AVAILABLE: __all__.append('ColoredMultilineFormatter') @@ -29,8 +29,9 @@ SUCCESS_LEVEL = 25 logging.addLevelName(SUCCESS_LEVEL, 'SUCCESS') -# Deprecation removal version - update this when planning the next major version -DEPRECATION_REMOVAL_VERSION = '7.0.0' +# Deprecation removal versions — split by the release that introduced them +DEPRECATION_REMOVAL_V7 = '7.0.0' # v6.x deprecations (statistics→stats, Optimization class, topology renames, …) +DEPRECATION_REMOVAL_V8 = '8.0.0' # v7.x deprecations (label→id, Flow constructor, .label/.label_full, …) class MultilineFormatter(logging.Formatter): diff --git a/flixopt/effects.py b/flixopt/effects.py index 1a2c8ccdf..46ce1a403 100644 --- a/flixopt/effects.py +++ b/flixopt/effects.py @@ -16,9 +16,9 @@ import xarray as xr from .core import PlausibilityError +from .id_list import IdList from .structure import ( Element, - ElementContainer, FlowSystemModel, register_class_for_io, ) @@ -30,8 +30,11 @@ logger = logging.getLogger('flixopt') -# Penalty effect label constant -PENALTY_EFFECT_LABEL = 'Penalty' +# Penalty effect ID constant +PENALTY_EFFECT_ID = 'Penalty' + +# Deprecated alias +PENALTY_EFFECT_LABEL = PENALTY_EFFECT_ID @register_class_for_io @@ -47,7 +50,7 @@ class Effect(Element): See Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. unit: The unit of the effect (e.g., '€', 'kg_CO2', 'kWh_primary', 'm²'). This is informative only and does not affect optimization. description: Descriptive name explaining what this effect represents. @@ -93,7 +96,7 @@ class Effect(Element): ```python cost_effect = Effect( - label='system_costs', + id='system_costs', unit='€', description='Total system costs', is_objective=True, @@ -104,7 +107,7 @@ class Effect(Element): ```python co2_effect = Effect( - label='CO2', + id='CO2', unit='kg_CO2', description='Carbon dioxide emissions', maximum_total=100_000, # 100 t CO2 per period @@ -115,7 +118,7 @@ class Effect(Element): ```python co2_effect = Effect( - label='CO2', + id='CO2', unit='kg_CO2', description='Carbon dioxide emissions', maximum_over_periods=1_000_000, # 1000 t CO2 total across all periods @@ -126,7 +129,7 @@ class Effect(Element): ```python land_use = Effect( - label='land_usage', + id='land_usage', unit='m²', description='Land area requirement', maximum_total=50_000, # Maximum 5 hectares per period @@ -137,7 +140,7 @@ class Effect(Element): ```python primary_energy = Effect( - label='primary_energy', + id='primary_energy', unit='kWh_primary', description='Primary energy consumption', ) @@ -147,7 +150,7 @@ class Effect(Element): ```python cost_effect = Effect( - label='system_costs', + id='system_costs', unit='€', description='Total system costs', is_objective=True, @@ -162,7 +165,7 @@ class Effect(Element): ```python water_usage = Effect( - label='water_consumption', + id='water_consumption', unit='m³', description='Industrial water usage', minimum_per_hour=10, # Minimum 10 m³/h for process stability @@ -187,8 +190,8 @@ class Effect(Element): def __init__( self, - label: str, - unit: str, + id: str | None = None, + unit: str = '', description: str = '', meta_data: dict | None = None, is_standard: bool = False, @@ -206,16 +209,18 @@ def __init__( maximum_total: Numeric_PS | None = None, minimum_over_periods: Numeric_S | None = None, maximum_over_periods: Numeric_S | None = None, + **kwargs, ): - super().__init__(label, meta_data=meta_data) + super().__init__(id, meta_data=meta_data, **kwargs) self.unit = unit self.description = description self.is_standard = is_standard - # Validate that Penalty cannot be set as objective - if is_objective and label == PENALTY_EFFECT_LABEL: + # Validate that Penalty cannot be set as objective (compare resolved self.id, not the id argument, + # so the check is not bypassed when a deprecated label is used with id=None) + if is_objective and self.id == PENALTY_EFFECT_ID: raise ValueError( - f'The Penalty effect ("{PENALTY_EFFECT_LABEL}") cannot be set as the objective effect. ' + f'The Penalty effect ("{PENALTY_EFFECT_ID}") cannot be set as the objective effect. ' f'Please use a different effect as the optimization objective.' ) @@ -242,9 +247,9 @@ def __init__( def link_to_flow_system(self, flow_system, prefix: str = '') -> None: """Link this effect to a FlowSystem. - Elements use their label_full as prefix by default, ignoring the passed prefix. + Elements use their id as prefix by default, ignoring the passed prefix. """ - super().link_to_flow_system(flow_system, self.label_full) + super().link_to_flow_system(flow_system, self.id) def transform_data(self) -> None: self.minimum_per_hour = self._fit_coords(f'{self.prefix}|minimum_per_hour', self.minimum_per_hour) @@ -301,7 +306,7 @@ def validate_config(self) -> None: self.minimum_over_periods is not None or self.maximum_over_periods is not None ) and self.flow_system.periods is None: raise PlausibilityError( - f"Effect '{self.label}': minimum_over_periods and maximum_over_periods require " + f"Effect '{self.id}': minimum_over_periods and maximum_over_periods require " f"the FlowSystem to have a 'period' dimension. Please define periods when creating " f'the FlowSystem, or remove these constraints.' ) @@ -532,7 +537,7 @@ def _merge_coords(base_dict: dict, model_coords) -> dict: return effects_with_over_periods = self.data.effects_with_over_periods if effects_with_over_periods: - over_periods_ids = [e.label for e in effects_with_over_periods] + over_periods_ids = [e.id for e in effects_with_over_periods] over_periods_coords = xr.Coordinates( _merge_coords( {'effect': over_periods_ids}, @@ -557,12 +562,12 @@ def _merge_coords(base_dict: dict, model_coords) -> dict: # Create constraint: total_over_periods == weighted sum for each effect # Can't use xr.concat with LinearExpression objects, so create individual constraints for e in effects_with_over_periods: - total_e = self.total.sel(effect=e.label) - weights_e = self.data.period_weights[e.label] + total_e = self.total.sel(effect=e.id) + weights_e = self.data.period_weights[e.id] weighted_total = (total_e * weights_e).sum('period') self.model.add_constraints( - self.total_over_periods.sel(effect=e.label) == weighted_total, - name=f'effect|total_over_periods|{e.label}', + self.total_over_periods.sel(effect=e.id) == weighted_total, + name=f'effect|total_over_periods|{e.id}', ) def _as_expression(self, expr) -> linopy.LinearExpression: @@ -717,16 +722,16 @@ def _add_share_between_effects(self): so they appear in the share variables and can be reconstructed by statistics. """ for target_effect in self.data.values(): - target_id = target_effect.label + target_id = target_effect.id # 1. temporal: <- receiving temporal shares from other effects for source_effect, time_series in target_effect.share_from_temporal.items(): - source_id = self.data[source_effect].label + source_id = self.data[source_effect].id source_per_timestep = self.get_per_timestep(source_id) expr = (source_per_timestep * time_series).expand_dims(effect=[target_id], contributor=[source_id]) self.add_temporal_contribution(expr) # 2. periodic: <- receiving periodic shares from other effects for source_effect, factor in target_effect.share_from_periodic.items(): - source_id = self.data[source_effect].label + source_id = self.data[source_effect].id source_periodic = self.get_periodic(source_id) expr = (source_periodic * factor).expand_dims(effect=[target_id], contributor=[source_id]) self.add_periodic_contribution(expr) @@ -741,7 +746,7 @@ def _set_objective(self): ) -class EffectCollection(ElementContainer[Effect]): +class EffectCollection(IdList[Effect]): """ Handling all Effects """ @@ -754,7 +759,7 @@ def __init__(self, *effects: Effect, truncate_repr: int | None = None): *effects: Effects to register in the collection. truncate_repr: Maximum number of items to show in repr. If None, show all items. Default: None """ - super().__init__(element_type_name='effects', truncate_repr=truncate_repr) + super().__init__(key_fn=lambda e: e.id, display_name='effects', truncate_repr=truncate_repr) self._standard_effect: Effect | None = None self._objective_effect: Effect | None = None self._penalty_effect: Effect | None = None @@ -767,66 +772,66 @@ def _create_penalty_effect(self) -> Effect: Only creates if user hasn't already defined a Penalty effect. """ # Check if user has already defined a Penalty effect - if PENALTY_EFFECT_LABEL in self: - self._penalty_effect = self[PENALTY_EFFECT_LABEL] - logger.info(f'Using user-defined Penalty Effect: {PENALTY_EFFECT_LABEL}') + if PENALTY_EFFECT_ID in self: + self._penalty_effect = self[PENALTY_EFFECT_ID] + logger.info(f'Using user-defined Penalty Effect: {PENALTY_EFFECT_ID}') return self._penalty_effect # Auto-create penalty effect self._penalty_effect = Effect( - label=PENALTY_EFFECT_LABEL, + id=PENALTY_EFFECT_ID, unit='penalty_units', description='Penalty for constraint violations and modeling artifacts', is_standard=False, is_objective=False, ) self.add(self._penalty_effect) # Add to container - logger.info(f'Auto-created Penalty Effect: {PENALTY_EFFECT_LABEL}') + logger.info(f'Auto-created Penalty Effect: {PENALTY_EFFECT_ID}') return self._penalty_effect def add_effects(self, *effects: Effect) -> None: for effect in list(effects): if effect in self: - raise ValueError(f'Effect with label "{effect.label=}" already added!') + raise ValueError(f'Effect with id "{effect.id=}" already added!') if effect.is_standard: self.standard_effect = effect if effect.is_objective: self.objective_effect = effect - self.add(effect) # Use the inherited add() method from ElementContainer - logger.info(f'Registered new Effect: {effect.label}') + self.add(effect) + logger.info(f'Registered new Effect: {effect.id}') def create_effect_values_dict(self, effect_values_user: Numeric_TPS | Effect_TPS | None) -> Effect_TPS | None: """Converts effect values into a dictionary. If a scalar is provided, it is associated with a default effect type. Examples: ```python - effect_values_user = 20 -> {'': 20} - effect_values_user = {None: 20} -> {'': 20} + effect_values_user = 20 -> {'': 20} + effect_values_user = {None: 20} -> {'': 20} effect_values_user = None -> None effect_values_user = {'effect1': 20, 'effect2': 0.3} -> {'effect1': 20, 'effect2': 0.3} ``` Returns: - A dictionary keyed by effect label, or None if input is None. - Note: a standard effect must be defined when passing scalars or None labels. + A dictionary keyed by effect id, or None if input is None. + Note: a standard effect must be defined when passing scalars or None ids. """ - def get_effect_label(eff: str | None) -> str: - """Get the label of an effect""" + def get_effect_id(eff: str | None) -> str: + """Get the id of an effect""" if eff is None: - return self.standard_effect.label + return self.standard_effect.id if isinstance(eff, Effect): raise TypeError( f'Effect objects are no longer accepted when specifying EffectValues. ' - f'Use the label string instead. Got: {eff.label_full}' + f'Use the id string instead. Got: {eff.id}' ) return eff if effect_values_user is None: return None if isinstance(effect_values_user, dict): - return {get_effect_label(effect): value for effect, value in effect_values_user.items()} - return {self.standard_effect.label: effect_values_user} + return {get_effect_id(effect): value for effect, value in effect_values_user.items()} + return {self.standard_effect.id: effect_values_user} def validate_config(self) -> None: """Deprecated: Validation is now handled by EffectsData.validate(). @@ -846,10 +851,10 @@ def _plausibility_checks(self) -> None: def __getitem__(self, effect: str | Effect | None) -> Effect: """ - Get an effect by label, or return the standard effect if None is passed + Get an effect by id, or return the standard effect if None is passed Raises: - KeyError: If no effect with the given label is found. + KeyError: If no effect with the given id is found. KeyError: If no standard effect is specified. """ if effect is None: @@ -860,7 +865,7 @@ def __getitem__(self, effect: str | Effect | None) -> Effect: else: raise KeyError(f'Effect {effect} not found!') try: - return super().__getitem__(effect) # Leverage ContainerMixin suggestions + return super().__getitem__(effect) except KeyError as e: # Extract the original message and append context for cleaner output original_msg = str(e).strip('\'"') @@ -870,11 +875,11 @@ def __iter__(self) -> Iterator[str]: return iter(self.keys()) # Iterate over keys like a normal dict def __contains__(self, item: str | Effect) -> bool: - """Check if the effect exists. Checks for label or object""" + """Check if the effect exists. Checks for id or object""" if isinstance(item, str): - return super().__contains__(item) # Check if the label exists + return super().__contains__(item) # Check if the id exists elif isinstance(item, Effect): - return item.label_full in self and self[item.label_full] is item + return item.id in self and self[item.id] is item return False @property @@ -882,14 +887,14 @@ def standard_effect(self) -> Effect: if self._standard_effect is None: raise KeyError( 'No standard-effect specified! Either set an effect through is_standard=True ' - 'or pass a mapping when specifying effect values: {effect_label: value}.' + 'or pass a mapping when specifying effect values: {effect_id: value}.' ) return self._standard_effect @standard_effect.setter def standard_effect(self, value: Effect) -> None: if self._standard_effect is not None: - raise ValueError(f'A standard-effect already exists! ({self._standard_effect.label=})') + raise ValueError(f'A standard-effect already exists! ({self._standard_effect.id=})') self._standard_effect = value @property @@ -901,13 +906,13 @@ def objective_effect(self) -> Effect: @objective_effect.setter def objective_effect(self, value: Effect) -> None: # Check Penalty first to give users a more specific error message - if value.label == PENALTY_EFFECT_LABEL: + if value.id == PENALTY_EFFECT_ID: raise ValueError( - f'The Penalty effect ("{PENALTY_EFFECT_LABEL}") cannot be set as the objective effect. ' + f'The Penalty effect ("{PENALTY_EFFECT_ID}") cannot be set as the objective effect. ' f'Please use a different effect as the optimization objective.' ) if self._objective_effect is not None: - raise ValueError(f'An objective-effect already exists! ({self._objective_effect.label=})') + raise ValueError(f'An objective-effect already exists! ({self._objective_effect.id=})') self._objective_effect = value @property @@ -922,14 +927,14 @@ def penalty_effect(self) -> Effect: return self._penalty_effect # Check if user has defined a Penalty effect - if PENALTY_EFFECT_LABEL in self: - self._penalty_effect = self[PENALTY_EFFECT_LABEL] + if PENALTY_EFFECT_ID in self: + self._penalty_effect = self[PENALTY_EFFECT_ID] return self._penalty_effect # Not yet created - will be created during modeling raise KeyError( f'Penalty effect not yet created. It will be auto-created during modeling, ' - f'or you can define your own using: Effect("{PENALTY_EFFECT_LABEL}", ...)' + f'or you can define your own using: Effect("{PENALTY_EFFECT_ID}", ...)' ) def calculate_effect_share_factors( diff --git a/flixopt/elements.py b/flixopt/elements.py index df1dc4a99..243b285c2 100644 --- a/flixopt/elements.py +++ b/flixopt/elements.py @@ -13,7 +13,7 @@ import xarray as xr from . import io as fx_io -from .config import CONFIG +from .config import CONFIG, DEPRECATION_REMOVAL_V7 from .core import PlausibilityError from .features import ( MaskHelpers, @@ -22,6 +22,7 @@ sparse_multiply_sum, sparse_weighted_sum, ) +from .id_list import IdList, flow_id_list from .interface import InvestParameters, StatusParameters from .modeling import ModelingUtilitiesAbstract from .structure import ( @@ -29,7 +30,6 @@ ComponentVarName, ConverterVarName, Element, - FlowContainer, FlowSystemModel, FlowVarName, TransmissionVarName, @@ -79,7 +79,7 @@ def _add_prevent_simultaneous_constraints( ) mask = MaskHelpers.build_mask( row_dim='component', - row_ids=[c.label for c in with_prevent], + row_ids=[c.id for c in with_prevent], col_dim='flow', col_ids=flows_model.element_ids, membership=membership, @@ -106,7 +106,7 @@ class Component(Element): enabling the modeling of complex energy system topologies and operational constraints. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. inputs: list of input Flows feeding into the component. These represent energy/material consumption by the component. outputs: list of output Flows leaving the component. These represent @@ -141,21 +141,22 @@ class Component(Element): def __init__( self, - label: str, + id: str | None = None, inputs: list[Flow] | dict[str, Flow] | None = None, outputs: list[Flow] | dict[str, Flow] | None = None, status_parameters: StatusParameters | None = None, prevent_simultaneous_flows: list[Flow] | None = None, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): - super().__init__(label, meta_data=meta_data, color=color) + super().__init__(id, meta_data=meta_data, color=color, **kwargs) self.status_parameters = status_parameters if isinstance(prevent_simultaneous_flows, dict): prevent_simultaneous_flows = list(prevent_simultaneous_flows.values()) self.prevent_simultaneous_flows: list[Flow] = prevent_simultaneous_flows or [] - # FlowContainers serialize as dicts, but constructor expects lists + # IdLists serialize as dicts, but constructor expects lists if isinstance(inputs, dict): inputs = list(inputs.values()) if isinstance(outputs, dict): @@ -165,29 +166,29 @@ def __init__( _outputs = outputs or [] # Check uniqueness on raw lists (before connecting) - all_flow_labels = [flow.label for flow in _inputs + _outputs] - if len(set(all_flow_labels)) != len(all_flow_labels): - duplicates = {label for label in all_flow_labels if all_flow_labels.count(label) > 1} - raise ValueError(f'Flow names must be unique! "{self.label_full}" got 2 or more of: {duplicates}') + all_flow_ids = [flow.flow_id for flow in _inputs + _outputs] + if len(set(all_flow_ids)) != len(all_flow_ids): + duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1} + raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}') - # Connect flows (sets component name / label_full) before creating FlowContainers + # Connect flows (sets component name) before creating IdLists self._connect_flows(_inputs, _outputs) - # Now label_full is set, so FlowContainer can key by it - self.inputs: FlowContainer = FlowContainer(_inputs, element_type_name='inputs') - self.outputs: FlowContainer = FlowContainer(_outputs, element_type_name='outputs') + # Now flow.id is qualified, so IdList can key by it + self.inputs: IdList = flow_id_list(_inputs, display_name='inputs') + self.outputs: IdList = flow_id_list(_outputs, display_name='outputs') @cached_property - def flows(self) -> FlowContainer: - """All flows (inputs and outputs) as a FlowContainer.""" + def flows(self) -> IdList: + """All flows (inputs and outputs) as an IdList.""" return self.inputs + self.outputs def link_to_flow_system(self, flow_system, prefix: str = '') -> None: """Propagate flow_system reference to nested Interface objects and flows. - Elements use their label_full as prefix by default, ignoring the passed prefix. + Elements use their id_full as prefix by default, ignoring the passed prefix. """ - super().link_to_flow_system(flow_system, self.label_full) + super().link_to_flow_system(flow_system, self.id) if self.status_parameters is not None: self.status_parameters.link_to_flow_system(flow_system, self._sub_prefix('status_parameters')) for flow in self.flows.values(): @@ -215,27 +216,23 @@ def _propagate_status_parameters(self) -> None: for flow in self.flows.values(): if flow.status_parameters is None: flow.status_parameters = StatusParameters() - flow.status_parameters.link_to_flow_system( - self._flow_system, f'{flow.label_full}|status_parameters' - ) + flow.status_parameters.link_to_flow_system(self._flow_system, f'{flow.id}|status_parameters') if self.prevent_simultaneous_flows: for flow in self.prevent_simultaneous_flows: if flow.status_parameters is None: flow.status_parameters = StatusParameters() - flow.status_parameters.link_to_flow_system( - self._flow_system, f'{flow.label_full}|status_parameters' - ) + flow.status_parameters.link_to_flow_system(self._flow_system, f'{flow.id}|status_parameters') - def _check_unique_flow_labels(self, inputs: list = None, outputs: list = None): + def _check_unique_flow_ids(self, inputs: list = None, outputs: list = None): if inputs is None: inputs = list(self.inputs.values()) if outputs is None: outputs = list(self.outputs.values()) - all_flow_labels = [flow.label for flow in inputs + outputs] + all_flow_ids = [flow.flow_id for flow in inputs + outputs] - if len(set(all_flow_labels)) != len(all_flow_labels): - duplicates = {label for label in all_flow_labels if all_flow_labels.count(label) > 1} - raise ValueError(f'Flow names must be unique! "{self.label_full}" got 2 or more of: {duplicates}') + if len(set(all_flow_ids)) != len(all_flow_ids): + duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1} + raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}') def validate_config(self) -> None: """Validate configuration consistency. @@ -243,15 +240,15 @@ def validate_config(self) -> None: Called BEFORE transformation via FlowSystem._run_config_validation(). These are simple checks that don't require DataArray operations. """ - self._check_unique_flow_labels() + self._check_unique_flow_ids() # Component with status_parameters requires all flows to have sizes set # (status_parameters are propagated to flows in _do_modeling, which need sizes for big-M constraints) if self.status_parameters is not None: - flows_without_size = [flow.label for flow in self.flows.values() if flow.size is None] + flows_without_size = [flow.flow_id for flow in self.flows.values() if flow.size is None] if flows_without_size: raise PlausibilityError( - f'Component "{self.label_full}" has status_parameters, but the following flows have no size: ' + f'Component "{self.id}" has status_parameters, but the following flows have no size: ' f'{flows_without_size}. All flows need explicit sizes when the component uses status_parameters ' f'(required for big-M constraints).' ) @@ -267,21 +264,19 @@ def _connect_flows(self, inputs=None, outputs=None): outputs = list(self.outputs.values()) # Inputs for flow in inputs: - if flow.component not in ('UnknownComponent', self.label_full): + if flow.component not in ('UnknownComponent', self.id): raise ValueError( - f'Flow "{flow.label}" already assigned to component "{flow.component}". ' - f'Cannot attach to "{self.label_full}".' + f'Flow "{flow.id}" already assigned to component "{flow.component}". Cannot attach to "{self.id}".' ) - flow.component = self.label_full + flow.component = self.id flow.is_input_in_component = True # Outputs for flow in outputs: - if flow.component not in ('UnknownComponent', self.label_full): + if flow.component not in ('UnknownComponent', self.id): raise ValueError( - f'Flow "{flow.label}" already assigned to component "{flow.component}". ' - f'Cannot attach to "{self.label_full}".' + f'Flow "{flow.id}" already assigned to component "{flow.component}". Cannot attach to "{self.id}".' ) - flow.component = self.label_full + flow.component = self.id flow.is_input_in_component = False # Validate prevent_simultaneous_flows: only allow local flows @@ -294,16 +289,16 @@ def _connect_flows(self, inputs=None, outputs=None): local = set(inputs + outputs) foreign = [f for f in self.prevent_simultaneous_flows if f not in local] if foreign: - names = ', '.join(f.label_full for f in foreign) + names = ', '.join(f.id for f in foreign) raise ValueError( - f'prevent_simultaneous_flows for "{self.label_full}" must reference its own flows. ' + f'prevent_simultaneous_flows for "{self.id}" must reference its own flows. ' f'Foreign flows detected: {names}' ) def __repr__(self) -> str: """Return string representation with flow information.""" return fx_io.build_repr_from_init( - self, excluded_params={'self', 'label', 'inputs', 'outputs', 'kwargs'}, skip_default_size=True + self, excluded_params={'self', 'id', 'inputs', 'outputs', 'kwargs'}, skip_default_size=True ) + fx_io.format_flow_details(self) @@ -373,33 +368,39 @@ class Bus(Element): def __init__( self, - label: str, + id: str | None = None, carrier: str | None = None, imbalance_penalty_per_flow_hour: Numeric_TPS | None = None, meta_data: dict | None = None, **kwargs, ): - super().__init__(label, meta_data=meta_data) - imbalance_penalty_per_flow_hour = self._handle_deprecated_kwarg( - kwargs, 'excess_penalty_per_flow_hour', 'imbalance_penalty_per_flow_hour', imbalance_penalty_per_flow_hour - ) - self._validate_kwargs(kwargs) + # Handle Bus-specific deprecated kwarg before passing kwargs to super + old_penalty = kwargs.pop('excess_penalty_per_flow_hour', None) + super().__init__(id, meta_data=meta_data, **kwargs) + if old_penalty is not None: + imbalance_penalty_per_flow_hour = self._handle_deprecated_kwarg( + {'excess_penalty_per_flow_hour': old_penalty}, + 'excess_penalty_per_flow_hour', + 'imbalance_penalty_per_flow_hour', + imbalance_penalty_per_flow_hour, + removal_version=DEPRECATION_REMOVAL_V7, + ) self.carrier = carrier.lower() if carrier else None # Store as lowercase string self.imbalance_penalty_per_flow_hour = imbalance_penalty_per_flow_hour - self.inputs: FlowContainer = FlowContainer(element_type_name='inputs') - self.outputs: FlowContainer = FlowContainer(element_type_name='outputs') + self.inputs: IdList = flow_id_list(display_name='inputs') + self.outputs: IdList = flow_id_list(display_name='outputs') @property - def flows(self) -> FlowContainer: - """All flows (inputs and outputs) as a FlowContainer.""" + def flows(self) -> IdList: + """All flows (inputs and outputs) as an IdList.""" return self.inputs + self.outputs def link_to_flow_system(self, flow_system, prefix: str = '') -> None: """Propagate flow_system reference to nested flows. - Elements use their label_full as prefix by default, ignoring the passed prefix. + Elements use their id_full as prefix by default, ignoring the passed prefix. """ - super().link_to_flow_system(flow_system, self.label_full) + super().link_to_flow_system(flow_system, self.id) for flow in self.flows.values(): flow.link_to_flow_system(flow_system) @@ -415,9 +416,7 @@ def validate_config(self) -> None: These are simple checks that don't require DataArray operations. """ if len(self.inputs) == 0 and len(self.outputs) == 0: - raise ValueError( - f'Bus "{self.label_full}" has no Flows connected to it. Please remove it from the FlowSystem' - ) + raise ValueError(f'Bus "{self.id}" has no Flows connected to it. Please remove it from the FlowSystem') def _plausibility_checks(self) -> None: """Legacy validation method - delegates to validate_config(). @@ -472,8 +471,8 @@ class Flow(Element): See Args: - label: Unique flow identifier within its component. - bus: Bus label this flow connects to. + bus: Bus this flow connects to (string id). First positional argument. + flow_id: Unique flow identifier within its component. Defaults to the bus name. size: Flow capacity. Scalar, InvestParameters, or None (unbounded). relative_minimum: Minimum flow rate as fraction of size (0-1). Default: 0. relative_maximum: Maximum flow rate as fraction of size. Default: 1. @@ -498,20 +497,19 @@ class Flow(Element): ```python generator_output = Flow( - label='electricity_out', - bus='electricity_grid', + 'electricity_grid', + flow_id='electricity_out', size=100, # 100 MW capacity relative_minimum=0.4, # Cannot operate below 40 MW effects_per_flow_hour={'fuel_cost': 45, 'co2_emissions': 0.8}, ) ``` - Investment decision for battery capacity: + Investment decision for battery capacity (flow_id defaults to bus name): ```python battery_flow = Flow( - label='electricity_storage', - bus='electricity_grid', + 'electricity_grid', size=InvestParameters( minimum_size=10, # Minimum 10 MWh maximum_size=100, # Maximum 100 MWh @@ -524,8 +522,8 @@ class Flow(Element): ```python heat_pump = Flow( - label='heat_output', - bus='heating_network', + 'heating_network', + flow_id='heat_output', size=50, # 50 kW thermal relative_minimum=0.3, # Minimum 15 kW output when active effects_per_flow_hour={'electricity_cost': 25, 'maintenance': 2}, @@ -542,8 +540,8 @@ class Flow(Element): ```python solar_generation = Flow( - label='solar_power', - bus='electricity_grid', + 'electricity_grid', + flow_id='solar_power', size=25, # 25 MW installed capacity fixed_relative_profile=np.array([0, 0.1, 0.4, 0.8, 0.9, 0.7, 0.3, 0.1, 0]), effects_per_flow_hour={'maintenance_costs': 5}, # €5/MWh maintenance @@ -554,8 +552,8 @@ class Flow(Element): ```python production_line = Flow( - label='product_output', - bus='product_market', + 'product_market', + flow_id='product_output', size=1000, # 1000 units/hour capacity load_factor_min=0.6, # Must achieve 60% annual utilization load_factor_max=0.85, # Cannot exceed 85% for maintenance @@ -587,8 +585,9 @@ class Flow(Element): def __init__( self, - label: str, - bus: str, + *args, + bus: str | None = None, + flow_id: str | None = None, size: Numeric_PS | InvestParameters | None = None, fixed_relative_profile: Numeric_TPS | None = None, relative_minimum: Numeric_TPS = 0, @@ -603,8 +602,78 @@ def __init__( load_factor_max: Numeric_PS | None = None, previous_flow_rate: Scalar | list[Scalar] | None = None, meta_data: dict | None = None, + label: str | None = None, + id: str | None = None, + **kwargs, ): - super().__init__(label, meta_data=meta_data) + # --- Resolve positional args + deprecation bridge --- + import warnings + + from .config import DEPRECATION_REMOVAL_V8 + + # Handle deprecated 'id' kwarg (use flow_id instead) + if id is not None: + warnings.warn( + f'Flow(id=...) is deprecated. Use Flow(flow_id=...) instead. ' + f'Will be removed in v{DEPRECATION_REMOVAL_V8}.', + DeprecationWarning, + stacklevel=2, + ) + if flow_id is not None: + raise ValueError('Either id or flow_id can be specified, but not both.') + flow_id = id + + if len(args) == 2: + # Old API: Flow(label, bus) + warnings.warn( + f'Flow(label, bus) positional form is deprecated. ' + f'Use Flow(bus, flow_id=...) instead. Will be removed in v{DEPRECATION_REMOVAL_V8}.', + DeprecationWarning, + stacklevel=2, + ) + if flow_id is None and label is None: + flow_id = args[0] + if bus is None: + bus = args[1] + elif len(args) == 1: + if bus is not None: + # Old API: Flow(label, bus=...) + warnings.warn( + f'Flow(label, bus=...) positional form is deprecated. ' + f'Use Flow(bus, flow_id=...) instead. Will be removed in v{DEPRECATION_REMOVAL_V8}.', + DeprecationWarning, + stacklevel=2, + ) + if flow_id is None and label is None: + flow_id = args[0] + else: + # New API: Flow(bus) — bus is the positional arg + bus = args[0] + elif len(args) > 2: + raise TypeError(f'Flow() takes at most 2 positional arguments ({len(args)} given)') + + # Handle deprecated label kwarg + if label is not None: + warnings.warn( + f'The "label" argument is deprecated. Use "flow_id" instead. ' + f'Will be removed in v{DEPRECATION_REMOVAL_V8}.', + DeprecationWarning, + stacklevel=2, + ) + if flow_id is not None: + raise ValueError('Either label or flow_id can be specified, but not both.') + flow_id = label + + # Default flow_id to bus name + if flow_id is None: + if bus is None: + raise TypeError('Flow() requires a bus argument.') + flow_id = bus if isinstance(bus, str) else str(bus) + + if bus is None: + raise TypeError('Flow() requires a bus argument.') + + super().__init__(flow_id, meta_data=meta_data, **kwargs) self.size = size self.relative_minimum = relative_minimum self.relative_maximum = relative_maximum @@ -613,7 +682,6 @@ def __init__( self.load_factor_min = load_factor_min self.load_factor_max = load_factor_max - # self.positive_gradient = TimeSeries('positive_gradient', positive_gradient, self) self.effects_per_flow_hour = effects_per_flow_hour if effects_per_flow_hour is not None else {} self.flow_hours_max = flow_hours_max self.flow_hours_min = flow_hours_min @@ -627,17 +695,17 @@ def __init__( self.is_input_in_component: bool | None = None if isinstance(bus, Bus): raise TypeError( - f'Bus {bus.label} is passed as a Bus object to Flow {self.label}. ' - f'This is no longer supported. Add the Bus to the FlowSystem and pass its label (string) to the Flow.' + f'Bus {bus.id} is passed as a Bus object to Flow {self.id}. ' + f'This is no longer supported. Add the Bus to the FlowSystem and pass its id (string) to the Flow.' ) self.bus = bus def link_to_flow_system(self, flow_system, prefix: str = '') -> None: """Propagate flow_system reference to nested Interface objects. - Elements use their label_full as prefix by default, ignoring the passed prefix. + Elements use their id_full as prefix by default, ignoring the passed prefix. """ - super().link_to_flow_system(flow_system, self.label_full) + super().link_to_flow_system(flow_system, self.id) if self.status_parameters is not None: self.status_parameters.link_to_flow_system(flow_system, self._sub_prefix('status_parameters')) if isinstance(self.size, InvestParameters): @@ -685,26 +753,26 @@ def validate_config(self) -> None: # Size is required when using StatusParameters (for big-M constraints) if self.status_parameters is not None and self.size is None: raise PlausibilityError( - f'Flow "{self.label_full}" has status_parameters but no size defined. ' + f'Flow "{self.id}" has status_parameters but no size defined. ' f'A size is required when using status_parameters to bound the flow rate.' ) if self.size is None and self.fixed_relative_profile is not None: raise PlausibilityError( - f'Flow "{self.label_full}" has a fixed_relative_profile but no size defined. ' + f'Flow "{self.id}" has a fixed_relative_profile but no size defined. ' f'A size is required because flow_rate = size * fixed_relative_profile.' ) # Size is required for load factor constraints (total_flow_hours / size) if self.size is None and self.load_factor_min is not None: raise PlausibilityError( - f'Flow "{self.label_full}" has load_factor_min but no size defined. ' + f'Flow "{self.id}" has load_factor_min but no size defined. ' f'A size is required because the constraint is total_flow_hours >= size * load_factor_min * hours.' ) if self.size is None and self.load_factor_max is not None: raise PlausibilityError( - f'Flow "{self.label_full}" has load_factor_max but no size defined. ' + f'Flow "{self.id}" has load_factor_max but no size defined. ' f'A size is required because the constraint is total_flow_hours <= size * load_factor_max * hours.' ) @@ -725,7 +793,7 @@ def validate_config(self) -> None: # Warning: fixed_relative_profile + status_parameters is unusual if self.fixed_relative_profile is not None and self.status_parameters is not None: logger.warning( - f'Flow {self.label_full} has both a fixed_relative_profile and status_parameters. ' + f'Flow {self.id} has both a fixed_relative_profile and status_parameters. ' f'This will allow the flow to be switched active and inactive, effectively differing from the fixed_flow_rate.' ) @@ -737,8 +805,25 @@ def _plausibility_checks(self) -> None: self.validate_config() @property - def label_full(self) -> str: - return f'{self.component}({self.label})' + def flow_id(self) -> str: + """The short flow identifier (e.g. ``'Heat'``). + + This is the user-facing name. Defaults to the bus name if not set explicitly. + """ + return self._short_id + + @flow_id.setter + def flow_id(self, value: str) -> None: + self._short_id = self._valid_id(value) + + @property + def id(self) -> str: + """The qualified identifier: ``component(flow_id)``.""" + return f'{self.component}({self._short_id})' + + @id.setter + def id(self, value: str) -> None: + self._short_id = self._valid_id(value) # ========================================================================= # Type-Level Model Access (for FlowsModel integration) @@ -761,23 +846,23 @@ def flow_rate_from_type_model(self) -> linopy.Variable | None: """ if self._flows_model is None: return None - return self._flows_model.get_variable(FlowVarName.RATE, self.label_full) + return self._flows_model.get_variable(FlowVarName.RATE, self.id) @property def total_flow_hours_from_type_model(self) -> linopy.Variable | None: """Get total_flow_hours from FlowsModel (if using type-level modeling).""" if self._flows_model is None: return None - return self._flows_model.get_variable(FlowVarName.TOTAL_FLOW_HOURS, self.label_full) + return self._flows_model.get_variable(FlowVarName.TOTAL_FLOW_HOURS, self.id) @property def status_from_type_model(self) -> linopy.Variable | None: """Get status from FlowsModel (if using type-level modeling).""" if self._flows_model is None or FlowVarName.STATUS not in self._flows_model: return None - if self.label_full not in self._flows_model.status_ids: + if self.id not in self._flows_model.status_ids: return None - return self._flows_model.get_variable(FlowVarName.STATUS, self.label_full) + return self._flows_model.get_variable(FlowVarName.STATUS, self.id) @property def size_is_fixed(self) -> bool: @@ -1586,7 +1671,7 @@ def get_previous_status(self, flow: Flow) -> xr.DataArray | None: Returns: DataArray of previous status (time dimension), or None if no previous status. """ - fid = flow.label_full + fid = flow.id return self.data.previous_states.get(fid) @@ -1686,7 +1771,7 @@ def create_constraints(self) -> None: balance = sparse_multiply_sum(flow_rate, self.data.balance_coefficients, sum_dim=flow_dim, group_dim=bus_dim) if self.buses_with_imbalance: - imbalance_ids = [b.label_full for b in self.buses_with_imbalance] + imbalance_ids = [b.id for b in self.buses_with_imbalance] is_imbalance = xr.DataArray( [b in imbalance_ids for b in bus_ids], dims=[bus_dim], coords={bus_dim: bus_ids} ) @@ -1715,7 +1800,7 @@ def collect_penalty_share_specs(self) -> list[tuple[str, xr.DataArray]]: dim = self.dim_name penalty_specs = [] for bus in self.buses_with_imbalance: - bus_label = bus.label_full + bus_label = bus.id imbalance_penalty = bus.imbalance_penalty_per_flow_hour * self.model.timestep_duration virtual_supply = self[BusVarName.VIRTUAL_SUPPLY].sel({dim: bus_label}) @@ -1823,8 +1908,8 @@ def create_constraints(self) -> None: flow_sum = sparse_weighted_sum(flow_status, mask, sum_dim='flow', group_dim='component') # Separate single-flow vs multi-flow components - single_flow_ids = [c.label for c in self.components if len(c.inputs) + len(c.outputs) == 1] - multi_flow_ids = [c.label for c in self.components if len(c.inputs) + len(c.outputs) > 1] + single_flow_ids = [c.id for c in self.components if len(c.inputs) + len(c.outputs) == 1] + multi_flow_ids = [c.id for c in self.components if len(c.inputs) + len(c.outputs) > 1] # Single-flow: exact equality if single_flow_ids: @@ -1878,7 +1963,7 @@ def previous_status_batched(self) -> xr.DataArray | None: for da in previous_status ] comp_prev_status = xr.concat(padded, dim='flow').any(dim='flow').astype(int) - comp_prev_status = comp_prev_status.expand_dims({self.dim_name: [component.label]}) + comp_prev_status = comp_prev_status.expand_dims({self.dim_name: [component.id]}) previous_arrays.append(comp_prev_status) components_with_previous.append(component) @@ -2447,7 +2532,7 @@ def create_constraints(self) -> None: efficiency_expr_2 = in2_rate * (1 - rel_losses_bidir) # Add absolute losses for bidirectional if any have them - bidir_with_abs = [t.label for t in d.bidirectional if t.label in d.transmissions_with_abs_losses] + bidir_with_abs = [t.id for t in d.bidirectional if t.id in d.transmissions_with_abs_losses] if bidir_with_abs: flow_status = self._flows_model[FlowVarName.STATUS] in2_status = (flow_status * d.in2_mask).sum('flow') diff --git a/flixopt/features.py b/flixopt/features.py index 90428a02f..5e3bb73e3 100644 --- a/flixopt/features.py +++ b/flixopt/features.py @@ -842,12 +842,12 @@ def build_flow_membership( get_flows: Function that returns list of flows for an element. Returns: - Dict mapping element label -> list of flow label_full. + Dict mapping element id -> list of flow ids. Example: >>> membership = MaskHelpers.build_flow_membership(storages, lambda s: s.inputs + s.outputs) """ - return {e.label: [f.label_full for f in get_flows(e)] for e in elements} + return {e.id: [f.id for f in get_flows(e)] for e in elements} class PiecewiseBuilder: diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py index 62df3a10b..707db260b 100644 --- a/flixopt/flow_system.py +++ b/flixopt/flow_system.py @@ -17,7 +17,7 @@ from . import io as fx_io from .batched import BatchedAccessor from .components import Storage -from .config import CONFIG, DEPRECATION_REMOVAL_VERSION +from .config import CONFIG, DEPRECATION_REMOVAL_V7 from .core import ( ConversionError, DataConverter, @@ -27,13 +27,13 @@ from .effects import Effect, EffectCollection from .elements import Bus, Component, Flow from .flow_system_status import FlowSystemStatus, get_status, invalidate_to_status +from .id_list import IdList, element_id_list from .model_coordinates import ModelCoordinates from .optimize_accessor import OptimizeAccessor from .statistics_accessor import StatisticsAccessor from .structure import ( CompositeContainerMixin, Element, - ElementContainer, FlowSystemModel, Interface, ) @@ -83,7 +83,7 @@ class LegacySolutionWrapper: 'invested': ('flow', 'invested'), } - # Storage-specific mappings (no parentheses in label, e.g., 'Battery|size') + # Storage-specific mappings (no parentheses in id, e.g., 'Battery|size') _LEGACY_STORAGE_VAR_MAP = { 'size': ('storage', 'size'), 'invested': ('storage', 'invested'), @@ -152,33 +152,33 @@ def __getitem__(self, key): if '|' in key: parts = key.rsplit('|', 1) if len(parts) == 2: - element_label, var_suffix = parts + element_id, var_suffix = parts - # Try flow variables first (labels have parentheses like 'Src(heat)') + # Try flow variables first (ids have parentheses like 'Src(heat)') if var_suffix in self._LEGACY_VAR_MAP: dim, var_name = self._LEGACY_VAR_MAP[var_suffix] new_key = f'{dim}|{var_name}' - if new_key in ds and dim in ds.coords and element_label in ds.coords[dim].values: + if new_key in ds and dim in ds.coords and element_id in ds.coords[dim].values: warnings.warn( f"Legacy solution access: solution['{key}'] is deprecated. " - f"Use solution['{new_key}'].sel({dim}='{element_label}') instead.", + f"Use solution['{new_key}'].sel({dim}='{element_id}') instead.", DeprecationWarning, stacklevel=2, ) - return ds[new_key].sel({dim: element_label}) + return ds[new_key].sel({dim: element_id}) - # Try storage variables (labels without parentheses like 'Battery') + # Try storage variables (ids without parentheses like 'Battery') if var_suffix in self._LEGACY_STORAGE_VAR_MAP: dim, var_name = self._LEGACY_STORAGE_VAR_MAP[var_suffix] new_key = f'{dim}|{var_name}' - if new_key in ds and dim in ds.coords and element_label in ds.coords[dim].values: + if new_key in ds and dim in ds.coords and element_id in ds.coords[dim].values: warnings.warn( f"Legacy solution access: solution['{key}'] is deprecated. " - f"Use solution['{new_key}'].sel({dim}='{element_label}') instead.", + f"Use solution['{new_key}'].sel({dim}='{element_id}') instead.", DeprecationWarning, stacklevel=2, ) - return ds[new_key].sel({dim: element_label}) + return ds[new_key].sel({dim: element_id}) raise e @@ -210,7 +210,7 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): This is the main container class that users work with to build and manage their energy or material flow system. FlowSystem provides both direct container access (via .components, .buses, .effects, .flows) and a unified - dict-like interface for accessing any element by label across all container types. + dict-like interface for accessing any element by id across all container types. Args: timesteps: The timesteps of the model. @@ -230,13 +230,17 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): how many original timesteps each cluster represents. Multiply with timestep_duration for proper time aggregation in clustered models. scenario_independent_sizes: Controls whether investment sizes are equalized across scenarios. - - True: All sizes are shared/equalized across scenarios + Targets investable flows and storages (i.e. elements with ``InvestParameters``). + - True: All investable sizes are shared/equalized across scenarios - False: All sizes are optimized separately per scenario - - list[str]: Only specified components (by label_full) are equalized across scenarios + - list[str]: Only the specified investable flow/storage ids are equalized. + Use the qualified flow id format ``"Component(flow_id)"`` (e.g. ``["Boiler(Q_fu)", "Battery"]``). scenario_independent_flow_rates: Controls whether flow rates are equalized across scenarios. + Targets flow rate variables of individual flows. - True: All flow rates are shared/equalized across scenarios - False: All flow rates are optimized separately per scenario - - list[str]: Only specified flows (by label_full) are equalized across scenarios + - list[str]: Only the specified flow ids are equalized. + Use the qualified flow id format ``"Component(flow_id)"`` (e.g. ``["Grid(elec)"]``). Examples: Creating a FlowSystem and accessing elements: @@ -254,7 +258,7 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): Unified dict-like access (recommended for most cases): - >>> # Access any element by label, regardless of type + >>> # Access any element by id, regardless of type >>> boiler = flow_system['Boiler'] # Returns Component >>> heat_bus = flow_system['Heat'] # Returns Bus >>> costs = flow_system['costs'] # Returns Effect @@ -264,25 +268,25 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): ... print('Boiler found in system') >>> >>> # Iterate over all elements - >>> for label in flow_system.keys(): - ... element = flow_system[label] - ... print(f'{label}: {type(element).__name__}') + >>> for element_id in flow_system.keys(): + ... element = flow_system[element_id] + ... print(f'{element_id}: {type(element).__name__}') >>> - >>> # Get all element labels and objects - >>> all_labels = list(flow_system.keys()) + >>> # Get all element ids and objects + >>> all_ids = list(flow_system.keys()) >>> all_elements = list(flow_system.values()) - >>> for label, element in flow_system.items(): - ... print(f'{label}: {element}') + >>> for element_id, element in flow_system.items(): + ... print(f'{element_id}: {element}') Direct container access for type-specific operations: >>> # Access specific container when you need type filtering >>> for component in flow_system.components.values(): - ... print(f'{component.label}: {len(component.inputs)} inputs') + ... print(f'{component.id}: {len(component.inputs)} inputs') >>> >>> # Access buses directly >>> for bus in flow_system.buses.values(): - ... print(f'{bus.label}') + ... print(f'{bus.id}') >>> >>> # Flows are automatically collected from all components @@ -302,17 +306,17 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]): >>> # - FlowSystem._dataset_isel(dataset, time=..., period=..., scenario=...) >>> # - flow_system._dataset_resample(dataset, freq=..., method=..., **kwargs) >>> for flow in flow_system.flows.values(): - ... print(f'{flow.label_full}: {flow.size}') + ... print(f'{flow.id}: {flow.size}') >>> >>> # Access effects >>> for effect in flow_system.effects.values(): - ... print(f'{effect.label}') + ... print(f'{effect.id}') Notes: - The dict-like interface (`flow_system['element']`) searches across all containers - (components, buses, effects, flows) to find the element with the matching label. - - Element labels must be unique across all container types. Attempting to add - elements with duplicate labels will raise an error, ensuring each label maps to exactly one element. + (components, buses, effects, flows) to find the element with the matching id. + - Element ids must be unique across all container types. Attempting to add + elements with duplicate ids will raise an error, ensuring each id maps to exactly one element. - Direct container access (`.components`, `.buses`, `.effects`, `.flows`) is useful when you need type-specific filtering or operations. - The `.flows` container is automatically populated from all component inputs and outputs. @@ -354,10 +358,8 @@ def __init__( ) # Element collections - self.components: ElementContainer[Component] = ElementContainer( - element_type_name='components', truncate_repr=10 - ) - self.buses: ElementContainer[Bus] = ElementContainer(element_type_name='buses', truncate_repr=10) + self.components: IdList[Component] = element_id_list(display_name='components', truncate_repr=10) + self.buses: IdList[Bus] = element_id_list(display_name='buses', truncate_repr=10) self.effects: EffectCollection = EffectCollection(truncate_repr=10) self.model: FlowSystemModel | None = None @@ -365,8 +367,8 @@ def __init__( self._used_in_optimization = False self._network_app = None - self._flows_cache: ElementContainer[Flow] | None = None - self._storages_cache: ElementContainer[Storage] | None = None + self._flows_cache: IdList[Flow] | None = None + self._storages_cache: IdList[Storage] | None = None # Solution dataset - populated after optimization or loaded from file self._solution: xr.Dataset | None = None @@ -412,18 +414,18 @@ def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]: # Extract from components components_structure = {} - for comp_label, component in self.components.items(): + for comp_id, component in self.components.items(): comp_structure, comp_arrays = component._create_reference_structure() all_extracted_arrays.update(comp_arrays) - components_structure[comp_label] = comp_structure + components_structure[comp_id] = comp_structure reference_structure['components'] = components_structure # Extract from buses buses_structure = {} - for bus_label, bus in self.buses.items(): + for bus_id, bus in self.buses.items(): bus_structure, bus_arrays = bus._create_reference_structure() all_extracted_arrays.update(bus_arrays) - buses_structure[bus_label] = bus_structure + buses_structure[bus_id] = bus_structure reference_structure['buses'] = buses_structure # Extract from effects @@ -431,7 +433,7 @@ def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]: for effect in self.effects.values(): effect_structure, effect_arrays = effect._create_reference_structure() all_extracted_arrays.update(effect_arrays) - effects_structure[effect.label] = effect_structure + effects_structure[effect.id] = effect_structure reference_structure['effects'] = effects_structure return reference_structure, all_extracted_arrays @@ -616,7 +618,7 @@ def from_old_results(cls, folder: str | pathlib.Path, name: str) -> FlowSystem: This method will be removed in v6. """ warnings.warn( - f'from_old_results() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'from_old_results() is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'This utility is only for migrating results from flixopt versions before v5.', DeprecationWarning, stacklevel=2, @@ -684,7 +686,7 @@ def from_old_dataset(cls, path: str | pathlib.Path) -> FlowSystem: This method will be removed in v6. """ warnings.warn( - f'from_old_dataset() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'from_old_dataset() is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'This utility is only for migrating FlowSystems from flixopt versions before v5.', DeprecationWarning, stacklevel=2, @@ -920,8 +922,8 @@ def _assign_element_colors(self) -> None: from .color_processing import process_colors # Collect elements without colors (components only - buses use carrier colors) - # Use label_full for consistent keying with ElementContainer - elements_without_colors = [comp.label_full for comp in self.components.values() if comp.color is None] + # Use id for consistent keying with IdList + elements_without_colors = [comp.id for comp in self.components.values() if comp.color is None] if not elements_without_colors: return @@ -931,9 +933,9 @@ def _assign_element_colors(self) -> None: color_mapping = process_colors(colorscale, elements_without_colors) # Assign colors to elements - for label_full, color in color_mapping.items(): - self.components[label_full].color = color - logger.debug(f"Auto-assigned color '{color}' to component '{label_full}'") + for element_id, color in color_mapping.items(): + self.components[element_id].color = color + logger.debug(f"Auto-assigned color '{color}' to component '{element_id}'") def add_elements(self, *elements: Element) -> None: """ @@ -983,7 +985,7 @@ def add_elements(self, *elements: Element) -> None: # Log registration element_type = type(new_element).__name__ - logger.info(f'Registered new {element_type}: {new_element.label_full}') + logger.info(f'Registered new {element_type}: {new_element.id}') def add_carriers(self, *carriers: Carrier) -> None: """Register a custom carrier for this FlowSystem. @@ -1036,11 +1038,11 @@ def add_carriers(self, *carriers: Carrier) -> None: self._carriers.add(carrier) logger.debug(f'Adding carrier {carrier} to FlowSystem') - def get_carrier(self, label: str) -> Carrier | None: + def get_carrier(self, element_id: str) -> Carrier | None: """Get the carrier for a bus or flow. Args: - label: Bus label (e.g., 'Fernwärme') or flow label (e.g., 'Boiler(Q_th)'). + element_id: Bus id (e.g., 'Fernwärme') or flow id (e.g., 'Boiler(Q_th)'). Returns: Carrier or None if not found. @@ -1053,13 +1055,13 @@ def get_carrier(self, label: str) -> Carrier | None: """ self._require_status(FlowSystemStatus.CONNECTED, 'get carrier') - # Try as bus label - bus = self.buses.get(label) + # Try as bus id + bus = self.buses.get(element_id) if bus and bus.carrier: return self._carriers.get(bus.carrier.lower()) - # Try as flow label - flow = self.flows.get(label) + # Try as flow id + flow = self.flows.get(element_id) if flow and flow.bus: bus = self.buses.get(flow.bus) if bus and bus.carrier: @@ -1074,10 +1076,10 @@ def carriers(self) -> CarrierContainer: @property def flow_carriers(self) -> dict[str, str]: - """Cached mapping of flow labels to carrier names. + """Cached mapping of flow ids to carrier names. Returns: - Dict mapping flow label to carrier name (lowercase). + Dict mapping flow id to carrier name (lowercase). Flows without a carrier are not included. Raises: @@ -1087,10 +1089,10 @@ def flow_carriers(self) -> dict[str, str]: if self._flow_carriers is None: self._flow_carriers = {} - for flow_label, flow in self.flows.items(): + for flow_id, flow in self.flows.items(): bus = self.buses.get(flow.bus) if bus and bus.carrier: - self._flow_carriers[flow_label] = bus.carrier.lower() + self._flow_carriers[flow_id] = bus.carrier.lower() return self._flow_carriers @@ -1103,7 +1105,7 @@ def create_model(self, normalize_weights: bool | None = None) -> FlowSystemModel """ if normalize_weights is not None: warnings.warn( - f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Scenario weights are now always normalized when set on FlowSystem.\n', DeprecationWarning, stacklevel=2, @@ -1139,7 +1141,7 @@ def build_model(self, normalize_weights: bool | None = None) -> FlowSystem: """ if normalize_weights is not None: warnings.warn( - f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Scenario weights are now always normalized when set on FlowSystem.\n', DeprecationWarning, stacklevel=2, @@ -1601,7 +1603,7 @@ def plot_network( Visualizes the network structure of a FlowSystem using PyVis. """ warnings.warn( - f'plot_network() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'plot_network() is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'Use flow_system.topology.plot() instead.', DeprecationWarning, stacklevel=2, @@ -1615,7 +1617,7 @@ def start_network_app(self) -> None: Visualizes the network structure using Dash and Cytoscape. """ warnings.warn( - f'start_network_app() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'start_network_app() is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'Use flow_system.topology.start_app() instead.', DeprecationWarning, stacklevel=2, @@ -1629,7 +1631,7 @@ def stop_network_app(self) -> None: Stop the network visualization server. """ warnings.warn( - f'stop_network_app() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'stop_network_app() is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'Use flow_system.topology.stop_app() instead.', DeprecationWarning, stacklevel=2, @@ -1643,7 +1645,7 @@ def network_infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, Get network topology information as dictionaries. """ warnings.warn( - f'network_infos() is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'network_infos() is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'Use flow_system.topology.infos() instead.', DeprecationWarning, stacklevel=2, @@ -1652,14 +1654,14 @@ def network_infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, def _check_if_element_is_unique(self, element: Element) -> None: """ - checks if element or label of element already exists in list + checks if element or id of element already exists in list Args: element: new element to check """ - # check if name is already used: - if element.label_full in self: - raise ValueError(f'Label of Element {element.label_full} already used in another element!') + # check if id is already used: + if element.id in self: + raise ValueError(f'ID of Element {element.id} already used in another element!') def _check_if_element_already_assigned(self, element: Element) -> None: """ @@ -1673,7 +1675,7 @@ def _check_if_element_already_assigned(self, element: Element) -> None: """ if element._flow_system is not None and element._flow_system is not self: raise ValueError( - f'Element "{element.label_full}" is already assigned to another FlowSystem. ' + f'Element "{element.id}" is already assigned to another FlowSystem. ' f'Each element can only belong to one FlowSystem at a time. ' f'To use this element in multiple systems, create a copy: ' f'flow_system.add_elements(element.copy())' @@ -1729,7 +1731,7 @@ def _validate_system_integrity(self) -> None: if flow.bus not in self.buses: available_buses = list(self.buses.keys()) raise ValueError( - f'Flow "{flow.label_full}" references bus "{flow.bus}" which does not exist in FlowSystem. ' + f'Flow "{flow.id}" references bus "{flow.bus}" which does not exist in FlowSystem. ' f'Available buses: {available_buses}. ' f'Did you forget to add the bus using flow_system.add_elements(Bus("{flow.bus}"))?' ) @@ -1761,19 +1763,18 @@ def _connect_network(self): """Connects the network of components and buses. Can be rerun without changes if no elements were added""" for component in self.components.values(): for flow in component.flows.values(): - flow.component = component.label_full - flow.is_input_in_component = flow.label_full in component.inputs + flow.component = component.id + flow.is_input_in_component = flow.id in component.inputs # Connect Buses bus = self.buses.get(flow.bus) if bus is None: raise KeyError( - f'Bus {flow.bus} not found in the FlowSystem, but used by "{flow.label_full}". ' - f'Please add it first.' + f'Bus {flow.bus} not found in the FlowSystem, but used by "{flow.id}". Please add it first.' ) - if flow.is_input_in_component and flow.label_full not in bus.outputs: + if flow.is_input_in_component and flow.id not in bus.outputs: bus.outputs.add(flow) - elif not flow.is_input_in_component and flow.label_full not in bus.inputs: + elif not flow.is_input_in_component and flow.id not in bus.inputs: bus.inputs.add(flow) # Count flows manually to avoid triggering cache rebuild @@ -1846,7 +1847,7 @@ def __eq__(self, other: FlowSystem): return True - def _get_container_groups(self) -> dict[str, ElementContainer]: + def _get_container_groups(self) -> dict[str, IdList]: """Return ordered container groups for CompositeContainerMixin.""" return { 'Components': self.components, @@ -1856,26 +1857,26 @@ def _get_container_groups(self) -> dict[str, ElementContainer]: } @property - def flows(self) -> ElementContainer[Flow]: + def flows(self) -> IdList[Flow]: if self._flows_cache is None: flows = [f for c in self.components.values() for f in c.flows.values()] # Deduplicate by id and sort for reproducibility - flows = sorted({id(f): f for f in flows}.values(), key=lambda f: f.label_full.lower()) - self._flows_cache = ElementContainer(flows, element_type_name='flows', truncate_repr=10) + flows = sorted({id(f): f for f in flows}.values(), key=lambda f: f.id.lower()) + self._flows_cache = element_id_list(flows, display_name='flows', truncate_repr=10) return self._flows_cache @property - def storages(self) -> ElementContainer[Storage]: - """All storage components as an ElementContainer. + def storages(self) -> IdList[Storage]: + """All storage components as an IdList. Returns: - ElementContainer containing all Storage components in the FlowSystem, - sorted by label for reproducibility. + IdList containing all Storage components in the FlowSystem, + sorted by id for reproducibility. """ if self._storages_cache is None: storages = [c for c in self.components.values() if isinstance(c, Storage)] - storages = sorted(storages, key=lambda s: s.label_full.lower()) - self._storages_cache = ElementContainer(storages, element_type_name='storages', truncate_repr=10) + storages = sorted(storages, key=lambda s: s.id.lower()) + self._storages_cache = element_id_list(storages, display_name='storages', truncate_repr=10) return self._storages_cache # --- Forwarding properties for model coordinate state --- @@ -2010,7 +2011,7 @@ def coords(self) -> dict[FlowSystemDimensions, pd.Index]: Dict mapping dimension names to coordinate arrays. """ warnings.warn( - f'FlowSystem.coords is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'FlowSystem.coords is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'Use FlowSystem.indexes instead.', DeprecationWarning, stacklevel=2, @@ -2104,7 +2105,7 @@ def _validate_scenario_parameter(self, value: bool | list[str], param_name: str, Args: value: The value to validate param_name: Name of the parameter (for error messages) - element_type: Type of elements expected in list (e.g., 'component label_full', 'flow label_full') + element_type: Type of elements expected in list (e.g., 'Element.id', 'Flow.id') Raises: TypeError: If value is not bool or list[str] @@ -2120,52 +2121,56 @@ def _validate_scenario_parameter(self, value: bool | list[str], param_name: str, @property def scenario_independent_sizes(self) -> bool | list[str]: - """ - Controls whether investment sizes are equalized across scenarios. + """Controls whether investment sizes are equalized across scenarios. + + Targets investable flows and storages (i.e. elements with ``InvestParameters``). Returns: - bool or list[str]: Configuration for scenario-independent sizing + bool or list[str]: Configuration for scenario-independent sizing. + When a list, entries are qualified flow/storage ids, e.g. ``["Boiler(Q_fu)", "Battery"]``. """ return self._scenario_independent_sizes @scenario_independent_sizes.setter def scenario_independent_sizes(self, value: bool | list[str]) -> None: - """ - Set whether investment sizes should be equalized across scenarios. + """Set whether investment sizes should be equalized across scenarios. Args: - value: True (all equalized), False (all vary), or list of component label_full strings to equalize + value: True (all equalized), False (all vary), or list of investable + flow/storage ids to equalize (e.g. ``["Boiler(Q_fu)", "Battery"]``). Raises: TypeError: If value is not bool or list[str] ValueError: If list contains non-string elements """ - self._validate_scenario_parameter(value, 'scenario_independent_sizes', 'Element.label_full') + self._validate_scenario_parameter(value, 'scenario_independent_sizes', 'Element.id') self._scenario_independent_sizes = value @property def scenario_independent_flow_rates(self) -> bool | list[str]: - """ - Controls whether flow rates are equalized across scenarios. + """Controls whether flow rates are equalized across scenarios. + + Targets flow rate variables of individual flows. Returns: - bool or list[str]: Configuration for scenario-independent flow rates + bool or list[str]: Configuration for scenario-independent flow rates. + When a list, entries are qualified flow ids, e.g. ``["Grid(elec)"]``. """ return self._scenario_independent_flow_rates @scenario_independent_flow_rates.setter def scenario_independent_flow_rates(self, value: bool | list[str]) -> None: - """ - Set whether flow rates should be equalized across scenarios. + """Set whether flow rates should be equalized across scenarios. Args: - value: True (all equalized), False (all vary), or list of flow label_full strings to equalize + value: True (all equalized), False (all vary), or list of qualified + flow ids to equalize (e.g. ``["Grid(elec)"]``). Raises: TypeError: If value is not bool or list[str] ValueError: If list contains non-string elements """ - self._validate_scenario_parameter(value, 'scenario_independent_flow_rates', 'Flow.label_full') + self._validate_scenario_parameter(value, 'scenario_independent_flow_rates', 'Flow.id') self._scenario_independent_flow_rates = value @classmethod @@ -2207,7 +2212,7 @@ def _dataset_sel( xr.Dataset: Selected dataset """ warnings.warn( - f'\n_dataset_sel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\n_dataset_sel() is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Use TransformAccessor._dataset_sel() instead.', DeprecationWarning, stacklevel=2, @@ -2244,7 +2249,7 @@ def sel( FlowSystem: New FlowSystem with selected data (no solution). """ warnings.warn( - f'\nsel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\nsel() is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Use flow_system.transform.sel() instead.', DeprecationWarning, stacklevel=2, @@ -2279,7 +2284,7 @@ def _dataset_isel( xr.Dataset: Selected dataset """ warnings.warn( - f'\n_dataset_isel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\n_dataset_isel() is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Use TransformAccessor._dataset_isel() instead.', DeprecationWarning, stacklevel=2, @@ -2316,7 +2321,7 @@ def isel( FlowSystem: New FlowSystem with selected data (no solution). """ warnings.warn( - f'\nisel() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\nisel() is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Use flow_system.transform.isel() instead.', DeprecationWarning, stacklevel=2, @@ -2353,7 +2358,7 @@ def _dataset_resample( xr.Dataset: Resampled dataset """ warnings.warn( - f'\n_dataset_resample() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\n_dataset_resample() is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Use TransformAccessor._dataset_resample() instead.', DeprecationWarning, stacklevel=2, @@ -2385,7 +2390,7 @@ def _resample_by_dimension_groups( Will be removed in v6.0.0. """ warnings.warn( - f'\n_resample_by_dimension_groups() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\n_resample_by_dimension_groups() is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Use TransformAccessor._resample_by_dimension_groups() instead.', DeprecationWarning, stacklevel=2, @@ -2419,7 +2424,7 @@ def resample( FlowSystem: New resampled FlowSystem (no solution). """ warnings.warn( - f'\nresample() is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\nresample() is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Use flow_system.transform.resample() instead.', DeprecationWarning, stacklevel=2, diff --git a/flixopt/id_list.py b/flixopt/id_list.py new file mode 100644 index 000000000..96797e9d3 --- /dev/null +++ b/flixopt/id_list.py @@ -0,0 +1,230 @@ +"""Generic frozen ordered container for named elements. + +IdList provides dict-like access by key (string) or position (int), +with helpful error messages including close-match suggestions. +""" + +from __future__ import annotations + +import re +from difflib import get_close_matches +from typing import TYPE_CHECKING, Generic, TypeVar + +from . import io as fx_io + +if TYPE_CHECKING: + from collections.abc import Callable, Iterator + +T = TypeVar('T') + + +# Precompiled regex pattern for natural sorting +_NATURAL_SPLIT = re.compile(r'(\d+)') + + +def _natural_sort_key(text: str) -> list: + """Sort key for natural ordering (e.g., bus1, bus2, bus10 instead of bus1, bus10, bus2).""" + return [int(c) if c.isdigit() else c.lower() for c in _NATURAL_SPLIT.split(text)] + + +class IdList(Generic[T]): + """Generic frozen ordered container for named elements. + + Backed by ``dict[str, T]`` internally. Provides dict-like access by + primary key, optional short-key fallback, or positional index. + + Args: + elements: Initial elements to add. + key_fn: Callable extracting the primary key from an element. + short_key_fn: Optional callable for fallback lookup (e.g. short id). + display_name: Name shown in repr/error messages (e.g. 'inputs', 'components'). + + Examples: + >>> il = IdList([bus_a, bus_b], key_fn=lambda b: b.id_full, display_name='buses') + >>> il['BusA'] + >>> il[0] + >>> len(il) + 2 + """ + + __slots__ = ('_data', '_key_fn', '_short_key_fn', '_display_name', '_truncate_repr') + + def __init__( + self, + elements: list[T] | None = None, + *, + key_fn: Callable[[T], str], + short_key_fn: Callable[[T], str] | None = None, + display_name: str = 'elements', + truncate_repr: int | None = None, + ) -> None: + self._data: dict[str, T] = {} + self._key_fn = key_fn + self._short_key_fn = short_key_fn + self._display_name = display_name + self._truncate_repr = truncate_repr + if elements: + for elem in elements: + self.add(elem) + + # --- mutation (build phase) ------------------------------------------- + + def add(self, element: T) -> None: + """Add *element* to the container (build phase). + + Raises: + ValueError: If the key already exists. + """ + key = self._key_fn(element) + if key in self._data: + item_name = element.__class__.__name__ + raise ValueError( + f'{item_name} with id "{key}" already exists in {self._display_name}. ' + f'Each {item_name.lower()} must have a unique id.' + ) + self._data[key] = element + + # --- read access ------------------------------------------------------ + + def __getitem__(self, key: str | int) -> T: + """Get element by primary key, short key, or positional index.""" + if isinstance(key, int): + try: + return list(self._data.values())[key] + except IndexError: + raise IndexError( + f'{self._display_name.capitalize()} index {key} out of range ' + f'(container has {len(self._data)} items)' + ) from None + + # Primary key lookup + if key in self._data: + return self._data[key] + + # Short-key fallback + if self._short_key_fn is not None: + matches = [elem for elem in self._data.values() if self._short_key_fn(elem) == key] + if len(matches) == 1: + return matches[0] + if len(matches) > 1: + full_ids = [self._key_fn(elem) for elem in matches] + raise ValueError( + f'Ambiguous short key {key!r} in {self._display_name}: ' + f'matches {len(matches)} elements: {full_ids}. ' + f'Use the full identifier to resolve the ambiguity.' + ) + + # Error with suggestions + self._raise_key_error(key) + + def __contains__(self, key: object) -> bool: + if not isinstance(key, str): + return False + if key in self._data: + return True + if self._short_key_fn is not None: + return any(self._short_key_fn(elem) == key for elem in self._data.values()) + return False + + def __len__(self) -> int: + return len(self._data) + + def __iter__(self) -> Iterator[str]: + """Iterate over primary keys.""" + return iter(self._data) + + def __bool__(self) -> bool: + return bool(self._data) + + def keys(self) -> list[str]: + return list(self._data.keys()) + + def values(self) -> list[T]: + return list(self._data.values()) + + def items(self) -> list[tuple[str, T]]: + return list(self._data.items()) + + def get(self, key: str, default: T | None = None) -> T | None: + """Get element by primary key, returning *default* if not found.""" + if key in self._data: + return self._data[key] + if self._short_key_fn is not None: + for elem in self._data.values(): + if self._short_key_fn(elem) == key: + return elem + return default + + # --- combination ------------------------------------------------------ + + def __add__(self, other: IdList[T]) -> IdList[T]: + """Return a new IdList combining elements from both lists.""" + result = IdList( + key_fn=self._key_fn, + short_key_fn=self._short_key_fn, + display_name=self._display_name, + ) + for elem in self._data.values(): + result.add(elem) + for elem in other._data.values(): + result.add(elem) + return result + + # --- repr ------------------------------------------------------------- + + def _get_repr(self, max_items: int | None = None) -> str: + limit = max_items if max_items is not None else self._truncate_repr + count = len(self._data) + title = f'{self._display_name.capitalize()} ({count} item{"s" if count != 1 else ""})' + + if not self._data: + r = fx_io.format_title_with_underline(title) + r += '\n' + else: + r = fx_io.format_title_with_underline(title) + sorted_names = sorted(self._data.keys(), key=_natural_sort_key) + if limit is not None and limit > 0 and len(sorted_names) > limit: + for name in sorted_names[:limit]: + r += f' * {name}\n' + r += f' ... (+{len(sorted_names) - limit} more)\n' + else: + for name in sorted_names: + r += f' * {name}\n' + return r + + def __repr__(self) -> str: + return self._get_repr() + + # --- helpers ---------------------------------------------------------- + + def _raise_key_error(self, key: str) -> None: + """Raise a KeyError with helpful suggestions.""" + suggestions = get_close_matches(key, self._data.keys(), n=3, cutoff=0.6) + # Also check short keys for suggestions + if self._short_key_fn is not None: + short_keys = [self._short_key_fn(e) for e in self._data.values()] + suggestions += get_close_matches(key, short_keys, n=3, cutoff=0.6) + + error_msg = f'"{key}" not found in {self._display_name}.' + if suggestions: + error_msg += f' Did you mean: {", ".join(suggestions)}?' + else: + available = list(self._data.keys()) + if len(available) <= 5: + error_msg += f' Available: {", ".join(available)}' + else: + error_msg += f' Available: {", ".join(available[:5])} ... (+{len(available) - 5} more)' + raise KeyError(error_msg) from None + + +# --- factory helpers ------------------------------------------------------- + + +def flow_id_list(flows: list | None = None, **kw) -> IdList: + """Create an IdList keyed by ``flow.id`` with short-key fallback to ``flow.flow_id``.""" + return IdList(flows, key_fn=lambda f: f.id, short_key_fn=lambda f: f.flow_id, **kw) + + +def element_id_list(elements: list | None = None, **kw) -> IdList: + """Create an IdList keyed by ``element.id``.""" + return IdList(elements, key_fn=lambda e: e.id, **kw) diff --git a/flixopt/io.py b/flixopt/io.py index 6de7df7a5..8bd777a21 100644 --- a/flixopt/io.py +++ b/flixopt/io.py @@ -1225,8 +1225,9 @@ def _format_value_for_repr(value) -> str: def build_repr_from_init( obj: object, excluded_params: set[str] | None = None, - label_as_positional: bool = True, + id_as_positional: bool = True, skip_default_size: bool = False, + label_as_positional: bool | None = None, ) -> str: """Build a repr string from __init__ signature, showing non-default parameter values. @@ -1236,40 +1237,45 @@ def build_repr_from_init( Args: obj: The object to create repr for - excluded_params: Set of parameter names to exclude (e.g., {'self', 'inputs', 'outputs'}) - Default excludes 'self', 'label', and 'kwargs' - label_as_positional: If True and 'label' param exists, show it as first positional arg + excluded_params: Set of parameter names to exclude (e.g., {'self', 'inputs', 'outputs'}). + Default excludes 'self', 'id', 'label', and 'kwargs'. + 'label' is always excluded as a deprecated alias for 'id'. + id_as_positional: If True and 'id' param exists, show it as first positional arg skip_default_size: Deprecated. Previously skipped size=CONFIG.Modeling.big, now size=None is default. + label_as_positional: Deprecated alias for id_as_positional. Returns: - Formatted repr string like: ClassName("label", param=value) + Formatted repr string like: ClassName("id", param=value) """ + if label_as_positional is not None: + id_as_positional = label_as_positional + if excluded_params is None: - excluded_params = {'self', 'label', 'kwargs'} + excluded_params = {'self', 'id', 'label', 'kwargs'} else: - # Always exclude 'self' - excluded_params = excluded_params | {'self'} + # Always exclude 'self' and deprecated 'label' + excluded_params = excluded_params | {'self', 'label'} try: # Get the constructor arguments and their current values init_signature = inspect.signature(obj.__init__) init_params = init_signature.parameters - # Check if this has a 'label' parameter - if so, show it first as positional - has_label = 'label' in init_params and label_as_positional + # Check if this has an 'id' parameter - if so, show it first as positional + has_id = 'id' in init_params and id_as_positional # Build kwargs for non-default parameters kwargs_parts = [] - label_value = None + id_value = None for param_name, param in init_params.items(): # Skip *args and **kwargs if param.kind in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD): continue - # Handle label separately if showing as positional (check BEFORE excluded_params) - if param_name == 'label' and has_label: - label_value = getattr(obj, param_name, None) + # Handle id separately if showing as positional (check BEFORE excluded_params) + if param_name == 'id' and has_id: + id_value = getattr(obj, param_name, None) continue # Now check if parameter should be excluded @@ -1341,17 +1347,13 @@ def build_repr_from_init( value_repr = _format_value_for_repr(value) kwargs_parts.append(f'{param_name}={value_repr}') - # Build args string with label first as positional if present - if has_label and label_value is not None: - # Use label_full if available, otherwise label - if hasattr(obj, 'label_full'): - label_repr = repr(obj.label_full) - else: - label_repr = repr(label_value) + # Build args string with id first as positional if present + if has_id and id_value is not None: + id_repr = repr(id_value) - if len(label_repr) > 50: - label_repr = label_repr[:47] + '...' - args_str = label_repr + if len(id_repr) > 50: + id_repr = id_repr[:47] + '...' + args_str = id_repr if kwargs_parts: args_str += ', ' + ', '.join(kwargs_parts) else: diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py index c5c9afd4d..0212e73e4 100644 --- a/flixopt/linear_converters.py +++ b/flixopt/linear_converters.py @@ -30,7 +30,7 @@ class Boiler(LinearConverter): relationships for thermal generation applications. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. thermal_efficiency: Thermal efficiency factor (0-1 range). Defines the ratio of thermal output to fuel input energy content. fuel_flow: Fuel input-flow representing fuel consumption. @@ -44,7 +44,7 @@ class Boiler(LinearConverter): ```python gas_boiler = Boiler( - label='natural_gas_boiler', + id='natural_gas_boiler', thermal_efficiency=0.85, # 85% thermal efficiency fuel_flow=natural_gas_flow, thermal_flow=hot_water_flow, @@ -55,7 +55,7 @@ class Boiler(LinearConverter): ```python biomass_boiler = Boiler( - label='wood_chip_boiler', + id='wood_chip_boiler', thermal_efficiency=seasonal_efficiency_profile, # Time-varying efficiency fuel_flow=biomass_flow, thermal_flow=district_heat_flow, @@ -75,29 +75,31 @@ class Boiler(LinearConverter): def __init__( self, - label: str, + id: str | None = None, thermal_efficiency: Numeric_TPS | None = None, fuel_flow: Flow | None = None, thermal_flow: Flow | None = None, status_parameters: StatusParameters | None = None, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): # Validate required parameters if fuel_flow is None: - raise ValueError(f"'{label}': fuel_flow is required and cannot be None") + raise ValueError(f"'{id}': fuel_flow is required and cannot be None") if thermal_flow is None: - raise ValueError(f"'{label}': thermal_flow is required and cannot be None") + raise ValueError(f"'{id}': thermal_flow is required and cannot be None") if thermal_efficiency is None: - raise ValueError(f"'{label}': thermal_efficiency is required and cannot be None") + raise ValueError(f"'{id}': thermal_efficiency is required and cannot be None") super().__init__( - label, + id, inputs=[fuel_flow], outputs=[thermal_flow], status_parameters=status_parameters, meta_data=meta_data, color=color, + **kwargs, ) self.fuel_flow = fuel_flow self.thermal_flow = thermal_flow @@ -105,12 +107,12 @@ def __init__( @property def thermal_efficiency(self): - return self.conversion_factors[0][self.fuel_flow.label] + return self.conversion_factors[0][self.fuel_flow.flow_id] @thermal_efficiency.setter def thermal_efficiency(self, value): - check_bounds(value, 'thermal_efficiency', self.label_full, 0, 1) - self.conversion_factors = [{self.fuel_flow.label: value, self.thermal_flow.label: 1}] + check_bounds(value, 'thermal_efficiency', self.id, 0, 1) + self.conversion_factors = [{self.fuel_flow.flow_id: value, self.thermal_flow.flow_id: 1}] @register_class_for_io @@ -124,7 +126,7 @@ class Power2Heat(LinearConverter): conversion relationships for electric heating applications. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. thermal_efficiency: Thermal efficiency factor (0-1 range). For resistance heating this is typically close to 1.0 (nearly 100% efficiency), but may be lower for electrode boilers or systems with distribution losses. @@ -139,7 +141,7 @@ class Power2Heat(LinearConverter): ```python electric_heater = Power2Heat( - label='resistance_heater', + id='resistance_heater', thermal_efficiency=0.98, # 98% efficiency (small losses) electrical_flow=electricity_flow, thermal_flow=space_heating_flow, @@ -150,7 +152,7 @@ class Power2Heat(LinearConverter): ```python electrode_boiler = Power2Heat( - label='electrode_steam_boiler', + id='electrode_steam_boiler', thermal_efficiency=0.95, # 95% efficiency including boiler losses electrical_flow=industrial_electricity, thermal_flow=process_steam_flow, @@ -172,29 +174,31 @@ class Power2Heat(LinearConverter): def __init__( self, - label: str, + id: str | None = None, thermal_efficiency: Numeric_TPS | None = None, electrical_flow: Flow | None = None, thermal_flow: Flow | None = None, status_parameters: StatusParameters | None = None, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): # Validate required parameters if electrical_flow is None: - raise ValueError(f"'{label}': electrical_flow is required and cannot be None") + raise ValueError(f"'{id}': electrical_flow is required and cannot be None") if thermal_flow is None: - raise ValueError(f"'{label}': thermal_flow is required and cannot be None") + raise ValueError(f"'{id}': thermal_flow is required and cannot be None") if thermal_efficiency is None: - raise ValueError(f"'{label}': thermal_efficiency is required and cannot be None") + raise ValueError(f"'{id}': thermal_efficiency is required and cannot be None") super().__init__( - label, + id, inputs=[electrical_flow], outputs=[thermal_flow], status_parameters=status_parameters, meta_data=meta_data, color=color, + **kwargs, ) self.electrical_flow = electrical_flow @@ -203,12 +207,12 @@ def __init__( @property def thermal_efficiency(self): - return self.conversion_factors[0][self.electrical_flow.label] + return self.conversion_factors[0][self.electrical_flow.flow_id] @thermal_efficiency.setter def thermal_efficiency(self, value): - check_bounds(value, 'thermal_efficiency', self.label_full, 0, 1) - self.conversion_factors = [{self.electrical_flow.label: value, self.thermal_flow.label: 1}] + check_bounds(value, 'thermal_efficiency', self.id, 0, 1) + self.conversion_factors = [{self.electrical_flow.flow_id: value, self.thermal_flow.flow_id: 1}] @register_class_for_io @@ -222,7 +226,7 @@ class HeatPump(LinearConverter): conversion relationships for heat pump applications. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. cop: Coefficient of Performance (typically 1-20 range). Defines the ratio of thermal output to electrical input. COP > 1 indicates the heat pump extracts additional energy from the environment. @@ -237,7 +241,7 @@ class HeatPump(LinearConverter): ```python air_hp = HeatPump( - label='air_source_heat_pump', + id='air_source_heat_pump', cop=3.5, # COP of 3.5 (350% efficiency) electrical_flow=electricity_flow, thermal_flow=heating_flow, @@ -248,7 +252,7 @@ class HeatPump(LinearConverter): ```python ground_hp = HeatPump( - label='geothermal_heat_pump', + id='geothermal_heat_pump', cop=temperature_dependent_cop, # Time-varying COP based on ground temp electrical_flow=electricity_flow, thermal_flow=radiant_heating_flow, @@ -269,30 +273,32 @@ class HeatPump(LinearConverter): def __init__( self, - label: str, + id: str | None = None, cop: Numeric_TPS | None = None, electrical_flow: Flow | None = None, thermal_flow: Flow | None = None, status_parameters: StatusParameters | None = None, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): # Validate required parameters if electrical_flow is None: - raise ValueError(f"'{label}': electrical_flow is required and cannot be None") + raise ValueError(f"'{id}': electrical_flow is required and cannot be None") if thermal_flow is None: - raise ValueError(f"'{label}': thermal_flow is required and cannot be None") + raise ValueError(f"'{id}': thermal_flow is required and cannot be None") if cop is None: - raise ValueError(f"'{label}': cop is required and cannot be None") + raise ValueError(f"'{id}': cop is required and cannot be None") super().__init__( - label, + id, inputs=[electrical_flow], outputs=[thermal_flow], conversion_factors=[], status_parameters=status_parameters, meta_data=meta_data, color=color, + **kwargs, ) self.electrical_flow = electrical_flow self.thermal_flow = thermal_flow @@ -300,12 +306,12 @@ def __init__( @property def cop(self): - return self.conversion_factors[0][self.electrical_flow.label] + return self.conversion_factors[0][self.electrical_flow.flow_id] @cop.setter def cop(self, value): - check_bounds(value, 'cop', self.label_full, 1, 20) - self.conversion_factors = [{self.electrical_flow.label: value, self.thermal_flow.label: 1}] + check_bounds(value, 'cop', self.id, 1, 20) + self.conversion_factors = [{self.electrical_flow.flow_id: value, self.thermal_flow.flow_id: 1}] @register_class_for_io @@ -319,7 +325,7 @@ class CoolingTower(LinearConverter): has no thermal outputs as the heat is rejected to the environment. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. specific_electricity_demand: Auxiliary electricity demand per unit of cooling power (dimensionless, typically 0.01-0.05 range). Represents the fraction of thermal power that must be supplied as electricity for fans and pumps. @@ -334,7 +340,7 @@ class CoolingTower(LinearConverter): ```python cooling_tower = CoolingTower( - label='process_cooling_tower', + id='process_cooling_tower', specific_electricity_demand=0.025, # 2.5% auxiliary power electrical_flow=cooling_electricity, thermal_flow=waste_heat_flow, @@ -345,7 +351,7 @@ class CoolingTower(LinearConverter): ```python condenser_cooling = CoolingTower( - label='power_plant_cooling', + id='power_plant_cooling', specific_electricity_demand=0.015, # 1.5% auxiliary power electrical_flow=auxiliary_electricity, thermal_flow=condenser_waste_heat, @@ -368,27 +374,29 @@ class CoolingTower(LinearConverter): def __init__( self, - label: str, - specific_electricity_demand: Numeric_TPS, + id: str | None = None, + specific_electricity_demand: Numeric_TPS | None = None, electrical_flow: Flow | None = None, thermal_flow: Flow | None = None, status_parameters: StatusParameters | None = None, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): # Validate required parameters if electrical_flow is None: - raise ValueError(f"'{label}': electrical_flow is required and cannot be None") + raise ValueError(f"'{id}': electrical_flow is required and cannot be None") if thermal_flow is None: - raise ValueError(f"'{label}': thermal_flow is required and cannot be None") + raise ValueError(f"'{id}': thermal_flow is required and cannot be None") super().__init__( - label, + id, inputs=[electrical_flow, thermal_flow], outputs=[], status_parameters=status_parameters, meta_data=meta_data, color=color, + **kwargs, ) self.electrical_flow = electrical_flow @@ -397,12 +405,12 @@ def __init__( @property def specific_electricity_demand(self): - return self.conversion_factors[0][self.thermal_flow.label] + return self.conversion_factors[0][self.thermal_flow.flow_id] @specific_electricity_demand.setter def specific_electricity_demand(self, value): - check_bounds(value, 'specific_electricity_demand', self.label_full, 0, 1) - self.conversion_factors = [{self.electrical_flow.label: -1, self.thermal_flow.label: value}] + check_bounds(value, 'specific_electricity_demand', self.id, 0, 1) + self.conversion_factors = [{self.electrical_flow.flow_id: -1, self.thermal_flow.flow_id: value}] @register_class_for_io @@ -416,7 +424,7 @@ class CHP(LinearConverter): cogeneration applications. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. thermal_efficiency: Thermal efficiency factor (0-1 range). Defines the fraction of fuel energy converted to useful thermal output. electrical_efficiency: Electrical efficiency factor (0-1 range). Defines the fraction of fuel @@ -433,7 +441,7 @@ class CHP(LinearConverter): ```python gas_chp = CHP( - label='natural_gas_chp', + id='natural_gas_chp', thermal_efficiency=0.45, # 45% thermal efficiency electrical_efficiency=0.35, # 35% electrical efficiency (80% total) fuel_flow=natural_gas_flow, @@ -446,7 +454,7 @@ class CHP(LinearConverter): ```python industrial_chp = CHP( - label='industrial_chp', + id='industrial_chp', thermal_efficiency=0.40, electrical_efficiency=0.38, fuel_flow=fuel_gas_flow, @@ -472,7 +480,7 @@ class CHP(LinearConverter): def __init__( self, - label: str, + id: str | None = None, thermal_efficiency: Numeric_TPS | None = None, electrical_efficiency: Numeric_TPS | None = None, fuel_flow: Flow | None = None, @@ -481,27 +489,29 @@ def __init__( status_parameters: StatusParameters | None = None, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): # Validate required parameters if fuel_flow is None: - raise ValueError(f"'{label}': fuel_flow is required and cannot be None") + raise ValueError(f"'{id}': fuel_flow is required and cannot be None") if electrical_flow is None: - raise ValueError(f"'{label}': electrical_flow is required and cannot be None") + raise ValueError(f"'{id}': electrical_flow is required and cannot be None") if thermal_flow is None: - raise ValueError(f"'{label}': thermal_flow is required and cannot be None") + raise ValueError(f"'{id}': thermal_flow is required and cannot be None") if thermal_efficiency is None: - raise ValueError(f"'{label}': thermal_efficiency is required and cannot be None") + raise ValueError(f"'{id}': thermal_efficiency is required and cannot be None") if electrical_efficiency is None: - raise ValueError(f"'{label}': electrical_efficiency is required and cannot be None") + raise ValueError(f"'{id}': electrical_efficiency is required and cannot be None") super().__init__( - label, + id, inputs=[fuel_flow], outputs=[thermal_flow, electrical_flow], conversion_factors=[{}, {}], status_parameters=status_parameters, meta_data=meta_data, color=color, + **kwargs, ) self.fuel_flow = fuel_flow @@ -513,28 +523,28 @@ def __init__( check_bounds( electrical_efficiency + thermal_efficiency, 'thermal_efficiency+electrical_efficiency', - self.label_full, + self.id, 0, 1, ) @property def thermal_efficiency(self): - return self.conversion_factors[0][self.fuel_flow.label] + return self.conversion_factors[0][self.fuel_flow.flow_id] @thermal_efficiency.setter def thermal_efficiency(self, value): - check_bounds(value, 'thermal_efficiency', self.label_full, 0, 1) - self.conversion_factors[0] = {self.fuel_flow.label: value, self.thermal_flow.label: 1} + check_bounds(value, 'thermal_efficiency', self.id, 0, 1) + self.conversion_factors[0] = {self.fuel_flow.flow_id: value, self.thermal_flow.flow_id: 1} @property def electrical_efficiency(self): - return self.conversion_factors[1][self.fuel_flow.label] + return self.conversion_factors[1][self.fuel_flow.flow_id] @electrical_efficiency.setter def electrical_efficiency(self, value): - check_bounds(value, 'electrical_efficiency', self.label_full, 0, 1) - self.conversion_factors[1] = {self.fuel_flow.label: value, self.electrical_flow.label: 1} + check_bounds(value, 'electrical_efficiency', self.id, 0, 1) + self.conversion_factors[1] = {self.fuel_flow.flow_id: value, self.electrical_flow.flow_id: 1} @register_class_for_io @@ -548,7 +558,7 @@ class HeatPumpWithSource(LinearConverter): heat source extraction and electrical consumption with their interdependent relationships. Args: - label: The label of the Element. Used to identify it in the FlowSystem. + id: The id of the Element. Used to identify it in the FlowSystem. cop: Coefficient of Performance (typically 1-20 range). Defines the ratio of thermal output to electrical input. The heat source extraction is automatically calculated as heat_source_flow = thermal_flow × (COP-1)/COP. @@ -565,7 +575,7 @@ class HeatPumpWithSource(LinearConverter): ```python ground_source_hp = HeatPumpWithSource( - label='geothermal_heat_pump', + id='geothermal_heat_pump', cop=4.5, # High COP due to stable ground temperature electrical_flow=electricity_flow, heat_source_flow=ground_heat_extraction, # Heat extracted from ground loop @@ -577,7 +587,7 @@ class HeatPumpWithSource(LinearConverter): ```python waste_heat_pump = HeatPumpWithSource( - label='waste_heat_pump', + id='waste_heat_pump', cop=temperature_dependent_cop, # Varies with temperature of heat source electrical_flow=electricity_consumption, heat_source_flow=industrial_heat_extraction, # Heat extracted from a industrial process or waste water @@ -605,7 +615,7 @@ class HeatPumpWithSource(LinearConverter): def __init__( self, - label: str, + id: str | None = None, cop: Numeric_TPS | None = None, electrical_flow: Flow | None = None, heat_source_flow: Flow | None = None, @@ -613,24 +623,26 @@ def __init__( status_parameters: StatusParameters | None = None, meta_data: dict | None = None, color: str | None = None, + **kwargs, ): # Validate required parameters if electrical_flow is None: - raise ValueError(f"'{label}': electrical_flow is required and cannot be None") + raise ValueError(f"'{id}': electrical_flow is required and cannot be None") if heat_source_flow is None: - raise ValueError(f"'{label}': heat_source_flow is required and cannot be None") + raise ValueError(f"'{id}': heat_source_flow is required and cannot be None") if thermal_flow is None: - raise ValueError(f"'{label}': thermal_flow is required and cannot be None") + raise ValueError(f"'{id}': thermal_flow is required and cannot be None") if cop is None: - raise ValueError(f"'{label}': cop is required and cannot be None") + raise ValueError(f"'{id}': cop is required and cannot be None") super().__init__( - label, + id, inputs=[electrical_flow, heat_source_flow], outputs=[thermal_flow], status_parameters=status_parameters, meta_data=meta_data, color=color, + **kwargs, ) self.electrical_flow = electrical_flow self.heat_source_flow = heat_source_flow @@ -639,16 +651,16 @@ def __init__( @property def cop(self): - return self.conversion_factors[0][self.electrical_flow.label] + return self.conversion_factors[0][self.electrical_flow.flow_id] @cop.setter def cop(self, value): - check_bounds(value, 'cop', self.label_full, 1, 20) + check_bounds(value, 'cop', self.id, 1, 20) if np.any(np.asarray(value) == 1): - raise ValueError(f'{self.label_full}.cop must be strictly !=1 for HeatPumpWithSource.') + raise ValueError(f'{self.id}.cop must be strictly !=1 for HeatPumpWithSource.') self.conversion_factors = [ - {self.electrical_flow.label: value, self.thermal_flow.label: 1}, - {self.heat_source_flow.label: value / (value - 1), self.thermal_flow.label: 1}, + {self.electrical_flow.flow_id: value, self.thermal_flow.flow_id: 1}, + {self.heat_source_flow.flow_id: value / (value - 1), self.thermal_flow.flow_id: 1}, ] diff --git a/flixopt/network_app.py b/flixopt/network_app.py index 32b0af2cd..db8ed612f 100644 --- a/flixopt/network_app.py +++ b/flixopt/network_app.py @@ -158,7 +158,7 @@ def get_shape(element): # Add nodes with attributes for node in nodes: graph.add_node( - node.label_full, + node.id, color=VisualizationConfig.DEFAULT_COLORS[get_element_type(node)], shape=get_shape(node), element_type=get_element_type(node), @@ -171,7 +171,7 @@ def get_shape(element): graph.add_edge( u_of_edge=edge.bus if edge.is_input_in_component else edge.component, v_of_edge=edge.component if edge.is_input_in_component else edge.bus, - label=edge.label_full, + label=edge.id, parameters=edge.__str__().replace(')', '\n)'), ) except Exception as e: diff --git a/flixopt/optimization.py b/flixopt/optimization.py index 006678586..c6d354ef4 100644 --- a/flixopt/optimization.py +++ b/flixopt/optimization.py @@ -23,8 +23,8 @@ from . import io as fx_io from .components import Storage -from .config import CONFIG, DEPRECATION_REMOVAL_VERSION, SUCCESS_LEVEL -from .effects import PENALTY_EFFECT_LABEL +from .config import CONFIG, DEPRECATION_REMOVAL_V7, SUCCESS_LEVEL +from .effects import PENALTY_EFFECT_ID from .results import Results, SegmentedResults from .structure import BusVarName, FlowVarName, StorageVarName @@ -109,7 +109,7 @@ def _initialize_optimization_common( # normalize_weights is deprecated but kept for backwards compatibility if normalize_weights is not None: warnings.warn( - f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Scenario weights are now always normalized when set on FlowSystem.\n', DeprecationWarning, stacklevel=3, @@ -176,7 +176,7 @@ def __init__( normalize_weights: bool = True, ): warnings.warn( - f'Optimization is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'Optimization is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'Use FlowSystem.optimize(solver) or FlowSystem.build_model() + FlowSystem.solve(solver) instead. ' 'Access results via FlowSystem.solution.', DeprecationWarning, @@ -299,7 +299,7 @@ def main_results(self) -> dict[str, int | float | dict]: effects_model = self.model.effects try: - penalty_effect_id = PENALTY_EFFECT_LABEL + penalty_effect_id = PENALTY_EFFECT_ID penalty_section = { 'temporal': effects_model.temporal.sel(effect=penalty_effect_id).solution.values, 'periodic': effects_model.periodic.sel(effect=penalty_effect_id).solution.values, @@ -310,10 +310,10 @@ def main_results(self) -> dict[str, int | float | dict]: # Get effect totals from type-level model effects_section = {} - for effect in sorted(self.flow_system.effects.values(), key=lambda e: e.label_full.upper()): - if effect.label_full != PENALTY_EFFECT_LABEL: - effect_id = effect.label - effects_section[f'{effect.label} [{effect.unit}]'] = { + for effect in sorted(self.flow_system.effects.values(), key=lambda e: e.id.upper()): + if effect.id != PENALTY_EFFECT_ID: + effect_id = effect.id + effects_section[f'{effect.id} [{effect.unit}]'] = { 'temporal': effects_model.temporal.sel(effect=effect_id).solution.values, 'periodic': effects_model.periodic.sel(effect=effect_id).solution.values, 'total': effects_model.total.sel(effect=effect_id).solution.values, @@ -353,15 +353,15 @@ def main_results(self) -> dict[str, int | float | dict]: if buses_model is not None: for bus in self.flow_system.buses.values(): if bus.allows_imbalance: - virtual_supply = buses_model.get_variable(BusVarName.VIRTUAL_SUPPLY, bus.label_full) - virtual_demand = buses_model.get_variable(BusVarName.VIRTUAL_DEMAND, bus.label_full) + virtual_supply = buses_model.get_variable(BusVarName.VIRTUAL_SUPPLY, bus.id) + virtual_demand = buses_model.get_variable(BusVarName.VIRTUAL_DEMAND, bus.id) if virtual_supply is not None and virtual_demand is not None: supply_sum = virtual_supply.solution.sum().item() demand_sum = virtual_demand.solution.sum().item() if supply_sum > 1e-3 or demand_sum > 1e-3: buses_with_excess.append( { - bus.label_full: { + bus.id: { 'virtual_supply': virtual_supply.solution.sum('time'), 'virtual_demand': virtual_demand.solution.sum('time'), } @@ -530,7 +530,7 @@ def __init__( folder: pathlib.Path | None = None, ): warnings.warn( - f'SegmentedOptimization is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'SegmentedOptimization is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'A replacement API for segmented optimization will be provided in a future release.', DeprecationWarning, stacklevel=2, @@ -575,9 +575,9 @@ def __init__( self.flow_system._connect_network() # Connect network to ensure that all Flows know their Component # Storing all original start values self._original_start_values = { - **{flow.label_full: flow.previous_flow_rate for flow in self.flow_system.flows.values()}, + **{flow.id: flow.previous_flow_rate for flow in self.flow_system.flows.values()}, **{ - comp.label_full: comp.initial_charge_state + comp.id: comp.initial_charge_state for comp in self.flow_system.components.values() if isinstance(comp, Storage) }, @@ -739,22 +739,22 @@ def _transfer_start_values(self, i: int): current_model = self.sub_optimizations[i - 1].model flows_model = current_model._flows_model for current_flow in current_flow_system.flows.values(): - next_flow = next_flow_system.flows[current_flow.label_full] - flow_rate = flows_model.get_variable(FlowVarName.RATE, current_flow.label_full) + next_flow = next_flow_system.flows[current_flow.id] + flow_rate = flows_model.get_variable(FlowVarName.RATE, current_flow.id) next_flow.previous_flow_rate = flow_rate.solution.sel( time=slice(start_previous_values, end_previous_values) ).values - start_values_of_this_segment[current_flow.label_full] = next_flow.previous_flow_rate + start_values_of_this_segment[current_flow.id] = next_flow.previous_flow_rate # Get previous charge state from type-level model storages_model = current_model._storages_model for current_comp in current_flow_system.components.values(): - next_comp = next_flow_system.components[current_comp.label_full] + next_comp = next_flow_system.components[current_comp.id] if isinstance(next_comp, Storage): if storages_model is not None: - charge_state = storages_model.get_variable(StorageVarName.CHARGE, current_comp.label_full) + charge_state = storages_model.get_variable(StorageVarName.CHARGE, current_comp.id) next_comp.initial_charge_state = charge_state.solution.sel(time=start).item() - start_values_of_this_segment[current_comp.label_full] = next_comp.initial_charge_state + start_values_of_this_segment[current_comp.id] = next_comp.initial_charge_state self._transfered_start_values.append(start_values_of_this_segment) diff --git a/flixopt/optimize_accessor.py b/flixopt/optimize_accessor.py index dd1b79f82..639498611 100644 --- a/flixopt/optimize_accessor.py +++ b/flixopt/optimize_accessor.py @@ -112,10 +112,10 @@ def __call__( if normalize_weights is not None: import warnings - from .config import DEPRECATION_REMOVAL_VERSION + from .config import DEPRECATION_REMOVAL_V7 warnings.warn( - f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_VERSION}. ' + f'\n\nnormalize_weights parameter is deprecated and will be removed in {DEPRECATION_REMOVAL_V7}. ' 'Scenario weights are now always normalized when set on FlowSystem.\n', DeprecationWarning, stacklevel=2, @@ -366,14 +366,14 @@ def _check_no_investments(self, segment_fs: FlowSystem) -> None: # Check flows for InvestParameters for flow in segment_fs.flows.values(): if isinstance(flow.size, InvestParameters): - invest_elements.append(flow.label_full) + invest_elements.append(flow.id) # Check storages for InvestParameters from .components import Storage for comp in segment_fs.components.values(): if isinstance(comp, Storage) and isinstance(comp.capacity, InvestParameters): - invest_elements.append(comp.label_full) + invest_elements.append(comp.id) if invest_elements: raise ValueError( diff --git a/flixopt/results.py b/flixopt/results.py index 921efd3ba..774723069 100644 --- a/flixopt/results.py +++ b/flixopt/results.py @@ -16,10 +16,11 @@ from . import io as fx_io from . import plotting from .color_processing import process_colors -from .config import CONFIG, DEPRECATION_REMOVAL_VERSION, SUCCESS_LEVEL +from .config import CONFIG, DEPRECATION_REMOVAL_V7, DEPRECATION_REMOVAL_V8, SUCCESS_LEVEL from .flow_system import FlowSystem +from .id_list import IdList from .model_coordinates import ModelCoordinates -from .structure import CompositeContainerMixin, ResultsContainer +from .structure import CompositeContainerMixin if TYPE_CHECKING: import matplotlib.pyplot as plt @@ -96,9 +97,9 @@ class Results(CompositeContainerMixin['ComponentResults | BusResults | EffectRes name: Unique identifier for this optimization model: Original linopy optimization model (if available) folder: Directory path for result storage and loading - components: Dictionary mapping component labels to ComponentResults objects - buses: Dictionary mapping bus labels to BusResults objects - effects: Dictionary mapping effect names to EffectResults objects + components: IdList mapping component ids to ComponentResults objects (supports short-key fallback) + buses: IdList mapping bus ids to BusResults objects (supports short-key fallback) + effects: IdList mapping effect ids to EffectResults objects (supports short-key fallback) timesteps_extra: Extended time index including boundary conditions timestep_duration: Duration of each timestep in hours for proper energy calculations @@ -155,7 +156,7 @@ class Results(CompositeContainerMixin['ComponentResults | BusResults | EffectRes Design Patterns: **Factory Methods**: Use `from_file()` and `from_optimization()` for creation or access directly from `Optimization.results` - **Dictionary Access**: Use `results[element_label]` for element-specific results + **IdList Access**: Use `results[element_id]` for element-specific results (with short-key fallback) **Lazy Loading**: Results objects created on-demand for memory efficiency **Unified Interface**: Consistent API across different result types @@ -237,7 +238,7 @@ def __init__( model: Linopy optimization model. """ warnings.warn( - f'Results is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'Results is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'Access results directly via FlowSystem.solution after optimization, or use the ' '.plot accessor on FlowSystem and its components (e.g., flow_system.plot.heatmap(...)). ' 'To load old result files, use FlowSystem.from_old_results(folder, name).', @@ -252,24 +253,28 @@ def __init__( self.model = model self.folder = pathlib.Path(folder) if folder is not None else pathlib.Path.cwd() / 'results' - # Create ResultsContainers for better access patterns + # Create IdLists for better access patterns components_dict = { label: ComponentResults(self, **infos) for label, infos in _get_solution_attr(self.solution, 'Components').items() } - self.components = ResultsContainer( - elements=components_dict, element_type_name='component results', truncate_repr=10 + self.components = IdList( + list(components_dict.values()), key_fn=lambda r: r.id, display_name='component results', truncate_repr=10 ) buses_dict = { label: BusResults(self, **infos) for label, infos in _get_solution_attr(self.solution, 'Buses').items() } - self.buses = ResultsContainer(elements=buses_dict, element_type_name='bus results', truncate_repr=10) + self.buses = IdList( + list(buses_dict.values()), key_fn=lambda r: r.id, display_name='bus results', truncate_repr=10 + ) effects_dict = { label: EffectResults(self, **infos) for label, infos in _get_solution_attr(self.solution, 'Effects').items() } - self.effects = ResultsContainer(elements=effects_dict, element_type_name='effect results', truncate_repr=10) + self.effects = IdList( + list(effects_dict.values()), key_fn=lambda r: r.id, display_name='effect results', truncate_repr=10 + ) flows_attr = _get_solution_attr(self.solution, 'Flows') if not flows_attr: @@ -283,7 +288,9 @@ def __init__( else: flows_dict = {label: FlowResults(self, **infos) for label, infos in flows_attr.items()} self._has_flow_data = True - self.flows = ResultsContainer(elements=flows_dict, element_type_name='flow results', truncate_repr=10) + self.flows = IdList( + list(flows_dict.values()), key_fn=lambda r: r.id, display_name='flow results', truncate_repr=10 + ) self.timesteps_extra = self.solution.indexes['time'] self.timestep_duration = ModelCoordinates.calculate_timestep_duration(self.timesteps_extra) @@ -310,7 +317,7 @@ def __init__( self.colors: dict[str, str] = {} - def _get_container_groups(self) -> dict[str, ResultsContainer]: + def _get_container_groups(self) -> dict[str, IdList]: """Return ordered container groups for CompositeContainerMixin.""" return { 'Components': self.components, @@ -584,31 +591,31 @@ def flow_rates( **Note**: The new API differs from this method: - - Returns ``xr.Dataset`` (not ``DataArray``) with flow labels as variable names + - Returns ``xr.Dataset`` (not ``DataArray``) with flow ids as variable names - No ``'flow'`` dimension - each flow is a separate variable - No filtering parameters - filter using these alternatives:: - # Select specific flows by label + # Select specific flows by id ds = results.plot.all_flow_rates ds[['Boiler(Q_th)', 'CHP(Q_th)']] - # Filter by substring in label + # Filter by substring in id ds[[v for v in ds.data_vars if 'Boiler' in v]] # Filter by bus (start/end) - get flows connected to a bus - results['Fernwärme'].inputs # list of input flow labels - results['Fernwärme'].outputs # list of output flow labels + results['Fernwärme'].inputs # list of input flow ids + results['Fernwärme'].outputs # list of output flow ids ds[results['Fernwärme'].inputs] # Dataset with only inputs to bus # Filter by component - get flows of a component - results['Boiler'].inputs # list of input flow labels - results['Boiler'].outputs # list of output flow labels + results['Boiler'].inputs # list of input flow ids + results['Boiler'].outputs # list of output flow ids """ warnings.warn( 'results.flow_rates() is deprecated. ' 'Use results.plot.all_flow_rates instead (returns Dataset, not DataArray). ' - 'Note: The new API has no filtering parameters and uses flow labels as variable names. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', + 'Note: The new API has no filtering parameters and uses flow ids as variable names. ' + f'Will be removed in v{DEPRECATION_REMOVAL_V7}.', DeprecationWarning, stacklevel=2, ) @@ -617,7 +624,7 @@ def flow_rates( if self._flow_rates is None: self._flow_rates = self._assign_flow_coords( xr.concat( - [flow.flow_rate.rename(flow.label) for flow in self.flows.values()], + [flow.flow_rate.rename(flow.id) for flow in self.flows.values()], dim=pd.Index(self.flows.keys(), name='flow'), ) ).rename('flow_rates') @@ -638,25 +645,25 @@ def flow_hours( **Note**: The new API differs from this method: - - Returns ``xr.Dataset`` (not ``DataArray``) with flow labels as variable names + - Returns ``xr.Dataset`` (not ``DataArray``) with flow ids as variable names - No ``'flow'`` dimension - each flow is a separate variable - No filtering parameters - filter using these alternatives:: - # Select specific flows by label + # Select specific flows by id ds = results.plot.all_flow_hours ds[['Boiler(Q_th)', 'CHP(Q_th)']] - # Filter by substring in label + # Filter by substring in id ds[[v for v in ds.data_vars if 'Boiler' in v]] # Filter by bus (start/end) - get flows connected to a bus - results['Fernwärme'].inputs # list of input flow labels - results['Fernwärme'].outputs # list of output flow labels + results['Fernwärme'].inputs # list of input flow ids + results['Fernwärme'].outputs # list of output flow ids ds[results['Fernwärme'].inputs] # Dataset with only inputs to bus # Filter by component - get flows of a component - results['Boiler'].inputs # list of input flow labels - results['Boiler'].outputs # list of output flow labels + results['Boiler'].inputs # list of input flow ids + results['Boiler'].outputs # list of output flow ids Flow hours represent the total energy/material transferred over time, calculated by multiplying flow rates by the duration of each timestep. @@ -680,8 +687,8 @@ def flow_hours( warnings.warn( 'results.flow_hours() is deprecated. ' 'Use results.plot.all_flow_hours instead (returns Dataset, not DataArray). ' - 'Note: The new API has no filtering parameters and uses flow labels as variable names. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', + 'Note: The new API has no filtering parameters and uses flow ids as variable names. ' + f'Will be removed in v{DEPRECATION_REMOVAL_V7}.', DeprecationWarning, stacklevel=2, ) @@ -704,31 +711,31 @@ def sizes( **Note**: The new API differs from this method: - - Returns ``xr.Dataset`` (not ``DataArray``) with flow labels as variable names + - Returns ``xr.Dataset`` (not ``DataArray``) with flow ids as variable names - No ``'flow'`` dimension - each flow is a separate variable - No filtering parameters - filter using these alternatives:: - # Select specific flows by label + # Select specific flows by id ds = results.plot.all_sizes ds[['Boiler(Q_th)', 'CHP(Q_th)']] - # Filter by substring in label + # Filter by substring in id ds[[v for v in ds.data_vars if 'Boiler' in v]] # Filter by bus (start/end) - get flows connected to a bus - results['Fernwärme'].inputs # list of input flow labels - results['Fernwärme'].outputs # list of output flow labels + results['Fernwärme'].inputs # list of input flow ids + results['Fernwärme'].outputs # list of output flow ids ds[results['Fernwärme'].inputs] # Dataset with only inputs to bus # Filter by component - get flows of a component - results['Boiler'].inputs # list of input flow labels - results['Boiler'].outputs # list of output flow labels + results['Boiler'].inputs # list of input flow ids + results['Boiler'].outputs # list of output flow ids """ warnings.warn( 'results.sizes() is deprecated. ' 'Use results.plot.all_sizes instead (returns Dataset, not DataArray). ' - 'Note: The new API has no filtering parameters and uses flow labels as variable names. ' - f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.', + 'Note: The new API has no filtering parameters and uses flow ids as variable names. ' + f'Will be removed in v{DEPRECATION_REMOVAL_V7}.', DeprecationWarning, stacklevel=2, ) @@ -737,7 +744,7 @@ def sizes( if self._sizes is None: self._sizes = self._assign_flow_coords( xr.concat( - [flow.size.rename(flow.label) for flow in self.flows.values()], + [flow.size.rename(flow.id) for flow in self.flows.values()], dim=pd.Index(self.flows.keys(), name='flow'), ) ).rename('flow_sizes') @@ -1282,12 +1289,22 @@ def to_file( class _ElementResults: def __init__(self, results: Results, label: str, variables: list[str], constraints: list[str]): self._results = results - self.label = label + self.id = label self.variable_names = variables self._constraint_names = constraints self.solution = self._results.solution[self.variable_names] + @property + def label(self) -> str: + """Deprecated. Use `.id` instead.""" + warnings.warn( + f'`label` is deprecated and will be removed in v{DEPRECATION_REMOVAL_V8}. Use `.id` instead.', + DeprecationWarning, + stacklevel=2, + ) + return self.id + @property def variables(self) -> linopy.Variables: """Get element variables (requires linopy model). @@ -1313,7 +1330,7 @@ def constraints(self) -> linopy.Constraints: def __repr__(self) -> str: """Return string representation with element info and dataset preview.""" class_name = self.__class__.__name__ - header = f'{class_name}: "{self.label}"' + header = f'{class_name}: "{self.id}"' sol = self.solution.copy(deep=False) sol.attrs = {} return f'{header}\n{"-" * len(header)}\n{repr(sol)}' @@ -1510,9 +1527,7 @@ def plot_node_balance( ) suffix = '--' + '-'.join(suffix_parts) if suffix_parts else '' - title = ( - f'{self.label} (flow rates){suffix}' if unit_type == 'flow_rate' else f'{self.label} (flow hours){suffix}' - ) + title = f'{self.id} (flow rates){suffix}' if unit_type == 'flow_rate' else f'{self.id} (flow hours){suffix}' if engine == 'plotly': figure_like = plotting.with_plotly( @@ -1662,7 +1677,7 @@ def plot_node_balance_pie( suffix_parts.extend(auto_suffix_parts) suffix = '--' + '-'.join(sorted(set(suffix_parts))) if suffix_parts else '' - title = f'{self.label} (total flow hours){suffix}' + title = f'{self.id} (total flow hours){suffix}' if engine == 'plotly': figure_like = plotting.dual_pie_with_plotly( @@ -1766,13 +1781,13 @@ def is_storage(self) -> bool: @property def _charge_state(self) -> str: - return f'{self.label}|charge_state' + return f'{self.id}|charge_state' @property def charge_state(self) -> xr.DataArray: """Get storage charge state solution.""" if not self.is_storage: - raise ValueError(f'Cant get charge_state. "{self.label}" is not a storage') + raise ValueError(f'Cant get charge_state. "{self.id}" is not a storage') return self.solution[self._charge_state] def plot_charge_state( @@ -1861,7 +1876,7 @@ def plot_charge_state( overlay_color = plot_kwargs.pop('charge_state_line_color', 'black') if not self.is_storage: - raise ValueError(f'Cant plot charge_state. "{self.label}" is not a storage') + raise ValueError(f'Cant plot charge_state. "{self.id}" is not a storage') # Get node balance and charge state ds = self.node_balance(with_last_timestep=True).fillna(0) @@ -1872,7 +1887,7 @@ def plot_charge_state( charge_state_da, _ = _apply_selection_to_data(charge_state_da, select=select, drop=True) suffix = '--' + '-'.join(suffix_parts) if suffix_parts else '' - title = f'Operation Balance of {self.label}{suffix}' + title = f'Operation Balance of {self.id}{suffix}' if engine == 'plotly': # Plot flows (node balance) with the specified mode @@ -1993,7 +2008,7 @@ def node_balance_with_charge_state( ValueError: If component is not a storage. """ if not self.is_storage: - raise ValueError(f'Cant get charge_state. "{self.label}" is not a storage') + raise ValueError(f'Cant get charge_state. "{self.id}" is not a storage') variable_names = self.inputs + self.outputs + [self._charge_state] return sanitize_dataset( ds=self.solution[variable_names], @@ -2018,7 +2033,7 @@ def get_shares_from(self, element: str) -> xr.Dataset: """Get effect shares from specific element. Args: - element: Element label to get shares from. + element: Element id to get shares from. Returns: xr.Dataset: Element shares to this effect. @@ -2044,21 +2059,21 @@ def __init__( @property def flow_rate(self) -> xr.DataArray: - return self.solution[f'{self.label}|flow_rate'] + return self.solution[f'{self.id}|flow_rate'] @property def flow_hours(self) -> xr.DataArray: - return (self.flow_rate * self._results.timestep_duration).rename(f'{self.label}|flow_hours') + return (self.flow_rate * self._results.timestep_duration).rename(f'{self.id}|flow_hours') @property def size(self) -> xr.DataArray: - name = f'{self.label}|size' + name = f'{self.id}|size' if name in self.solution: return self.solution[name] try: - return self._results.flow_system.flows[self.label].size.rename(name) + return self._results.flow_system.flows[self.id].size.rename(name) except _FlowSystemRestorationError: - logger.critical(f'Size of flow {self.label}.size not availlable. Returning NaN') + logger.critical(f'Size of flow {self.id}.size not availlable. Returning NaN') return xr.DataArray(np.nan).rename(name) @@ -2222,7 +2237,7 @@ def __init__( folder: pathlib.Path | None = None, ): warnings.warn( - f'SegmentedResults is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'SegmentedResults is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'A replacement API for segmented optimization will be provided in a future release.', DeprecationWarning, stacklevel=2, diff --git a/flixopt/statistics_accessor.py b/flixopt/statistics_accessor.py index a086f33e7..7f4b1ed35 100644 --- a/flixopt/statistics_accessor.py +++ b/flixopt/statistics_accessor.py @@ -1529,8 +1529,8 @@ def balance( else: raise KeyError(f"'{node}' not found in buses or components") - input_labels = [f.label_full for f in element.inputs.values()] - output_labels = [f.label_full for f in element.outputs.values()] + input_labels = [f.id for f in element.inputs.values()] + output_labels = [f.id for f in element.outputs.values()] all_labels = input_labels + output_labels filtered_labels = _filter_by_labels(all_labels, include, exclude) @@ -1656,11 +1656,11 @@ def carrier_balance( for bus in carrier_buses: for flow in bus.inputs.values(): - input_labels.append(flow.label_full) - component_inputs.setdefault(flow.component, []).append(flow.label_full) + input_labels.append(flow.id) + component_inputs.setdefault(flow.component, []).append(flow.id) for flow in bus.outputs.values(): - output_labels.append(flow.label_full) - component_outputs.setdefault(flow.component, []).append(flow.label_full) + output_labels.append(flow.id) + component_outputs.setdefault(flow.component, []).append(flow.id) all_labels = input_labels + output_labels filtered_labels = _filter_by_labels(all_labels, include, exclude) @@ -1888,7 +1888,7 @@ def flows( if components and comp_label not in components: continue - matching_labels.append(flow.label_full) + matching_labels.append(flow.id) selected_flows = [lbl for lbl in matching_labels if lbl in available_flows] da = source_da.sel(flow=selected_flows) @@ -2383,8 +2383,8 @@ def storage( raise ValueError(f"'{storage}' is not a storage (no charge_state variable found)") # Get flow data - input_labels = [f.label_full for f in component.inputs.values()] - output_labels = [f.label_full for f in component.outputs.values()] + input_labels = [f.id for f in component.inputs.values()] + output_labels = [f.id for f in component.outputs.values()] all_labels = input_labels + output_labels source_da = self._stats.flow_rates if unit == 'flow_rate' else self._stats.flow_hours diff --git a/flixopt/structure.py b/flixopt/structure.py index fe8eaf4f4..67e2d3d2c 100644 --- a/flixopt/structure.py +++ b/flixopt/structure.py @@ -29,8 +29,9 @@ import xarray as xr from . import io as fx_io -from .config import DEPRECATION_REMOVAL_VERSION +from .config import DEPRECATION_REMOVAL_V8 from .core import FlowSystemDimensions, TimeSeriesData, get_dataarray_stats +from .id_list import IdList if TYPE_CHECKING: # for type checking and preventing circular imports from collections.abc import Collection @@ -401,8 +402,8 @@ class TypeModel(ABC): Attributes: model: The FlowSystemModel to create variables/constraints in. data: Data object providing element_ids, dim_name, and elements. - elements: ElementContainer of elements this model manages. - element_ids: List of element identifiers (label_full). + elements: IdList of elements this model manages. + element_ids: List of element identifiers. dim_name: Dimension name for this element type (e.g., 'flow', 'storage'). Example: @@ -430,8 +431,8 @@ def __init__(self, model: FlowSystemModel, data): self._constraints: dict[str, linopy.Constraint] = {} @property - def elements(self) -> ElementContainer: - """ElementContainer of elements in this model.""" + def elements(self) -> IdList: + """IdList of elements in this model.""" return self.data.elements @property @@ -791,21 +792,21 @@ def _find_constraints_for_element(element_id: str, dim_name: str) -> list[str]: # Populate flows for flow in self.flow_system.flows.values(): - flow._variable_names = _find_vars_for_element(flow.label_full, 'flow') - flow._constraint_names = _find_constraints_for_element(flow.label_full, 'flow') + flow._variable_names = _find_vars_for_element(flow.id, 'flow') + flow._constraint_names = _find_constraints_for_element(flow.id, 'flow') # Populate buses for bus in self.flow_system.buses.values(): - bus._variable_names = _find_vars_for_element(bus.label_full, 'bus') - bus._constraint_names = _find_constraints_for_element(bus.label_full, 'bus') + bus._variable_names = _find_vars_for_element(bus.id, 'bus') + bus._constraint_names = _find_constraints_for_element(bus.id, 'bus') # Populate storages from .components import Storage for comp in self.flow_system.components.values(): if isinstance(comp, Storage): - comp._variable_names = _find_vars_for_element(comp.label_full, 'storage') - comp._constraint_names = _find_constraints_for_element(comp.label_full, 'storage') + comp._variable_names = _find_vars_for_element(comp.id, 'storage') + comp._constraint_names = _find_constraints_for_element(comp.id, 'storage') # Also add flow variables (storages have charging/discharging flows) for flow in comp.flows.values(): comp._variable_names.extend(flow._variable_names) @@ -815,8 +816,8 @@ def _find_constraints_for_element(element_id: str, dim_name: str) -> list[str]: comp._variable_names = [] comp._constraint_names = [] # Add component-level variables (status, etc.) - comp._variable_names.extend(_find_vars_for_element(comp.label_full, 'component')) - comp._constraint_names.extend(_find_constraints_for_element(comp.label_full, 'component')) + comp._variable_names.extend(_find_vars_for_element(comp.id, 'component')) + comp._constraint_names.extend(_find_constraints_for_element(comp.id, 'component')) # Add flow variables for flow in comp.flows.values(): comp._variable_names.extend(flow._variable_names) @@ -824,8 +825,8 @@ def _find_constraints_for_element(element_id: str, dim_name: str) -> list[str]: # Populate effects for effect in self.flow_system.effects.values(): - effect._variable_names = _find_vars_for_element(effect.label, 'effect') - effect._constraint_names = _find_constraints_for_element(effect.label, 'effect') + effect._variable_names = _find_vars_for_element(effect.id, 'effect') + effect._constraint_names = _find_constraints_for_element(effect.id, 'effect') def _build_results_structure(self) -> dict[str, dict]: """Build results structure for all elements using type-level models.""" @@ -838,45 +839,45 @@ def _build_results_structure(self) -> dict[str, dict]: } # Components - for comp in sorted(self.flow_system.components.values(), key=lambda c: c.label_full.upper()): - flow_labels = [f.label_full for f in comp.flows.values()] - results['Components'][comp.label_full] = { - 'label': comp.label_full, + for comp in sorted(self.flow_system.components.values(), key=lambda c: c.id.upper()): + flow_ids = [f.id for f in comp.flows.values()] + results['Components'][comp.id] = { + 'id': comp.id, 'variables': comp._variable_names, 'constraints': comp._constraint_names, 'inputs': ['flow|rate'] * len(comp.inputs), 'outputs': ['flow|rate'] * len(comp.outputs), - 'flows': flow_labels, + 'flows': flow_ids, } # Buses - for bus in sorted(self.flow_system.buses.values(), key=lambda b: b.label_full.upper()): + for bus in sorted(self.flow_system.buses.values(), key=lambda b: b.id.upper()): input_vars = ['flow|rate'] * len(bus.inputs) output_vars = ['flow|rate'] * len(bus.outputs) if bus.allows_imbalance: input_vars.append('bus|virtual_supply') output_vars.append('bus|virtual_demand') - results['Buses'][bus.label_full] = { - 'label': bus.label_full, + results['Buses'][bus.id] = { + 'id': bus.id, 'variables': bus._variable_names, 'constraints': bus._constraint_names, 'inputs': input_vars, 'outputs': output_vars, - 'flows': [f.label_full for f in bus.flows.values()], + 'flows': [f.id for f in bus.flows.values()], } # Effects - for effect in sorted(self.flow_system.effects.values(), key=lambda e: e.label_full.upper()): - results['Effects'][effect.label_full] = { - 'label': effect.label_full, + for effect in sorted(self.flow_system.effects.values(), key=lambda e: e.id.upper()): + results['Effects'][effect.id] = { + 'id': effect.id, 'variables': effect._variable_names, 'constraints': effect._constraint_names, } # Flows - for flow in sorted(self.flow_system.flows.values(), key=lambda f: f.label_full.upper()): - results['Flows'][flow.label_full] = { - 'label': flow.label_full, + for flow in sorted(self.flow_system.flows.values(), key=lambda f: f.id.upper()): + results['Flows'][flow.id] = { + 'id': flow.id, 'variables': flow._variable_names, 'constraints': flow._constraint_names, 'start': flow.bus if flow.is_input_in_component else flow.component, @@ -984,28 +985,28 @@ def _add_scenario_equality_for_parameter_type( if 'scenario' not in batched_var.dims: return # No scenario dimension, nothing to equalize - all_flow_labels = list(batched_var.coords['flow'].values) + all_flow_ids = list(batched_var.coords['flow'].values) if config is True: # All flows should be scenario-independent - flows_to_constrain = all_flow_labels + flows_to_constrain = all_flow_ids else: # Only those in the list should be scenario-independent - flows_to_constrain = [f for f in config if f in all_flow_labels] + flows_to_constrain = [f for f in config if f in all_flow_ids] # Validate that all specified flows exist - missing = [f for f in config if f not in all_flow_labels] + missing = [f for f in config if f not in all_flow_ids] if missing: param_name = ( 'scenario_independent_sizes' if parameter_type == 'size' else 'scenario_independent_flow_rates' ) - logger.warning(f'{param_name} contains labels not in {batched_var_name}: {missing}') + logger.warning(f'{param_name} contains ids not in {batched_var_name}: {missing}') logger.debug(f'Adding scenario equality constraints for {len(flows_to_constrain)} {parameter_type} variables') - for flow_label in flows_to_constrain: - var_slice = batched_var.sel(flow=flow_label) + for flow_id in flows_to_constrain: + var_slice = batched_var.sel(flow=flow_id) self.add_constraints( var_slice.isel(scenario=0) == var_slice.isel(scenario=slice(1, None)), - name=f'{flow_label}|{parameter_type}|scenario_independent', + name=f'{flow_id}|{parameter_type}|scenario_independent', ) def _add_scenario_equality_constraints(self): @@ -1268,7 +1269,7 @@ def link_to_flow_system(self, flow_system, prefix: str = '') -> None: # In a Model class if flow.status_parameters is None: flow.status_parameters = StatusParameters() - flow.status_parameters.link_to_flow_system(self._model.flow_system, f'{flow.label_full}') + flow.status_parameters.link_to_flow_system(self._model.flow_system, f'{flow.id}') ``` """ self._flow_system = flow_system @@ -1349,11 +1350,22 @@ def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]: reference_structure = {'__class__': self.__class__.__name__} all_extracted_arrays = {} + # Deprecated init params that should not be serialized (they alias other params) + _deprecated_init_params = {'label', 'label_as_positional'} + # On Flow, 'id' is deprecated in favor of 'flow_id' + if 'flow_id' in self._cached_init_params: + _deprecated_init_params.add('id') + for name in self._cached_init_params: - if name == 'self': # Skip self and timesteps. Timesteps are directly stored in Datasets + if name == 'self' or name in _deprecated_init_params: continue - value = getattr(self, name, None) + # For 'id' or 'flow_id' param, use _short_id to get the raw constructor value + # (Flow.id property returns qualified name, but constructor expects short name) + if name in ('id', 'flow_id') and hasattr(self, '_short_id'): + value = self._short_id + else: + value = getattr(self, name, None) if value is None: continue @@ -1439,6 +1451,16 @@ def _extract_dataarrays_recursive(self, obj, context_name: str = '') -> tuple[An processed_items.append(processed_item) return processed_items, extracted_arrays + # Handle IdList containers (treat as dict for serialization) + elif isinstance(obj, IdList): + processed_dict = {} + for key, value in obj.items(): + key_context = f'{context_name}.{key}' if context_name else str(key) + processed_value, nested_arrays = self._extract_dataarrays_recursive(value, key_context) + extracted_arrays.update(nested_arrays) + processed_dict[key] = processed_value + return processed_dict, extracted_arrays + # Handle dictionaries elif isinstance(obj, dict): processed_dict = {} @@ -1472,6 +1494,7 @@ def _handle_deprecated_kwarg( transform: callable = None, check_conflict: bool = True, additional_warning_message: str = '', + removal_version: str | None = None, ) -> Any: """ Handle a deprecated keyword argument by issuing a warning and returning the appropriate value. @@ -1488,6 +1511,7 @@ def _handle_deprecated_kwarg( Note: For parameters with non-None default values (e.g., bool parameters with default=False), set check_conflict=False since we cannot distinguish between an explicit value and the default. additional_warning_message: Add a custom message which gets appended with a line break to the default warning. + removal_version: Target removal version string. Defaults to DEPRECATION_REMOVAL_V8. Returns: The value to use (either from old parameter or current_value) @@ -1508,10 +1532,13 @@ def _handle_deprecated_kwarg( """ import warnings + if removal_version is None: + removal_version = DEPRECATION_REMOVAL_V8 + old_value = kwargs.pop(old_name, None) if old_value is not None: # Build base warning message - base_warning = f'The use of the "{old_name}" argument is deprecated. Use the "{new_name}" argument instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.' + base_warning = f'The use of the "{old_name}" argument is deprecated. Use the "{new_name}" argument instead. Will be removed in v{removal_version}.' # Append additional message on a new line if provided if additional_warning_message: @@ -1684,6 +1711,15 @@ def _resolve_reference_structure(cls, structure, arrays_dict: dict[str, xr.DataA deferred_attrs = {k: v for k, v in resolved_nested_data.items() if k in deferred_attr_names} constructor_data = {k: v for k, v in resolved_nested_data.items() if k not in deferred_attr_names} + # Handle renamed parameters from old serialized data + if 'label' in constructor_data and 'label' not in init_params: + # label → id for most elements, label → flow_id for Flow + new_key = 'flow_id' if 'flow_id' in init_params else 'id' + constructor_data[new_key] = constructor_data.pop('label') + if 'id' in constructor_data and 'id' not in init_params and 'flow_id' in init_params: + # id → flow_id for Flow (from recently serialized data) + constructor_data['flow_id'] = constructor_data.pop('id') + # Check for unknown parameters - these could be typos or renamed params unknown_params = set(constructor_data.keys()) - init_params if unknown_params: @@ -1931,7 +1967,7 @@ def to_json(self, path: str | pathlib.Path): def __repr__(self): """Return a detailed string representation for debugging.""" - return fx_io.build_repr_from_init(self, excluded_params={'self', 'label', 'kwargs'}) + return fx_io.build_repr_from_init(self, excluded_params={'self', 'id', 'label', 'kwargs'}) def copy(self) -> Interface: """ @@ -1957,7 +1993,7 @@ def __deepcopy__(self, memo): class Element(Interface): - """This class is the basic Element of flixopt. Every Element has a label""" + """This class is the basic Element of flixopt. Every Element has an id.""" # Attributes that are serialized but set after construction (not passed to child __init__) # These are internal state populated during modeling, not user-facing parameters @@ -1965,21 +2001,26 @@ class Element(Interface): def __init__( self, - label: str, + id: str | None = None, meta_data: dict | None = None, color: str | None = None, _variable_names: list[str] | None = None, _constraint_names: list[str] | None = None, + **kwargs, ): """ Args: - label: The label of the element + id: The id of the element meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types. color: Optional color for visualizations (e.g., '#FF6B6B'). If not provided, a color will be automatically assigned during FlowSystem.connect_and_transform(). _variable_names: Internal. Variable names for this element (populated after modeling). _constraint_names: Internal. Constraint names for this element (populated after modeling). """ - self.label = Element._valid_label(label) + id = self._handle_deprecated_kwarg(kwargs, 'label', 'id', id) + if id is None: + raise TypeError(f'{self.__class__.__name__}.__init__() requires an "id" argument.') + self._validate_kwargs(kwargs) + self._short_id: str = Element._valid_id(id) self.meta_data = meta_data if meta_data is not None else {} self.color = color self._flow_system: FlowSystem | None = None @@ -1992,9 +2033,57 @@ def _plausibility_checks(self) -> None: This is run after all data is transformed to the correct format/type""" raise NotImplementedError('Every Element needs a _plausibility_checks() method') + @property + def id(self) -> str: + """The unique identifier of this element. + + For most elements this is the name passed to the constructor. + For flows this returns the qualified form: ``component(short_id)``. + """ + return self._short_id + + @id.setter + def id(self, value: str) -> None: + self._short_id = Element._valid_id(value) + + @property + def label(self) -> str: + """Deprecated: Use ``id`` instead.""" + warnings.warn( + f'Accessing ".label" is deprecated. Use ".id" instead. Will be removed in v{DEPRECATION_REMOVAL_V8}.', + DeprecationWarning, + stacklevel=2, + ) + return self._short_id + + @label.setter + def label(self, value: str) -> None: + warnings.warn( + f'Setting ".label" is deprecated. Use ".id" instead. Will be removed in v{DEPRECATION_REMOVAL_V8}.', + DeprecationWarning, + stacklevel=2, + ) + self._short_id = value + @property def label_full(self) -> str: - return self.label + """Deprecated: Use ``id`` instead.""" + warnings.warn( + f'Accessing ".label_full" is deprecated. Use ".id" instead. Will be removed in v{DEPRECATION_REMOVAL_V8}.', + DeprecationWarning, + stacklevel=2, + ) + return self.id + + @property + def id_full(self) -> str: + """Deprecated: Use ``id`` instead.""" + warnings.warn( + f'Accessing ".id_full" is deprecated. Use ".id" instead. Will be removed in v{DEPRECATION_REMOVAL_V8}.', + DeprecationWarning, + stacklevel=2, + ) + return self.id @property def solution(self) -> xr.Dataset: @@ -2007,11 +2096,11 @@ def solution(self) -> xr.Dataset: ValueError: If no solution is available (optimization not run or not solved). """ if self._flow_system is None: - raise ValueError(f'Element "{self.label}" is not linked to a FlowSystem.') + raise ValueError(f'Element "{self.id}" is not linked to a FlowSystem.') if self._flow_system.solution is None: - raise ValueError(f'No solution available for "{self.label}". Run optimization first or load results.') + raise ValueError(f'No solution available for "{self.id}". Run optimization first or load results.') if not self._variable_names: - raise ValueError(f'No variable names available for "{self.label}". Element may not have been modeled yet.') + raise ValueError(f'No variable names available for "{self.id}". Element may not have been modeled yet.') full_solution = self._flow_system.solution data_vars = {} for var_name in self._variable_names: @@ -2022,8 +2111,8 @@ def solution(self) -> xr.Dataset: for dim in var.dims: if dim in ('time', 'period', 'scenario', 'cluster'): continue - if self.label_full in var.coords[dim].values: - var = var.sel({dim: self.label_full}, drop=True) + if self.id in var.coords[dim].values: + var = var.sel({dim: self.id}, drop=True) break data_vars[var_name] = var return xr.Dataset(data_vars) @@ -2047,25 +2136,35 @@ def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]: def __repr__(self) -> str: """Return string representation.""" - return fx_io.build_repr_from_init(self, excluded_params={'self', 'label', 'kwargs'}, skip_default_size=True) + return fx_io.build_repr_from_init(self, excluded_params={'self', 'id', 'kwargs'}, skip_default_size=True) @staticmethod - def _valid_label(label: str) -> str: - """Checks if the label is valid. If not, it is replaced by the default label. + def _valid_id(id: str) -> str: + """Checks if the id is valid. Raises: - ValueError: If the label is not valid. + ValueError: If the id is not valid. """ not_allowed = ['(', ')', '|', '->', '\\', '-slash-'] # \\ is needed to check for \ - if any([sign in label for sign in not_allowed]): + if any([sign in id for sign in not_allowed]): raise ValueError( - f'Label "{label}" is not valid. Labels cannot contain the following characters: {not_allowed}. ' + f'Id "{id}" is not valid. Ids cannot contain the following characters: {not_allowed}. ' f'Use any other symbol instead' ) - if label.endswith(' '): - logger.error(f'Label "{label}" ends with a space. This will be removed.') - return label.rstrip() - return label + if id.endswith(' '): + logger.error(f'Id "{id}" ends with a space. This will be removed.') + return id.rstrip() + return id + + @staticmethod + def _valid_label(label: str) -> str: + """Deprecated: Use ``_valid_id`` instead.""" + warnings.warn( + f'_valid_label is deprecated. Use _valid_id instead. Will be removed in v{DEPRECATION_REMOVAL_V8}.', + DeprecationWarning, + stacklevel=2, + ) + return Element._valid_id(label) # Precompiled regex pattern for natural sorting @@ -2244,31 +2343,31 @@ def __repr__(self) -> str: class FlowContainer(ContainerMixin[T]): - """Container for Flow objects with dual access: by index or by label_full. + """Container for Flow objects with dual access: by index or by id. Supports: - - container['Boiler(Q_th)'] # label_full-based access - - container['Q_th'] # short-label access (when all flows share same component) + - container['Boiler(Q_th)'] # id-based access + - container['Q_th'] # short-id access (when all flows share same component) - container[0] # index-based access - container.add(flow) - for flow in container.values() - container1 + container2 # concatenation Examples: - >>> boiler = Boiler(label='Boiler', inputs=[Flow('Q_th', bus=heat_bus)]) + >>> boiler = Boiler(id='Boiler', inputs=[Flow('heat_bus')]) >>> boiler.inputs[0] # Index access - >>> boiler.inputs['Boiler(Q_th)'] # Full label access - >>> boiler.inputs['Q_th'] # Short label access (same component) + >>> boiler.inputs['Boiler(heat_bus)'] # Full id access + >>> boiler.inputs['heat_bus'] # Short id access (same component) >>> for flow in boiler.inputs.values(): - ... print(flow.label_full) + ... print(flow.id) """ def _get_label(self, flow: T) -> str: - """Extract label_full from Flow.""" - return flow.label_full + """Extract id from Flow.""" + return flow.id def __getitem__(self, key: str | int) -> T: - """Get flow by label_full, short label, or index.""" + """Get flow by id, short id, or index.""" if isinstance(key, int): try: return list(self.values())[key] @@ -2278,7 +2377,7 @@ def __getitem__(self, key: str | int) -> T: if dict.__contains__(self, key): return super().__getitem__(key) - # Try short-label match if all flows share the same component + # Try short-id match if all flows share the same component if len(self) > 0: components = {flow.component for flow in self.values()} if len(components) == 1: @@ -2290,7 +2389,7 @@ def __getitem__(self, key: str | int) -> T: raise KeyError(f"'{key}' not found in {self._element_type_name}") def __contains__(self, key: object) -> bool: - """Check if key exists (supports label_full or short label).""" + """Check if key exists (supports id or short id).""" if not isinstance(key, str): return False if dict.__contains__(self, key): @@ -2308,24 +2407,24 @@ class ElementContainer(ContainerMixin[T]): """ Container for Element objects (Component, Bus, Flow, Effect). - Uses element.label_full for keying. + Uses element.id for keying. """ def _get_label(self, element: T) -> str: - """Extract label_full from Element.""" - return element.label_full + """Extract id from Element.""" + return element.id class ResultsContainer(ContainerMixin[T]): """ Container for Results objects (ComponentResults, BusResults, etc). - Uses element.label for keying. + Uses element.id for keying. """ def _get_label(self, element: T) -> str: - """Extract label from Results object.""" - return element.label + """Extract id from Results object.""" + return element.id T_element = TypeVar('T_element') @@ -2383,12 +2482,12 @@ def _get_container_groups(self): interface while preserving their individual functionality. """ - def _get_container_groups(self) -> dict[str, ContainerMixin[Any]]: + def _get_container_groups(self) -> dict[str, IdList[Any]]: """ Return ordered dict of container groups to aggregate. Returns: - Dictionary mapping group names to container objects (e.g., ElementContainer, ResultsContainer). + Dictionary mapping group names to IdList containers. Group names should be capitalized (e.g., 'Components', 'Buses'). Order determines display order in __repr__. @@ -2422,17 +2521,17 @@ def __getitem__(self, key: str) -> T_element: return container[key] # Element not found - provide helpful error - all_elements = {} + all_keys: list[str] = [] for container in self._get_container_groups().values(): - all_elements.update(container) + all_keys.extend(container.keys()) - suggestions = get_close_matches(key, all_elements.keys(), n=3, cutoff=0.6) + suggestions = get_close_matches(key, all_keys, n=3, cutoff=0.6) error_msg = f'Element "{key}" not found.' if suggestions: error_msg += f' Did you mean: {", ".join(suggestions)}?' else: - available = list(all_elements.keys()) + available = all_keys if len(available) <= 5: error_msg += f' Available: {", ".join(available)}' else: diff --git a/flixopt/topology_accessor.py b/flixopt/topology_accessor.py index 3c20b6660..88f4124a1 100644 --- a/flixopt/topology_accessor.py +++ b/flixopt/topology_accessor.py @@ -19,7 +19,7 @@ import xarray as xr from .color_processing import ColorType, hex_to_rgba, process_colors -from .config import CONFIG, DEPRECATION_REMOVAL_VERSION +from .config import CONFIG, DEPRECATION_REMOVAL_V7 from .flow_system_status import FlowSystemStatus from .plot_result import PlotResult @@ -169,10 +169,10 @@ def carrier_colors(self) -> dict[str, str]: @property def component_colors(self) -> dict[str, str]: - """Cached mapping of component label to hex color. + """Cached mapping of component id to hex color. Returns: - Dict mapping component labels to hex color strings. + Dict mapping component ids to hex color strings. Only components with a color defined are included. Examples: @@ -185,12 +185,12 @@ def component_colors(self) -> dict[str, str]: @property def flow_colors(self) -> dict[str, str]: - """Cached mapping of flow label_full to hex color (from parent component). + """Cached mapping of flow id to hex color (from parent component). Flow colors are derived from their parent component's color. Returns: - Dict mapping flow labels (e.g., 'Boiler(Q_th)') to hex color strings. + Dict mapping flow ids (e.g., 'Boiler(Q_th)') to hex color strings. Only flows whose parent component has a color defined are included. Examples: @@ -202,17 +202,17 @@ def flow_colors(self) -> dict[str, str]: self._flow_colors = {} for flow in self._fs.flows.values(): if flow.component in component_colors: - self._flow_colors[flow.label_full] = component_colors[flow.component] + self._flow_colors[flow.id] = component_colors[flow.component] return self._flow_colors @property def bus_colors(self) -> dict[str, str]: - """Cached mapping of bus label to hex color (from carrier). + """Cached mapping of bus id to hex color (from carrier). Bus colors are derived from their associated carrier's color. Returns: - Dict mapping bus labels to hex color strings. + Dict mapping bus ids to hex color strings. Only buses with a carrier that has a color defined are included. Examples: @@ -245,25 +245,25 @@ def carrier_units(self) -> dict[str, str]: @cached_property def effect_units(self) -> dict[str, str]: - """Mapping of effect label to unit string. + """Mapping of effect id to unit string. Returns: - Dict mapping effect labels to unit strings. + Dict mapping effect ids to unit strings. Effects without a unit defined return an empty string. Examples: >>> fs.topology.effect_units {'costs': '€', 'CO2': 'kg'} """ - return {effect.label: effect.unit or '' for effect in self._fs.effects.values()} + return {effect.id: effect.unit or '' for effect in self._fs.effects.values()} @cached_property def flows(self) -> xr.DataArray: """DataArray with 'flow' dimension and metadata coordinates. Coordinates on the 'flow' dimension: - - component: Parent component label - - bus: Connected bus label + - component: Parent component id + - bus: Connected bus id - carrier: Carrier name (lowercase) - unit: Unit string from carrier - is_input: Whether the flow is an input to its component @@ -310,7 +310,7 @@ def flows(self) -> xr.DataArray: carrier_units = self.carrier_units for flow in self._fs.flows.values(): - flow_labels.append(flow.label_full) + flow_labels.append(flow.id) components.append(flow.component) buses.append(flow.bus) bus_obj = self._fs.buses.get(flow.bus) @@ -438,8 +438,8 @@ def infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]: Returns: Tuple of (nodes_dict, edges_dict) where: - - nodes_dict maps node labels to their properties (label, class, infos) - - edges_dict maps edge labels to their properties (label, start, end, infos) + - nodes_dict maps node ids to their properties (label, class, infos) + - edges_dict maps edge ids to their properties (label, start, end, infos) Examples: >>> nodes, edges = flow_system.topology.infos() @@ -452,8 +452,8 @@ def infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]: self._fs.connect_and_transform() nodes = { - node.label_full: { - 'label': node.label, + node.id: { + 'label': node.id, 'class': 'Bus' if isinstance(node, Bus) else 'Component', 'infos': node.__str__(), } @@ -466,9 +466,9 @@ def infos(self) -> tuple[dict[str, dict[str, str]], dict[str, dict[str, str]]]: edges = {} for flow in self._fs.flows.values(): - carrier_name = flow_carriers.get(flow.label_full) - edges[flow.label_full] = { - 'label': flow.label, + carrier_name = flow_carriers.get(flow.id) + edges[flow.id] = { + 'label': flow.flow_id, 'start': flow.bus if flow.is_input_in_component else flow.component, 'end': flow.component if flow.is_input_in_component else flow.bus, 'infos': flow.__str__(), @@ -539,9 +539,9 @@ def plot( # Collect node hover info (format repr for HTML display) node_hover: dict[str, str] = {} for comp in self._fs.components.values(): - node_hover[comp.label] = repr(comp).replace('\n', '
') + node_hover[comp.id] = repr(comp).replace('\n', '
') for bus in self._fs.buses.values(): - node_hover[bus.label] = repr(bus).replace('\n', '
') + node_hover[bus.id] = repr(bus).replace('\n', '
') # Use cached colors for efficient lookup flow_carriers = self._fs.flow_carriers @@ -563,11 +563,11 @@ def plot( links['source'].append(source) links['target'].append(target) links['value'].append(1) # Equal width for all links (no solution data) - links['label'].append(flow.label_full) + links['label'].append(flow.id) links['customdata'].append(repr(flow).replace('\n', '
')) # Flow repr for hover # Get carrier color for this flow (subtle/semi-transparent) using cached colors - carrier_name = flow_carriers.get(flow.label_full) + carrier_name = flow_carriers.get(flow.id) color = carrier_colors.get(carrier_name) if carrier_name else None links['color'].append(hex_to_rgba(color, alpha=0.4) if color else hex_to_rgba('', alpha=0.4)) @@ -581,7 +581,7 @@ def plot( # If user provided colors, process them for buses if colors is not None: - bus_labels = [bus.label for bus in self._fs.buses.values()] + bus_labels = [bus.id for bus in self._fs.buses.values()] bus_color_map = process_colors(colors, bus_labels) else: bus_color_map = bus_colors_cached @@ -689,7 +689,7 @@ def plot_legacy( and annotated with node information. """ warnings.warn( - f'This method is deprecated and will be removed in v{DEPRECATION_REMOVAL_VERSION}. ' + f'This method is deprecated and will be removed in v{DEPRECATION_REMOVAL_V7}. ' 'Use flow_system.topology.plot() instead.', DeprecationWarning, stacklevel=2, diff --git a/flixopt/transform_accessor.py b/flixopt/transform_accessor.py index 0bc8b089d..fc0780cad 100644 --- a/flixopt/transform_accessor.py +++ b/flixopt/transform_accessor.py @@ -1381,7 +1381,7 @@ def fix_sizes( # Check flows for flow in new_fs.flows.values(): - if flow.label_full == base_name and isinstance(flow.size, InvestParameters): + if flow.id == base_name and isinstance(flow.size, InvestParameters): flow.size.fixed_size = fixed_value flow.size.mandatory = True found = True @@ -1392,9 +1392,7 @@ def fix_sizes( if not found: for component in new_fs.components.values(): if hasattr(component, 'capacity_in_flow_hours'): - if component.label == base_name and isinstance( - component.capacity_in_flow_hours, InvestParameters - ): + if component.id == base_name and isinstance(component.capacity_in_flow_hours, InvestParameters): component.capacity_in_flow_hours.fixed_size = fixed_value component.capacity_in_flow_hours.mandatory = True found = True diff --git a/tests/conftest.py b/tests/conftest.py index c519ef34b..970b8f285 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -151,14 +151,14 @@ def simple(): 'Boiler', thermal_efficiency=0.5, thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=50, relative_minimum=5 / 50, relative_maximum=1, status_parameters=fx.StatusParameters(), ), - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), ) @staticmethod @@ -169,8 +169,8 @@ def complex(): thermal_efficiency=0.5, status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', load_factor_max=1.0, load_factor_min=0.1, relative_minimum=5 / 50, @@ -193,7 +193,7 @@ def complex(): ), flow_hours_max=1e6, ), - fuel_flow=fx.Flow('Q_fu', bus='Gas', size=200, relative_minimum=0, relative_maximum=1), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1), ) class CHPs: @@ -205,10 +205,10 @@ def simple(): thermal_efficiency=0.5, electrical_efficiency=0.4, electrical_flow=fx.Flow( - 'P_el', bus='Strom', size=60, relative_minimum=5 / 60, status_parameters=fx.StatusParameters() + 'Strom', flow_id='P_el', size=60, relative_minimum=5 / 60, status_parameters=fx.StatusParameters() ), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme'), - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), ) @staticmethod @@ -219,9 +219,11 @@ def base(): thermal_efficiency=0.5, electrical_efficiency=0.4, status_parameters=fx.StatusParameters(effects_per_startup=0.01), - electrical_flow=fx.Flow('P_el', bus='Strom', size=60, relative_minimum=5 / 60, previous_flow_rate=10), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=1e3), - fuel_flow=fx.Flow('Q_fu', bus='Gas', size=1e3), + electrical_flow=fx.Flow( + 'Strom', flow_id='P_el', size=60, relative_minimum=5 / 60, previous_flow_rate=10 + ), + thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=1e3), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu', size=1e3), ) class LinearConverters: @@ -230,10 +232,10 @@ def piecewise(): """Piecewise converter from flow_system_piecewise_conversion""" return fx.LinearConverter( 'KWK', - inputs=[fx.Flow('Q_fu', bus='Gas', size=200)], + inputs=[fx.Flow('Gas', flow_id='Q_fu', size=200)], outputs=[ - fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10), - fx.Flow('Q_th', bus='Fernwärme', size=100), + fx.Flow('Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10), + fx.Flow('Fernwärme', flow_id='Q_th', size=100), ], piecewise_conversion=fx.PiecewiseConversion( { @@ -250,10 +252,10 @@ def segments(timesteps_length): """Segments converter with time-varying piecewise conversion""" return fx.LinearConverter( 'KWK', - inputs=[fx.Flow('Q_fu', bus='Gas', size=200)], + inputs=[fx.Flow('Gas', flow_id='Q_fu', size=200)], outputs=[ - fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10), - fx.Flow('Q_th', bus='Fernwärme', size=100), + fx.Flow('Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10), + fx.Flow('Fernwärme', flow_id='Q_th', size=100), ], piecewise_conversion=fx.PiecewiseConversion( { @@ -284,11 +286,11 @@ def simple(timesteps_length=9): return fx.Storage( 'Speicher', charging=fx.Flow( - 'Q_th_load', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th_load', size=fx.InvestParameters(fixed_size=1e4, mandatory=True), # Investment for testing sizes ), - discharging=fx.Flow('Q_th_unload', bus='Fernwärme', size=1e4), + discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=1e4), capacity_in_flow_hours=fx.InvestParameters(effects_of_investment=20, fixed_size=30, mandatory=True), initial_charge_state=0, relative_maximum_charge_state=1 / 100 * np.array(charge_state_values), @@ -318,8 +320,8 @@ def complex(): ) return fx.Storage( 'Speicher', - charging=fx.Flow('Q_th_load', bus='Fernwärme', size=1e4), - discharging=fx.Flow('Q_th_unload', bus='Fernwärme', size=1e4), + charging=fx.Flow('Fernwärme', flow_id='Q_th_load', size=1e4), + discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=1e4), capacity_in_flow_hours=invest_speicher, initial_charge_state=0, maximal_final_charge_state=10, @@ -376,21 +378,24 @@ class Sinks: def heat_load(thermal_profile): """Create thermal heat load sink""" return fx.Sink( - 'Wärmelast', inputs=[fx.Flow('Q_th_Last', bus='Fernwärme', size=1, fixed_relative_profile=thermal_profile)] + 'Wärmelast', + inputs=[fx.Flow('Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_profile)], ) @staticmethod def electricity_feed_in(electrical_price_profile): """Create electricity feed-in sink""" return fx.Sink( - 'Einspeisung', inputs=[fx.Flow('P_el', bus='Strom', effects_per_flow_hour=-1 * electrical_price_profile)] + 'Einspeisung', + inputs=[fx.Flow('Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_price_profile)], ) @staticmethod def electricity_load(electrical_profile): """Create electrical load sink (for flow_system_long)""" return fx.Sink( - 'Stromlast', inputs=[fx.Flow('P_el_Last', bus='Strom', size=1, fixed_relative_profile=electrical_profile)] + 'Stromlast', + inputs=[fx.Flow('Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_profile)], ) @@ -408,7 +413,7 @@ def gas_with_costs_and_co2(): def gas_with_costs(): """Simple gas tariff without CO2""" return fx.Source( - 'Gastarif', outputs=[fx.Flow(label='Q_Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': 0.04})] + 'Gastarif', outputs=[fx.Flow('Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04})] ) @@ -598,23 +603,27 @@ def flow_system_long(): Effects.co2(), Effects.primary_energy(), fx.Sink( - 'Wärmelast', inputs=[fx.Flow('Q_th_Last', bus='Fernwärme', size=1, fixed_relative_profile=thermal_load_ts)] + 'Wärmelast', + inputs=[fx.Flow('Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load_ts)], ), fx.Sink( - 'Stromlast', inputs=[fx.Flow('P_el_Last', bus='Strom', size=1, fixed_relative_profile=electrical_load_ts)] + 'Stromlast', + inputs=[fx.Flow('Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_load_ts)], ), fx.Source( 'Kohletarif', - outputs=[fx.Flow('Q_Kohle', bus='Kohle', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})], + outputs=[fx.Flow('Kohle', flow_id='Q_Kohle', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})], ), fx.Source( 'Gastarif', - outputs=[fx.Flow('Q_Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})], + outputs=[ + fx.Flow('Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3}) + ], ), - fx.Sink('Einspeisung', inputs=[fx.Flow('P_el', bus='Strom', size=1000, effects_per_flow_hour=p_feed_in)]), + fx.Sink('Einspeisung', inputs=[fx.Flow('Strom', flow_id='P_el', size=1000, effects_per_flow_hour=p_feed_in)]), fx.Source( 'Stromtarif', - outputs=[fx.Flow('P_el', bus='Strom', size=1000, effects_per_flow_hour={'costs': p_sell, 'CO2': 0.3})], + outputs=[fx.Flow('Strom', flow_id='P_el', size=1000, effects_per_flow_hour={'costs': p_sell, 'CO2': 0.3})], ), ) @@ -622,10 +631,10 @@ def flow_system_long(): fx.linear_converters.Boiler( 'Kessel', thermal_efficiency=0.85, - thermal_flow=fx.Flow(label='Q_th', bus='Fernwärme'), + thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'), fuel_flow=fx.Flow( - label='Q_fu', - bus='Gas', + 'Gas', + flow_id='Q_fu', size=95, relative_minimum=12 / 95, previous_flow_rate=0, @@ -637,14 +646,14 @@ def flow_system_long(): thermal_efficiency=(eta_th := 0.58), electrical_efficiency=(eta_el := 0.22), status_parameters=fx.StatusParameters(effects_per_startup=24000), - fuel_flow=fx.Flow('Q_fu', bus='Kohle', size=(fuel_size := 288), relative_minimum=87 / fuel_size), - electrical_flow=fx.Flow('P_el', bus='Strom', size=fuel_size * eta_el), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=fuel_size * eta_th), + fuel_flow=fx.Flow('Kohle', flow_id='Q_fu', size=(fuel_size := 288), relative_minimum=87 / fuel_size), + electrical_flow=fx.Flow('Strom', flow_id='P_el', size=fuel_size * eta_el), + thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=fuel_size * eta_th), ), fx.Storage( 'Speicher', - charging=fx.Flow('Q_th_load', size=137, bus='Fernwärme'), - discharging=fx.Flow('Q_th_unload', size=158, bus='Fernwärme'), + charging=fx.Flow('Fernwärme', flow_id='Q_th_load', size=137), + discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=158), capacity_in_flow_hours=684, initial_charge_state=137, minimal_final_charge_state=137, diff --git a/tests/flow_system/test_flow_system_resample.py b/tests/flow_system/test_flow_system_resample.py index dd5e19176..360b1bfc1 100644 --- a/tests/flow_system/test_flow_system_resample.py +++ b/tests/flow_system/test_flow_system_resample.py @@ -18,12 +18,10 @@ def simple_fs(): ) fs.add_elements( fx.Sink( - label='demand', - inputs=[fx.Flow(label='in', bus='heat', fixed_relative_profile=np.linspace(10, 20, 24), size=1)], - ), - fx.Source( - label='source', outputs=[fx.Flow(label='out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})] + 'demand', + inputs=[fx.Flow('heat', flow_id='in', fixed_relative_profile=np.linspace(10, 20, 24), size=1)], ), + fx.Source('source', outputs=[fx.Flow('heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]), ) return fs @@ -43,16 +41,16 @@ def complex_fs(): # Storage fs.add_elements( fx.Storage( - label='battery', - charging=fx.Flow('charge', bus='elec', size=10), - discharging=fx.Flow('discharge', bus='elec', size=10), + 'battery', + charging=fx.Flow('elec', flow_id='charge', size=10), + discharging=fx.Flow('elec', flow_id='discharge', size=10), capacity_in_flow_hours=fx.InvestParameters(fixed_size=100), ) ) # Piecewise converter converter = fx.linear_converters.Boiler( - 'boiler', thermal_efficiency=0.9, fuel_flow=fx.Flow('gas', bus='elec'), thermal_flow=fx.Flow('heat', bus='heat') + 'boiler', thermal_efficiency=0.9, fuel_flow=fx.Flow('elec', flow_id='gas'), thermal_flow=fx.Flow('heat') ) converter.thermal_flow.size = 100 fs.add_elements(converter) @@ -60,11 +58,11 @@ def complex_fs(): # Component with investment fs.add_elements( fx.Source( - label='pv', + 'pv', outputs=[ fx.Flow( - 'gen', - bus='elec', + 'elec', + flow_id='gen', size=fx.InvestParameters(maximum_size=1000, effects_of_investment_per_size={'costs': 100}), ) ], @@ -102,8 +100,8 @@ def test_resample_methods(method, expected): fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) fs.add_elements( fx.Sink( - label='s', - inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.array([10.0, 20.0, 30.0, 40.0]), size=1)], + 's', + inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.array([10.0, 20.0, 30.0, 40.0]), size=1)], ) ) @@ -146,9 +144,7 @@ def test_with_dimensions(simple_fs, dim_name, dim_value): """Test resampling preserves period/scenario dimensions.""" fs = fx.FlowSystem(simple_fs.timesteps, **{dim_name: dim_value}) fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) - fs.add_elements( - fx.Sink(label='d', inputs=[fx.Flow(label='in', bus='h', fixed_relative_profile=np.ones(24), size=1)]) - ) + fs.add_elements(fx.Sink('d', inputs=[fx.Flow('h', flow_id='in', fixed_relative_profile=np.ones(24), size=1)])) fs_r = fs.resample('2h', method='mean') assert getattr(fs_r, dim_name) is not None @@ -199,10 +195,8 @@ def test_modeling(with_dim): fs = fx.FlowSystem(ts, **kwargs) fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) fs.add_elements( - fx.Sink( - label='d', inputs=[fx.Flow(label='in', bus='h', fixed_relative_profile=np.linspace(10, 30, 48), size=1)] - ), - fx.Source(label='s', outputs=[fx.Flow(label='out', bus='h', size=100, effects_per_flow_hour={'costs': 0.05})]), + fx.Sink('d', inputs=[fx.Flow('h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]), + fx.Source('s', outputs=[fx.Flow('h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]), ) fs_r = fs.resample('4h', method='mean') @@ -218,10 +212,8 @@ def test_model_structure_preserved(): fs = fx.FlowSystem(ts) fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) fs.add_elements( - fx.Sink( - label='d', inputs=[fx.Flow(label='in', bus='h', fixed_relative_profile=np.linspace(10, 30, 48), size=1)] - ), - fx.Source(label='s', outputs=[fx.Flow(label='out', bus='h', size=100, effects_per_flow_hour={'costs': 0.05})]), + fx.Sink('d', inputs=[fx.Flow('h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]), + fx.Source('s', outputs=[fx.Flow('h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]), ) fs.build_model() @@ -264,9 +256,7 @@ def test_frequencies(freq, exp_len): ts = pd.date_range('2023-01-01', periods=168, freq='h') fs = fx.FlowSystem(ts) fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) - fs.add_elements( - fx.Sink(label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.ones(168), size=1)]) - ) + fs.add_elements(fx.Sink('s', inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.ones(168), size=1)])) assert len(fs.resample(freq, method='mean').timesteps) == exp_len @@ -276,9 +266,7 @@ def test_irregular_timesteps_error(): ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time') fs = fx.FlowSystem(ts) fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) - fs.add_elements( - fx.Sink(label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.ones(3), size=1)]) - ) + fs.add_elements(fx.Sink('s', inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.ones(3), size=1)])) with pytest.raises(ValueError, match='Resampling created gaps'): fs.transform.resample('1h', method='mean') @@ -290,9 +278,7 @@ def test_irregular_timesteps_with_fill_gaps(): fs = fx.FlowSystem(ts) fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)) fs.add_elements( - fx.Sink( - label='s', inputs=[fx.Flow(label='in', bus='b', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)] - ) + fx.Sink('s', inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)]) ) # Test with ffill diff --git a/tests/plotting/test_solution_and_plotting.py b/tests/plotting/test_solution_and_plotting.py index 9494306d9..50cc2ec59 100644 --- a/tests/plotting/test_solution_and_plotting.py +++ b/tests/plotting/test_solution_and_plotting.py @@ -154,8 +154,8 @@ def test_element_solution_raises_for_unlinked_element(self): boiler = fx.linear_converters.Boiler( 'TestBoiler', thermal_efficiency=0.9, - thermal_flow=fx.Flow('Q_th', bus='Heat'), - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + thermal_flow=fx.Flow('Heat', flow_id='Q_th'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), ) with pytest.raises(ValueError, match='not linked to a FlowSystem'): _ = boiler.solution diff --git a/tests/superseded/math/test_flow.py b/tests/superseded/math/test_flow.py index 61c5dde2d..fa9d558cb 100644 --- a/tests/superseded/math/test_flow.py +++ b/tests/superseded/math/test_flow.py @@ -13,7 +13,7 @@ def test_flow_minimal(self, basic_flow_system_linopy_coords, coords_config): """Test that flow model constraints are correctly generated.""" flow_system, coords_config = basic_flow_system_linopy_coords, coords_config - flow = fx.Flow('Wärme', bus='Fernwärme', size=100) + flow = fx.Flow('Fernwärme', flow_id='Wärme', size=100) flow_system.add_elements(fx.Sink('Sink', inputs=[flow])) @@ -34,8 +34,8 @@ def test_flow(self, basic_flow_system_linopy_coords, coords_config): timesteps = flow_system.timesteps flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, relative_minimum=np.linspace(0, 0.5, timesteps.size), relative_maximum=np.linspace(0.5, 1, timesteps.size), @@ -69,7 +69,7 @@ def test_effects_per_flow_hour(self, basic_flow_system_linopy_coords, coords_con co2_per_flow_hour = np.linspace(4, 5, timesteps.size) flow = fx.Flow( - 'Wärme', bus='Fernwärme', effects_per_flow_hour={'costs': costs_per_flow_hour, 'CO2': co2_per_flow_hour} + 'Fernwärme', flow_id='Wärme', effects_per_flow_hour={'costs': costs_per_flow_hour, 'CO2': co2_per_flow_hour} ) flow_system.add_elements(fx.Sink('Sink', inputs=[flow]), fx.Effect('CO2', 't', '')) model = create_linopy_model(flow_system) @@ -93,8 +93,8 @@ def test_flow_invest(self, basic_flow_system_linopy_coords, coords_config): timesteps = flow_system.timesteps flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters(minimum_size=20, maximum_size=100, mandatory=True), relative_minimum=np.linspace(0.1, 0.5, timesteps.size), relative_maximum=np.linspace(0.5, 1, timesteps.size), @@ -127,8 +127,8 @@ def test_flow_invest_optional(self, basic_flow_system_linopy_coords, coords_conf timesteps = flow_system.timesteps flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters(minimum_size=20, maximum_size=100, mandatory=False), relative_minimum=np.linspace(0.1, 0.5, timesteps.size), relative_maximum=np.linspace(0.5, 1, timesteps.size), @@ -159,8 +159,8 @@ def test_flow_invest_optional_wo_min_size(self, basic_flow_system_linopy_coords, timesteps = flow_system.timesteps flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters(maximum_size=100, mandatory=False), relative_minimum=np.linspace(0.1, 0.5, timesteps.size), relative_maximum=np.linspace(0.5, 1, timesteps.size), @@ -182,8 +182,8 @@ def test_flow_invest_wo_min_size_non_optional(self, basic_flow_system_linopy_coo timesteps = flow_system.timesteps flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters(maximum_size=100, mandatory=True), relative_minimum=np.linspace(0.1, 0.5, timesteps.size), relative_maximum=np.linspace(0.5, 1, timesteps.size), @@ -207,8 +207,8 @@ def test_flow_invest_fixed_size(self, basic_flow_system_linopy_coords, coords_co flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters(fixed_size=75, mandatory=True), relative_minimum=0.2, relative_maximum=0.9, @@ -237,11 +237,11 @@ def test_flow_invest_with_effects(self, basic_flow_system_linopy_coords, coords_ flow_system, coords_config = basic_flow_system_linopy_coords, coords_config # Create effects - co2 = fx.Effect(label='CO2', unit='ton', description='CO2 emissions') + co2 = fx.Effect('CO2', unit='ton', description='CO2 emissions') flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters( minimum_size=20, maximum_size=100, @@ -264,8 +264,8 @@ def test_flow_invest_divest_effects(self, basic_flow_system_linopy_coords, coord flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters( minimum_size=20, maximum_size=100, @@ -289,8 +289,8 @@ def test_flow_on(self, basic_flow_system_linopy_coords, coords_config): flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, relative_minimum=0.2, relative_maximum=0.8, @@ -329,8 +329,8 @@ def test_effects_per_active_hour(self, basic_flow_system_linopy_coords, coords_c co2_per_running_hour = np.linspace(4, 5, timesteps.size) flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, status_parameters=fx.StatusParameters( effects_per_active_hour={'costs': costs_per_running_hour, 'CO2': co2_per_running_hour} @@ -353,8 +353,8 @@ def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_conf flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, previous_flow_rate=0, # Required to get initial constraint status_parameters=fx.StatusParameters( @@ -387,8 +387,8 @@ def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, co flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, status_parameters=fx.StatusParameters( min_uptime=2, # Must run for at least 2 hours when active @@ -414,8 +414,8 @@ def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_con flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, previous_flow_rate=0, # Required to get initial constraint (was OFF for 1h, so previous_downtime=1) status_parameters=fx.StatusParameters( @@ -448,8 +448,8 @@ def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, c flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, status_parameters=fx.StatusParameters( min_downtime=4, # Must stay inactive for at least 4 hours when shut down @@ -475,8 +475,8 @@ def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_con flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, previous_flow_rate=0, # Required to get initial constraint status_parameters=fx.StatusParameters( @@ -513,8 +513,8 @@ def test_on_hours_limits(self, basic_flow_system_linopy_coords, coords_config): flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, status_parameters=fx.StatusParameters( active_hours_min=20, # Minimum 20 hours of operation @@ -544,8 +544,8 @@ class TestFlowOnInvestModel: def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_config): flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=False), relative_minimum=0.2, relative_maximum=0.8, @@ -574,8 +574,8 @@ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_c def test_flow_on_invest_non_optional(self, basic_flow_system_linopy_coords, coords_config): flow_system, coords_config = basic_flow_system_linopy_coords, coords_config flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=True), relative_minimum=0.2, relative_maximum=0.8, @@ -613,8 +613,8 @@ def test_fixed_relative_profile(self, basic_flow_system_linopy_coords, coords_co profile = np.sin(np.linspace(0, 2 * np.pi, len(timesteps))) * 0.5 + 0.5 # Values between 0 and 1 flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=100, fixed_relative_profile=profile, ) @@ -638,8 +638,8 @@ def test_fixed_profile_with_investment(self, basic_flow_system_linopy_coords, co profile = np.sin(np.linspace(0, 2 * np.pi, len(timesteps))) * 0.5 + 0.5 flow = fx.Flow( - 'Wärme', - bus='Fernwärme', + 'Fernwärme', + flow_id='Wärme', size=fx.InvestParameters(minimum_size=50, maximum_size=200, mandatory=False), fixed_relative_profile=profile, ) diff --git a/tests/superseded/math/test_linear_converter.py b/tests/superseded/math/test_linear_converter.py index 978194a92..2057581e4 100644 --- a/tests/superseded/math/test_linear_converter.py +++ b/tests/superseded/math/test_linear_converter.py @@ -14,12 +14,12 @@ def test_basic_linear_converter(self, basic_flow_system_linopy_coords, coords_co flow_system, coords_config = basic_flow_system_linopy_coords, coords_config # Create input and output flows - input_flow = fx.Flow('input', bus='input_bus', size=100) - output_flow = fx.Flow('output', bus='output_bus', size=100) + input_flow = fx.Flow('input_bus', flow_id='input', size=100) + output_flow = fx.Flow('output_bus', flow_id='output', size=100) # Create a simple linear converter with constant conversion factor converter = fx.LinearConverter( - label='Converter', + 'Converter', inputs=[input_flow], outputs=[output_flow], conversion_factors=[{input_flow.label: 0.8, output_flow.label: 1.0}], @@ -48,12 +48,12 @@ def test_linear_converter_time_varying(self, basic_flow_system_linopy_coords, co varying_efficiency = np.linspace(0.7, 0.9, len(timesteps)) # Create input and output flows - input_flow = fx.Flow('input', bus='input_bus', size=100) - output_flow = fx.Flow('output', bus='output_bus', size=100) + input_flow = fx.Flow('input_bus', flow_id='input', size=100) + output_flow = fx.Flow('output_bus', flow_id='output', size=100) # Create a linear converter with time-varying conversion factor converter = fx.LinearConverter( - label='Converter', + 'Converter', inputs=[input_flow], outputs=[output_flow], conversion_factors=[{input_flow.label: varying_efficiency, output_flow.label: 1.0}], @@ -78,14 +78,14 @@ def test_linear_converter_multiple_factors(self, basic_flow_system_linopy_coords flow_system, coords_config = basic_flow_system_linopy_coords, coords_config # Create flows - input_flow1 = fx.Flow('input1', bus='input_bus1', size=100) - input_flow2 = fx.Flow('input2', bus='input_bus2', size=100) - output_flow1 = fx.Flow('output1', bus='output_bus1', size=100) - output_flow2 = fx.Flow('output2', bus='output_bus2', size=100) + input_flow1 = fx.Flow('input_bus1', flow_id='input1', size=100) + input_flow2 = fx.Flow('input_bus2', flow_id='input2', size=100) + output_flow1 = fx.Flow('output_bus1', flow_id='output1', size=100) + output_flow2 = fx.Flow('output_bus2', flow_id='output2', size=100) # Create a linear converter with multiple inputs/outputs and conversion factors converter = fx.LinearConverter( - label='Converter', + 'Converter', inputs=[input_flow1, input_flow2], outputs=[output_flow1, output_flow2], conversion_factors=[ @@ -111,8 +111,8 @@ def test_linear_converter_with_status(self, basic_flow_system_linopy_coords, coo flow_system, coords_config = basic_flow_system_linopy_coords, coords_config # Create input and output flows - input_flow = fx.Flow('input', bus='input_bus', size=100) - output_flow = fx.Flow('output', bus='output_bus', size=100) + input_flow = fx.Flow('input_bus', flow_id='input', size=100) + output_flow = fx.Flow('output_bus', flow_id='output', size=100) # Create StatusParameters status_params = fx.StatusParameters( @@ -121,7 +121,7 @@ def test_linear_converter_with_status(self, basic_flow_system_linopy_coords, coo # Create a linear converter with StatusParameters converter = fx.LinearConverter( - label='Converter', + 'Converter', inputs=[input_flow], outputs=[output_flow], conversion_factors=[{input_flow.label: 0.8, output_flow.label: 1.0}], @@ -158,14 +158,14 @@ def test_linear_converter_multidimensional(self, basic_flow_system_linopy_coords flow_system, coords_config = basic_flow_system_linopy_coords, coords_config # Create a more complex setup with multiple flows - input_flow1 = fx.Flow('fuel', bus='fuel_bus', size=100) - input_flow2 = fx.Flow('electricity', bus='electricity_bus', size=50) - output_flow1 = fx.Flow('heat', bus='heat_bus', size=70) - output_flow2 = fx.Flow('cooling', bus='cooling_bus', size=30) + input_flow1 = fx.Flow('fuel_bus', flow_id='fuel', size=100) + input_flow2 = fx.Flow('electricity_bus', flow_id='electricity', size=50) + output_flow1 = fx.Flow('heat_bus', flow_id='heat', size=70) + output_flow2 = fx.Flow('cooling_bus', flow_id='cooling', size=30) # Create a CHP-like converter with more complex connections converter = fx.LinearConverter( - label='MultiConverter', + 'MultiConverter', inputs=[input_flow1, input_flow2], outputs=[output_flow1, output_flow2], conversion_factors=[ @@ -205,14 +205,14 @@ def test_edge_case_time_varying_conversion(self, basic_flow_system_linopy_coords ) # Create input and output flows - input_flow = fx.Flow('electricity', bus='electricity_bus', size=100) - output_flow = fx.Flow('heat', bus='heat_bus', size=500) # Higher maximum to allow for COP of 5 + input_flow = fx.Flow('electricity_bus', flow_id='electricity', size=100) + output_flow = fx.Flow('heat_bus', flow_id='heat', size=500) # Higher maximum to allow for COP of 5 conversion_factors = [{input_flow.label: fluctuating_cop, output_flow.label: np.ones(len(timesteps))}] # Create the converter converter = fx.LinearConverter( - label='VariableConverter', inputs=[input_flow], outputs=[output_flow], conversion_factors=conversion_factors + 'VariableConverter', inputs=[input_flow], outputs=[output_flow], conversion_factors=conversion_factors ) # Add to flow system @@ -229,8 +229,8 @@ def test_piecewise_conversion(self, basic_flow_system_linopy_coords, coords_conf flow_system, coords_config = basic_flow_system_linopy_coords, coords_config # Create input and output flows - input_flow = fx.Flow('input', bus='input_bus', size=100) - output_flow = fx.Flow('output', bus='output_bus', size=100) + input_flow = fx.Flow('input_bus', flow_id='input', size=100) + output_flow = fx.Flow('output_bus', flow_id='output', size=100) # Create pieces for piecewise conversion # For input flow: two pieces from 0-50 and 50-100 @@ -246,7 +246,7 @@ def test_piecewise_conversion(self, basic_flow_system_linopy_coords, coords_conf # Create a linear converter with piecewise conversion converter = fx.LinearConverter( - label='Converter', inputs=[input_flow], outputs=[output_flow], piecewise_conversion=piecewise_conversion + 'Converter', inputs=[input_flow], outputs=[output_flow], piecewise_conversion=piecewise_conversion ) # Add to flow system @@ -269,8 +269,8 @@ def test_piecewise_conversion_with_status(self, basic_flow_system_linopy_coords, flow_system, coords_config = basic_flow_system_linopy_coords, coords_config # Create input and output flows - input_flow = fx.Flow('input', bus='input_bus', size=100) - output_flow = fx.Flow('output', bus='output_bus', size=100) + input_flow = fx.Flow('input_bus', flow_id='input', size=100) + output_flow = fx.Flow('output_bus', flow_id='output', size=100) # Create pieces for piecewise conversion input_pieces = [fx.Piece(start=0, end=50), fx.Piece(start=50, end=100)] @@ -289,7 +289,7 @@ def test_piecewise_conversion_with_status(self, basic_flow_system_linopy_coords, # Create a linear converter with piecewise conversion and status parameters converter = fx.LinearConverter( - label='Converter', + 'Converter', inputs=[input_flow], outputs=[output_flow], piecewise_conversion=piecewise_conversion, diff --git a/tests/superseded/test_functional.py b/tests/superseded/test_functional.py index 3561248a9..2826379b5 100644 --- a/tests/superseded/test_functional.py +++ b/tests/superseded/test_functional.py @@ -70,10 +70,10 @@ def flow_system_base(timesteps: pd.DatetimeIndex) -> fx.FlowSystem: flow_system.add_elements(fx.Effect('costs', '€', 'Kosten', is_standard=True, is_objective=True)) flow_system.add_elements( fx.Sink( - label='Wärmelast', - inputs=[fx.Flow(label='Wärme', bus='Fernwärme', fixed_relative_profile=data.thermal_demand, size=1)], + 'Wärmelast', + inputs=[fx.Flow('Fernwärme', flow_id='Wärme', fixed_relative_profile=data.thermal_demand, size=1)], ), - fx.Source(label='Gastarif', outputs=[fx.Flow(label='Gas', bus='Gas', effects_per_flow_hour=1)]), + fx.Source('Gastarif', outputs=[fx.Flow('Gas', flow_id='Gas', effects_per_flow_hour=1)]), ) return flow_system @@ -84,8 +84,8 @@ def flow_system_minimal(timesteps) -> fx.FlowSystem: fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), + thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'), ) ) return flow_system @@ -140,10 +140,10 @@ def test_fixed_size(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=fx.InvestParameters(fixed_size=1000, effects_of_investment=10, effects_of_investment_per_size=1), ), ) @@ -179,10 +179,10 @@ def test_optimize_size(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1, maximum_size=100), ), ) @@ -218,10 +218,10 @@ def test_size_bounds(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=fx.InvestParameters( minimum_size=40, maximum_size=100, effects_of_investment=10, effects_of_investment_per_size=1 ), @@ -259,10 +259,10 @@ def test_optional_invest(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=fx.InvestParameters( mandatory=False, minimum_size=40, @@ -275,10 +275,10 @@ def test_optional_invest(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler_optional', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=fx.InvestParameters( mandatory=False, minimum_size=50, @@ -336,8 +336,8 @@ def test_on(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=100, status_parameters=fx.StatusParameters()), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), + thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=100, status_parameters=fx.StatusParameters()), ) ) @@ -373,10 +373,10 @@ def test_off(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=100, status_parameters=fx.StatusParameters(max_downtime=100), ), @@ -422,10 +422,10 @@ def test_startup_shutdown(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=100, status_parameters=fx.StatusParameters(force_startup_tracking=True), ), @@ -478,10 +478,10 @@ def test_on_total_max(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=100, status_parameters=fx.StatusParameters(active_hours_max=1), ), @@ -489,8 +489,8 @@ def test_on_total_max(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler_backup', thermal_efficiency=0.2, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=100), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), + thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=100), ), ) @@ -526,10 +526,10 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=100, status_parameters=fx.StatusParameters(active_hours_max=2), ), @@ -537,10 +537,10 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler_backup', thermal_efficiency=0.2, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=100, status_parameters=fx.StatusParameters(active_hours_min=3), ), @@ -597,10 +597,10 @@ def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=100, previous_flow_rate=0, # Required for initial uptime constraint status_parameters=fx.StatusParameters(max_uptime=2, min_uptime=2), @@ -609,8 +609,8 @@ def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler_backup', thermal_efficiency=0.2, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme', size=100), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), + thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=100), ), ) flow_system['Wärmelast'].inputs[0].fixed_relative_profile = np.array([5, 10, 20, 18, 12]) @@ -656,16 +656,16 @@ def test_consecutive_off(solver_fixture, time_steps_fixture): fx.linear_converters.Boiler( 'Boiler', thermal_efficiency=0.5, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), - thermal_flow=fx.Flow('Q_th', bus='Fernwärme'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), + thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'), ), fx.linear_converters.Boiler( 'Boiler_backup', thermal_efficiency=0.2, - fuel_flow=fx.Flow('Q_fu', bus='Gas'), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu'), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', size=100, previous_flow_rate=np.array([20]), # Otherwise its Off before the start status_parameters=fx.StatusParameters(max_downtime=2, min_downtime=2), diff --git a/tests/test_scenarios.py b/tests/test_scenarios.py index 278ceb44a..f4b07f9f5 100644 --- a/tests/test_scenarios.py +++ b/tests/test_scenarios.py @@ -59,13 +59,13 @@ def test_system(): electricity_bus = Bus('Electricity') # Create a demand sink with scenario-dependent profiles - demand = Flow(label='Demand', bus=electricity_bus.label_full, fixed_relative_profile=demand_profiles) + demand = Flow(electricity_bus.label_full, flow_id='Demand', fixed_relative_profile=demand_profiles) demand_sink = Sink('Demand', inputs=[demand]) # Create a power source with investment option power_gen = Flow( - label='Generation', - bus=electricity_bus.label_full, + electricity_bus.label_full, + flow_id='Generation', size=InvestParameters( minimum_size=0, maximum_size=20, @@ -76,10 +76,10 @@ def test_system(): generator = Source('Generator', outputs=[power_gen]) # Create a storage for electricity - storage_charge = Flow(label='Charge', bus=electricity_bus.label_full, size=10) - storage_discharge = Flow(label='Discharge', bus=electricity_bus.label_full, size=10) + storage_charge = Flow(electricity_bus.label_full, flow_id='Charge', size=10) + storage_discharge = Flow(electricity_bus.label_full, flow_id='Discharge', size=10) storage = Storage( - label='Battery', + 'Battery', charging=storage_charge, discharging=storage_discharge, capacity_in_flow_hours=InvestParameters( @@ -93,7 +93,7 @@ def test_system(): ) # Create effects and objective - cost_effect = Effect(label='costs', unit='€', description='Total costs', is_standard=True, is_objective=True) + cost_effect = Effect('costs', unit='€', description='Total costs', is_standard=True, is_objective=True) # Add all elements to the flow system flow_system.add_elements(electricity_bus, generator, demand_sink, storage, cost_effect) @@ -134,11 +134,14 @@ def flow_system_complex_scenarios() -> fx.FlowSystem: fx.Bus('Strom'), fx.Bus('Fernwärme'), fx.Bus('Gas'), - fx.Sink('Wärmelast', inputs=[fx.Flow('Q_th_Last', 'Fernwärme', size=1, fixed_relative_profile=thermal_load)]), + fx.Sink( + 'Wärmelast', inputs=[fx.Flow('Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load)] + ), fx.Source( - 'Gastarif', outputs=[fx.Flow('Q_Gas', 'Gas', size=1000, effects_per_flow_hour={'costs': 0.04, 'CO2': 0.3})] + 'Gastarif', + outputs=[fx.Flow('Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04, 'CO2': 0.3})], ), - fx.Sink('Einspeisung', inputs=[fx.Flow('P_el', 'Strom', effects_per_flow_hour=-1 * electrical_load)]), + fx.Sink('Einspeisung', inputs=[fx.Flow('Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_load)]), ) boiler = fx.linear_converters.Boiler( @@ -146,8 +149,8 @@ def flow_system_complex_scenarios() -> fx.FlowSystem: thermal_efficiency=0.5, status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}), thermal_flow=fx.Flow( - 'Q_th', - bus='Fernwärme', + 'Fernwärme', + flow_id='Q_th', load_factor_max=1.0, load_factor_min=0.1, relative_minimum=5 / 50, @@ -170,7 +173,7 @@ def flow_system_complex_scenarios() -> fx.FlowSystem: ), flow_hours_max=1e6, ), - fuel_flow=fx.Flow('Q_fu', bus='Gas', size=200, relative_minimum=0, relative_maximum=1), + fuel_flow=fx.Flow('Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1), ) invest_speicher = fx.InvestParameters( @@ -189,8 +192,8 @@ def flow_system_complex_scenarios() -> fx.FlowSystem: ) speicher = fx.Storage( 'Speicher', - charging=fx.Flow('Q_th_load', bus='Fernwärme', size=1e4), - discharging=fx.Flow('Q_th_unload', bus='Fernwärme', size=1e4), + charging=fx.Flow('Fernwärme', flow_id='Q_th_load', size=1e4), + discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=1e4), capacity_in_flow_hours=invest_speicher, initial_charge_state=0, maximal_final_charge_state=10, @@ -215,10 +218,10 @@ def flow_system_piecewise_conversion_scenarios(flow_system_complex_scenarios) -> flow_system.add_elements( fx.LinearConverter( 'KWK', - inputs=[fx.Flow('Q_fu', bus='Gas', size=200)], + inputs=[fx.Flow('Gas', flow_id='Q_fu', size=200)], outputs=[ - fx.Flow('P_el', bus='Strom', size=60, relative_maximum=55, previous_flow_rate=10), - fx.Flow('Q_th', bus='Fernwärme', size=100), + fx.Flow('Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10), + fx.Flow('Fernwärme', flow_id='Q_th', size=100), ], piecewise_conversion=fx.PiecewiseConversion( { @@ -506,11 +509,11 @@ def test_size_equality_constraints(): bus = fx.Bus('grid') source = fx.Source( - label='solar', + 'solar', outputs=[ fx.Flow( - label='out', - bus='grid', + 'grid', + flow_id='out', size=fx.InvestParameters( minimum_size=10, maximum_size=100, @@ -545,11 +548,11 @@ def test_flow_rate_equality_constraints(): bus = fx.Bus('grid') source = fx.Source( - label='solar', + 'solar', outputs=[ fx.Flow( - label='out', - bus='grid', + 'grid', + flow_id='out', size=fx.InvestParameters( minimum_size=10, maximum_size=100, @@ -584,11 +587,11 @@ def test_selective_scenario_independence(): bus = fx.Bus('grid') source = fx.Source( - label='solar', + 'solar', outputs=[ fx.Flow( - label='out', - bus='grid', + 'grid', + flow_id='out', size=fx.InvestParameters( minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100} ), @@ -596,8 +599,8 @@ def test_selective_scenario_independence(): ], ) sink = fx.Sink( - label='demand', - inputs=[fx.Flow(label='in', bus='grid', size=50)], + 'demand', + inputs=[fx.Flow('grid', flow_id='in', size=50)], ) fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True)) @@ -643,11 +646,11 @@ def test_scenario_parameters_io_persistence(): bus = fx.Bus('grid') source = fx.Source( - label='solar', + 'solar', outputs=[ fx.Flow( - label='out', - bus='grid', + 'grid', + flow_id='out', size=fx.InvestParameters( minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100} ), @@ -683,11 +686,11 @@ def test_scenario_parameters_io_with_calculation(tmp_path): bus = fx.Bus('grid') source = fx.Source( - label='solar', + 'solar', outputs=[ fx.Flow( - label='out', - bus='grid', + 'grid', + flow_id='out', size=fx.InvestParameters( minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100} ), @@ -695,8 +698,8 @@ def test_scenario_parameters_io_with_calculation(tmp_path): ], ) sink = fx.Sink( - label='demand', - inputs=[fx.Flow(label='in', bus='grid', size=50)], + 'demand', + inputs=[fx.Flow('grid', flow_id='in', size=50)], ) fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True)) @@ -741,11 +744,11 @@ def test_weights_io_persistence(): bus = fx.Bus('grid') source = fx.Source( - label='solar', + 'solar', outputs=[ fx.Flow( - label='out', - bus='grid', + 'grid', + flow_id='out', size=fx.InvestParameters( minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100} ), @@ -782,11 +785,11 @@ def test_weights_selection(): bus = fx.Bus('grid') source = fx.Source( - label='solar', + 'solar', outputs=[ fx.Flow( - label='out', - bus='grid', + 'grid', + flow_id='out', size=10, ) ],