diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9efaf699a..170200ab0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1090,7 +1090,7 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp
- **Penalty as first-class Effect**: Users can now add Penalty contributions anywhere effects are used:
```python
- fx.Flow('Q', 'Bus', effects_per_flow_hour={'Penalty': 2.5})
+ fx.Flow(bus='Bus', flow_id='Q', effects_per_flow_hour={'Penalty': 2.5})
fx.InvestParameters(..., effects_of_investment={'Penalty': 100})
```
- **User-definable Penalty**: Optionally define custom Penalty with constraints (auto-created if not defined):
diff --git a/benchmarks/benchmark_model_build.py b/benchmarks/benchmark_model_build.py
index 21695e80c..a3baffd47 100644
--- a/benchmarks/benchmark_model_build.py
+++ b/benchmarks/benchmark_model_build.py
@@ -246,7 +246,7 @@ def create_large_system(
fs.add_elements(
fx.Source(
'GasGrid',
- outputs=[fx.Flow('Gas', bus='Gas', size=5000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],
+ outputs=[fx.Flow(bus='Gas', size=5000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],
)
)
@@ -255,19 +255,25 @@ def create_large_system(
fx.Source(
'ElecBuy',
outputs=[
- fx.Flow('El', bus='Electricity', size=2000, effects_per_flow_hour={'costs': elec_price, 'CO2': 0.4})
+ fx.Flow(
+ bus='Electricity', flow_id='El', size=2000, effects_per_flow_hour={'costs': elec_price, 'CO2': 0.4}
+ )
],
),
fx.Sink(
'ElecSell',
- inputs=[fx.Flow('El', bus='Electricity', size=1000, effects_per_flow_hour={'costs': -elec_price * 0.8})],
+ inputs=[
+ fx.Flow(bus='Electricity', flow_id='El', size=1000, effects_per_flow_hour={'costs': -elec_price * 0.8})
+ ],
),
)
# Demands
fs.add_elements(
- fx.Sink('HeatDemand', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_profile)]),
- fx.Sink('ElecDemand', inputs=[fx.Flow('El', bus='Electricity', size=1, fixed_relative_profile=elec_profile)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_profile)]),
+ fx.Sink(
+ 'ElecDemand', inputs=[fx.Flow(bus='Electricity', flow_id='El', size=1, fixed_relative_profile=elec_profile)]
+ ),
)
# Converters (CHPs and Boilers)
@@ -294,10 +300,10 @@ def create_large_system(
fs.add_elements(
fx.LinearConverter(
f'CHP_{i}',
- inputs=[fx.Flow('Gas', bus='Gas', size=300)],
+ inputs=[fx.Flow(bus='Gas', size=300)],
outputs=[
- fx.Flow('El', bus='Electricity', size=100),
- fx.Flow('Heat', bus='Heat', size=size_param, status_parameters=status_param),
+ fx.Flow(bus='Electricity', flow_id='El', size=100),
+ fx.Flow(bus='Heat', size=size_param, status_parameters=status_param),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -314,9 +320,9 @@ def create_large_system(
f'CHP_{i}',
thermal_efficiency=0.50,
electrical_efficiency=0.35,
- thermal_flow=fx.Flow('Heat', bus='Heat', size=size_param, status_parameters=status_param),
- electrical_flow=fx.Flow('El', bus='Electricity', size=100),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', size=size_param, status_parameters=status_param),
+ electrical_flow=fx.Flow(bus='Electricity', flow_id='El', size=100),
+ fuel_flow=fx.Flow(bus='Gas'),
)
)
else:
@@ -326,13 +332,12 @@ def create_large_system(
f'Boiler_{i}',
thermal_efficiency=0.90,
thermal_flow=fx.Flow(
- 'Heat',
bus='Heat',
size=size_param,
relative_minimum=0.2,
status_parameters=status_param,
),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas'),
)
)
@@ -356,8 +361,8 @@ def create_large_system(
eta_charge=0.95,
eta_discharge=0.95,
relative_loss_per_hour=0.001,
- charging=fx.Flow('Charge', bus='Heat', size=100),
- discharging=fx.Flow('Discharge', bus='Heat', size=100),
+ charging=fx.Flow(bus='Heat', size=100),
+ discharging=fx.Flow(bus='Heat', size=100),
)
)
diff --git a/docs/home/quick-start.md b/docs/home/quick-start.md
index 7bbc88172..27cf5a63e 100644
--- a/docs/home/quick-start.md
+++ b/docs/home/quick-start.md
@@ -54,7 +54,6 @@ solar_profile = np.array([0, 0, 0, 0, 0, 0, 0.2, 0.5, 0.8, 1.0,
solar = fx.Source(
'solar',
outputs=[fx.Flow(
- 'power',
bus='electricity',
size=100, # 100 kW capacity
relative_maximum=solar_profile
@@ -67,8 +66,7 @@ demand_profile = np.array([30, 25, 20, 20, 25, 35, 50, 70, 80, 75,
60, 50, 40, 35])
demand = fx.Sink('demand', inputs=[
- fx.Flow('consumption',
- bus='electricity',
+ fx.Flow(bus='electricity',
size=1,
fixed_relative_profile=demand_profile)
])
@@ -76,8 +74,8 @@ demand = fx.Sink('demand', inputs=[
# Battery storage
battery = fx.Storage(
'battery',
- charging=fx.Flow('charge', bus='electricity', size=50),
- discharging=fx.Flow('discharge', bus='electricity', size=50),
+ charging=fx.Flow(bus='electricity', size=50),
+ discharging=fx.Flow(bus='electricity', size=50),
capacity_in_flow_hours=100, # 100 kWh capacity
initial_charge_state=50, # Start at 50%
eta_charge=0.95,
diff --git a/docs/notebooks/01-quickstart.ipynb b/docs/notebooks/01-quickstart.ipynb
index 47d83d664..52fc7cb17 100644
--- a/docs/notebooks/01-quickstart.ipynb
+++ b/docs/notebooks/01-quickstart.ipynb
@@ -127,19 +127,19 @@
" # === Gas Supply: Unlimited gas at 0.08 €/kWh ===\n",
" fx.Source(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.08)],\n",
+ " outputs=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.08)],\n",
" ),\n",
" # === Boiler: Converts gas to heat at 90% efficiency ===\n",
" fx.linear_converters.Boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.9,\n",
- " thermal_flow=fx.Flow('Heat', bus='Heat', size=100), # 100 kW capacity\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=100), # 100 kW capacity\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Workshop: Heat demand that must be met ===\n",
" fx.Sink(\n",
" 'Workshop',\n",
- " inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand.values)],\n",
+ " inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand.values)],\n",
" ),\n",
")"
]
diff --git a/docs/notebooks/02-heat-system.ipynb b/docs/notebooks/02-heat-system.ipynb
index f36a1b7a9..8db6308d6 100644
--- a/docs/notebooks/02-heat-system.ipynb
+++ b/docs/notebooks/02-heat-system.ipynb
@@ -148,14 +148,14 @@
" # === Gas Supply with time-varying price ===\n",
" fx.Source(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_price)],\n",
+ " outputs=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=gas_price)],\n",
" ),\n",
" # === Gas Boiler: 150 kW, 92% efficiency ===\n",
" fx.linear_converters.Boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.92,\n",
- " thermal_flow=fx.Flow('Heat', bus='Heat', size=150),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=150),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Thermal Storage: 500 kWh tank ===\n",
" fx.Storage(\n",
@@ -166,13 +166,13 @@
" eta_charge=0.98, # 98% charging efficiency\n",
" eta_discharge=0.98, # 98% discharging efficiency\n",
" relative_loss_per_hour=0.005, # 0.5% heat loss per hour\n",
- " charging=fx.Flow('Charge', bus='Heat', size=100), # Max 100 kW charging\n",
- " discharging=fx.Flow('Discharge', bus='Heat', size=100), # Max 100 kW discharging\n",
+ " charging=fx.Flow(bus='Heat', size=100), # Max 100 kW charging\n",
+ " discharging=fx.Flow(bus='Heat', size=100), # Max 100 kW discharging\n",
" ),\n",
" # === Office Heat Demand ===\n",
" fx.Sink(\n",
" 'Office',\n",
- " inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n",
+ " inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n",
" ),\n",
")"
]
diff --git a/docs/notebooks/03-investment-optimization.ipynb b/docs/notebooks/03-investment-optimization.ipynb
index 9cfa0afee..9805bc7c0 100644
--- a/docs/notebooks/03-investment-optimization.ipynb
+++ b/docs/notebooks/03-investment-optimization.ipynb
@@ -141,21 +141,20 @@
" # === Gas Supply ===\n",
" fx.Source(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=GAS_PRICE)],\n",
+ " outputs=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=GAS_PRICE)],\n",
" ),\n",
" # === Gas Boiler (existing, fixed size) ===\n",
" fx.linear_converters.Boiler(\n",
" 'GasBoiler',\n",
" thermal_efficiency=0.92,\n",
- " thermal_flow=fx.Flow('Heat', bus='Heat', size=200), # 200 kW existing\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=200), # 200 kW existing\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Solar Collectors (size to be optimized) ===\n",
" fx.Source(\n",
" 'SolarCollectors',\n",
" outputs=[\n",
" fx.Flow(\n",
- " 'Heat',\n",
" bus='Heat',\n",
" # Investment optimization: find optimal size between 0-500 kW\n",
" size=fx.InvestParameters(\n",
@@ -181,13 +180,13 @@
" eta_charge=0.95,\n",
" eta_discharge=0.95,\n",
" relative_loss_per_hour=0.01, # 1% loss per hour\n",
- " charging=fx.Flow('Charge', bus='Heat', size=200),\n",
- " discharging=fx.Flow('Discharge', bus='Heat', size=200),\n",
+ " charging=fx.Flow(bus='Heat', size=200),\n",
+ " discharging=fx.Flow(bus='Heat', size=200),\n",
" ),\n",
" # === Pool Heat Demand ===\n",
" fx.Sink(\n",
" 'Pool',\n",
- " inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=pool_demand)],\n",
+ " inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=pool_demand)],\n",
" ),\n",
")"
]
diff --git a/docs/notebooks/04-operational-constraints.ipynb b/docs/notebooks/04-operational-constraints.ipynb
index 401f99393..626ebec5a 100644
--- a/docs/notebooks/04-operational-constraints.ipynb
+++ b/docs/notebooks/04-operational-constraints.ipynb
@@ -126,7 +126,7 @@
" # === Gas Supply ===\n",
" fx.Source(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)],\n",
+ " outputs=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)],\n",
" ),\n",
" # === Main Industrial Boiler (with operational constraints) ===\n",
" fx.linear_converters.Boiler(\n",
@@ -144,20 +144,20 @@
" size=500,\n",
" relative_minimum=0.3, # Minimum load: 30% = 150 kW\n",
" ),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas', size=600), # Size required for status_parameters\n",
+ " fuel_flow=fx.Flow(bus='Gas', size=600), # Size required for status_parameters\n",
" ),\n",
" # === Backup Boiler (flexible, but less efficient) ===\n",
" fx.linear_converters.Boiler(\n",
" 'BackupBoiler',\n",
" thermal_efficiency=0.85, # Lower efficiency\n",
" # No status parameters = can turn on/off freely\n",
- " thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Steam', size=150),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Factory Steam Demand ===\n",
" fx.Sink(\n",
" 'Factory',\n",
- " inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)],\n",
+ " inputs=[fx.Flow(bus='Steam', size=1, fixed_relative_profile=steam_demand)],\n",
" ),\n",
")"
]
@@ -340,21 +340,21 @@
" fx.Bus('Gas', carrier='gas'),\n",
" fx.Bus('Steam', carrier='steam'),\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
- " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
+ " fx.Source('GasGrid', outputs=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
" # Main boiler WITHOUT status parameters\n",
" fx.linear_converters.Boiler(\n",
" 'MainBoiler',\n",
" thermal_efficiency=0.94,\n",
- " thermal_flow=fx.Flow('Steam', bus='Steam', size=500),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Steam', size=500),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" fx.linear_converters.Boiler(\n",
" 'BackupBoiler',\n",
" thermal_efficiency=0.85,\n",
- " thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Steam', size=150),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
- " fx.Sink('Factory', inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)]),\n",
+ " fx.Sink('Factory', inputs=[fx.Flow(bus='Steam', size=1, fixed_relative_profile=steam_demand)]),\n",
")\n",
"\n",
"fs_unconstrained.optimize(fx.solvers.HighsSolver())\n",
@@ -559,7 +559,7 @@
"\n",
"Set via `Flow.relative_minimum`:\n",
"```python\n",
- "fx.Flow('Steam', bus='Steam', size=500, relative_minimum=0.3) # Min 30% load\n",
+ "fx.Flow(bus='Steam', size=500, relative_minimum=0.3) # Min 30% load\n",
"```\n",
"\n",
"### When Status is Active\n",
diff --git a/docs/notebooks/05-multi-carrier-system.ipynb b/docs/notebooks/05-multi-carrier-system.ipynb
index 3727227f4..a0c00a054 100644
--- a/docs/notebooks/05-multi-carrier-system.ipynb
+++ b/docs/notebooks/05-multi-carrier-system.ipynb
@@ -146,7 +146,6 @@
" 'GasGrid',\n",
" outputs=[\n",
" fx.Flow(\n",
- " 'Gas',\n",
" bus='Gas',\n",
" size=1000,\n",
" effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2}, # Gas: 0.2 kg CO2/kWh\n",
@@ -158,7 +157,6 @@
" 'GridBuy',\n",
" outputs=[\n",
" fx.Flow(\n",
- " 'Electricity',\n",
" bus='Electricity',\n",
" size=500,\n",
" effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}, # Grid: 0.4 kg CO2/kWh\n",
@@ -170,7 +168,6 @@
" 'GridSell',\n",
" inputs=[\n",
" fx.Flow(\n",
- " 'Electricity',\n",
" bus='Electricity',\n",
" size=200,\n",
" effects_per_flow_hour={'costs': -elec_sell_price}, # Negative = income\n",
@@ -186,10 +183,9 @@
" effects_per_startup={'costs': 30},\n",
" min_uptime=3,\n",
" ),\n",
- " electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n",
+ " electrical_flow=fx.Flow(bus='Electricity', size=200),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=250),\n",
" fuel_flow=fx.Flow(\n",
- " 'Q_fuel',\n",
" bus='Gas',\n",
" size=500,\n",
" relative_minimum=0.4, # Min 40% load\n",
@@ -199,17 +195,17 @@
" fx.linear_converters.Boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.92,\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=400),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=400),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Hospital Loads ===\n",
" fx.Sink(\n",
" 'HospitalElec',\n",
- " inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],\n",
+ " inputs=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],\n",
" ),\n",
" fx.Sink(\n",
" 'HospitalHeat',\n",
- " inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n",
+ " inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n",
" ),\n",
")"
]
@@ -303,7 +299,7 @@
"metadata": {},
"outputs": [],
"source": [
- "flow_system.stats.plot.heatmap('CHP(P_el)')"
+ "flow_system.stats.plot.heatmap('CHP(Electricity)')"
]
},
{
@@ -325,9 +321,9 @@
"flow_rates = flow_system.stats.flow_rates\n",
"grid_buy = flow_rates['GridBuy(Electricity)'].sum().item()\n",
"grid_sell = flow_rates['GridSell(Electricity)'].sum().item()\n",
- "chp_elec = flow_rates['CHP(P_el)'].sum().item()\n",
- "chp_heat = flow_rates['CHP(Q_th)'].sum().item()\n",
- "boiler_heat = flow_rates['Boiler(Q_th)'].sum().item()\n",
+ "chp_elec = flow_rates['CHP(Electricity)'].sum().item()\n",
+ "chp_heat = flow_rates['CHP(Heat)'].sum().item()\n",
+ "boiler_heat = flow_rates['Boiler(Heat)'].sum().item()\n",
"\n",
"total_elec = electricity_demand.sum()\n",
"total_heat = heat_demand.sum()\n",
@@ -380,27 +376,24 @@
" fx.Effect('CO2', 'kg', 'CO2 Emissions'),\n",
" fx.Source(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],\n",
+ " outputs=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],\n",
" ),\n",
" fx.Source(\n",
" 'GridBuy',\n",
- " outputs=[\n",
- " fx.Flow(\n",
- " 'Electricity', bus='Electricity', size=500, effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}\n",
- " )\n",
- " ],\n",
+ " outputs=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4})],\n",
" ),\n",
" # Only boiler for heat\n",
" fx.linear_converters.Boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.92,\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=500),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=500),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" fx.Sink(\n",
- " 'HospitalElec', inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]\n",
+ " 'HospitalElec',\n",
+ " inputs=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],\n",
" ),\n",
- " fx.Sink('HospitalHeat', inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n",
+ " fx.Sink('HospitalHeat', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n",
")\n",
"\n",
"fs_no_chp.optimize(fx.solvers.HighsSolver())\n",
@@ -498,9 +491,9 @@
" electrical_efficiency=0.40, # Fuel → Electricity\n",
" thermal_efficiency=0.50, # Fuel → Heat\n",
" # Total efficiency = 0.40 + 0.50 = 0.90 (90%)\n",
- " electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas', size=500),\n",
+ " electrical_flow=fx.Flow(bus='Electricity', size=200),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=250),\n",
+ " fuel_flow=fx.Flow(bus='Gas', size=500),\n",
")\n",
"```\n",
"\n",
diff --git a/docs/notebooks/06a-time-varying-parameters.ipynb b/docs/notebooks/06a-time-varying-parameters.ipynb
index 5e1efa331..4a9eebf21 100644
--- a/docs/notebooks/06a-time-varying-parameters.ipynb
+++ b/docs/notebooks/06a-time-varying-parameters.ipynb
@@ -167,16 +167,16 @@
" # Effect for cost tracking\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # Grid electricity source\n",
- " fx.Source('Grid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=0.30)]),\n",
+ " fx.Source('Grid', outputs=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour=0.30)]),\n",
" # Heat pump with TIME-VARYING COP\n",
" fx.LinearConverter(\n",
" 'HeatPump',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n",
- " conversion_factors=[{'Elec': cop, 'Heat': 1}], # <-- Array for time-varying COP\n",
+ " inputs=[fx.Flow(bus='Electricity', size=150)],\n",
+ " outputs=[fx.Flow(bus='Heat', size=500)],\n",
+ " conversion_factors=[{'Electricity': cop, 'Heat': 1}], # <-- Array for time-varying COP\n",
" ),\n",
" # Heat demand\n",
- " fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n",
+ " fx.Sink('Building', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n",
")\n",
"\n",
"flow_system.optimize(fx.solvers.HighsSolver());"
@@ -221,7 +221,7 @@
"# Create dataset with solution and input data - xarray auto-aligns by time coordinate\n",
"comparison = xr.Dataset(\n",
" {\n",
- " 'elec_consumption': flow_system.solution['HeatPump(Elec)|flow_rate'],\n",
+ " 'elec_consumption': flow_system.solution['HeatPump(Electricity)|flow_rate'],\n",
" 'heat_output': flow_system.solution['HeatPump(Heat)|flow_rate'],\n",
" 'outdoor_temp': xr.DataArray(outdoor_temp, dims=['time'], coords={'time': timesteps}),\n",
" }\n",
@@ -251,15 +251,15 @@
"\n",
"The `conversion_factors` parameter accepts a list of dictionaries where values can be:\n",
"- **Scalars**: Constant efficiency (e.g., `{'Fuel': 1, 'Heat': 0.9}`)\n",
- "- **Arrays**: Time-varying efficiency (e.g., `{'Elec': cop_array, 'Heat': 1}`)\n",
+ "- **Arrays**: Time-varying efficiency (e.g., `{'Electricity': cop_array, 'Heat': 1}`)\n",
"- **TimeSeriesData**: For more complex data with metadata\n",
"\n",
"```python\n",
"fx.LinearConverter(\n",
" 'HeatPump',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n",
- " conversion_factors=[{'Elec': cop_array, 'Heat': 1}], # Time-varying\n",
+ " inputs=[fx.Flow(bus='Electricity', size=150)],\n",
+ " outputs=[fx.Flow(bus='Heat', size=500)],\n",
+ " conversion_factors=[{'Electricity': cop_array, 'Heat': 1}], # Time-varying\n",
")\n",
"```\n",
"\n",
diff --git a/docs/notebooks/06b-piecewise-conversion.ipynb b/docs/notebooks/06b-piecewise-conversion.ipynb
index 957e3ac34..d49a47d31 100644
--- a/docs/notebooks/06b-piecewise-conversion.ipynb
+++ b/docs/notebooks/06b-piecewise-conversion.ipynb
@@ -64,14 +64,14 @@
"source": [
"piecewise_efficiency = fx.PiecewiseConversion(\n",
" {\n",
- " 'Fuel': fx.Piecewise(\n",
+ " 'Gas': fx.Piecewise(\n",
" [\n",
" fx.Piece(start=78, end=132), # Part load\n",
" fx.Piece(start=132, end=179), # Mid load\n",
" fx.Piece(start=179, end=250), # Full load\n",
" ]\n",
" ),\n",
- " 'Elec': fx.Piecewise(\n",
+ " 'Electricity': fx.Piecewise(\n",
" [\n",
" fx.Piece(start=25, end=50), # 32% -> 38% efficiency\n",
" fx.Piece(start=50, end=75), # 38% -> 42% efficiency\n",
@@ -107,14 +107,14 @@
" fx.Bus('Gas'),\n",
" fx.Bus('Electricity'),\n",
" fx.Effect('costs', '€', is_standard=True, is_objective=True),\n",
- " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=300, effects_per_flow_hour=0.05)]),\n",
+ " fx.Source('GasGrid', outputs=[fx.Flow(bus='Gas', size=300, effects_per_flow_hour=0.05)]),\n",
" fx.LinearConverter(\n",
" 'GasEngine',\n",
- " inputs=[fx.Flow('Fuel', bus='Gas')],\n",
- " outputs=[fx.Flow('Elec', bus='Electricity')],\n",
+ " inputs=[fx.Flow(bus='Gas')],\n",
+ " outputs=[fx.Flow(bus='Electricity')],\n",
" piecewise_conversion=piecewise_efficiency,\n",
" ),\n",
- " fx.Sink('Load', inputs=[fx.Flow('Elec', bus='Electricity', size=1, fixed_relative_profile=elec_demand)]),\n",
+ " fx.Sink('Load', inputs=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=elec_demand)]),\n",
")\n",
"\n",
"fs.optimize(fx.solvers.HighsSolver());"
@@ -135,7 +135,7 @@
"metadata": {},
"outputs": [],
"source": [
- "fs.components['GasEngine'].piecewise_conversion.plot(x_flow='Fuel')"
+ "fs.components['GasEngine'].piecewise_conversion.plot(x_flow='Gas')"
]
},
{
@@ -164,8 +164,8 @@
"outputs": [],
"source": [
"# Verify efficiency varies with load\n",
- "fuel = fs.solution['GasEngine(Fuel)|flow_rate']\n",
- "elec = fs.solution['GasEngine(Elec)|flow_rate']\n",
+ "fuel = fs.solution['GasEngine(Gas)|flow_rate']\n",
+ "elec = fs.solution['GasEngine(Electricity)|flow_rate']\n",
"efficiency = elec / fuel\n",
"\n",
"print(f'Efficiency range: {float(efficiency.min()):.1%} - {float(efficiency.max()):.1%}')\n",
diff --git a/docs/notebooks/06c-piecewise-effects.ipynb b/docs/notebooks/06c-piecewise-effects.ipynb
index dd373ab46..a72415197 100644
--- a/docs/notebooks/06c-piecewise-effects.ipynb
+++ b/docs/notebooks/06c-piecewise-effects.ipynb
@@ -167,12 +167,12 @@
" fx.Bus('Elec'),\n",
" fx.Effect('costs', '€', is_standard=True, is_objective=True),\n",
" # Grid with time-varying price\n",
- " fx.Source('Grid', outputs=[fx.Flow('Elec', bus='Elec', size=500, effects_per_flow_hour=elec_price)]),\n",
+ " fx.Source('Grid', outputs=[fx.Flow(bus='Elec', size=500, effects_per_flow_hour=elec_price)]),\n",
" # Battery with PIECEWISE investment cost (discrete tiers)\n",
" fx.Storage(\n",
" 'Battery',\n",
- " charging=fx.Flow('charge', bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
- " discharging=fx.Flow('discharge', bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
+ " charging=fx.Flow(bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
+ " discharging=fx.Flow(bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
" capacity_in_flow_hours=fx.InvestParameters(\n",
" piecewise_effects_of_investment=piecewise_costs,\n",
" minimum_size=0,\n",
@@ -182,7 +182,7 @@
" eta_discharge=0.95,\n",
" initial_charge_state=0,\n",
" ),\n",
- " fx.Sink('Demand', inputs=[fx.Flow('Elec', bus='Elec', size=1, fixed_relative_profile=demand)]),\n",
+ " fx.Sink('Demand', inputs=[fx.Flow(bus='Elec', size=1, fixed_relative_profile=demand)]),\n",
")\n",
"\n",
"fs.optimize(fx.solvers.HighsSolver());"
diff --git a/docs/notebooks/07-scenarios-and-periods.ipynb b/docs/notebooks/07-scenarios-and-periods.ipynb
index 1aae7660b..35be16e6c 100644
--- a/docs/notebooks/07-scenarios-and-periods.ipynb
+++ b/docs/notebooks/07-scenarios-and-periods.ipynb
@@ -174,7 +174,6 @@
" 'GasGrid',\n",
" outputs=[\n",
" fx.Flow(\n",
- " 'Gas',\n",
" bus='Gas',\n",
" size=1000,\n",
" effects_per_flow_hour=gas_prices, # Array = varies by period\n",
@@ -187,7 +186,6 @@
" electrical_efficiency=0.35,\n",
" thermal_efficiency=0.50,\n",
" electrical_flow=fx.Flow(\n",
- " 'P_el',\n",
" bus='Electricity',\n",
" # Investment optimization: find optimal CHP size\n",
" size=fx.InvestParameters(\n",
@@ -196,22 +194,21 @@
" effects_of_investment_per_size={'costs': 15}, # 15 €/kW/week annualized\n",
" ),\n",
" ),\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat'),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat'),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Gas Boiler (existing backup) ===\n",
" fx.linear_converters.Boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.90,\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=500),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=500),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Electricity Sales (revenue varies by period) ===\n",
" fx.Sink(\n",
" 'ElecSales',\n",
" inputs=[\n",
" fx.Flow(\n",
- " 'P_el',\n",
" bus='Electricity',\n",
" size=100,\n",
" effects_per_flow_hour=-elec_prices, # Negative = revenue\n",
@@ -223,7 +220,6 @@
" 'HeatDemand',\n",
" inputs=[\n",
" fx.Flow(\n",
- " 'Q_th',\n",
" bus='Heat',\n",
" size=1,\n",
" fixed_relative_profile=heat_demand, # DataFrame with scenario columns\n",
@@ -268,7 +264,7 @@
"metadata": {},
"outputs": [],
"source": [
- "chp_size = flow_system.stats.sizes['CHP(P_el)']\n",
+ "chp_size = flow_system.stats.sizes['CHP(Electricity)']\n",
"\n",
"pd.DataFrame(\n",
" {\n",
@@ -315,7 +311,7 @@
"metadata": {},
"outputs": [],
"source": [
- "flow_system.stats.plot.heatmap('CHP(Q_th)')"
+ "flow_system.stats.plot.heatmap('CHP(Heat)')"
]
},
{
@@ -349,7 +345,7 @@
"outputs": [],
"source": [
"# CHP operation summary by scenario\n",
- "chp_heat = flow_rates['CHP(Q_th)']\n",
+ "chp_heat = flow_rates['CHP(Heat)']\n",
"\n",
"pd.DataFrame(\n",
" {\n",
@@ -383,7 +379,7 @@
"fs_mild = flow_system.transform.sel(scenario='Mild Winter')\n",
"fs_mild.optimize(fx.solvers.HighsSolver(mip_gap=0.01))\n",
"\n",
- "chp_size_mild = float(fs_mild.stats.sizes['CHP(P_el)'].max())\n",
+ "chp_size_mild = float(fs_mild.stats.sizes['CHP(Electricity)'].max())\n",
"chp_size_both = float(chp_size.max())\n",
"\n",
"pd.DataFrame(\n",
diff --git a/docs/notebooks/09-plotting-and-data-access.ipynb b/docs/notebooks/09-plotting-and-data-access.ipynb
index a375fd641..bae32cf22 100644
--- a/docs/notebooks/09-plotting-and-data-access.ipynb
+++ b/docs/notebooks/09-plotting-and-data-access.ipynb
@@ -727,8 +727,8 @@
" 'Heat',\n",
" colors={\n",
" 'Boiler(Heat)': 'orangered',\n",
- " 'ThermalStorage(Charge)': 'steelblue',\n",
- " 'ThermalStorage(Discharge)': 'lightblue',\n",
+ " 'ThermalStorage(charging)': 'steelblue',\n",
+ " 'ThermalStorage(discharging)': 'lightblue',\n",
" 'Office(Heat)': 'forestgreen',\n",
" },\n",
")"
diff --git a/docs/notebooks/10-transmission.ipynb b/docs/notebooks/10-transmission.ipynb
index 065e7d14e..b9688bed0 100644
--- a/docs/notebooks/10-transmission.ipynb
+++ b/docs/notebooks/10-transmission.ipynb
@@ -151,32 +151,32 @@
" # === Effect ===\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # === External supplies ===\n",
- " fx.Source('GasSupply', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
- " fx.Source('ElecGrid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=0.25)]),\n",
+ " fx.Source('GasSupply', outputs=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
+ " fx.Source('ElecGrid', outputs=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour=0.25)]),\n",
" # === Site A: Large gas boiler (cheap) ===\n",
" fx.LinearConverter(\n",
" 'GasBoiler_A',\n",
- " inputs=[fx.Flow('Gas', bus='Gas', size=500)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_A', size=400)],\n",
- " conversion_factors=[{'Gas': 1, 'Heat': 0.92}], # 92% efficiency\n",
+ " inputs=[fx.Flow(bus='Gas', size=500)],\n",
+ " outputs=[fx.Flow(bus='Heat_A', size=400)],\n",
+ " conversion_factors=[{'Gas': 1, 'Heat_A': 0.92}], # 92% efficiency\n",
" ),\n",
" # === Site B: Small electric boiler (expensive but flexible) ===\n",
" fx.LinearConverter(\n",
" 'ElecBoiler_B',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=250)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_B', size=250)],\n",
- " conversion_factors=[{'Elec': 1, 'Heat': 0.99}], # 99% efficiency\n",
+ " inputs=[fx.Flow(bus='Electricity', size=250)],\n",
+ " outputs=[fx.Flow(bus='Heat_B', size=250)],\n",
+ " conversion_factors=[{'Electricity': 1, 'Heat_B': 0.99}], # 99% efficiency\n",
" ),\n",
" # === Transmission: A → B (unidirectional) ===\n",
" fx.Transmission(\n",
" 'Pipe_A_to_B',\n",
- " in1=fx.Flow('from_A', bus='Heat_A', size=200), # Input from Site A\n",
- " out1=fx.Flow('to_B', bus='Heat_B', size=200), # Output to Site B\n",
+ " in1=fx.Flow(bus='Heat_A', size=200), # Input from Site A\n",
+ " out1=fx.Flow(bus='Heat_B', size=200), # Output to Site B\n",
" relative_losses=0.05, # 5% heat loss in pipe\n",
" ),\n",
" # === Demands ===\n",
- " fx.Sink('Demand_A', inputs=[fx.Flow('Heat', bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
- " fx.Sink('Demand_B', inputs=[fx.Flow('Heat', bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
+ " fx.Sink('Demand_A', inputs=[fx.Flow(bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
+ " fx.Sink('Demand_B', inputs=[fx.Flow(bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
")\n",
"\n",
"fs_unidirectional.optimize(fx.solvers.HighsSolver());"
@@ -289,37 +289,37 @@
" # === Effect ===\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # === External supplies ===\n",
- " fx.Source('GasSupply', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
- " fx.Source('ElecGrid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n",
+ " fx.Source('GasSupply', outputs=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
+ " fx.Source('ElecGrid', outputs=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n",
" # === Site A: Gas boiler ===\n",
" fx.LinearConverter(\n",
" 'GasBoiler_A',\n",
- " inputs=[fx.Flow('Gas', bus='Gas', size=500)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_A', size=400)],\n",
- " conversion_factors=[{'Gas': 1, 'Heat': 0.92}],\n",
+ " inputs=[fx.Flow(bus='Gas', size=500)],\n",
+ " outputs=[fx.Flow(bus='Heat_A', size=400)],\n",
+ " conversion_factors=[{'Gas': 1, 'Heat_A': 0.92}],\n",
" ),\n",
" # === Site B: Heat pump (efficient with variable electricity price) ===\n",
" fx.LinearConverter(\n",
" 'HeatPump_B',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=100)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_B', size=350)],\n",
- " conversion_factors=[{'Elec': 1, 'Heat': 3.5}], # COP = 3.5\n",
+ " inputs=[fx.Flow(bus='Electricity', size=100)],\n",
+ " outputs=[fx.Flow(bus='Heat_B', size=350)],\n",
+ " conversion_factors=[{'Electricity': 1, 'Heat_B': 3.5}], # COP = 3.5\n",
" ),\n",
" # === BIDIRECTIONAL Transmission ===\n",
" fx.Transmission(\n",
" 'Pipe_AB',\n",
" # Direction 1: A → B\n",
- " in1=fx.Flow('from_A', bus='Heat_A', size=200),\n",
- " out1=fx.Flow('to_B', bus='Heat_B', size=200),\n",
+ " in1=fx.Flow(bus='Heat_A', flow_id='from_A', size=200),\n",
+ " out1=fx.Flow(bus='Heat_B', flow_id='to_B', size=200),\n",
" # Direction 2: B → A\n",
- " in2=fx.Flow('from_B', bus='Heat_B', size=200),\n",
- " out2=fx.Flow('to_A', bus='Heat_A', size=200),\n",
+ " in2=fx.Flow(bus='Heat_B', flow_id='from_B', size=200),\n",
+ " out2=fx.Flow(bus='Heat_A', flow_id='to_A', size=200),\n",
" relative_losses=0.05,\n",
" prevent_simultaneous_flows_in_both_directions=True, # Can't flow both ways at once\n",
" ),\n",
" # === Demands ===\n",
- " fx.Sink('Demand_A', inputs=[fx.Flow('Heat', bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
- " fx.Sink('Demand_B', inputs=[fx.Flow('Heat', bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
+ " fx.Sink('Demand_A', inputs=[fx.Flow(bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
+ " fx.Sink('Demand_B', inputs=[fx.Flow(bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
")\n",
"\n",
"fs_bidirectional.optimize(fx.solvers.HighsSolver());"
@@ -433,28 +433,28 @@
" # === Effect ===\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # === External supplies ===\n",
- " fx.Source('GasSupply', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
- " fx.Source('ElecGrid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n",
+ " fx.Source('GasSupply', outputs=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
+ " fx.Source('ElecGrid', outputs=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n",
" # === Site A: Gas boiler ===\n",
" fx.LinearConverter(\n",
" 'GasBoiler_A',\n",
- " inputs=[fx.Flow('Gas', bus='Gas', size=500)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_A', size=400)],\n",
- " conversion_factors=[{'Gas': 1, 'Heat': 0.92}],\n",
+ " inputs=[fx.Flow(bus='Gas', size=500)],\n",
+ " outputs=[fx.Flow(bus='Heat_A', size=400)],\n",
+ " conversion_factors=[{'Gas': 1, 'Heat_A': 0.92}],\n",
" ),\n",
" # === Site B: Heat pump ===\n",
" fx.LinearConverter(\n",
" 'HeatPump_B',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=100)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_B', size=350)],\n",
- " conversion_factors=[{'Elec': 1, 'Heat': 3.5}],\n",
+ " inputs=[fx.Flow(bus='Electricity', size=100)],\n",
+ " outputs=[fx.Flow(bus='Heat_B', size=350)],\n",
+ " conversion_factors=[{'Electricity': 1, 'Heat_B': 3.5}],\n",
" ),\n",
" # === Site B: Backup electric boiler ===\n",
" fx.LinearConverter(\n",
" 'ElecBoiler_B',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=200)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_B', size=200)],\n",
- " conversion_factors=[{'Elec': 1, 'Heat': 0.99}],\n",
+ " inputs=[fx.Flow(bus='Electricity', size=200)],\n",
+ " outputs=[fx.Flow(bus='Heat_B', size=200)],\n",
+ " conversion_factors=[{'Electricity': 1, 'Heat_B': 0.99}],\n",
" ),\n",
" # === Transmission with INVESTMENT OPTIMIZATION ===\n",
" # Investment parameters are passed via 'size' parameter\n",
@@ -469,7 +469,7 @@
" maximum_size=300,\n",
" ),\n",
" ),\n",
- " out1=fx.Flow('to_B', bus='Heat_B'),\n",
+ " out1=fx.Flow(bus='Heat_B', flow_id='to_B'),\n",
" in2=fx.Flow(\n",
" 'from_B',\n",
" bus='Heat_B',\n",
@@ -479,14 +479,14 @@
" maximum_size=300,\n",
" ),\n",
" ),\n",
- " out2=fx.Flow('to_A', bus='Heat_A'),\n",
+ " out2=fx.Flow(bus='Heat_A', flow_id='to_A'),\n",
" relative_losses=0.05,\n",
" balanced=True, # Same capacity in both directions\n",
" prevent_simultaneous_flows_in_both_directions=True,\n",
" ),\n",
" # === Demands ===\n",
- " fx.Sink('Demand_A', inputs=[fx.Flow('Heat', bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
- " fx.Sink('Demand_B', inputs=[fx.Flow('Heat', bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
+ " fx.Sink('Demand_A', inputs=[fx.Flow(bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
+ " fx.Sink('Demand_B', inputs=[fx.Flow(bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
")\n",
"\n",
"fs_invest.optimize(fx.solvers.HighsSolver());"
@@ -542,11 +542,11 @@
"fx.Transmission(\n",
" label='pipe_name',\n",
" # Direction 1: A → B\n",
- " in1=fx.Flow('from_A', bus='Bus_A', size=100),\n",
- " out1=fx.Flow('to_B', bus='Bus_B', size=100),\n",
+ " in1=fx.Flow(bus='Bus_A', flow_id='from_A', size=100),\n",
+ " out1=fx.Flow(bus='Bus_B', flow_id='to_B', size=100),\n",
" # Direction 2: B → A (optional - omit for unidirectional)\n",
- " in2=fx.Flow('from_B', bus='Bus_B', size=100),\n",
- " out2=fx.Flow('to_A', bus='Bus_A', size=100),\n",
+ " in2=fx.Flow(bus='Bus_B', flow_id='from_B', size=100),\n",
+ " out2=fx.Flow(bus='Bus_A', flow_id='to_A', size=100),\n",
" # Loss parameters\n",
" relative_losses=0.05, # 5% proportional loss\n",
" absolute_losses=10, # 10 kW fixed loss when active (optional)\n",
diff --git a/docs/notebooks/data/generate_example_systems.py b/docs/notebooks/data/generate_example_systems.py
index 985628e1f..9a8fa66af 100644
--- a/docs/notebooks/data/generate_example_systems.py
+++ b/docs/notebooks/data/generate_example_systems.py
@@ -119,12 +119,12 @@ def create_simple_system() -> fx.FlowSystem:
fx.Bus('Gas', carrier='gas'),
fx.Bus('Heat', carrier='heat'),
fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),
- fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_price)]),
+ fx.Source('GasGrid', outputs=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=gas_price)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.92,
- thermal_flow=fx.Flow('Heat', bus='Heat', size=150),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', size=150),
+ fuel_flow=fx.Flow(bus='Gas'),
),
fx.Storage(
'ThermalStorage',
@@ -134,10 +134,10 @@ def create_simple_system() -> fx.FlowSystem:
eta_charge=0.98,
eta_discharge=0.98,
relative_loss_per_hour=0.005,
- charging=fx.Flow('Charge', bus='Heat', size=100),
- discharging=fx.Flow('Discharge', bus='Heat', size=100),
+ charging=fx.Flow(bus='Heat', size=100),
+ discharging=fx.Flow(bus='Heat', size=100),
),
- fx.Sink('Office', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Sink('Office', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
)
return fs
@@ -197,14 +197,13 @@ def create_complex_system() -> fx.FlowSystem:
# Gas supply
fx.Source(
'GasGrid',
- outputs=[fx.Flow('Gas', bus='Gas', size=300, effects_per_flow_hour={'costs': gas_price, 'CO2': gas_co2})],
+ outputs=[fx.Flow(bus='Gas', size=300, effects_per_flow_hour={'costs': gas_price, 'CO2': gas_co2})],
),
# Electricity grid (import and export)
fx.Source(
'ElectricityImport',
outputs=[
fx.Flow(
- 'El',
bus='Electricity',
size=100,
effects_per_flow_hour={'costs': electricity_price, 'CO2': electricity_co2},
@@ -213,15 +212,13 @@ def create_complex_system() -> fx.FlowSystem:
),
fx.Sink(
'ElectricityExport',
- inputs=[
- fx.Flow('El', bus='Electricity', size=50, effects_per_flow_hour={'costs': -electricity_price * 0.8})
- ],
+ inputs=[fx.Flow(bus='Electricity', size=50, effects_per_flow_hour={'costs': -electricity_price * 0.8})],
),
# CHP with piecewise efficiency (efficiency varies with load)
fx.LinearConverter(
'CHP',
- inputs=[fx.Flow('Gas', bus='Gas', size=200)],
- outputs=[fx.Flow('El', bus='Electricity', size=80), fx.Flow('Heat', bus='Heat', size=85)],
+ inputs=[fx.Flow(bus='Gas', size=200)],
+ outputs=[fx.Flow(bus='Electricity', size=80), fx.Flow(bus='Heat', size=85)],
piecewise_conversion=fx.PiecewiseConversion(
{
'Gas': fx.Piecewise(
@@ -230,7 +227,7 @@ def create_complex_system() -> fx.FlowSystem:
fx.Piece(start=160, end=200), # Full load
]
),
- 'El': fx.Piecewise(
+ 'Electricity': fx.Piecewise(
[
fx.Piece(start=25, end=60), # ~31-38% electrical efficiency
fx.Piece(start=60, end=80), # ~38-40% electrical efficiency
@@ -250,7 +247,6 @@ def create_complex_system() -> fx.FlowSystem:
fx.linear_converters.HeatPump(
'HeatPump',
thermal_flow=fx.Flow(
- 'Heat',
bus='Heat',
size=fx.InvestParameters(
effects_of_investment={'costs': 500},
@@ -258,14 +254,14 @@ def create_complex_system() -> fx.FlowSystem:
maximum_size=60,
),
),
- electrical_flow=fx.Flow('El', bus='Electricity'),
+ electrical_flow=fx.Flow(bus='Electricity'),
cop=3.5,
),
# Backup boiler
fx.linear_converters.Boiler(
'BackupBoiler',
- thermal_flow=fx.Flow('Heat', bus='Heat', size=80),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', size=80),
+ fuel_flow=fx.Flow(bus='Gas'),
thermal_efficiency=0.90,
),
# Thermal storage (with investment)
@@ -278,13 +274,14 @@ def create_complex_system() -> fx.FlowSystem:
),
eta_charge=0.95,
eta_discharge=0.95,
- charging=fx.Flow('Charge', bus='Heat', size=50),
- discharging=fx.Flow('Discharge', bus='Heat', size=50),
+ charging=fx.Flow(bus='Heat', size=50),
+ discharging=fx.Flow(bus='Heat', size=50),
),
# Demands
- fx.Sink('HeatDemand', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
fx.Sink(
- 'ElDemand', inputs=[fx.Flow('El', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]
+ 'ElDemand',
+ inputs=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],
),
)
return fs
@@ -335,9 +332,8 @@ def create_district_heating_system() -> fx.FlowSystem:
'CHP',
thermal_efficiency=0.58,
electrical_efficiency=0.22,
- electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),
+ electrical_flow=fx.Flow(bus='Electricity', size=200),
thermal_flow=fx.Flow(
- 'Q_th',
bus='Heat',
size=fx.InvestParameters(
minimum_size=100,
@@ -347,14 +343,13 @@ def create_district_heating_system() -> fx.FlowSystem:
relative_minimum=0.3,
status_parameters=fx.StatusParameters(),
),
- fuel_flow=fx.Flow('Q_fu', bus='Coal'),
+ fuel_flow=fx.Flow(bus='Coal'),
),
# Gas Boiler with investment
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.85,
thermal_flow=fx.Flow(
- 'Q_th',
bus='Heat',
size=fx.InvestParameters(
minimum_size=0,
@@ -364,7 +359,7 @@ def create_district_heating_system() -> fx.FlowSystem:
relative_minimum=0.1,
status_parameters=fx.StatusParameters(),
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas'),
),
# Thermal Storage with investment
fx.Storage(
@@ -378,24 +373,23 @@ def create_district_heating_system() -> fx.FlowSystem:
eta_charge=1,
eta_discharge=1,
relative_loss_per_hour=0.001,
- charging=fx.Flow('Charge', size=137, bus='Heat'),
- discharging=fx.Flow('Discharge', size=158, bus='Heat'),
+ charging=fx.Flow(bus='Heat', size=137),
+ discharging=fx.Flow(bus='Heat', size=158),
),
# Fuel sources
fx.Source(
'GasGrid',
- outputs=[fx.Flow('Q_Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})],
+ outputs=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})],
),
fx.Source(
'CoalSupply',
- outputs=[fx.Flow('Q_Coal', bus='Coal', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
+ outputs=[fx.Flow(bus='Coal', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
),
# Electricity grid
fx.Source(
'GridBuy',
outputs=[
fx.Flow(
- 'P_el',
bus='Electricity',
size=1000,
effects_per_flow_hour={'costs': electricity_price + 0.5, 'CO2': 0.3},
@@ -404,12 +398,13 @@ def create_district_heating_system() -> fx.FlowSystem:
),
fx.Sink(
'GridSell',
- inputs=[fx.Flow('P_el', bus='Electricity', size=1000, effects_per_flow_hour=-(electricity_price - 0.5))],
+ inputs=[fx.Flow(bus='Electricity', size=1000, effects_per_flow_hour=-(electricity_price - 0.5))],
),
# Demands
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q_th', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
fx.Sink(
- 'ElecDemand', inputs=[fx.Flow('P_el', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]
+ 'ElecDemand',
+ inputs=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],
),
)
return fs
@@ -460,17 +455,16 @@ def create_operational_system() -> fx.FlowSystem:
thermal_efficiency=0.58,
electrical_efficiency=0.22,
status_parameters=fx.StatusParameters(effects_per_startup=24000),
- electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),
- thermal_flow=fx.Flow('Q_th', bus='Heat', size=200),
- fuel_flow=fx.Flow('Q_fu', bus='Coal', size=288, relative_minimum=87 / 288, previous_flow_rate=100),
+ electrical_flow=fx.Flow(bus='Electricity', size=200),
+ thermal_flow=fx.Flow(bus='Heat', size=200),
+ fuel_flow=fx.Flow(bus='Coal', size=288, relative_minimum=87 / 288, previous_flow_rate=100),
),
# Boiler with startup costs
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.85,
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ thermal_flow=fx.Flow(bus='Heat'),
fuel_flow=fx.Flow(
- 'Q_fu',
bus='Gas',
size=95,
relative_minimum=12 / 95,
@@ -489,22 +483,21 @@ def create_operational_system() -> fx.FlowSystem:
eta_discharge=1,
relative_loss_per_hour=0.001,
prevent_simultaneous_charge_and_discharge=True,
- charging=fx.Flow('Charge', size=137, bus='Heat'),
- discharging=fx.Flow('Discharge', size=158, bus='Heat'),
+ charging=fx.Flow(bus='Heat', size=137),
+ discharging=fx.Flow(bus='Heat', size=158),
),
fx.Source(
'GasGrid',
- outputs=[fx.Flow('Q_Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})],
+ outputs=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})],
),
fx.Source(
'CoalSupply',
- outputs=[fx.Flow('Q_Coal', bus='Coal', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
+ outputs=[fx.Flow(bus='Coal', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
),
fx.Source(
'GridBuy',
outputs=[
fx.Flow(
- 'P_el',
bus='Electricity',
size=1000,
effects_per_flow_hour={'costs': electricity_price + 0.5, 'CO2': 0.3},
@@ -513,11 +506,12 @@ def create_operational_system() -> fx.FlowSystem:
),
fx.Sink(
'GridSell',
- inputs=[fx.Flow('P_el', bus='Electricity', size=1000, effects_per_flow_hour=-(electricity_price - 0.5))],
+ inputs=[fx.Flow(bus='Electricity', size=1000, effects_per_flow_hour=-(electricity_price - 0.5))],
),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q_th', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
fx.Sink(
- 'ElecDemand', inputs=[fx.Flow('P_el', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]
+ 'ElecDemand',
+ inputs=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],
),
)
return fs
@@ -580,7 +574,6 @@ def create_seasonal_storage_system() -> fx.FlowSystem:
'SolarThermal',
outputs=[
fx.Flow(
- 'Q_th',
bus='Heat',
size=fx.InvestParameters(
minimum_size=0,
@@ -596,7 +589,6 @@ def create_seasonal_storage_system() -> fx.FlowSystem:
'GasBoiler',
thermal_efficiency=0.90,
thermal_flow=fx.Flow(
- 'Q_th',
bus='Heat',
size=fx.InvestParameters(
minimum_size=0,
@@ -604,14 +596,13 @@ def create_seasonal_storage_system() -> fx.FlowSystem:
effects_of_investment_per_size={'costs': 20000}, # €/MW (annualized)
),
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas'),
),
# Gas supply (higher price makes solar+storage more attractive)
fx.Source(
'GasGrid',
outputs=[
fx.Flow(
- 'Q_gas',
bus='Gas',
size=20,
effects_per_flow_hour={'costs': gas_price * 1.5, 'CO2': 0.2}, # €/MWh
@@ -631,12 +622,10 @@ def create_seasonal_storage_system() -> fx.FlowSystem:
eta_discharge=0.95,
relative_loss_per_hour=0.0001, # Very low losses for pit storage
charging=fx.Flow(
- 'Charge',
bus='Heat',
size=fx.InvestParameters(maximum_size=10, effects_of_investment_per_size={'costs': 5000}),
),
discharging=fx.Flow(
- 'Discharge',
bus='Heat',
size=fx.InvestParameters(maximum_size=10, effects_of_investment_per_size={'costs': 5000}),
),
@@ -644,7 +633,7 @@ def create_seasonal_storage_system() -> fx.FlowSystem:
# Heat demand
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q_th', bus='Heat', size=1, fixed_relative_profile=heat_demand)],
+ inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)],
),
)
return fs
@@ -709,12 +698,11 @@ def create_multiperiod_system() -> fx.FlowSystem:
fx.Bus('Gas', carrier='gas'),
fx.Bus('Heat', carrier='heat'),
fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),
- fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_prices)]),
+ fx.Source('GasGrid', outputs=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=gas_prices)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.92,
thermal_flow=fx.Flow(
- 'Heat',
bus='Heat',
size=fx.InvestParameters(
effects_of_investment={'costs': 1000},
@@ -722,7 +710,7 @@ def create_multiperiod_system() -> fx.FlowSystem:
maximum_size=250,
),
),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas'),
),
fx.Storage(
'ThermalStorage',
@@ -733,10 +721,10 @@ def create_multiperiod_system() -> fx.FlowSystem:
),
eta_charge=0.98,
eta_discharge=0.98,
- charging=fx.Flow('Charge', bus='Heat', size=80),
- discharging=fx.Flow('Discharge', bus='Heat', size=80),
+ charging=fx.Flow(bus='Heat', size=80),
+ discharging=fx.Flow(bus='Heat', size=80),
),
- fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Sink('Building', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
)
return fs
diff --git a/docs/user-guide/building-models/choosing-components.md b/docs/user-guide/building-models/choosing-components.md
index 5f07e82dc..2f19ee6f4 100644
--- a/docs/user-guide/building-models/choosing-components.md
+++ b/docs/user-guide/building-models/choosing-components.md
@@ -39,7 +39,7 @@ graph TD
```python
fx.Source(
'GridElectricity',
- outputs=[fx.Flow('Elec', bus='Electricity', size=1000, effects_per_flow_hour=0.25)]
+ outputs=[fx.Flow(bus='Electricity', size=1000, effects_per_flow_hour=0.25)]
)
```
@@ -67,13 +67,13 @@ fx.Source(
# Fixed demand (must be met)
fx.Sink(
'Building',
- inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=demand)]
+ inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=demand)]
)
# Optional export (can sell if profitable)
fx.Sink(
'Export',
- inputs=[fx.Flow('Elec', bus='Electricity', size=100, effects_per_flow_hour=-0.15)]
+ inputs=[fx.Flow(bus='Electricity', size=100, effects_per_flow_hour=-0.15)]
)
```
@@ -100,8 +100,8 @@ fx.Sink(
```python
fx.SourceAndSink(
'GridConnection',
- inputs=[fx.Flow('import', bus='Electricity', size=500, effects_per_flow_hour=0.25)],
- outputs=[fx.Flow('export', bus='Electricity', size=500, effects_per_flow_hour=-0.15)],
+ inputs=[fx.Flow(bus='Electricity', flow_id='import', size=500, effects_per_flow_hour=0.25)],
+ outputs=[fx.Flow(bus='Electricity', flow_id='export', size=500, effects_per_flow_hour=-0.15)],
prevent_simultaneous_flow_rates=True, # Can't buy and sell at same time
)
```
@@ -121,30 +121,30 @@ fx.SourceAndSink(
# Single input, single output
fx.LinearConverter(
'Boiler',
- inputs=[fx.Flow('Gas', bus='Gas', size=500)],
- outputs=[fx.Flow('Heat', bus='Heat', size=450)],
+ inputs=[fx.Flow(bus='Gas', size=500)],
+ outputs=[fx.Flow(bus='Heat', size=450)],
conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
)
# Multiple outputs (CHP)
fx.LinearConverter(
'CHP',
- inputs=[fx.Flow('Gas', bus='Gas', size=300)],
+ inputs=[fx.Flow(bus='Gas', size=300)],
outputs=[
- fx.Flow('Elec', bus='Electricity', size=100),
- fx.Flow('Heat', bus='Heat', size=150),
+ fx.Flow(bus='Electricity', size=100),
+ fx.Flow(bus='Heat', size=150),
],
- conversion_factors=[{'Gas': 1, 'Elec': 0.35, 'Heat': 0.50}],
+ conversion_factors=[{'Gas': 1, 'Electricity': 0.35, 'Heat': 0.50}],
)
# Multiple inputs
fx.LinearConverter(
'CoFiringBoiler',
inputs=[
- fx.Flow('Gas', bus='Gas', size=200),
- fx.Flow('Biomass', bus='Biomass', size=100),
+ fx.Flow(bus='Gas', size=200),
+ fx.Flow(bus='Biomass', size=100),
],
- outputs=[fx.Flow('Heat', bus='Heat', size=270)],
+ outputs=[fx.Flow(bus='Heat', size=270)],
conversion_factors=[{'Gas': 1, 'Biomass': 1, 'Heat': 0.9}],
)
```
@@ -183,8 +183,8 @@ from flixopt.linear_converters import Boiler, HeatPump
boiler = Boiler(
'GasBoiler',
thermal_efficiency=0.92,
- fuel_flow=fx.Flow('gas', bus='Gas', size=500, effects_per_flow_hour=0.05),
- thermal_flow=fx.Flow('heat', bus='Heat', size=460),
+ fuel_flow=fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05),
+ thermal_flow=fx.Flow(bus='Heat', size=460),
)
```
@@ -197,8 +197,8 @@ boiler = Boiler(
```python
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Electricity', size=100),
- discharging=fx.Flow('discharge', bus='Electricity', size=100),
+ charging=fx.Flow(bus='Electricity', size=100),
+ discharging=fx.Flow(bus='Electricity', size=100),
capacity_in_flow_hours=4, # 4 hours at full rate = 400 kWh
eta_charge=0.95,
eta_discharge=0.95,
@@ -233,18 +233,18 @@ fx.Storage(
# Unidirectional
fx.Transmission(
'HeatPipe',
- in1=fx.Flow('from_A', bus='Heat_A', size=200),
- out1=fx.Flow('to_B', bus='Heat_B', size=200),
+ in1=fx.Flow(bus='Heat_A', size=200),
+ out1=fx.Flow(bus='Heat_B', size=200),
relative_losses=0.05,
)
# Bidirectional
fx.Transmission(
'PowerLine',
- in1=fx.Flow('A_to_B', bus='Elec_A', size=100),
- out1=fx.Flow('at_B', bus='Elec_B', size=100),
- in2=fx.Flow('B_to_A', bus='Elec_B', size=100),
- out2=fx.Flow('at_A', bus='Elec_A', size=100),
+ in1=fx.Flow(bus='Elec_A', flow_id='A_to_B', size=100),
+ out1=fx.Flow(bus='Elec_B', flow_id='at_B', size=100),
+ in2=fx.Flow(bus='Elec_B', flow_id='B_to_A', size=100),
+ out2=fx.Flow(bus='Elec_A', flow_id='at_A', size=100),
relative_losses=0.03,
prevent_simultaneous_flows_in_both_directions=True,
)
@@ -274,7 +274,6 @@ Add `InvestParameters` to flows to let the optimizer choose sizes:
```python
fx.Flow(
- 'Heat',
bus='Heat',
invest_parameters=fx.InvestParameters(
effects_of_investment_per_size={'costs': 100}, # €/kW
@@ -292,7 +291,6 @@ Add `StatusParameters` to flows for on/off behavior:
```python
fx.Flow(
- 'Heat',
bus='Heat',
size=500,
status_parameters=fx.StatusParameters(
@@ -312,11 +310,11 @@ Use `PiecewiseConversion` for load-dependent efficiency:
```python
fx.LinearConverter(
'GasEngine',
- inputs=[fx.Flow('Fuel', bus='Gas')],
- outputs=[fx.Flow('Elec', bus='Electricity')],
+ inputs=[fx.Flow(bus='Gas')],
+ outputs=[fx.Flow(bus='Electricity')],
piecewise_conversion=fx.PiecewiseConversion({
- 'Fuel': fx.Piecewise([fx.Piece(100, 200), fx.Piece(200, 300)]),
- 'Elec': fx.Piecewise([fx.Piece(35, 80), fx.Piece(80, 110)]),
+ 'Gas': fx.Piecewise([fx.Piece(100, 200), fx.Piece(200, 300)]),
+ 'Electricity': fx.Piecewise([fx.Piece(35, 80), fx.Piece(80, 110)]),
}),
)
```
@@ -334,8 +332,8 @@ for i in range(3):
flow_system.add_elements(
fx.LinearConverter(
f'Boiler_{i}',
- inputs=[fx.Flow('Gas', bus='Gas', size=100)],
- outputs=[fx.Flow('Heat', bus='Heat', size=90)],
+ inputs=[fx.Flow(bus='Gas', size=100)],
+ outputs=[fx.Flow(bus='Heat', size=90)],
conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
)
)
@@ -349,12 +347,12 @@ Model waste heat recovery from one process to another:
# Process that generates waste heat
process = fx.LinearConverter(
'Process',
- inputs=[fx.Flow('Elec', bus='Electricity', size=100)],
+ inputs=[fx.Flow(bus='Electricity', size=100)],
outputs=[
- fx.Flow('Product', bus='Products', size=80),
- fx.Flow('WasteHeat', bus='Heat', size=20), # Recovered heat
+ fx.Flow(bus='Products', size=80),
+ fx.Flow(bus='Heat', size=20), # Recovered heat
],
- conversion_factors=[{'Elec': 1, 'Product': 0.8, 'WasteHeat': 0.2}],
+ conversion_factors=[{'Electricity': 1, 'Products': 0.8, 'Heat': 0.2}],
)
```
@@ -366,10 +364,10 @@ Model a component that can use multiple fuels:
flex_boiler = fx.LinearConverter(
'FlexBoiler',
inputs=[
- fx.Flow('Gas', bus='Gas', size=200, effects_per_flow_hour=0.05),
- fx.Flow('Oil', bus='Oil', size=200, effects_per_flow_hour=0.08),
+ fx.Flow(bus='Gas', size=200, effects_per_flow_hour=0.05),
+ fx.Flow(bus='Oil', size=200, effects_per_flow_hour=0.08),
],
- outputs=[fx.Flow('Heat', bus='Heat', size=180)],
+ outputs=[fx.Flow(bus='Heat', size=180)],
conversion_factors=[{'Gas': 1, 'Oil': 1, 'Heat': 0.9}],
)
```
diff --git a/docs/user-guide/building-models/index.md b/docs/user-guide/building-models/index.md
index 248c7ada5..72ad944ca 100644
--- a/docs/user-guide/building-models/index.md
+++ b/docs/user-guide/building-models/index.md
@@ -90,13 +90,13 @@ Use for **purchasing** energy or materials from outside:
# Grid electricity with time-varying price
grid = fx.Source(
'Grid',
- outputs=[fx.Flow('Elec', bus='Electricity', size=1000, effects_per_flow_hour=price_profile)]
+ outputs=[fx.Flow(bus='Electricity', size=1000, effects_per_flow_hour=price_profile)]
)
# Natural gas with fixed price
gas_supply = fx.Source(
'GasSupply',
- outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)]
+ outputs=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05)]
)
```
@@ -108,13 +108,13 @@ Use for **consuming** energy or materials (demands, exports):
# Heat demand (must be met exactly)
building = fx.Sink(
'Building',
- inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=demand_profile)]
+ inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=demand_profile)]
)
# Optional export (can sell but not required)
export = fx.Sink(
'Export',
- inputs=[fx.Flow('Elec', bus='Electricity', size=100, effects_per_flow_hour=-0.15)] # Negative = revenue
+ inputs=[fx.Flow(bus='Electricity', size=100, effects_per_flow_hour=-0.15)] # Negative = revenue
)
```
@@ -126,28 +126,28 @@ Use for **converting** one form of energy to another:
# Gas boiler: Gas → Heat
boiler = fx.LinearConverter(
'Boiler',
- inputs=[fx.Flow('Gas', bus='Gas', size=500)],
- outputs=[fx.Flow('Heat', bus='Heat', size=450)],
+ inputs=[fx.Flow(bus='Gas', size=500)],
+ outputs=[fx.Flow(bus='Heat', size=450)],
conversion_factors=[{'Gas': 1, 'Heat': 0.9}], # 90% efficiency
)
# Heat pump: Electricity → Heat
heat_pump = fx.LinearConverter(
'HeatPump',
- inputs=[fx.Flow('Elec', bus='Electricity', size=100)],
- outputs=[fx.Flow('Heat', bus='Heat', size=350)],
- conversion_factors=[{'Elec': 1, 'Heat': 3.5}], # COP = 3.5
+ inputs=[fx.Flow(bus='Electricity', size=100)],
+ outputs=[fx.Flow(bus='Heat', size=350)],
+ conversion_factors=[{'Electricity': 1, 'Heat': 3.5}], # COP = 3.5
)
# CHP: Gas → Electricity + Heat (multiple outputs)
chp = fx.LinearConverter(
'CHP',
- inputs=[fx.Flow('Gas', bus='Gas', size=300)],
+ inputs=[fx.Flow(bus='Gas', size=300)],
outputs=[
- fx.Flow('Elec', bus='Electricity', size=100),
- fx.Flow('Heat', bus='Heat', size=150),
+ fx.Flow(bus='Electricity', size=100),
+ fx.Flow(bus='Heat', size=150),
],
- conversion_factors=[{'Gas': 1, 'Elec': 0.35, 'Heat': 0.50}],
+ conversion_factors=[{'Gas': 1, 'Electricity': 0.35, 'Heat': 0.50}],
)
```
@@ -159,8 +159,8 @@ Use for **storing** energy or materials:
# Thermal storage
tank = fx.Storage(
'ThermalTank',
- charging=fx.Flow('charge', bus='Heat', size=200),
- discharging=fx.Flow('discharge', bus='Heat', size=200),
+ charging=fx.Flow(bus='Heat', size=200),
+ discharging=fx.Flow(bus='Heat', size=200),
capacity_in_flow_hours=10, # 10 hours at full charge/discharge rate
eta_charge=0.95,
eta_discharge=0.95,
@@ -177,8 +177,8 @@ Use for **connecting** different locations:
# District heating pipe
pipe = fx.Transmission(
'HeatPipe',
- in1=fx.Flow('from_A', bus='Heat_A', size=200),
- out1=fx.Flow('to_B', bus='Heat_B', size=200),
+ in1=fx.Flow(bus='Heat_A', size=200),
+ out1=fx.Flow(bus='Heat_B', size=200),
relative_losses=0.05, # 5% loss
)
```
@@ -212,10 +212,10 @@ Effects are typically assigned per flow hour:
```python
# Gas costs 0.05 €/kWh
-fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})
+fx.Flow(bus='Gas', size=500, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})
# Shorthand when only one effect (the standard one)
-fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)
+fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05)
```
## Step 5: Add Everything to FlowSystem
@@ -234,14 +234,14 @@ flow_system.add_elements(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
# Components
- fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)]),
+ fx.Source('GasGrid', outputs=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05)]),
fx.LinearConverter(
'Boiler',
- inputs=[fx.Flow('Gas', bus='Gas', size=500)],
- outputs=[fx.Flow('Heat', bus='Heat', size=450)],
+ inputs=[fx.Flow(bus='Gas', size=500)],
+ outputs=[fx.Flow(bus='Heat', size=450)],
conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
),
- fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=demand)]),
+ fx.Sink('Building', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=demand)]),
)
```
@@ -255,14 +255,14 @@ Gas → Boiler → Heat
flow_system.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Gas', outputs=[fx.Flow('gas', bus=None, size=500, effects_per_flow_hour=0.05)]),
+ fx.Source('Gas', outputs=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05)]),
fx.LinearConverter(
'Boiler',
- inputs=[fx.Flow('gas', bus=None, size=500)], # Inline source
- outputs=[fx.Flow('heat', bus='Heat', size=450)],
- conversion_factors=[{'gas': 1, 'heat': 0.9}],
+ inputs=[fx.Flow(bus='Gas', size=500)],
+ outputs=[fx.Flow(bus='Heat', size=450)],
+ conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
),
- fx.Sink('Demand', inputs=[fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=demand)]),
+ fx.Sink('Demand', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=demand)]),
)
```
diff --git a/docs/user-guide/mathematical-notation/elements/LinearConverter.md b/docs/user-guide/mathematical-notation/elements/LinearConverter.md
index 915537d60..ecb340e27 100644
--- a/docs/user-guide/mathematical-notation/elements/LinearConverter.md
+++ b/docs/user-guide/mathematical-notation/elements/LinearConverter.md
@@ -121,15 +121,15 @@ chp = fx.linear_converters.CHP(
```python
chp = fx.LinearConverter(
label='CHP',
- inputs=[fx.Flow('fuel', bus=gas_bus)],
+ inputs=[fx.Flow(bus='Gas')],
outputs=[
- fx.Flow('el', bus=elec_bus, size=60),
- fx.Flow('heat', bus=heat_bus),
+ fx.Flow(bus='Electricity', size=60),
+ fx.Flow(bus='Heat'),
],
piecewise_conversion=fx.PiecewiseConversion({
- 'el': fx.Piecewise([fx.Piece(5, 30), fx.Piece(40, 60)]),
- 'heat': fx.Piecewise([fx.Piece(6, 35), fx.Piece(45, 100)]),
- 'fuel': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]),
+ 'Electricity': fx.Piecewise([fx.Piece(5, 30), fx.Piece(40, 60)]),
+ 'Heat': fx.Piecewise([fx.Piece(6, 35), fx.Piece(45, 100)]),
+ 'Gas': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]),
}),
)
```
diff --git a/flixopt/batched.py b/flixopt/batched.py
index 4bc732b62..50265ed17 100644
--- a/flixopt/batched.py
+++ b/flixopt/batched.py
@@ -13,13 +13,13 @@
import logging
from functools import cached_property
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Any
import numpy as np
import pandas as pd
import xarray as xr
-from .core import PlausibilityError
+from .core import PlausibilityError, align_effects_to_coords, align_to_coords
from .features import fast_isnull, fast_notnull, stack_along_dim
from .id_list import IdList, element_id_list
from .interface import InvestParameters, StatusParameters
@@ -107,6 +107,8 @@ def __init__(
effect_ids: list[str] | None = None,
timestep_duration: xr.DataArray | float | None = None,
previous_states: dict[str, xr.DataArray] | None = None,
+ coords: dict[str, pd.Index] | None = None,
+ normalize_effects: Any = None,
):
self._params = params
self._dim = dim_name
@@ -114,6 +116,8 @@ def __init__(
self._effect_ids = effect_ids or []
self._timestep_duration = timestep_duration
self._previous_states = previous_states or {}
+ self._coords = coords
+ self._normalize_effects = normalize_effects
@property
def ids(self) -> list[str]:
@@ -264,7 +268,23 @@ def previous_downtime(self) -> xr.DataArray | None:
def _build_effects(self, attr: str) -> xr.DataArray | None:
"""Build effect factors array for a status effect attribute."""
ids = self._categorize(lambda p: getattr(p, attr))
- dicts = {eid: getattr(self._params[eid], attr) for eid in ids}
+ if not ids:
+ return None
+ norm = self._normalize_effects or (lambda x: x)
+ dicts = {}
+ for eid in ids:
+ raw = getattr(self._params[eid], attr)
+ normalized = norm(raw) or {}
+ if self._coords is not None:
+ aligned = align_effects_to_coords(
+ normalized,
+ self._coords,
+ prefix=eid,
+ suffix=attr,
+ )
+ dicts[eid] = aligned or {}
+ else:
+ dicts[eid] = normalized
return build_effects_array(dicts, self._effect_ids, self._dim)
@cached_property
@@ -295,11 +315,25 @@ def __init__(
params: dict[str, InvestParameters],
dim_name: str,
effect_ids: list[str] | None = None,
+ coords: dict[str, pd.Index] | None = None,
+ normalize_effects: Any = None,
):
self._params = params
self._dim = dim_name
self._ids = list(params.keys())
self._effect_ids = effect_ids or []
+ self._coords = coords
+ self._normalize_effects = normalize_effects
+ self._validate()
+
+ def _validate(self) -> None:
+ """Validate investment parameters."""
+ for eid, p in self._params.items():
+ if p.fixed_size is None and p.maximum_size is None:
+ raise PlausibilityError(
+ f'InvestParameters for "{eid}" requires either fixed_size or maximum_size to be set. '
+ f'An upper bound is needed to properly scale the optimization model.'
+ )
@property
def ids(self) -> list[str]:
@@ -398,7 +432,22 @@ def _build_effects(self, attr: str, ids: list[str] | None = None) -> xr.DataArra
"""Build effect factors array for an investment effect attribute."""
if ids is None:
ids = self._categorize(lambda p: getattr(p, attr))
- dicts = {eid: getattr(self._params[eid], attr) for eid in ids}
+ norm = self._normalize_effects or (lambda x: x)
+ dicts = {}
+ for eid in ids:
+ raw = getattr(self._params[eid], attr)
+ normalized = norm(raw) or {}
+ if self._coords is not None:
+ aligned = align_effects_to_coords(
+ normalized,
+ self._coords,
+ prefix=eid,
+ suffix=attr,
+ dims=['period', 'scenario'],
+ )
+ dicts[eid] = aligned or {}
+ else:
+ dicts[eid] = normalized
return build_effects_array(dicts, self._effect_ids, self._dim)
@cached_property
@@ -526,7 +575,13 @@ class StoragesData:
"""
def __init__(
- self, storages: list, dim_name: str, effect_ids: list[str], timesteps_extra: pd.DatetimeIndex | None = None
+ self,
+ storages: list,
+ dim_name: str,
+ effect_ids: list[str],
+ timesteps_extra: pd.DatetimeIndex | None = None,
+ coords: dict[str, pd.Index] | None = None,
+ normalize_effects: Any = None,
):
"""Initialize StoragesData.
@@ -536,11 +591,15 @@ def __init__(
effect_ids: List of effect IDs for building effect arrays.
timesteps_extra: Extended timesteps (time + 1 final step) for charge state bounds.
Required for StoragesModel, None for InterclusterStoragesModel.
+ coords: Coordinate indexes for alignment (time, period, scenario).
+ normalize_effects: Callable to normalize raw effect values.
"""
self._storages = storages
self._dim_name = dim_name
self._effect_ids = effect_ids
self._timesteps_extra = timesteps_extra
+ self._coords = coords
+ self._normalize_effects = normalize_effects
self._by_id = {s.id: s for s in storages}
@cached_property
@@ -570,6 +629,11 @@ def __getitem__(self, label: str):
def __len__(self) -> int:
return len(self._storages)
+ def _align(self, storage_id: str, attr: str, dims: list[str] | None = None) -> xr.DataArray | None:
+ """Align a single storage attribute value to model coords."""
+ raw = getattr(self._by_id[storage_id], attr)
+ return align_to_coords(raw, self._coords, name=f'{storage_id}|{attr}', dims=dims)
+
# === Categorization ===
@cached_property
@@ -608,6 +672,8 @@ def investment_data(self) -> InvestmentData | None:
params=self.invest_params,
dim_name=self._dim_name,
effect_ids=self._effect_ids,
+ coords=self._coords,
+ normalize_effects=self._normalize_effects,
)
# === Stacked Storage Parameters ===
@@ -615,27 +681,33 @@ def investment_data(self) -> InvestmentData | None:
@cached_property
def eta_charge(self) -> xr.DataArray:
"""(element, [time]) - charging efficiency."""
- return stack_along_dim([s.eta_charge for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim([self._align(s.id, 'eta_charge') for s in self._storages], self._dim_name, self.ids)
@cached_property
def eta_discharge(self) -> xr.DataArray:
"""(element, [time]) - discharging efficiency."""
- return stack_along_dim([s.eta_discharge for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim([self._align(s.id, 'eta_discharge') for s in self._storages], self._dim_name, self.ids)
@cached_property
def relative_loss_per_hour(self) -> xr.DataArray:
"""(element, [time]) - relative loss per hour."""
- return stack_along_dim([s.relative_loss_per_hour for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim(
+ [self._align(s.id, 'relative_loss_per_hour') for s in self._storages], self._dim_name, self.ids
+ )
@cached_property
def relative_minimum_charge_state(self) -> xr.DataArray:
"""(element, [time]) - relative minimum charge state."""
- return stack_along_dim([s.relative_minimum_charge_state for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim(
+ [self._align(s.id, 'relative_minimum_charge_state') for s in self._storages], self._dim_name, self.ids
+ )
@cached_property
def relative_maximum_charge_state(self) -> xr.DataArray:
"""(element, [time]) - relative maximum charge state."""
- return stack_along_dim([s.relative_maximum_charge_state for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim(
+ [self._align(s.id, 'relative_maximum_charge_state') for s in self._storages], self._dim_name, self.ids
+ )
@cached_property
def charging_flow_ids(self) -> list[str]:
@@ -647,6 +719,20 @@ def discharging_flow_ids(self) -> list[str]:
"""Flow IDs for discharging flows, aligned with self.ids."""
return [s.discharging.id for s in self._storages]
+ def aligned_initial_charge_state(self, storage) -> xr.DataArray | None:
+ """Get aligned initial_charge_state for a storage (None if string or None)."""
+ if storage.initial_charge_state is None or isinstance(storage.initial_charge_state, str):
+ return None
+ return self._align(storage.id, 'initial_charge_state', dims=['period', 'scenario'])
+
+ def aligned_minimal_final_charge_state(self, storage) -> xr.DataArray | None:
+ """Get aligned minimal_final_charge_state for a storage."""
+ return self._align(storage.id, 'minimal_final_charge_state', dims=['period', 'scenario'])
+
+ def aligned_maximal_final_charge_state(self, storage) -> xr.DataArray | None:
+ """Get aligned maximal_final_charge_state for a storage."""
+ return self._align(storage.id, 'maximal_final_charge_state', dims=['period', 'scenario'])
+
# === Capacity and Charge State Bounds ===
@cached_property
@@ -659,7 +745,7 @@ def capacity_lower(self) -> xr.DataArray:
elif isinstance(s.capacity_in_flow_hours, InvestParameters):
values.append(s.capacity_in_flow_hours.minimum_or_fixed_size)
else:
- values.append(s.capacity_in_flow_hours)
+ values.append(self._align(s.id, 'capacity_in_flow_hours', dims=['period', 'scenario']))
return stack_along_dim(values, self._dim_name, self.ids)
@cached_property
@@ -672,7 +758,7 @@ def capacity_upper(self) -> xr.DataArray:
elif isinstance(s.capacity_in_flow_hours, InvestParameters):
values.append(s.capacity_in_flow_hours.maximum_or_fixed_size)
else:
- values.append(s.capacity_in_flow_hours)
+ values.append(self._align(s.id, 'capacity_in_flow_hours', dims=['period', 'scenario']))
return stack_along_dim(values, self._dim_name, self.ids)
def _relative_bounds_extra(self) -> tuple[xr.DataArray, xr.DataArray]:
@@ -685,19 +771,21 @@ def _relative_bounds_extra(self) -> tuple[xr.DataArray, xr.DataArray]:
rel_mins = []
rel_maxs = []
for s in self._storages:
- rel_min = s.relative_minimum_charge_state
- rel_max = s.relative_maximum_charge_state
+ rel_min = self._align(s.id, 'relative_minimum_charge_state')
+ rel_max = self._align(s.id, 'relative_maximum_charge_state')
# Get final values
- if s.relative_minimum_final_charge_state is None:
+ rel_min_final = self._align(s.id, 'relative_minimum_final_charge_state', dims=['period', 'scenario'])
+ rel_max_final = self._align(s.id, 'relative_maximum_final_charge_state', dims=['period', 'scenario'])
+ if rel_min_final is None:
min_final_value = _scalar_safe_isel_drop(rel_min, 'time', -1)
else:
- min_final_value = s.relative_minimum_final_charge_state
+ min_final_value = rel_min_final
- if s.relative_maximum_final_charge_state is None:
+ if rel_max_final is None:
max_final_value = _scalar_safe_isel_drop(rel_max, 'time', -1)
else:
- max_final_value = s.relative_maximum_final_charge_state
+ max_final_value = rel_max_final
# Build bounds arrays for timesteps_extra
if 'time' in rel_min.dims:
@@ -762,10 +850,6 @@ def charge_state_upper_bounds(self) -> xr.DataArray:
def validate(self) -> None:
"""Validate all storages (config + DataArray checks).
- Performs both:
- - Config validation via Storage.validate_config()
- - DataArray validation (post-transformation checks)
-
Raises:
PlausibilityError: If any validation check fails.
"""
@@ -774,59 +858,92 @@ def validate(self) -> None:
errors: list[str] = []
for storage in self._storages:
- storage.validate_config()
sid = storage.id
- # Capacity required for non-default relative bounds (DataArray checks)
+ # Config checks (moved from Storage.validate_config / Component.validate_config)
+ storage._check_unique_flow_ids()
+ if storage.status_parameters:
+ for flow in storage.flows.values():
+ if flow.size is None:
+ raise PlausibilityError(
+ f'"{storage.id}": Flow "{flow.flow_id}" must have a defined size '
+ f'because {storage.id} has status_parameters. '
+ f'A size is required for big-M constraints.'
+ )
+
+ if isinstance(storage.initial_charge_state, str):
+ if storage.initial_charge_state != 'equals_final':
+ raise PlausibilityError(f'initial_charge_state has undefined value: {storage.initial_charge_state}')
+
+ if storage.capacity_in_flow_hours is None:
+ if storage.relative_minimum_final_charge_state is not None:
+ raise PlausibilityError(
+ f'Storage "{sid}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. '
+ f'A capacity is required for relative final charge state constraints.'
+ )
+ if storage.relative_maximum_final_charge_state is not None:
+ raise PlausibilityError(
+ f'Storage "{sid}" has relative_maximum_final_charge_state but no capacity_in_flow_hours. '
+ f'A capacity is required for relative final charge state constraints.'
+ )
+
+ if storage.balanced:
+ if not isinstance(storage.charging.size, InvestParameters) or not isinstance(
+ storage.discharging.size, InvestParameters
+ ):
+ raise PlausibilityError(
+ f'Balancing charging and discharging Flows in {sid} is only possible with Investments.'
+ )
+
+ # DataArray checks (use aligned values)
+ rel_min = self._align(sid, 'relative_minimum_charge_state')
+ rel_max = self._align(sid, 'relative_maximum_charge_state')
+
if storage.capacity_in_flow_hours is None:
- if np.any(storage.relative_minimum_charge_state > 0):
+ if np.any(rel_min > 0):
errors.append(
f'Storage "{sid}" has relative_minimum_charge_state > 0 but no capacity_in_flow_hours. '
f'A capacity is required because the lower bound is capacity * relative_minimum_charge_state.'
)
- if np.any(storage.relative_maximum_charge_state < 1):
+ if np.any(rel_max < 1):
errors.append(
f'Storage "{sid}" has relative_maximum_charge_state < 1 but no capacity_in_flow_hours. '
f'A capacity is required because the upper bound is capacity * relative_maximum_charge_state.'
)
- # Initial charge state vs capacity bounds (DataArray checks)
if storage.capacity_in_flow_hours is not None:
if isinstance(storage.capacity_in_flow_hours, InvestParameters):
minimum_capacity = storage.capacity_in_flow_hours.minimum_or_fixed_size
maximum_capacity = storage.capacity_in_flow_hours.maximum_or_fixed_size
else:
- maximum_capacity = storage.capacity_in_flow_hours
- minimum_capacity = storage.capacity_in_flow_hours
+ aligned_cap = self._align(sid, 'capacity_in_flow_hours', dims=['period', 'scenario'])
+ maximum_capacity = aligned_cap
+ minimum_capacity = aligned_cap
- min_initial_at_max_capacity = maximum_capacity * _scalar_safe_isel(
- storage.relative_minimum_charge_state, {'time': 0}
- )
- max_initial_at_min_capacity = minimum_capacity * _scalar_safe_isel(
- storage.relative_maximum_charge_state, {'time': 0}
- )
+ min_initial_at_max_capacity = maximum_capacity * _scalar_safe_isel(rel_min, {'time': 0})
+ max_initial_at_min_capacity = minimum_capacity * _scalar_safe_isel(rel_max, {'time': 0})
initial_equals_final = isinstance(storage.initial_charge_state, str)
if not initial_equals_final and storage.initial_charge_state is not None:
- if (storage.initial_charge_state > max_initial_at_min_capacity).any():
+ initial = self._align(sid, 'initial_charge_state', dims=['period', 'scenario'])
+ if (initial > max_initial_at_min_capacity).any():
errors.append(
f'{sid}: initial_charge_state={storage.initial_charge_state} '
f'is constraining the investment decision. Choose a value <= {max_initial_at_min_capacity}.'
)
- if (storage.initial_charge_state < min_initial_at_max_capacity).any():
+ if (initial < min_initial_at_max_capacity).any():
errors.append(
f'{sid}: initial_charge_state={storage.initial_charge_state} '
f'is constraining the investment decision. Choose a value >= {min_initial_at_max_capacity}.'
)
- # Balanced charging/discharging size compatibility (DataArray checks)
if storage.balanced:
charging_min = storage.charging.size.minimum_or_fixed_size
charging_max = storage.charging.size.maximum_or_fixed_size
discharging_min = storage.discharging.size.minimum_or_fixed_size
discharging_max = storage.discharging.size.maximum_or_fixed_size
- if (charging_min > discharging_max).any() or (charging_max < discharging_min).any():
+ if np.any(charging_min > discharging_max) or np.any(charging_max < discharging_min):
errors.append(
f'Balancing charging and discharging Flows in {sid} need compatible minimum and maximum sizes. '
f'Got: charging.size.minimum={charging_min}, charging.size.maximum={charging_max} and '
@@ -944,7 +1061,7 @@ def has_size(self) -> xr.DataArray:
@cached_property
def has_effects(self) -> xr.DataArray:
"""(flow,) - boolean mask for flows with effects_per_flow_hour."""
- return self._mask(lambda f: bool(f.effects_per_flow_hour))
+ return self._mask(lambda f: f.effects_per_flow_hour is not None)
@cached_property
def has_flow_hours_min(self) -> xr.DataArray:
@@ -1090,7 +1207,7 @@ def with_load_factor_max(self) -> list[str]:
@cached_property
def with_effects(self) -> list[str]:
"""IDs of flows with effects_per_flow_hour defined."""
- return self._categorize(lambda f: f.effects_per_flow_hour)
+ return self._categorize(lambda f: f.effects_per_flow_hour is not None)
@cached_property
def with_previous_flow_rate(self) -> list[str]:
@@ -1120,6 +1237,8 @@ def _status_data(self) -> StatusData | None:
effect_ids=list(self._fs.effects.keys()),
timestep_duration=self._fs.timestep_duration,
previous_states=self.previous_states,
+ coords=self._fs.indexes,
+ normalize_effects=self._fs.effects.create_effect_values_dict,
)
@cached_property
@@ -1131,6 +1250,8 @@ def _investment_data(self) -> InvestmentData | None:
params=self.invest_params,
dim_name='flow',
effect_ids=list(self._fs.effects.keys()),
+ coords=self._fs.indexes,
+ normalize_effects=self._fs.effects.create_effect_values_dict,
)
# === Batched Parameters ===
@@ -1173,14 +1294,14 @@ def load_factor_maximum(self) -> xr.DataArray | None:
@cached_property
def relative_minimum(self) -> xr.DataArray:
"""(flow, time, period, scenario) - relative lower bound on flow rate."""
- values = [f.relative_minimum for f in self.elements.values()]
+ values = [self._align(fid, 'relative_minimum') for fid in self.ids]
arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(None))
return self._ensure_canonical_order(arr)
@cached_property
def relative_maximum(self) -> xr.DataArray:
"""(flow, time, period, scenario) - relative upper bound on flow rate."""
- values = [f.relative_maximum for f in self.elements.values()]
+ values = [self._align(fid, 'relative_maximum') for fid in self.ids]
arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(None))
return self._ensure_canonical_order(arr)
@@ -1188,7 +1309,8 @@ def relative_maximum(self) -> xr.DataArray:
def fixed_relative_profile(self) -> xr.DataArray:
"""(flow, time, period, scenario) - fixed profile. NaN = not fixed."""
values = [
- f.fixed_relative_profile if f.fixed_relative_profile is not None else np.nan for f in self.elements.values()
+ self._align(fid, 'fixed_relative_profile') if self[fid].fixed_relative_profile is not None else np.nan
+ for fid in self.ids
]
arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(None))
return self._ensure_canonical_order(arr)
@@ -1213,11 +1335,12 @@ def effective_relative_maximum(self) -> xr.DataArray:
def fixed_size(self) -> xr.DataArray:
"""(flow, period, scenario) - fixed size for non-investment flows. NaN for investment/no-size flows."""
values = []
- for f in self.elements.values():
+ for fid in self.ids:
+ f = self[fid]
if f.size is None or isinstance(f.size, InvestParameters):
values.append(np.nan)
else:
- values.append(f.size)
+ values.append(self._align(fid, 'size', ['period', 'scenario']))
arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(['period', 'scenario']))
return self._ensure_canonical_order(arr)
@@ -1230,13 +1353,14 @@ def effective_size_lower(self) -> xr.DataArray:
- No size: NaN
"""
values = []
- for f in self.elements.values():
+ for fid in self.ids:
+ f = self[fid]
if f.size is None:
values.append(np.nan)
elif isinstance(f.size, InvestParameters):
values.append(f.size.minimum_or_fixed_size)
else:
- values.append(f.size)
+ values.append(self._align(fid, 'size', ['period', 'scenario']))
arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(['period', 'scenario']))
return self._ensure_canonical_order(arr)
@@ -1249,13 +1373,14 @@ def effective_size_upper(self) -> xr.DataArray:
- No size: NaN
"""
values = []
- for f in self.elements.values():
+ for fid in self.ids:
+ f = self[fid]
if f.size is None:
values.append(np.nan)
elif isinstance(f.size, InvestParameters):
values.append(f.size.maximum_or_fixed_size)
else:
- values.append(f.size)
+ values.append(self._align(fid, 'size', ['period', 'scenario']))
arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(['period', 'scenario']))
return self._ensure_canonical_order(arr)
@@ -1376,7 +1501,18 @@ def effects_per_flow_hour(self) -> xr.DataArray | None:
if not effect_ids:
return None
- dicts = {fid: self[fid].effects_per_flow_hour for fid in self.with_effects}
+ norm = self._fs.effects.create_effect_values_dict
+ dicts = {}
+ for fid in self.with_effects:
+ raw = self[fid].effects_per_flow_hour
+ normalized = norm(raw) or {}
+ aligned = align_effects_to_coords(
+ normalized,
+ self._fs.indexes,
+ prefix=fid,
+ suffix='per_flow_hour',
+ )
+ dicts[fid] = aligned or {}
return build_effects_array(dicts, effect_ids, 'flow')
# --- Investment Parameters ---
@@ -1476,6 +1612,11 @@ def previous_downtime(self) -> xr.DataArray | None:
# === Helper Methods ===
+ def _align(self, flow_id: str, attr: str, dims: list[str] | None = None) -> xr.DataArray | None:
+ """Align a single flow attribute value to model coords."""
+ raw = getattr(self[flow_id], attr)
+ return align_to_coords(raw, self._fs.indexes, name=f'{flow_id}|{attr}', dims=dims)
+
def _batched_parameter(
self,
ids: list[str],
@@ -1494,7 +1635,7 @@ def _batched_parameter(
"""
if not ids:
return None
- values = [getattr(self[fid], attr) for fid in ids]
+ values = [self._align(fid, attr, dims) for fid in ids]
arr = stack_along_dim(values, 'flow', ids, self._model_coords(dims))
return self._ensure_canonical_order(arr)
@@ -1579,10 +1720,6 @@ def _flagged_ids(self, mask: xr.DataArray) -> list[str]:
def validate(self) -> None:
"""Validate all flows (config + DataArray checks).
- Performs both:
- - Config validation via Flow.validate_config()
- - DataArray validation (post-transformation checks)
-
Raises:
PlausibilityError: If any validation check fails.
"""
@@ -1590,7 +1727,53 @@ def validate(self) -> None:
return
for flow in self.elements.values():
- flow.validate_config()
+ # Size is required when using StatusParameters (for big-M constraints)
+ if flow.status_parameters is not None and flow.size is None:
+ raise PlausibilityError(
+ f'Flow "{flow.id}" has status_parameters but no size defined. '
+ f'A size is required when using status_parameters to bound the flow rate.'
+ )
+
+ if flow.size is None and flow.fixed_relative_profile is not None:
+ raise PlausibilityError(
+ f'Flow "{flow.id}" has a fixed_relative_profile but no size defined. '
+ f'A size is required because flow_rate = size * fixed_relative_profile.'
+ )
+
+ # Size is required for load factor constraints (total_flow_hours / size)
+ if flow.size is None and flow.load_factor_min is not None:
+ raise PlausibilityError(
+ f'Flow "{flow.id}" has load_factor_min but no size defined. '
+ f'A size is required because the constraint is total_flow_hours >= size * load_factor_min * hours.'
+ )
+
+ if flow.size is None and flow.load_factor_max is not None:
+ raise PlausibilityError(
+ f'Flow "{flow.id}" has load_factor_max but no size defined. '
+ f'A size is required because the constraint is total_flow_hours <= size * load_factor_max * hours.'
+ )
+
+ # Validate previous_flow_rate type
+ if flow.previous_flow_rate is not None:
+ if not any(
+ [
+ isinstance(flow.previous_flow_rate, np.ndarray) and flow.previous_flow_rate.ndim == 1,
+ isinstance(flow.previous_flow_rate, (int, float, list)),
+ ]
+ ):
+ raise TypeError(
+ f'previous_flow_rate must be None, a scalar, a list of scalars or a 1D-numpy-array. '
+ f'Got {type(flow.previous_flow_rate)}. '
+ f'Different values in different periods or scenarios are not yet supported.'
+ )
+
+ # Warning: fixed_relative_profile + status_parameters is unusual
+ if flow.fixed_relative_profile is not None and flow.status_parameters is not None:
+ logger.warning(
+ f'Flow {flow.id} has both a fixed_relative_profile and status_parameters. '
+ f'This will allow the flow to be switched active and inactive, '
+ f'effectively differing from the fixed_flow_rate.'
+ )
errors: list[str] = []
@@ -1647,9 +1830,11 @@ class EffectsData:
modeling (EffectsModel).
"""
- def __init__(self, effect_collection: EffectCollection):
+ def __init__(self, effect_collection: EffectCollection, coords: dict[str, pd.Index], default_period_weights):
self._collection = effect_collection
self._effects: list[Effect] = list(effect_collection.values())
+ self._coords = coords
+ self._default_period_weights = default_period_weights
@cached_property
def effect_ids(self) -> list[str]:
@@ -1685,45 +1870,97 @@ def _effect_values(self, attr_name: str, default: float) -> list:
values.append(default if val is None else val)
return values
+ def _align(self, effect_id: str, attr: str, dims: list[str] | None = None) -> xr.DataArray | None:
+ """Align a single effect attribute value to model coords."""
+ raw = getattr(self._collection[effect_id], attr)
+ return align_to_coords(raw, self._coords, name=f'{effect_id}|{attr}', dims=dims)
+
+ def _aligned_values(self, attr_name: str, default: float, dims: list[str] | None = None) -> list:
+ """Extract per-effect attribute values, aligned to model coords."""
+ values = []
+ for effect in self._effects:
+ aligned = self._align(effect.id, attr_name, dims=dims)
+ values.append(default if aligned is None else aligned)
+ return values
+
+ def aligned_share_from_temporal(self, effect: Effect) -> dict[str, xr.DataArray]:
+ """Get aligned share_from_temporal for a specific effect."""
+ return (
+ align_effects_to_coords(
+ effect.share_from_temporal,
+ self._coords,
+ suffix=f'(temporal)->{effect.id}(temporal)',
+ )
+ or {}
+ )
+
+ def aligned_share_from_periodic(self, effect: Effect) -> dict[str, xr.DataArray]:
+ """Get aligned share_from_periodic for a specific effect."""
+ return (
+ align_effects_to_coords(
+ effect.share_from_periodic,
+ self._coords,
+ suffix=f'(periodic)->{effect.id}(periodic)',
+ dims=['period', 'scenario'],
+ )
+ or {}
+ )
+
@cached_property
def minimum_periodic(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_periodic', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('minimum_periodic', -np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def maximum_periodic(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_periodic', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('maximum_periodic', np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def minimum_temporal(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_temporal', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('minimum_temporal', -np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def maximum_temporal(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_temporal', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('maximum_temporal', np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def minimum_per_hour(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_per_hour', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(self._aligned_values('minimum_per_hour', -np.inf), 'effect', self.effect_ids)
@cached_property
def maximum_per_hour(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_per_hour', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(self._aligned_values('maximum_per_hour', np.inf), 'effect', self.effect_ids)
@cached_property
def minimum_total(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_total', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('minimum_total', -np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def maximum_total(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_total', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('maximum_total', np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def minimum_over_periods(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_over_periods', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('minimum_over_periods', -np.inf, dims=['scenario']), 'effect', self.effect_ids
+ )
@cached_property
def maximum_over_periods(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_over_periods', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('maximum_over_periods', np.inf, dims=['scenario']), 'effect', self.effect_ids
+ )
@cached_property
def effects_with_over_periods(self) -> list[Effect]:
@@ -1734,14 +1971,13 @@ def period_weights(self) -> dict[str, xr.DataArray]:
"""Get period weights for each effect, keyed by effect id."""
result = {}
for effect in self._effects:
- effect_weights = effect.period_weights
- default_weights = effect._flow_system.period_weights
- if effect_weights is not None:
- result[effect.id] = effect_weights
- elif default_weights is not None:
- result[effect.id] = default_weights
+ aligned = self._align(effect.id, 'period_weights', dims=['period', 'scenario'])
+ if aligned is not None:
+ result[effect.id] = aligned
+ elif self._default_period_weights is not None:
+ result[effect.id] = self._default_period_weights
else:
- result[effect.id] = effect._fit_coords(name='period_weights', data=1, dims=['period'])
+ result[effect.id] = align_to_coords(1, self._coords, name='period_weights', dims=['period'])
return result
def effects(self) -> list[Effect]:
@@ -1763,8 +1999,16 @@ def validate(self) -> None:
- Individual effect config validation
- Collection-level validation (circular loops in share mappings, unknown effect refs)
"""
+ has_periods = 'period' in self._coords
+
for effect in self._effects:
- effect.validate_config()
+ # Check that minimum_over_periods and maximum_over_periods require a period dimension
+ if (effect.minimum_over_periods is not None or effect.maximum_over_periods is not None) and not has_periods:
+ raise PlausibilityError(
+ f"Effect '{effect.id}': minimum_over_periods and maximum_over_periods require "
+ f"the FlowSystem to have a 'period' dimension. Please define periods when creating "
+ f'the FlowSystem, or remove these constraints.'
+ )
# Collection-level validation (share structure)
self._validate_share_structure()
@@ -1799,9 +2043,10 @@ def _validate_share_structure(self) -> None:
class BusesData:
"""Batched data container for buses."""
- def __init__(self, buses: list[Bus]):
+ def __init__(self, buses: list[Bus], coords: dict[str, pd.Index]):
self._buses = buses
self.elements: IdList = element_id_list(buses)
+ self._coords = coords
@property
def element_ids(self) -> list[str]:
@@ -1821,6 +2066,14 @@ def imbalance_elements(self) -> list[Bus]:
"""Bus objects that allow imbalance."""
return [b for b in self._buses if b.allows_imbalance]
+ def aligned_imbalance_penalty(self, bus: Bus) -> xr.DataArray | None:
+ """Get aligned imbalance penalty for a specific bus."""
+ return align_to_coords(
+ bus.imbalance_penalty_per_flow_hour,
+ self._coords,
+ name=f'{bus.id}|imbalance_penalty_per_flow_hour',
+ )
+
@cached_property
def balance_coefficients(self) -> dict[tuple[str, str], float]:
"""Sparse (bus_id, flow_id) -> +1/-1 coefficients for bus balance."""
@@ -1833,17 +2086,16 @@ def balance_coefficients(self) -> dict[tuple[str, str], float]:
return coefficients
def validate(self) -> None:
- """Validate all buses (config + DataArray checks).
-
- Performs both:
- - Config validation via Bus.validate_config()
- - DataArray validation (post-transformation checks)
- """
+ """Validate all buses (config + DataArray checks)."""
for bus in self._buses:
- bus.validate_config()
+ # Config validation (moved from Bus.validate_config)
+ if len(bus.inputs) == 0 and len(bus.outputs) == 0:
+ raise ValueError(f'Bus "{bus.id}" has no Flows connected to it. Please remove it from the FlowSystem')
+
# Warning: imbalance_penalty == 0 (DataArray check)
if bus.imbalance_penalty_per_flow_hour is not None:
- zero_penalty = np.all(np.equal(bus.imbalance_penalty_per_flow_hour, 0))
+ aligned = self.aligned_imbalance_penalty(bus)
+ zero_penalty = np.all(np.equal(aligned, 0))
if zero_penalty:
logger.warning(
f'In Bus {bus.id}, the imbalance_penalty_per_flow_hour is 0. Use "None" or a value > 0.'
@@ -1860,12 +2112,16 @@ def __init__(
flows_data: FlowsData,
effect_ids: list[str],
timestep_duration: xr.DataArray | float,
+ coords: dict[str, pd.Index] | None = None,
+ normalize_effects: Any = None,
):
self._components_with_status = components_with_status
self._all_components = all_components
self._flows_data = flows_data
self._effect_ids = effect_ids
self._timestep_duration = timestep_duration
+ self._coords = coords
+ self._normalize_effects = normalize_effects
self.elements: IdList = element_id_list(components_with_status)
@property
@@ -1955,6 +2211,8 @@ def status_data(self) -> StatusData:
effect_ids=self._effect_ids,
timestep_duration=self._timestep_duration,
previous_states=self.previous_status_dict,
+ coords=self._coords,
+ normalize_effects=self._normalize_effects,
)
@cached_property
@@ -1993,17 +2251,35 @@ def validate(self) -> None:
from .components import LinearConverter, Storage, Transmission
for component in self._all_components:
- if not isinstance(component, (Storage, LinearConverter, Transmission)):
- component.validate_config()
+ if isinstance(component, (Storage, LinearConverter, Transmission)):
+ continue
+
+ component._check_unique_flow_ids()
+
+ if component.status_parameters is not None:
+ flows_without_size = [flow.flow_id for flow in component.flows.values() if flow.size is None]
+ if flows_without_size:
+ raise PlausibilityError(
+ f'Component "{component.id}" has status_parameters, but the following flows '
+ f'have no size: {flows_without_size}. All flows need explicit sizes when the '
+ f'component uses status_parameters (required for big-M constraints).'
+ )
class ConvertersData:
"""Batched data container for converters."""
- def __init__(self, converters: list[LinearConverter], flow_ids: list[str], timesteps: pd.DatetimeIndex):
+ def __init__(
+ self,
+ converters: list[LinearConverter],
+ flow_ids: list[str],
+ timesteps: pd.DatetimeIndex,
+ coords: dict[str, pd.Index],
+ ):
self._converters = converters
self._flow_ids = flow_ids
self._timesteps = timesteps
+ self._coords = coords
self.elements: IdList = element_id_list(converters)
@property
@@ -2024,6 +2300,22 @@ def with_piecewise(self) -> list[LinearConverter]:
"""Converters with piecewise_conversion."""
return [c for c in self._converters if c.piecewise_conversion]
+ def aligned_conversion_factors(self, converter: LinearConverter) -> list[dict[str, xr.DataArray]]:
+ """Align all conversion factors for a converter to model coords."""
+ result = []
+ for idx, conv_factor in enumerate(converter.conversion_factors):
+ aligned_dict = {}
+ for flow_label, values in conv_factor.items():
+ flow_id = converter.flows[flow_label].id
+ aligned = align_to_coords(values, self._coords, name=f'{flow_id}|conversion_factor{idx}')
+ if aligned is None:
+ raise PlausibilityError(
+ f'{converter.id}: conversion factor for flow "{flow_label}" must not be None'
+ )
+ aligned_dict[flow_label] = aligned
+ result.append(aligned_dict)
+ return result
+
# === Linear Conversion Properties ===
@cached_property
@@ -2078,7 +2370,8 @@ def signed_coefficients(self) -> dict[tuple[str, str], float | xr.DataArray]:
flow_signs = {f.id: 1.0 for f in conv.inputs.values() if f.id in all_flow_ids_set}
flow_signs.update({f.id: -1.0 for f in conv.outputs.values() if f.id in all_flow_ids_set})
- for eq_idx, conv_factors in enumerate(conv.conversion_factors):
+ aligned_factors = self.aligned_conversion_factors(conv)
+ for eq_idx, conv_factors in enumerate(aligned_factors):
for flow_label, coeff in conv_factors.items():
flow_id = flow_map.get(flow_label)
sign = flow_signs.get(flow_id, 0.0) if flow_id else 0.0
@@ -2227,17 +2520,58 @@ def piecewise_breakpoints(self) -> xr.Dataset | None:
return xr.Dataset({'starts': starts_combined, 'ends': ends_combined})
def validate(self) -> None:
- """Validate all converters (config checks, no DataArray operations needed)."""
- for converter in self._converters:
- converter.validate_config()
+ """Validate all converters."""
+ for conv in self._converters:
+ # Checks from LinearConverter.validate_config
+ conv._check_unique_flow_ids()
+ # Validate flow sizes for status_parameters
+ if conv.status_parameters:
+ for flow in conv.flows.values():
+ if flow.size is None:
+ raise PlausibilityError(
+ f'"{conv.id}": Flow "{flow.flow_id}" must have a defined size '
+ f'because {conv.id} has status_parameters. '
+ f'A size is required for big-M constraints.'
+ )
+
+ if not conv.conversion_factors and not conv.piecewise_conversion:
+ raise PlausibilityError('Either conversion_factors or piecewise_conversion must be defined!')
+ if conv.conversion_factors and conv.piecewise_conversion:
+ raise PlausibilityError(
+ 'Only one of conversion_factors or piecewise_conversion can be defined, not both!'
+ )
+
+ if conv.conversion_factors:
+ if conv.degrees_of_freedom <= 0:
+ raise PlausibilityError(
+ f'Too Many conversion_factors_specified. Care that you use less conversion_factors '
+ f'then inputs + outputs!! With {len(conv.inputs + conv.outputs)} inputs and outputs, '
+ f'use not more than {len(conv.inputs + conv.outputs) - 1} conversion_factors!'
+ )
+
+ for conversion_factor in conv.conversion_factors:
+ for flow in conversion_factor:
+ if flow not in conv.flows:
+ raise PlausibilityError(
+ f'{conv.id}: Flow {flow} in conversion_factors is not in inputs/outputs'
+ )
+ if conv.piecewise_conversion:
+ for flow in conv.flows.values():
+ if isinstance(flow.size, InvestParameters) and flow.size.fixed_size is None:
+ logger.warning(
+ f'Using a Flow with variable size (InvestParameters without fixed_size) '
+ f'and a piecewise_conversion in {conv.id} is uncommon. Please verify intent '
+ f'({flow.id}).'
+ )
class TransmissionsData:
"""Batched data container for transmissions."""
- def __init__(self, transmissions: list[Transmission], flow_ids: list[str]):
+ def __init__(self, transmissions: list[Transmission], flow_ids: list[str], coords: dict[str, pd.Index]):
self._transmissions = transmissions
self._flow_ids = flow_ids
+ self._coords = coords
self.elements: IdList = element_id_list(transmissions)
@property
@@ -2325,6 +2659,11 @@ def balanced_in2_mask(self) -> xr.DataArray:
# === Loss Properties ===
+ def _align(self, transmission_id: str, attr: str) -> xr.DataArray | None:
+ """Align a single transmission attribute value to model coords."""
+ raw = getattr(self.elements[transmission_id], attr)
+ return align_to_coords(raw, self._coords, name=f'{transmission_id}|{attr}')
+
@cached_property
def relative_losses(self) -> xr.DataArray:
"""(transmission, [time, ...]) relative losses. 0 if None."""
@@ -2332,8 +2671,8 @@ def relative_losses(self) -> xr.DataArray:
return xr.DataArray()
values = []
for t in self._transmissions:
- loss = t.relative_losses if t.relative_losses is not None else 0
- values.append(loss)
+ aligned = self._align(t.id, 'relative_losses')
+ values.append(aligned if aligned is not None else 0)
return stack_along_dim(values, self.dim_name, self.element_ids)
@cached_property
@@ -2343,8 +2682,8 @@ def absolute_losses(self) -> xr.DataArray:
return xr.DataArray()
values = []
for t in self._transmissions:
- loss = t.absolute_losses if t.absolute_losses is not None else 0
- values.append(loss)
+ aligned = self._align(t.id, 'absolute_losses')
+ values.append(aligned if aligned is not None else 0)
return stack_along_dim(values, self.dim_name, self.element_ids)
@cached_property
@@ -2367,19 +2706,45 @@ def transmissions_with_abs_losses(self) -> list[str]:
def validate(self) -> None:
"""Validate all transmissions (config + DataArray checks).
- Performs both:
- - Config validation via Transmission.validate_config()
- - DataArray validation (post-transformation checks)
-
Raises:
PlausibilityError: If any validation check fails.
"""
- for transmission in self._transmissions:
- transmission.validate_config()
-
errors: list[str] = []
for transmission in self._transmissions:
+ # Config checks (moved from Transmission.validate_config / Component.validate_config)
+ transmission._check_unique_flow_ids()
+ if transmission.status_parameters:
+ for flow in transmission.flows.values():
+ if flow.size is None:
+ raise PlausibilityError(
+ f'"{transmission.id}": Flow "{flow.flow_id}" must have a defined size '
+ f'because {transmission.id} has status_parameters. '
+ f'A size is required for big-M constraints.'
+ )
+
+ # Bus consistency checks
+ if transmission.in2 is not None:
+ if transmission.in2.bus != transmission.out1.bus:
+ raise ValueError(
+ f'Output 1 and Input 2 do not start/end at the same Bus: '
+ f'{transmission.out1.bus=}, {transmission.in2.bus=}'
+ )
+ if transmission.out2 is not None:
+ if transmission.out2.bus != transmission.in1.bus:
+ raise ValueError(
+ f'Input 1 and Output 2 do not start/end at the same Bus: '
+ f'{transmission.in1.bus=}, {transmission.out2.bus=}'
+ )
+
+ # Balanced requires InvestParameters on both in-Flows
+ if transmission.balanced:
+ if transmission.in2 is None:
+ raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows')
+ if not isinstance(transmission.in1.size, InvestParameters) or not isinstance(
+ transmission.in2.size, InvestParameters
+ ):
+ raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows')
tid = transmission.id
# Balanced size compatibility (DataArray check)
@@ -2389,7 +2754,7 @@ def validate(self) -> None:
in2_min = transmission.in2.size.minimum_or_fixed_size
in2_max = transmission.in2.size.maximum_or_fixed_size
- if (in1_min > in2_max).any() or (in1_max < in2_min).any():
+ if np.any(in1_min > in2_max) or np.any(in1_max < in2_min):
errors.append(
f'Balanced Transmission {tid} needs compatible minimum and maximum sizes. '
f'Got: in1.size.minimum={in1_min}, in1.size.maximum={in1_max} and '
@@ -2447,7 +2812,12 @@ def storages(self) -> StoragesData:
]
effect_ids = list(self._fs.effects.keys())
self._storages = StoragesData(
- basic_storages, 'storage', effect_ids, timesteps_extra=self._fs.timesteps_extra
+ basic_storages,
+ 'storage',
+ effect_ids,
+ timesteps_extra=self._fs.timesteps_extra,
+ coords=self._fs.indexes,
+ normalize_effects=self._fs.effects.create_effect_values_dict,
)
return self._storages
@@ -2466,21 +2836,29 @@ def intercluster_storages(self) -> StoragesData:
and c.cluster_mode in ('intercluster', 'intercluster_cyclic')
]
effect_ids = list(self._fs.effects.keys())
- self._intercluster_storages = StoragesData(intercluster, 'intercluster_storage', effect_ids)
+ self._intercluster_storages = StoragesData(
+ intercluster,
+ 'intercluster_storage',
+ effect_ids,
+ coords=self._fs.indexes,
+ normalize_effects=self._fs.effects.create_effect_values_dict,
+ )
return self._intercluster_storages
@property
def buses(self) -> BusesData:
"""Get or create BusesData for all buses."""
if self._buses is None:
- self._buses = BusesData(list(self._fs.buses.values()))
+ self._buses = BusesData(list(self._fs.buses.values()), coords=self._fs.indexes)
return self._buses
@property
def effects(self) -> EffectsData:
"""Get or create EffectsData for all effects."""
if self._effects is None:
- self._effects = EffectsData(self._fs.effects)
+ self._effects = EffectsData(
+ self._fs.effects, coords=self._fs.indexes, default_period_weights=self._fs.period_weights
+ )
return self._effects
@property
@@ -2495,6 +2873,8 @@ def components(self) -> ComponentsData:
flows_data=self.flows,
effect_ids=list(self._fs.effects.keys()),
timestep_duration=self._fs.timestep_duration,
+ coords=self._fs.indexes,
+ normalize_effects=self._fs.effects.create_effect_values_dict,
)
return self._components
@@ -2505,7 +2885,12 @@ def converters(self) -> ConvertersData:
from .components import LinearConverter
converters = [c for c in self._fs.components.values() if isinstance(c, LinearConverter)]
- self._converters = ConvertersData(converters, flow_ids=self.flows.element_ids, timesteps=self._fs.timesteps)
+ self._converters = ConvertersData(
+ converters,
+ flow_ids=self.flows.element_ids,
+ timesteps=self._fs.timesteps,
+ coords=self._fs.indexes,
+ )
return self._converters
@property
@@ -2515,7 +2900,11 @@ def transmissions(self) -> TransmissionsData:
from .components import Transmission
transmissions = [c for c in self._fs.components.values() if isinstance(c, Transmission)]
- self._transmissions = TransmissionsData(transmissions, flow_ids=self.flows.element_ids)
+ self._transmissions = TransmissionsData(
+ transmissions,
+ flow_ids=self.flows.element_ids,
+ coords=self._fs.indexes,
+ )
return self._transmissions
def _reset(self) -> None:
diff --git a/flixopt/carrier.py b/flixopt/carrier.py
index ca4ac0de0..13f4edf0f 100644
--- a/flixopt/carrier.py
+++ b/flixopt/carrier.py
@@ -8,19 +8,17 @@
from __future__ import annotations
from .id_list import IdList
-from .structure import Interface, register_class_for_io
+from .structure import register_class_for_io
@register_class_for_io
-class Carrier(Interface):
+class Carrier:
"""Definition of an energy or material carrier type.
Carriers represent the type of energy or material flowing through a Bus.
They provide consistent color, unit, and description across all visualizations
and can be shared between multiple buses of the same type.
- Inherits from Interface to provide serialization capabilities.
-
Args:
name: Identifier for the carrier (e.g., 'electricity', 'heat', 'gas').
color: Hex color string for visualizations (e.g., '#FFD700').
@@ -94,16 +92,6 @@ def __init__(
self.unit = unit
self.description = description
- def transform_data(self, name_prefix: str = '') -> None:
- """Transform data to match FlowSystem dimensions.
-
- Carriers don't have time-series data, so this is a no-op.
-
- Args:
- name_prefix: Ignored for Carrier.
- """
- pass # Carriers have no data to transform
-
@property
def label(self) -> str:
"""Label for container keying (alias for name)."""
diff --git a/flixopt/components.py b/flixopt/components.py
index 9837bc7bf..a181e5b82 100644
--- a/flixopt/components.py
+++ b/flixopt/components.py
@@ -7,16 +7,16 @@
import functools
import logging
import warnings
-from typing import TYPE_CHECKING, Literal
+from dataclasses import dataclass, field
+from typing import TYPE_CHECKING, ClassVar, Literal
import numpy as np
import xarray as xr
from . import io as fx_io
-from .core import PlausibilityError
from .elements import Component, Flow
from .features import MaskHelpers, stack_along_dim
-from .interface import InvestParameters, PiecewiseConversion, StatusParameters
+from .interface import InvestParameters, PiecewiseConversion
from .modeling import _scalar_safe_reduce
from .structure import (
FlowSystemModel,
@@ -37,6 +37,7 @@
@register_class_for_io
+@dataclass(eq=False, repr=False)
class LinearConverter(Component):
"""
Converts input-Flows into output-Flows via linear conversion factors.
@@ -169,88 +170,10 @@ class LinearConverter(Component):
"""
- def __init__(
- self,
- id: str | None = None,
- inputs: list[Flow] | None = None,
- outputs: list[Flow] | None = None,
- status_parameters: StatusParameters | None = None,
- conversion_factors: list[dict[str, Numeric_TPS]] | None = None,
- piecewise_conversion: PiecewiseConversion | None = None,
- meta_data: dict | None = None,
- color: str | None = None,
- **kwargs,
- ):
- super().__init__(id, inputs, outputs, status_parameters, meta_data=meta_data, color=color, **kwargs)
- self.conversion_factors = conversion_factors or []
- self.piecewise_conversion = piecewise_conversion
-
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to parent Component and piecewise_conversion."""
- super().link_to_flow_system(flow_system, prefix)
- if self.piecewise_conversion is not None:
- self.piecewise_conversion.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseConversion'))
-
- def validate_config(self) -> None:
- """Validate configuration consistency.
+ _io_exclude: ClassVar[set[str]] = {'prevent_simultaneous_flows'}
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- super().validate_config()
- if not self.conversion_factors and not self.piecewise_conversion:
- raise PlausibilityError('Either conversion_factors or piecewise_conversion must be defined!')
- if self.conversion_factors and self.piecewise_conversion:
- raise PlausibilityError('Only one of conversion_factors or piecewise_conversion can be defined, not both!')
-
- if self.conversion_factors:
- if self.degrees_of_freedom <= 0:
- raise PlausibilityError(
- f'Too Many conversion_factors_specified. Care that you use less conversion_factors '
- f'then inputs + outputs!! With {len(self.inputs + self.outputs)} inputs and outputs, '
- f'use not more than {len(self.inputs + self.outputs) - 1} conversion_factors!'
- )
-
- for conversion_factor in self.conversion_factors:
- for flow in conversion_factor:
- if flow not in self.flows:
- raise PlausibilityError(
- f'{self.id}: Flow {flow} in conversion_factors is not in inputs/outputs'
- )
- if self.piecewise_conversion:
- for flow in self.flows.values():
- if isinstance(flow.size, InvestParameters) and flow.size.fixed_size is None:
- logger.warning(
- f'Using a Flow with variable size (InvestParameters without fixed_size) '
- f'and a piecewise_conversion in {self.id} is uncommon. Please verify intent '
- f'({flow.id}).'
- )
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config()."""
- self.validate_config()
-
- def transform_data(self) -> None:
- super().transform_data()
- if self.conversion_factors:
- self.conversion_factors = self._transform_conversion_factors()
- if self.piecewise_conversion:
- self.piecewise_conversion.has_time_dim = True
- self.piecewise_conversion.transform_data()
-
- def _transform_conversion_factors(self) -> list[dict[str, xr.DataArray]]:
- """Converts all conversion factors to internal datatypes"""
- list_of_conversion_factors = []
- for idx, conversion_factor in enumerate(self.conversion_factors):
- transformed_dict = {}
- for flow, values in conversion_factor.items():
- # TODO: Might be better to use the label of the component instead of the flow
- ts = self._fit_coords(f'{self.flows[flow].id}|conversion_factor{idx}', values)
- if ts is None:
- raise PlausibilityError(f'{self.id}: conversion factor for flow "{flow}" must not be None')
- transformed_dict[flow] = ts
- list_of_conversion_factors.append(transformed_dict)
- return list_of_conversion_factors
+ conversion_factors: list[dict[str, Numeric_TPS]] = field(default_factory=list)
+ piecewise_conversion: PiecewiseConversion | None = None
@property
def degrees_of_freedom(self):
@@ -407,11 +330,13 @@ class Storage(Component):
With flow rates in m3/h, the charge state is therefore in m3.
"""
+ _io_exclude: ClassVar[set[str]] = {'inputs', 'outputs', 'prevent_simultaneous_flows'}
+
def __init__(
self,
- id: str | None = None,
- charging: Flow | None = None,
- discharging: Flow | None = None,
+ id: str,
+ charging: Flow,
+ discharging: Flow,
capacity_in_flow_hours: Numeric_PS | InvestParameters | None = None,
relative_minimum_charge_state: Numeric_TPS = 0,
relative_maximum_charge_state: Numeric_TPS = 1,
@@ -426,128 +351,41 @@ def __init__(
prevent_simultaneous_charge_and_discharge: bool = True,
balanced: bool = False,
cluster_mode: Literal['independent', 'cyclic', 'intercluster', 'intercluster_cyclic'] = 'intercluster_cyclic',
- meta_data: dict | None = None,
- color: str | None = None,
**kwargs,
):
- # TODO: fixed_relative_chargeState implementieren
- super().__init__(
- id,
- inputs=[charging],
- outputs=[discharging],
- prevent_simultaneous_flows=[charging, discharging] if prevent_simultaneous_charge_and_discharge else None,
- meta_data=meta_data,
- color=color,
- **kwargs,
- )
-
+ # Store all params as attributes
self.charging = charging
self.discharging = discharging
self.capacity_in_flow_hours = capacity_in_flow_hours
- self.relative_minimum_charge_state: Numeric_TPS = relative_minimum_charge_state
- self.relative_maximum_charge_state: Numeric_TPS = relative_maximum_charge_state
-
- self.relative_minimum_final_charge_state = relative_minimum_final_charge_state
- self.relative_maximum_final_charge_state = relative_maximum_final_charge_state
-
+ self.relative_minimum_charge_state = relative_minimum_charge_state
+ self.relative_maximum_charge_state = relative_maximum_charge_state
self.initial_charge_state = initial_charge_state
self.minimal_final_charge_state = minimal_final_charge_state
self.maximal_final_charge_state = maximal_final_charge_state
-
- self.eta_charge: Numeric_TPS = eta_charge
- self.eta_discharge: Numeric_TPS = eta_discharge
- self.relative_loss_per_hour: Numeric_TPS = relative_loss_per_hour
+ self.relative_minimum_final_charge_state = relative_minimum_final_charge_state
+ self.relative_maximum_final_charge_state = relative_maximum_final_charge_state
+ self.eta_charge = eta_charge
+ self.eta_discharge = eta_discharge
+ self.relative_loss_per_hour = relative_loss_per_hour
self.prevent_simultaneous_charge_and_discharge = prevent_simultaneous_charge_and_discharge
self.balanced = balanced
self.cluster_mode = cluster_mode
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to parent Component and capacity_in_flow_hours if it's InvestParameters."""
- super().link_to_flow_system(flow_system, prefix)
- if isinstance(self.capacity_in_flow_hours, InvestParameters):
- self.capacity_in_flow_hours.link_to_flow_system(flow_system, self._sub_prefix('InvestParameters'))
+ # Default flow_ids to 'charging'/'discharging' when not explicitly set
+ self.charging.flow_id = self.charging.flow_id or 'charging'
+ self.discharging.flow_id = self.discharging.flow_id or 'discharging'
- def transform_data(self) -> None:
- super().transform_data()
- self.relative_minimum_charge_state = self._fit_coords(
- f'{self.prefix}|relative_minimum_charge_state', self.relative_minimum_charge_state
- )
- self.relative_maximum_charge_state = self._fit_coords(
- f'{self.prefix}|relative_maximum_charge_state', self.relative_maximum_charge_state
+ # Build Component fields from Storage-specific fields
+ prevent_simultaneous_flows = (
+ [self.charging, self.discharging] if prevent_simultaneous_charge_and_discharge else []
)
- self.eta_charge = self._fit_coords(f'{self.prefix}|eta_charge', self.eta_charge)
- self.eta_discharge = self._fit_coords(f'{self.prefix}|eta_discharge', self.eta_discharge)
- self.relative_loss_per_hour = self._fit_coords(
- f'{self.prefix}|relative_loss_per_hour', self.relative_loss_per_hour
- )
- if self.initial_charge_state is not None and not isinstance(self.initial_charge_state, str):
- self.initial_charge_state = self._fit_coords(
- f'{self.prefix}|initial_charge_state', self.initial_charge_state, dims=['period', 'scenario']
- )
- self.minimal_final_charge_state = self._fit_coords(
- f'{self.prefix}|minimal_final_charge_state', self.minimal_final_charge_state, dims=['period', 'scenario']
- )
- self.maximal_final_charge_state = self._fit_coords(
- f'{self.prefix}|maximal_final_charge_state', self.maximal_final_charge_state, dims=['period', 'scenario']
- )
- self.relative_minimum_final_charge_state = self._fit_coords(
- f'{self.prefix}|relative_minimum_final_charge_state',
- self.relative_minimum_final_charge_state,
- dims=['period', 'scenario'],
- )
- self.relative_maximum_final_charge_state = self._fit_coords(
- f'{self.prefix}|relative_maximum_final_charge_state',
- self.relative_maximum_final_charge_state,
- dims=['period', 'scenario'],
+ super().__init__(
+ id=id,
+ inputs=[self.charging],
+ outputs=[self.discharging],
+ prevent_simultaneous_flows=prevent_simultaneous_flows,
+ **kwargs,
)
- if isinstance(self.capacity_in_flow_hours, InvestParameters):
- self.capacity_in_flow_hours.transform_data()
- else:
- self.capacity_in_flow_hours = self._fit_coords(
- f'{self.prefix}|capacity_in_flow_hours', self.capacity_in_flow_hours, dims=['period', 'scenario']
- )
-
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- super().validate_config()
-
- # Validate string values for initial_charge_state
- if isinstance(self.initial_charge_state, str):
- if self.initial_charge_state != 'equals_final':
- raise PlausibilityError(f'initial_charge_state has undefined value: {self.initial_charge_state}')
-
- # Capacity is required for final charge state constraints (simple None checks)
- if self.capacity_in_flow_hours is None:
- if self.relative_minimum_final_charge_state is not None:
- raise PlausibilityError(
- f'Storage "{self.id}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. '
- f'A capacity is required for relative final charge state constraints.'
- )
- if self.relative_maximum_final_charge_state is not None:
- raise PlausibilityError(
- f'Storage "{self.id}" has relative_maximum_final_charge_state but no capacity_in_flow_hours. '
- f'A capacity is required for relative final charge state constraints.'
- )
-
- # Balanced requires InvestParameters on charging/discharging flows
- if self.balanced:
- if not isinstance(self.charging.size, InvestParameters) or not isinstance(
- self.discharging.size, InvestParameters
- ):
- raise PlausibilityError(
- f'Balancing charging and discharging Flows in {self.id} is only possible with Investments.'
- )
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config().
-
- DataArray-based checks moved to StoragesData.validate().
- """
- self.validate_config()
def __repr__(self) -> str:
"""Return string representation."""
@@ -670,75 +508,42 @@ class Transmission(Component):
"""
+ _io_exclude: ClassVar[set[str]] = {'inputs', 'outputs', 'prevent_simultaneous_flows'}
+
def __init__(
self,
- id: str | None = None,
- in1: Flow | None = None,
- out1: Flow | None = None,
+ id: str,
+ in1: Flow,
+ out1: Flow,
in2: Flow | None = None,
out2: Flow | None = None,
relative_losses: Numeric_TPS | None = None,
absolute_losses: Numeric_TPS | None = None,
- status_parameters: StatusParameters | None = None,
prevent_simultaneous_flows_in_both_directions: bool = True,
balanced: bool = False,
- meta_data: dict | None = None,
- color: str | None = None,
**kwargs,
):
- super().__init__(
- id,
- inputs=[flow for flow in (in1, in2) if flow is not None],
- outputs=[flow for flow in (out1, out2) if flow is not None],
- status_parameters=status_parameters,
- prevent_simultaneous_flows=None
- if in2 is None or prevent_simultaneous_flows_in_both_directions is False
- else [in1, in2],
- meta_data=meta_data,
- color=color,
- **kwargs,
- )
self.in1 = in1
self.out1 = out1
self.in2 = in2
self.out2 = out2
-
self.relative_losses = relative_losses
self.absolute_losses = absolute_losses
+ self.prevent_simultaneous_flows_in_both_directions = prevent_simultaneous_flows_in_both_directions
self.balanced = balanced
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- super().validate_config()
- # Check buses consistency
- if self.in2 is not None:
- if self.in2.bus != self.out1.bus:
- raise ValueError(
- f'Output 1 and Input 2 do not start/end at the same Bus: {self.out1.bus=}, {self.in2.bus=}'
- )
- if self.out2 is not None:
- if self.out2.bus != self.in1.bus:
- raise ValueError(
- f'Input 1 and Output 2 do not start/end at the same Bus: {self.in1.bus=}, {self.out2.bus=}'
- )
-
- # Balanced requires InvestParameters on both in-Flows
- if self.balanced:
- if self.in2 is None:
- raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows')
- if not isinstance(self.in1.size, InvestParameters) or not isinstance(self.in2.size, InvestParameters):
- raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows')
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config().
-
- DataArray-based checks moved to TransmissionsData.validate().
- """
- self.validate_config()
+ inputs = [f for f in (self.in1, self.in2) if f is not None]
+ outputs = [f for f in (self.out1, self.out2) if f is not None]
+ prevent_simultaneous_flows = (
+ [self.in1, self.in2] if self.in2 is not None and prevent_simultaneous_flows_in_both_directions else []
+ )
+ super().__init__(
+ id=id,
+ inputs=inputs,
+ outputs=outputs,
+ prevent_simultaneous_flows=prevent_simultaneous_flows,
+ **kwargs,
+ )
def _propagate_status_parameters(self) -> None:
super()._propagate_status_parameters()
@@ -754,7 +559,6 @@ def _propagate_status_parameters(self) -> None:
for flow in input_flows:
if flow.status_parameters is None:
flow.status_parameters = StatusParameters()
- flow.status_parameters.link_to_flow_system(self._flow_system, f'{flow.id}|status_parameters')
rel_min = flow.relative_minimum
needs_update = (
rel_min is None
@@ -764,11 +568,6 @@ def _propagate_status_parameters(self) -> None:
if needs_update:
flow.relative_minimum = CONFIG.Modeling.epsilon
- def transform_data(self) -> None:
- super().transform_data()
- self.relative_losses = self._fit_coords(f'{self.prefix}|relative_losses', self.relative_losses)
- self.absolute_losses = self._fit_coords(f'{self.prefix}|absolute_losses', self.absolute_losses)
-
class StoragesModel(TypeModel):
"""Type-level model for ALL basic (non-intercluster) storages in a FlowSystem.
@@ -808,10 +607,6 @@ def __init__(
super().__init__(model, data)
self._flows_model = flows_model
- # Set reference on each storage element
- for storage in self.elements.values():
- storage._storages_model = self
-
self.create_variables()
self.create_constraints()
self.create_investment_model()
@@ -1109,13 +904,15 @@ def _add_batched_initial_final_constraints(self, charge_state) -> None:
if isinstance(storage.initial_charge_state, str): # 'equals_final'
storages_equals_final.append(storage)
else:
- storages_numeric_initial.append((storage, storage.initial_charge_state))
+ storages_numeric_initial.append((storage, self.data.aligned_initial_charge_state(storage)))
- if storage.maximal_final_charge_state is not None:
- storages_max_final.append((storage, storage.maximal_final_charge_state))
+ aligned_max_final = self.data.aligned_maximal_final_charge_state(storage)
+ if aligned_max_final is not None:
+ storages_max_final.append((storage, aligned_max_final))
- if storage.minimal_final_charge_state is not None:
- storages_min_final.append((storage, storage.minimal_final_charge_state))
+ aligned_min_final = self.data.aligned_minimal_final_charge_state(storage)
+ if aligned_min_final is not None:
+ storages_min_final.append((storage, aligned_min_final))
dim = self.dim_name
@@ -1328,14 +1125,16 @@ def _add_initial_final_constraints_legacy(self, storage, cs) -> None:
name=f'storage|{storage.id}|initial_charge_state',
)
else:
+ aligned_initial = self.data.aligned_initial_charge_state(storage)
self.model.add_constraints(
- cs.isel(time=0) == storage.initial_charge_state,
+ cs.isel(time=0) == aligned_initial,
name=f'storage|{storage.id}|initial_charge_state',
)
- if storage.maximal_final_charge_state is not None:
+ aligned_min_final = self.data.aligned_minimal_final_charge_state(storage)
+ if aligned_min_final is not None:
self.model.add_constraints(
- cs.isel(time=-1) >= storage.minimal_final_charge_state,
+ cs.isel(time=-1) >= aligned_min_final,
name=f'storage|{storage.id}|final_charge_min',
)
@@ -1717,7 +1516,7 @@ def _add_cyclic_or_initial_constraints(self) -> None:
cyclic_ids.append(storage.id)
else:
initial_fixed_ids.append(storage.id)
- initial_values.append(initial)
+ initial_values.append(self.data.aligned_initial_charge_state(storage))
# Add cyclic constraints
if cyclic_ids:
@@ -1948,6 +1747,7 @@ def create_effect_shares(self) -> None:
@register_class_for_io
+@dataclass(eq=False, repr=False)
class SourceAndSink(Component):
"""
A SourceAndSink combines both supply and demand capabilities in a single component.
@@ -2033,32 +1833,18 @@ class SourceAndSink(Component):
The deprecated `sink` and `source` kwargs are accepted for compatibility but will be removed in future releases.
"""
- def __init__(
- self,
- id: str | None = None,
- inputs: list[Flow] | None = None,
- outputs: list[Flow] | None = None,
- prevent_simultaneous_flow_rates: bool = True,
- meta_data: dict | None = None,
- color: str | None = None,
- **kwargs,
- ):
- # Convert dict to list for deserialization compatibility (IdLists serialize as dicts)
- _inputs_list = list(inputs.values()) if isinstance(inputs, dict) else (inputs or [])
- _outputs_list = list(outputs.values()) if isinstance(outputs, dict) else (outputs or [])
- super().__init__(
- id,
- inputs=_inputs_list,
- outputs=_outputs_list,
- prevent_simultaneous_flows=_inputs_list + _outputs_list if prevent_simultaneous_flow_rates else None,
- meta_data=meta_data,
- color=color,
- **kwargs,
- )
- self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates
+ _io_exclude: ClassVar[set[str]] = {'prevent_simultaneous_flows'}
+
+ prevent_simultaneous_flow_rates: bool = True
+
+ def __post_init__(self):
+ if self.prevent_simultaneous_flow_rates:
+ self.prevent_simultaneous_flows = (self.inputs or []) + (self.outputs or [])
+ super().__post_init__()
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Source(Component):
"""
A Source generates or provides energy or material flows into the system.
@@ -2134,27 +1920,18 @@ class Source(Component):
The deprecated `source` kwarg is accepted for compatibility but will be removed in future releases.
"""
- def __init__(
- self,
- id: str | None = None,
- outputs: list[Flow] | None = None,
- meta_data: dict | None = None,
- prevent_simultaneous_flow_rates: bool = False,
- color: str | None = None,
- **kwargs,
- ):
- self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates
- super().__init__(
- id,
- outputs=outputs,
- meta_data=meta_data,
- prevent_simultaneous_flows=outputs if prevent_simultaneous_flow_rates else None,
- color=color,
- **kwargs,
- )
+ _io_exclude: ClassVar[set[str]] = {'inputs', 'prevent_simultaneous_flows'}
+
+ prevent_simultaneous_flow_rates: bool = False
+
+ def __post_init__(self):
+ if self.prevent_simultaneous_flow_rates:
+ self.prevent_simultaneous_flows = self.outputs or []
+ super().__post_init__()
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Sink(Component):
"""
A Sink consumes energy or material flows from the system.
@@ -2231,32 +2008,11 @@ class Sink(Component):
The deprecated `sink` kwarg is accepted for compatibility but will be removed in future releases.
"""
- def __init__(
- self,
- id: str | None = None,
- inputs: list[Flow] | None = None,
- meta_data: dict | None = None,
- prevent_simultaneous_flow_rates: bool = False,
- color: str | None = None,
- **kwargs,
- ):
- """Initialize a Sink (consumes flow from the system).
+ _io_exclude: ClassVar[set[str]] = {'outputs', 'prevent_simultaneous_flows'}
- Args:
- id: Unique element id.
- inputs: Input flows for the sink.
- meta_data: Arbitrary metadata attached to the element.
- prevent_simultaneous_flow_rates: If True, prevents simultaneous nonzero flow rates
- across the element's inputs by wiring that restriction into the base Component setup.
- color: Optional color for visualizations.
- """
+ prevent_simultaneous_flow_rates: bool = False
- self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates
- super().__init__(
- id,
- inputs=inputs,
- meta_data=meta_data,
- prevent_simultaneous_flows=inputs if prevent_simultaneous_flow_rates else None,
- color=color,
- **kwargs,
- )
+ def __post_init__(self):
+ if self.prevent_simultaneous_flow_rates:
+ self.prevent_simultaneous_flows = self.inputs or []
+ super().__post_init__()
diff --git a/flixopt/core.py b/flixopt/core.py
index aca380f5e..6fd4af3d5 100644
--- a/flixopt/core.py
+++ b/flixopt/core.py
@@ -474,6 +474,16 @@ def to_dataarray(
# Scalar values - create scalar DataArray
intermediate = xr.DataArray(data.item() if hasattr(data, 'item') else data)
+ elif isinstance(data, list):
+ # Plain Python list (e.g. from IO roundtrip) — convert to ndarray
+ data = np.asarray(data)
+ if data.ndim == 0:
+ intermediate = xr.DataArray(data.item())
+ elif data.ndim == 1:
+ intermediate = cls._match_1d_array_by_length(data, validated_coords, target_dims)
+ else:
+ intermediate = cls._match_multidim_array_by_shape_permutation(data, validated_coords, target_dims)
+
elif isinstance(data, np.ndarray):
# NumPy arrays - dispatch based on dimensionality
if data.ndim == 0:
@@ -522,6 +532,7 @@ def to_dataarray(
'np.integer',
'np.floating',
'np.bool_',
+ 'list',
'np.ndarray',
'pd.Series',
'pd.DataFrame',
@@ -588,6 +599,104 @@ def _validate_and_prepare_target_coordinates(
return validated_coords, tuple(dimension_names)
+def align_to_coords(
+ data: NumericOrBool | None,
+ coords: dict[str, pd.Index],
+ name: str = '',
+ dims: list[str] | None = None,
+) -> xr.DataArray | None:
+ """Convert any raw input to a DataArray aligned with model coordinates.
+
+ Standalone replacement for the ``FlowSystem.fit_to_model_coords`` →
+ ``DataConverter.to_dataarray`` chain. Handles every type users may pass:
+
+ * **scalar** (int / float / bool / np.number) → 0-d DataArray
+ * **1-D array** (np.ndarray / list) → matched to a dim by length
+ * **pd.Series** → matched by index
+ * **TimeSeriesData** → aligned via its own ``fit_to_coords``
+ * **xr.DataArray** (e.g. from IO roundtrip) → validated, returned as-is
+ * **None** → returns None (pass-through)
+
+ Args:
+ data: Raw input value. ``None`` is a legal no-op.
+ coords: Model coordinate mapping, e.g.
+ ``{'time': DatetimeIndex, 'period': Index, 'scenario': Index}``.
+ name: Optional name assigned to the resulting DataArray.
+ dims: If given, only these coordinate keys are considered for
+ alignment (subset of *coords*).
+
+ Returns:
+ DataArray aligned to *coords*, or ``None`` when *data* is ``None``.
+
+ Raises:
+ ConversionError: If the input cannot be mapped to the target
+ coordinates (length mismatch, incompatible dims, …).
+ """
+ if data is None:
+ return None
+
+ # Restrict coords to requested dims
+ if dims is not None:
+ coords = {k: v for k, v in coords.items() if k in dims}
+
+ # TimeSeriesData carries clustering metadata — delegate to its own method
+ if isinstance(data, TimeSeriesData):
+ try:
+ return data.fit_to_coords(coords, name=name or None)
+ except ConversionError as e:
+ raise ConversionError(
+ f'Could not align TimeSeriesData "{name}" to model coords:\n{data}\nOriginal error: {e}'
+ ) from e
+
+ # Everything else goes through DataConverter
+ try:
+ da = DataConverter.to_dataarray(data, coords=coords)
+ except ConversionError as e:
+ raise ConversionError(f'Could not align data "{name}" to model coords:\n{data}\nOriginal error: {e}') from e
+
+ if name:
+ da = da.rename(name)
+ return da
+
+
+def align_effects_to_coords(
+ effect_values: dict | None,
+ coords: dict[str, pd.Index],
+ prefix: str = '',
+ suffix: str = '',
+ dims: list[str] | None = None,
+ delimiter: str = '|',
+) -> dict[str, xr.DataArray] | None:
+ """Align a dict of effect values to model coordinates.
+
+ Convenience wrapper around :func:`align_to_coords` for
+ ``effects_per_flow_hour`` and similar effect dicts.
+
+ Args:
+ effect_values: ``{effect_id: numeric_value}`` mapping, or ``None``.
+ coords: Model coordinate mapping.
+ prefix: Label prefix for DataArray names.
+ suffix: Label suffix for DataArray names.
+ dims: Passed through to :func:`align_to_coords`.
+ delimiter: Separator between prefix, effect id, and suffix.
+
+ Returns:
+ ``{effect_id: DataArray}`` or ``None``.
+ """
+ if effect_values is None:
+ return None
+
+ return {
+ effect_id: align_to_coords(
+ value,
+ coords,
+ name=delimiter.join(filter(None, [prefix, effect_id, suffix])),
+ dims=dims,
+ )
+ for effect_id, value in effect_values.items()
+ }
+
+
def get_dataarray_stats(arr: xr.DataArray) -> dict:
"""Generate statistical summary of a DataArray."""
stats = {}
diff --git a/flixopt/effects.py b/flixopt/effects.py
index 4ad339eeb..2c3cfaccb 100644
--- a/flixopt/effects.py
+++ b/flixopt/effects.py
@@ -9,18 +9,20 @@
import logging
from collections import deque
+from dataclasses import dataclass, field
from typing import TYPE_CHECKING
import linopy
import numpy as np
import xarray as xr
-from .core import PlausibilityError
+from . import io as fx_io
from .id_list import IdList
from .structure import (
Element,
FlowSystemModel,
register_class_for_io,
+ valid_id,
)
if TYPE_CHECKING:
@@ -38,6 +40,7 @@
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Effect(Element):
"""Represents system-wide impacts like costs, emissions, or resource consumption.
@@ -188,131 +191,43 @@ class Effect(Element):
"""
- def __init__(
- self,
- id: str | None = None,
- unit: str = '',
- description: str = '',
- meta_data: dict | None = None,
- is_standard: bool = False,
- is_objective: bool = False,
- period_weights: Numeric_PS | None = None,
- share_from_temporal: Effect_TPS | Numeric_TPS | None = None,
- share_from_periodic: Effect_PS | Numeric_PS | None = None,
- minimum_temporal: Numeric_PS | None = None,
- maximum_temporal: Numeric_PS | None = None,
- minimum_periodic: Numeric_PS | None = None,
- maximum_periodic: Numeric_PS | None = None,
- minimum_per_hour: Numeric_TPS | None = None,
- maximum_per_hour: Numeric_TPS | None = None,
- minimum_total: Numeric_PS | None = None,
- maximum_total: Numeric_PS | None = None,
- minimum_over_periods: Numeric_S | None = None,
- maximum_over_periods: Numeric_S | None = None,
- **kwargs,
- ):
- super().__init__(id, meta_data=meta_data, **kwargs)
- self.unit = unit
- self.description = description
- self.is_standard = is_standard
-
+ id: str
+ unit: str = ''
+ description: str = ''
+ is_standard: bool = False
+ is_objective: bool = False
+ period_weights: Numeric_PS | None = None
+ share_from_temporal: Effect_TPS | Numeric_TPS | None = None
+ share_from_periodic: Effect_PS | Numeric_PS | None = None
+ minimum_temporal: Numeric_PS | None = None
+ maximum_temporal: Numeric_PS | None = None
+ minimum_periodic: Numeric_PS | None = None
+ maximum_periodic: Numeric_PS | None = None
+ minimum_per_hour: Numeric_TPS | None = None
+ maximum_per_hour: Numeric_TPS | None = None
+ minimum_total: Numeric_PS | None = None
+ maximum_total: Numeric_PS | None = None
+ minimum_over_periods: Numeric_S | None = None
+ maximum_over_periods: Numeric_S | None = None
+ meta_data: dict = field(default_factory=dict)
+ color: str | None = None
+
+ def __post_init__(self):
+ self.id = valid_id(self.id)
# Validate that Penalty cannot be set as objective
- if is_objective and id == PENALTY_EFFECT_ID:
+ if self.is_objective and self.id == PENALTY_EFFECT_ID:
raise ValueError(
f'The Penalty effect ("{PENALTY_EFFECT_ID}") cannot be set as the objective effect. '
f'Please use a different effect as the optimization objective.'
)
+ # Default to {} when None (no shares defined)
+ if self.share_from_temporal is None:
+ self.share_from_temporal = {}
+ if self.share_from_periodic is None:
+ self.share_from_periodic = {}
- self.is_objective = is_objective
- self.period_weights = period_weights
- # Share parameters accept Effect_* | Numeric_* unions (dict or single value).
- # Store as-is here; transform_data() will normalize via fit_effects_to_model_coords().
- # Default to {} when None (no shares defined).
- self.share_from_temporal = share_from_temporal if share_from_temporal is not None else {}
- self.share_from_periodic = share_from_periodic if share_from_periodic is not None else {}
-
- # Set attributes directly
- self.minimum_temporal = minimum_temporal
- self.maximum_temporal = maximum_temporal
- self.minimum_periodic = minimum_periodic
- self.maximum_periodic = maximum_periodic
- self.minimum_per_hour = minimum_per_hour
- self.maximum_per_hour = maximum_per_hour
- self.minimum_total = minimum_total
- self.maximum_total = maximum_total
- self.minimum_over_periods = minimum_over_periods
- self.maximum_over_periods = maximum_over_periods
-
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Link this effect to a FlowSystem.
-
- Elements use their id as prefix by default, ignoring the passed prefix.
- """
- super().link_to_flow_system(flow_system, self.id)
-
- def transform_data(self) -> None:
- self.minimum_per_hour = self._fit_coords(f'{self.prefix}|minimum_per_hour', self.minimum_per_hour)
- self.maximum_per_hour = self._fit_coords(f'{self.prefix}|maximum_per_hour', self.maximum_per_hour)
-
- self.share_from_temporal = self._fit_effect_coords(
- prefix=None,
- effect_values=self.share_from_temporal,
- suffix=f'(temporal)->{self.prefix}(temporal)',
- )
- self.share_from_periodic = self._fit_effect_coords(
- prefix=None,
- effect_values=self.share_from_periodic,
- suffix=f'(periodic)->{self.prefix}(periodic)',
- dims=['period', 'scenario'],
- )
-
- self.minimum_temporal = self._fit_coords(
- f'{self.prefix}|minimum_temporal', self.minimum_temporal, dims=['period', 'scenario']
- )
- self.maximum_temporal = self._fit_coords(
- f'{self.prefix}|maximum_temporal', self.maximum_temporal, dims=['period', 'scenario']
- )
- self.minimum_periodic = self._fit_coords(
- f'{self.prefix}|minimum_periodic', self.minimum_periodic, dims=['period', 'scenario']
- )
- self.maximum_periodic = self._fit_coords(
- f'{self.prefix}|maximum_periodic', self.maximum_periodic, dims=['period', 'scenario']
- )
- self.minimum_total = self._fit_coords(
- f'{self.prefix}|minimum_total', self.minimum_total, dims=['period', 'scenario']
- )
- self.maximum_total = self._fit_coords(
- f'{self.prefix}|maximum_total', self.maximum_total, dims=['period', 'scenario']
- )
- self.minimum_over_periods = self._fit_coords(
- f'{self.prefix}|minimum_over_periods', self.minimum_over_periods, dims=['scenario']
- )
- self.maximum_over_periods = self._fit_coords(
- f'{self.prefix}|maximum_over_periods', self.maximum_over_periods, dims=['scenario']
- )
- self.period_weights = self._fit_coords(
- f'{self.prefix}|period_weights', self.period_weights, dims=['period', 'scenario']
- )
-
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- # Check that minimum_over_periods and maximum_over_periods require a period dimension
- if (
- self.minimum_over_periods is not None or self.maximum_over_periods is not None
- ) and self.flow_system.periods is None:
- raise PlausibilityError(
- f"Effect '{self.id}': minimum_over_periods and maximum_over_periods require "
- f"the FlowSystem to have a 'period' dimension. Please define periods when creating "
- f'the FlowSystem, or remove these constraints.'
- )
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config()."""
- self.validate_config()
+ def __repr__(self) -> str:
+ return fx_io.build_repr_from_init(self, excluded_params={'self', 'id', 'kwargs'}, skip_default_size=True)
class EffectsModel:
@@ -723,13 +638,13 @@ def _add_share_between_effects(self):
for target_effect in self.data.values():
target_id = target_effect.id
# 1. temporal: <- receiving temporal shares from other effects
- for source_effect, time_series in target_effect.share_from_temporal.items():
+ for source_effect, time_series in self.data.aligned_share_from_temporal(target_effect).items():
source_id = self.data[source_effect].id
source_per_timestep = self.get_per_timestep(source_id)
expr = (source_per_timestep * time_series).expand_dims(effect=[target_id], contributor=[source_id])
self.add_temporal_contribution(expr)
# 2. periodic: <- receiving periodic shares from other effects
- for source_effect, factor in target_effect.share_from_periodic.items():
+ for source_effect, factor in self.data.aligned_share_from_periodic(target_effect).items():
source_id = self.data[source_effect].id
source_periodic = self.get_periodic(source_id)
expr = (source_periodic * factor).expand_dims(effect=[target_id], contributor=[source_id])
@@ -832,22 +747,6 @@ def get_effect_id(eff: str | None) -> str:
return {get_effect_id(effect): value for effect, value in effect_values_user.items()}
return {self.standard_effect.id: effect_values_user}
- def validate_config(self) -> None:
- """Deprecated: Validation is now handled by EffectsData.validate().
-
- This method is kept for backwards compatibility but does nothing.
- Collection-level validation (cycles, unknown refs) is now in EffectsData._validate_share_structure().
- """
- pass
-
- def _plausibility_checks(self) -> None:
- """Deprecated: Legacy validation method.
-
- Kept for backwards compatibility but does nothing.
- Validation is now handled by EffectsData.validate().
- """
- pass
-
def __getitem__(self, effect: str | Effect | None) -> Effect:
"""
Get an effect by id, or return the standard effect if None is passed
diff --git a/flixopt/elements.py b/flixopt/elements.py
index 7513eb8f9..89fc82c37 100644
--- a/flixopt/elements.py
+++ b/flixopt/elements.py
@@ -5,8 +5,10 @@
from __future__ import annotations
import logging
+import warnings
+from dataclasses import dataclass, field
from functools import cached_property
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, ClassVar
import numpy as np
import pandas as pd
@@ -14,7 +16,6 @@
from . import io as fx_io
from .config import CONFIG
-from .core import PlausibilityError
from .features import (
MaskHelpers,
StatusBuilder,
@@ -35,6 +36,7 @@
TransmissionVarName,
TypeModel,
register_class_for_io,
+ valid_id,
)
if TYPE_CHECKING:
@@ -42,7 +44,6 @@
from .batched import BusesData, ComponentsData, ConvertersData, FlowsData, TransmissionsData
from .types import (
- Effect_TPS,
Numeric_PS,
Numeric_S,
Numeric_TPS,
@@ -93,6 +94,7 @@ def _add_prevent_simultaneous_constraints(
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Component(Element):
"""
Base class for all system components that transform, convert, or process flows.
@@ -139,41 +141,29 @@ class Component(Element):
"""
- def __init__(
- self,
- id: str | None = None,
- inputs: list[Flow] | dict[str, Flow] | None = None,
- outputs: list[Flow] | dict[str, Flow] | None = None,
- status_parameters: StatusParameters | None = None,
- prevent_simultaneous_flows: list[Flow] | None = None,
- meta_data: dict | None = None,
- color: str | None = None,
- **kwargs,
- ):
- super().__init__(id, meta_data=meta_data, color=color, **kwargs)
- self.status_parameters = status_parameters
- if isinstance(prevent_simultaneous_flows, dict):
- prevent_simultaneous_flows = list(prevent_simultaneous_flows.values())
- self.prevent_simultaneous_flows: list[Flow] = prevent_simultaneous_flows or []
-
- # IdLists serialize as dicts, but constructor expects lists
- if isinstance(inputs, dict):
- inputs = list(inputs.values())
- if isinstance(outputs, dict):
- outputs = list(outputs.values())
-
- _inputs = inputs or []
- _outputs = outputs or []
-
- # Check uniqueness on raw lists (before connecting)
+ id: str
+ inputs: list[Flow] = field(default_factory=list)
+ outputs: list[Flow] = field(default_factory=list)
+ status_parameters: StatusParameters | None = None
+ prevent_simultaneous_flows: list[Flow] = field(default_factory=list)
+ meta_data: dict = field(default_factory=dict)
+ color: str | None = None
+
+ def __post_init__(self):
+ self.id = valid_id(self.id)
+
+ _inputs = self.inputs or []
+ _outputs = self.outputs or []
+
+ # Connect flows (sets component name, defaults flow_id to bus name)
+ self._connect_flows(_inputs, _outputs)
+
+ # Check uniqueness after flow_ids are resolved
all_flow_ids = [flow.flow_id for flow in _inputs + _outputs]
if len(set(all_flow_ids)) != len(all_flow_ids):
duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1}
raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}')
- # Connect flows (sets component name) before creating IdLists
- self._connect_flows(_inputs, _outputs)
-
# Now flow.id is qualified, so IdList can key by it
self.inputs: IdList = flow_id_list(_inputs, display_name='inputs')
self.outputs: IdList = flow_id_list(_outputs, display_name='outputs')
@@ -183,26 +173,6 @@ def flows(self) -> IdList:
"""All flows (inputs and outputs) as an IdList."""
return self.inputs + self.outputs
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Interface objects and flows.
-
- Elements use their id_full as prefix by default, ignoring the passed prefix.
- """
- super().link_to_flow_system(flow_system, self.id)
- if self.status_parameters is not None:
- self.status_parameters.link_to_flow_system(flow_system, self._sub_prefix('status_parameters'))
- for flow in self.flows.values():
- flow.link_to_flow_system(flow_system)
-
- def transform_data(self) -> None:
- self._propagate_status_parameters()
-
- if self.status_parameters is not None:
- self.status_parameters.transform_data()
-
- for flow in self.flows.values():
- flow.transform_data()
-
def _propagate_status_parameters(self) -> None:
"""Propagate status parameters from this component to flows that need them.
@@ -216,12 +186,10 @@ def _propagate_status_parameters(self) -> None:
for flow in self.flows.values():
if flow.status_parameters is None:
flow.status_parameters = StatusParameters()
- flow.status_parameters.link_to_flow_system(self._flow_system, f'{flow.id}|status_parameters')
if self.prevent_simultaneous_flows:
for flow in self.prevent_simultaneous_flows:
if flow.status_parameters is None:
flow.status_parameters = StatusParameters()
- flow.status_parameters.link_to_flow_system(self._flow_system, f'{flow.id}|status_parameters')
def _check_unique_flow_ids(self, inputs: list = None, outputs: list = None):
if inputs is None:
@@ -234,34 +202,15 @@ def _check_unique_flow_ids(self, inputs: list = None, outputs: list = None):
duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1}
raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}')
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- self._check_unique_flow_ids()
-
- # Component with status_parameters requires all flows to have sizes set
- # (status_parameters are propagated to flows in _do_modeling, which need sizes for big-M constraints)
- if self.status_parameters is not None:
- flows_without_size = [flow.flow_id for flow in self.flows.values() if flow.size is None]
- if flows_without_size:
- raise PlausibilityError(
- f'Component "{self.id}" has status_parameters, but the following flows have no size: '
- f'{flows_without_size}. All flows need explicit sizes when the component uses status_parameters '
- f'(required for big-M constraints).'
- )
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config()."""
- self.validate_config()
-
def _connect_flows(self, inputs=None, outputs=None):
if inputs is None:
inputs = list(self.inputs.values())
if outputs is None:
outputs = list(self.outputs.values())
+ # Default flow_id to bus name if not explicitly set
+ for flow in inputs + outputs:
+ if flow.flow_id is None:
+ flow.flow_id = valid_id(flow.bus if isinstance(flow.bus, str) else str(flow.bus))
# Inputs
for flow in inputs:
if flow.component not in ('UnknownComponent', self.id):
@@ -303,6 +252,7 @@ def __repr__(self) -> str:
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Bus(Element):
"""
Buses represent nodal balances between flow rates, serving as connection points.
@@ -316,7 +266,7 @@ class Bus(Element):
See
Args:
- label: The label of the Element. Used to identify it in the FlowSystem.
+ id: The id of the Element. Used to identify it in the FlowSystem.
carrier: Name of the energy/material carrier type (e.g., 'electricity', 'heat', 'gas').
Carriers are registered via ``flow_system.add_carrier()`` or available as
predefined defaults in CONFIG.Carriers. Used for automatic color assignment in plots.
@@ -330,8 +280,8 @@ class Bus(Element):
Using predefined carrier names:
```python
- electricity_bus = Bus(label='main_grid', carrier='electricity')
- heat_bus = Bus(label='district_heating', carrier='heat')
+ electricity_bus = Bus(id='main_grid', carrier='electricity')
+ heat_bus = Bus(id='district_heating', carrier='heat')
```
Registering custom carriers on FlowSystem:
@@ -341,14 +291,14 @@ class Bus(Element):
fs = fx.FlowSystem(timesteps)
fs.add_carrier(fx.Carrier('biogas', '#228B22', 'kW'))
- biogas_bus = fx.Bus(label='biogas_network', carrier='biogas')
+ biogas_bus = fx.Bus(id='biogas_network', carrier='biogas')
```
Heat network with penalty for imbalances:
```python
heat_bus = Bus(
- label='district_heating',
+ id='district_heating',
carrier='heat',
imbalance_penalty_per_flow_hour=1000,
)
@@ -366,71 +316,54 @@ class Bus(Element):
by the FlowSystem during system setup.
"""
- def __init__(
- self,
- id: str | None = None,
- carrier: str | None = None,
- imbalance_penalty_per_flow_hour: Numeric_TPS | None = None,
- meta_data: dict | None = None,
- **kwargs,
- ):
- # Handle Bus-specific deprecated kwarg before passing kwargs to super
- old_penalty = kwargs.pop('excess_penalty_per_flow_hour', None)
- super().__init__(id, meta_data=meta_data, **kwargs)
- if old_penalty is not None:
- imbalance_penalty_per_flow_hour = self._handle_deprecated_kwarg(
- {'excess_penalty_per_flow_hour': old_penalty},
- 'excess_penalty_per_flow_hour',
- 'imbalance_penalty_per_flow_hour',
- imbalance_penalty_per_flow_hour,
+ _io_exclude: ClassVar[set[str]] = {'excess_penalty_per_flow_hour'}
+
+ id: str
+ carrier: str | None = None
+ imbalance_penalty_per_flow_hour: Numeric_TPS | None = None
+ excess_penalty_per_flow_hour: Numeric_TPS | None = field(default=None, repr=False)
+ meta_data: dict = field(default_factory=dict)
+ color: str | None = None
+ # Internal state (populated by FlowSystem._connect_network)
+ inputs: IdList = field(default_factory=lambda: flow_id_list(display_name='inputs'), init=False, repr=False)
+ outputs: IdList = field(default_factory=lambda: flow_id_list(display_name='outputs'), init=False, repr=False)
+
+ def __post_init__(self):
+ self.id = valid_id(self.id)
+ # Handle deprecated excess_penalty_per_flow_hour
+ if self.excess_penalty_per_flow_hour is not None:
+ from .config import DEPRECATION_REMOVAL_VERSION
+
+ warnings.warn(
+ f'The use of the "excess_penalty_per_flow_hour" argument is deprecated. '
+ f'Use the "imbalance_penalty_per_flow_hour" argument instead. '
+ f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
+ DeprecationWarning,
+ stacklevel=2,
)
- self.carrier = carrier.lower() if carrier else None # Store as lowercase string
- self.imbalance_penalty_per_flow_hour = imbalance_penalty_per_flow_hour
- self.inputs: IdList = flow_id_list(display_name='inputs')
- self.outputs: IdList = flow_id_list(display_name='outputs')
+ if self.imbalance_penalty_per_flow_hour is not None:
+ raise ValueError(
+ 'Either excess_penalty_per_flow_hour or imbalance_penalty_per_flow_hour can be specified, but not both.'
+ )
+ self.imbalance_penalty_per_flow_hour = self.excess_penalty_per_flow_hour
+ self.excess_penalty_per_flow_hour = None
+ if self.carrier:
+ self.carrier = self.carrier.lower()
@property
def flows(self) -> IdList:
"""All flows (inputs and outputs) as an IdList."""
return self.inputs + self.outputs
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested flows.
-
- Elements use their id_full as prefix by default, ignoring the passed prefix.
- """
- super().link_to_flow_system(flow_system, self.id)
- for flow in self.flows.values():
- flow.link_to_flow_system(flow_system)
-
- def transform_data(self) -> None:
- self.imbalance_penalty_per_flow_hour = self._fit_coords(
- f'{self.prefix}|imbalance_penalty_per_flow_hour', self.imbalance_penalty_per_flow_hour
- )
-
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- if len(self.inputs) == 0 and len(self.outputs) == 0:
- raise ValueError(f'Bus "{self.id}" has no Flows connected to it. Please remove it from the FlowSystem')
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config().
-
- DataArray-based checks (imbalance_penalty warning) moved to BusesData.validate().
- """
- self.validate_config()
-
@property
def allows_imbalance(self) -> bool:
return self.imbalance_penalty_per_flow_hour is not None
def __repr__(self) -> str:
"""Return string representation."""
- return super().__repr__() + fx_io.format_flow_details(self)
+ return fx_io.build_repr_from_init(
+ self, excluded_params={'self', 'id', 'kwargs'}, skip_default_size=True
+ ) + fx_io.format_flow_details(self)
@register_class_for_io
@@ -447,6 +380,7 @@ def __init__(self):
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Flow(Element):
"""Define a directed flow of energy or material between bus and component.
@@ -470,7 +404,7 @@ class Flow(Element):
See
Args:
- bus: Bus this flow connects to (string id). First positional argument.
+ bus: Bus this flow connects to (string id).
flow_id: Unique flow identifier within its component. Defaults to the bus name.
size: Flow capacity. Scalar, InvestParameters, or None (unbounded).
relative_minimum: Minimum flow rate as fraction of size (0-1). Default: 0.
@@ -496,8 +430,7 @@ class Flow(Element):
```python
generator_output = Flow(
- 'electricity_grid',
- flow_id='electricity_out',
+ bus='electricity_grid',
size=100, # 100 MW capacity
relative_minimum=0.4, # Cannot operate below 40 MW
effects_per_flow_hour={'fuel_cost': 45, 'co2_emissions': 0.8},
@@ -508,7 +441,7 @@ class Flow(Element):
```python
battery_flow = Flow(
- 'electricity_grid',
+ bus='electricity_grid',
size=InvestParameters(
minimum_size=10, # Minimum 10 MWh
maximum_size=100, # Maximum 100 MWh
@@ -521,8 +454,7 @@ class Flow(Element):
```python
heat_pump = Flow(
- 'heating_network',
- flow_id='heat_output',
+ bus='heating_network',
size=50, # 50 kW thermal
relative_minimum=0.3, # Minimum 15 kW output when active
effects_per_flow_hour={'electricity_cost': 25, 'maintenance': 2},
@@ -539,8 +471,7 @@ class Flow(Element):
```python
solar_generation = Flow(
- 'electricity_grid',
- flow_id='solar_power',
+ bus='electricity_grid',
size=25, # 25 MW installed capacity
fixed_relative_profile=np.array([0, 0.1, 0.4, 0.8, 0.9, 0.7, 0.3, 0.1, 0]),
effects_per_flow_hour={'maintenance_costs': 5}, # €5/MWh maintenance
@@ -551,8 +482,7 @@ class Flow(Element):
```python
production_line = Flow(
- 'product_market',
- flow_id='product_output',
+ bus='product_market',
size=1000, # 1000 units/hour capacity
load_factor_min=0.6, # Must achieve 60% annual utilization
load_factor_max=0.85, # Cannot exceed 85% for maintenance
@@ -582,286 +512,66 @@ class Flow(Element):
"""
- def __init__(
- self,
- *args,
- bus: str | None = None,
- flow_id: str | None = None,
- size: Numeric_PS | InvestParameters | None = None,
- fixed_relative_profile: Numeric_TPS | None = None,
- relative_minimum: Numeric_TPS = 0,
- relative_maximum: Numeric_TPS = 1,
- effects_per_flow_hour: Effect_TPS | Numeric_TPS | None = None,
- status_parameters: StatusParameters | None = None,
- flow_hours_max: Numeric_PS | None = None,
- flow_hours_min: Numeric_PS | None = None,
- flow_hours_max_over_periods: Numeric_S | None = None,
- flow_hours_min_over_periods: Numeric_S | None = None,
- load_factor_min: Numeric_PS | None = None,
- load_factor_max: Numeric_PS | None = None,
- previous_flow_rate: Scalar | list[Scalar] | None = None,
- meta_data: dict | None = None,
- label: str | None = None,
- id: str | None = None,
- **kwargs,
- ):
- # --- Resolve positional args + deprecation bridge ---
- import warnings
-
- from .config import DEPRECATION_REMOVAL_VERSION
-
- # Handle deprecated 'id' kwarg (use flow_id instead)
- if id is not None:
- warnings.warn(
- f'Flow(id=...) is deprecated. Use Flow(flow_id=...) instead. '
- f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- if flow_id is not None:
- raise ValueError('Either id or flow_id can be specified, but not both.')
- flow_id = id
-
- if len(args) == 2:
- # Old API: Flow(label, bus)
- warnings.warn(
- f'Flow(label, bus) positional form is deprecated. '
- f'Use Flow(bus, flow_id=...) instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- if flow_id is None and label is None:
- flow_id = args[0]
- if bus is None:
- bus = args[1]
- elif len(args) == 1:
- if bus is not None:
- # Old API: Flow(label, bus=...)
- warnings.warn(
- f'Flow(label, bus=...) positional form is deprecated. '
- f'Use Flow(bus, flow_id=...) instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- if flow_id is None and label is None:
- flow_id = args[0]
- else:
- # New API: Flow(bus) — bus is the positional arg
- bus = args[0]
- elif len(args) > 2:
- raise TypeError(f'Flow() takes at most 2 positional arguments ({len(args)} given)')
-
- # Handle deprecated label kwarg
- if label is not None:
- warnings.warn(
- f'The "label" argument is deprecated. Use "flow_id" instead. '
- f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- if flow_id is not None:
- raise ValueError('Either label or flow_id can be specified, but not both.')
- flow_id = label
-
- # Default flow_id to bus name
- if flow_id is None:
- if bus is None:
- raise TypeError('Flow() requires a bus argument.')
- flow_id = bus if isinstance(bus, str) else str(bus)
-
- if bus is None:
- raise TypeError('Flow() requires a bus argument.')
-
- super().__init__(flow_id, meta_data=meta_data, **kwargs)
- self.size = size
- self.relative_minimum = relative_minimum
- self.relative_maximum = relative_maximum
- self.fixed_relative_profile = fixed_relative_profile
-
- self.load_factor_min = load_factor_min
- self.load_factor_max = load_factor_max
-
- self.effects_per_flow_hour = effects_per_flow_hour if effects_per_flow_hour is not None else {}
- self.flow_hours_max = flow_hours_max
- self.flow_hours_min = flow_hours_min
- self.flow_hours_max_over_periods = flow_hours_max_over_periods
- self.flow_hours_min_over_periods = flow_hours_min_over_periods
- self.status_parameters = status_parameters
-
- self.previous_flow_rate = previous_flow_rate
-
- self.component: str = 'UnknownComponent'
- self.is_input_in_component: bool | None = None
- if isinstance(bus, Bus):
+ bus: str
+ flow_id: str | None = None
+ size: Numeric_PS | InvestParameters | None = None
+ relative_minimum: Numeric_TPS = 0
+ relative_maximum: Numeric_TPS = 1
+ fixed_relative_profile: Numeric_TPS | None = None
+ effects_per_flow_hour: Numeric_TPS | dict | None = None
+ status_parameters: StatusParameters | None = None
+ flow_hours_max: Numeric_PS | None = None
+ flow_hours_min: Numeric_PS | None = None
+ flow_hours_max_over_periods: Numeric_S | None = None
+ flow_hours_min_over_periods: Numeric_S | None = None
+ load_factor_min: Numeric_PS | None = None
+ load_factor_max: Numeric_PS | None = None
+ previous_flow_rate: Scalar | list[Scalar] | None = None
+ meta_data: dict = field(default_factory=dict)
+ color: str | None = None
+ # Internal state (not user-facing)
+ component: str = field(default='UnknownComponent', init=False)
+ is_input_in_component: bool | None = field(default=None, init=False)
+
+ def __post_init__(self):
+ if isinstance(self.bus, Bus):
raise TypeError(
- f'Bus {bus.id} is passed as a Bus object to Flow {self.id}. '
+ f'Bus {self.bus.id} is passed as a Bus object to Flow {self.flow_id or self.bus}. '
f'This is no longer supported. Add the Bus to the FlowSystem and pass its id (string) to the Flow.'
)
- self.bus = bus
-
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Interface objects.
-
- Elements use their id_full as prefix by default, ignoring the passed prefix.
- """
- super().link_to_flow_system(flow_system, self.id)
- if self.status_parameters is not None:
- self.status_parameters.link_to_flow_system(flow_system, self._sub_prefix('status_parameters'))
- if isinstance(self.size, InvestParameters):
- self.size.link_to_flow_system(flow_system, self._sub_prefix('InvestParameters'))
-
- def transform_data(self) -> None:
- self.relative_minimum = self._fit_coords(f'{self.prefix}|relative_minimum', self.relative_minimum)
- self.relative_maximum = self._fit_coords(f'{self.prefix}|relative_maximum', self.relative_maximum)
- self.fixed_relative_profile = self._fit_coords(
- f'{self.prefix}|fixed_relative_profile', self.fixed_relative_profile
- )
- self.effects_per_flow_hour = self._fit_effect_coords(self.prefix, self.effects_per_flow_hour, 'per_flow_hour')
- self.flow_hours_max = self._fit_coords(
- f'{self.prefix}|flow_hours_max', self.flow_hours_max, dims=['period', 'scenario']
- )
- self.flow_hours_min = self._fit_coords(
- f'{self.prefix}|flow_hours_min', self.flow_hours_min, dims=['period', 'scenario']
- )
- self.flow_hours_max_over_periods = self._fit_coords(
- f'{self.prefix}|flow_hours_max_over_periods', self.flow_hours_max_over_periods, dims=['scenario']
- )
- self.flow_hours_min_over_periods = self._fit_coords(
- f'{self.prefix}|flow_hours_min_over_periods', self.flow_hours_min_over_periods, dims=['scenario']
- )
- self.load_factor_max = self._fit_coords(
- f'{self.prefix}|load_factor_max', self.load_factor_max, dims=['period', 'scenario']
- )
- self.load_factor_min = self._fit_coords(
- f'{self.prefix}|load_factor_min', self.load_factor_min, dims=['period', 'scenario']
- )
-
- if self.status_parameters is not None:
- self.status_parameters.transform_data()
- if isinstance(self.size, InvestParameters):
- self.size.transform_data()
- elif self.size is not None:
- self.size = self._fit_coords(f'{self.prefix}|size', self.size, dims=['period', 'scenario'])
-
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- # Size is required when using StatusParameters (for big-M constraints)
- if self.status_parameters is not None and self.size is None:
- raise PlausibilityError(
- f'Flow "{self.id}" has status_parameters but no size defined. '
- f'A size is required when using status_parameters to bound the flow rate.'
- )
-
- if self.size is None and self.fixed_relative_profile is not None:
- raise PlausibilityError(
- f'Flow "{self.id}" has a fixed_relative_profile but no size defined. '
- f'A size is required because flow_rate = size * fixed_relative_profile.'
- )
-
- # Size is required for load factor constraints (total_flow_hours / size)
- if self.size is None and self.load_factor_min is not None:
- raise PlausibilityError(
- f'Flow "{self.id}" has load_factor_min but no size defined. '
- f'A size is required because the constraint is total_flow_hours >= size * load_factor_min * hours.'
- )
-
- if self.size is None and self.load_factor_max is not None:
- raise PlausibilityError(
- f'Flow "{self.id}" has load_factor_max but no size defined. '
- f'A size is required because the constraint is total_flow_hours <= size * load_factor_max * hours.'
- )
-
- # Validate previous_flow_rate type
- if self.previous_flow_rate is not None:
- if not any(
- [
- isinstance(self.previous_flow_rate, np.ndarray) and self.previous_flow_rate.ndim == 1,
- isinstance(self.previous_flow_rate, (int, float, list)),
- ]
- ):
- raise TypeError(
- f'previous_flow_rate must be None, a scalar, a list of scalars or a 1D-numpy-array. '
- f'Got {type(self.previous_flow_rate)}. '
- f'Different values in different periods or scenarios are not yet supported.'
- )
-
- # Warning: fixed_relative_profile + status_parameters is unusual
- if self.fixed_relative_profile is not None and self.status_parameters is not None:
- logger.warning(
- f'Flow {self.id} has both a fixed_relative_profile and status_parameters. '
- f'This will allow the flow to be switched active and inactive, effectively differing from the fixed_flow_rate.'
- )
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config().
-
- DataArray-based validation is now done in FlowsData.validate().
- """
- self.validate_config()
-
- @property
- def flow_id(self) -> str:
- """The short flow identifier (e.g. ``'Heat'``).
-
- This is the user-facing name. Defaults to the bus name if not set explicitly.
- """
- return self._short_id
-
- @flow_id.setter
- def flow_id(self, value: str) -> None:
- self._short_id = value
+ if self.flow_id is not None:
+ self.flow_id = valid_id(self.flow_id)
@property
def id(self) -> str:
"""The qualified identifier: ``component(flow_id)``."""
- return f'{self.component}({self._short_id})'
-
- @id.setter
- def id(self, value: str) -> None:
- self._short_id = value
-
- # =========================================================================
- # Type-Level Model Access (for FlowsModel integration)
- # =========================================================================
-
- _flows_model: FlowsModel | None = None # Set by FlowsModel during creation
-
- def set_flows_model(self, flows_model: FlowsModel) -> None:
- """Set reference to the type-level FlowsModel.
-
- Called by FlowsModel during initialization to enable element access.
- """
- self._flows_model = flows_model
+ return f'{self.component}({self.flow_id})'
@property
- def flow_rate_from_type_model(self) -> linopy.Variable | None:
- """Get flow_rate from FlowsModel (if using type-level modeling).
+ def label(self) -> str:
+ """Deprecated: Use ``flow_id`` instead."""
+ from .config import DEPRECATION_REMOVAL_VERSION
- Returns the slice of the batched variable for this specific flow.
- """
- if self._flows_model is None:
- return None
- return self._flows_model.get_variable(FlowVarName.RATE, self.id)
+ warnings.warn(
+ f'Accessing ".label" is deprecated. Use ".flow_id" instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.flow_id
- @property
- def total_flow_hours_from_type_model(self) -> linopy.Variable | None:
- """Get total_flow_hours from FlowsModel (if using type-level modeling)."""
- if self._flows_model is None:
- return None
- return self._flows_model.get_variable(FlowVarName.TOTAL_FLOW_HOURS, self.id)
+ @label.setter
+ def label(self, value: str) -> None:
+ from .config import DEPRECATION_REMOVAL_VERSION
- @property
- def status_from_type_model(self) -> linopy.Variable | None:
- """Get status from FlowsModel (if using type-level modeling)."""
- if self._flows_model is None or FlowVarName.STATUS not in self._flows_model:
- return None
- if self.id not in self._flows_model.status_ids:
- return None
- return self._flows_model.get_variable(FlowVarName.STATUS, self.id)
+ warnings.warn(
+ f'Setting ".label" is deprecated. Use ".flow_id" instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.flow_id = value
+
+ def __repr__(self) -> str:
+ return fx_io.build_repr_from_init(self, excluded_params={'self', 'id'}, skip_default_size=True)
@property
def size_is_fixed(self) -> bool:
@@ -1092,10 +802,6 @@ def __init__(self, model: FlowSystemModel, data: FlowsData):
"""
super().__init__(model, data)
- # Set reference on each flow element for element access pattern
- for flow in self.elements.values():
- flow.set_flows_model(self)
-
self.create_variables()
self.create_status_model()
self.create_constraints()
@@ -1712,10 +1418,6 @@ def __init__(self, model: FlowSystemModel, data: BusesData, flows_model: FlowsMo
# Element ID lists for subsets
self.imbalance_ids: list[str] = data.with_imbalance
- # Set reference on each bus element
- for bus in self.elements.values():
- bus._buses_model = self
-
self.create_variables()
self.create_constraints()
self.create_effect_shares()
@@ -1800,7 +1502,7 @@ def collect_penalty_share_specs(self) -> list[tuple[str, xr.DataArray]]:
penalty_specs = []
for bus in self.buses_with_imbalance:
bus_label = bus.id
- imbalance_penalty = bus.imbalance_penalty_per_flow_hour * self.model.timestep_duration
+ imbalance_penalty = self.data.aligned_imbalance_penalty(bus) * self.model.timestep_duration
virtual_supply = self[BusVarName.VIRTUAL_SUPPLY].sel({dim: bus_label})
virtual_demand = self[BusVarName.VIRTUAL_DEMAND].sel({dim: bus_label})
diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py
index beb314148..0bd80587d 100644
--- a/flixopt/flow_system.py
+++ b/flixopt/flow_system.py
@@ -8,7 +8,6 @@
import logging
import pathlib
import warnings
-from itertools import chain
from typing import TYPE_CHECKING, Any, Literal
import pandas as pd
@@ -35,7 +34,8 @@
CompositeContainerMixin,
Element,
FlowSystemModel,
- Interface,
+ create_reference_structure,
+ replace_references_with_stats,
)
from .topology_accessor import TopologyAccessor
from .transform_accessor import TransformAccessor
@@ -204,7 +204,7 @@ def __contains__(self, key):
return key in object.__getattribute__(self, '_dataset')
-class FlowSystem(Interface, CompositeContainerMixin[Element]):
+class FlowSystem(CompositeContainerMixin[Element]):
"""
A FlowSystem organizes the high level Elements (Components, Buses, Effects & Flows).
@@ -362,6 +362,11 @@ def __init__(
self._connected_and_transformed = False
self._used_in_optimization = False
+ # Registry for runtime state (populated during model building, not stored on elements)
+ self._element_variable_names: dict[str, list[str]] = {}
+ self._element_constraint_names: dict[str, list[str]] = {}
+ self._registered_elements: set[int] = set() # Python id() for ownership check
+
self._network_app = None
self._flows_cache: IdList[Flow] | None = None
self._storages_cache: IdList[Storage] | None = None
@@ -397,37 +402,41 @@ def __init__(
def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
"""
Override Interface method to handle FlowSystem-specific serialization.
- Combines custom FlowSystem logic with Interface pattern for nested objects.
+
+ Uses path-based DataArray keys via standalone ``create_reference_structure``:
+ ``components.{id}.param``, ``buses.{id}.param``, ``effects.{id}.param``.
Returns:
Tuple of (reference_structure, extracted_arrays_dict)
"""
- # Start with Interface base functionality for constructor parameters
- reference_structure, all_extracted_arrays = super()._create_reference_structure()
+ coords = self.indexes
+
+ # Start with standalone function for FlowSystem's own constructor params
+ reference_structure, all_extracted_arrays = create_reference_structure(self, coords=coords)
# Remove timesteps, as it's directly stored in dataset index
reference_structure.pop('timesteps', None)
- # Extract from components
+ # Extract from components with path prefix
components_structure = {}
for comp_id, component in self.components.items():
- comp_structure, comp_arrays = component._create_reference_structure()
+ comp_structure, comp_arrays = create_reference_structure(component, f'components|{comp_id}', coords=coords)
all_extracted_arrays.update(comp_arrays)
components_structure[comp_id] = comp_structure
reference_structure['components'] = components_structure
- # Extract from buses
+ # Extract from buses with path prefix
buses_structure = {}
for bus_id, bus in self.buses.items():
- bus_structure, bus_arrays = bus._create_reference_structure()
+ bus_structure, bus_arrays = create_reference_structure(bus, f'buses|{bus_id}', coords=coords)
all_extracted_arrays.update(bus_arrays)
buses_structure[bus_id] = bus_structure
reference_structure['buses'] = buses_structure
- # Extract from effects
+ # Extract from effects with path prefix
effects_structure = {}
for effect in self.effects.values():
- effect_structure, effect_arrays = effect._create_reference_structure()
+ effect_structure, effect_arrays = create_reference_structure(effect, f'effects|{effect.id}', coords=coords)
all_extracted_arrays.update(effect_arrays)
effects_structure[effect.id] = effect_structure
reference_structure['effects'] = effects_structure
@@ -468,8 +477,9 @@ def to_dataset(self, include_solution: bool = True, include_original_data: bool
logger.info('FlowSystem is not connected_and_transformed. Connecting and transforming data now.')
self.connect_and_transform()
- # Get base dataset from parent class
- base_ds = super().to_dataset()
+ # Build base dataset from FlowSystem's own _create_reference_structure
+ reference_structure, extracted_arrays = self._create_reference_structure()
+ base_ds = xr.Dataset(extracted_arrays, attrs=reference_structure)
# Add FlowSystem-specific data (solution, clustering, metadata)
return fx_io.flow_system_to_dataset(self, base_ds, include_solution, include_original_data)
@@ -560,7 +570,11 @@ def from_netcdf(cls, path: str | pathlib.Path) -> FlowSystem:
FlowSystem instance with name set from filename
"""
path = pathlib.Path(path)
- flow_system = super().from_netcdf(path)
+ try:
+ ds = fx_io.load_dataset_from_netcdf(path)
+ flow_system = cls.from_dataset(ds)
+ except Exception as e:
+ raise OSError(f'Failed to load FlowSystem from NetCDF file {path}: {e}') from e
# Derive name from filename (without extension)
flow_system.name = path.stem
return flow_system
@@ -760,7 +774,14 @@ def get_structure(self, clean: bool = False, stats: bool = False) -> dict:
logger.warning('FlowSystem is not connected. Calling connect_and_transform() now.')
self.connect_and_transform()
- return super().get_structure(clean, stats)
+ reference_structure, extracted_arrays = self._create_reference_structure()
+
+ if stats:
+ reference_structure = replace_references_with_stats(reference_structure, extracted_arrays)
+
+ if clean:
+ return fx_io.remove_none_and_empty(reference_structure)
+ return reference_structure
def to_json(self, path: str | pathlib.Path):
"""
@@ -776,7 +797,11 @@ def to_json(self, path: str | pathlib.Path):
)
self.connect_and_transform()
- super().to_json(path)
+ try:
+ data = self.get_structure(clean=True, stats=True)
+ fx_io.save_json(data, path)
+ except Exception as e:
+ raise OSError(f'Failed to save FlowSystem to JSON file {path}: {e}') from e
def fit_to_model_coords(
self,
@@ -877,13 +902,11 @@ def connect_and_transform(self):
self._register_missing_carriers()
self._assign_element_colors()
- # Prepare effects BEFORE transform_data,
- # so the penalty Effect gets transformed too.
- # Note: status parameter propagation happens inside Component.transform_data()
+ # Create penalty effect if needed (must happen before validation)
self._prepare_effects()
- for element in chain(self.components.values(), self.effects.values(), self.buses.values()):
- element.transform_data()
+ # Propagate status parameters from components to flows
+ self._propagate_all_status_parameters()
# Validate cross-element references after transformation
self._validate_system_integrity()
@@ -1425,11 +1448,10 @@ def optimize(self) -> OptimizeAccessor:
>>> flow_system.optimize(HighsSolver())
>>> print(flow_system.solution['Boiler(Q_th)|flow_rate'])
- Access element solutions directly:
+ Access solution data:
>>> flow_system.optimize(solver)
- >>> boiler = flow_system.components['Boiler']
- >>> print(boiler.solution)
+ >>> print(flow_system.solution['flow|rate'])
Future specialized modes:
@@ -1669,24 +1691,29 @@ def _check_if_element_already_assigned(self, element: Element) -> None:
Raises:
ValueError: If element is already assigned to a different FlowSystem
"""
- if element._flow_system is not None and element._flow_system is not self:
- raise ValueError(
- f'Element "{element.id}" is already assigned to another FlowSystem. '
- f'Each element can only belong to one FlowSystem at a time. '
- f'To use this element in multiple systems, create a copy: '
- f'flow_system.add_elements(element.copy())'
- )
+ if id(element) in self._registered_elements:
+ return # Already registered to this FlowSystem
+ # Check if any other FlowSystem has claimed this element — not possible to detect
+ # with id()-based tracking alone, but duplicates are caught by _check_if_element_is_unique
+
+ def _propagate_all_status_parameters(self) -> None:
+ """Propagate status parameters from components to their flows.
+
+ Components with status_parameters or prevent_simultaneous_flows require
+ certain flows to have StatusParameters. Transmissions with absolute_losses
+ additionally need status variables on input flows.
+ """
+ for component in self.components.values():
+ component._propagate_status_parameters()
def _prepare_effects(self) -> None:
"""Create the penalty effect if needed.
- Called before transform_data() so the penalty effect gets transformed.
Validation is done after transformation via _run_validation().
"""
if self.effects._penalty_effect is None:
penalty = self.effects._create_penalty_effect()
- if penalty._flow_system is None:
- penalty.link_to_flow_system(self)
+ self._registered_elements.add(id(penalty))
def _run_validation(self) -> None:
"""Run all validation through batched *Data classes.
@@ -1695,7 +1722,7 @@ def _run_validation(self) -> None:
- Config validation (simple checks)
- DataArray validation (post-transformation checks)
- Called after transform_data(). The cached *Data instances are
+ Called during connect_and_transform(). The cached *Data instances are
reused during model building.
"""
batched = self.batched
@@ -1734,12 +1761,14 @@ def _validate_system_integrity(self) -> None:
def _add_effects(self, *args: Effect) -> None:
for effect in args:
- effect.link_to_flow_system(self) # Link element to FlowSystem
+ self._registered_elements.add(id(effect))
self.effects.add_effects(*args)
def _add_components(self, *components: Component) -> None:
for new_component in list(components):
- new_component.link_to_flow_system(self) # Link element to FlowSystem
+ self._registered_elements.add(id(new_component))
+ for flow in new_component.flows.values():
+ self._registered_elements.add(id(flow))
self.components.add(new_component) # Add to existing components
# Invalidate cache once after all additions
if components:
@@ -1748,7 +1777,7 @@ def _add_components(self, *components: Component) -> None:
def _add_buses(self, *buses: Bus):
for new_bus in list(buses):
- new_bus.link_to_flow_system(self) # Link element to FlowSystem
+ self._registered_elements.add(id(new_bus))
self.buses.add(new_bus) # Add to existing buses
# Invalidate cache once after all additions
if buses:
diff --git a/flixopt/flow_system_status.py b/flixopt/flow_system_status.py
index aef8c0957..47e39cf43 100644
--- a/flixopt/flow_system_status.py
+++ b/flixopt/flow_system_status.py
@@ -111,10 +111,9 @@ def _clear_solved(fs: FlowSystem) -> None:
def _clear_model_built(fs: FlowSystem) -> None:
"""Clear artifacts from MODEL_BUILT status."""
- # Clear element variable/constraint name mappings
- for element in fs.values():
- element._variable_names = []
- element._constraint_names = []
+ # Clear element variable/constraint name registries
+ fs._element_variable_names.clear()
+ fs._element_constraint_names.clear()
# Reset the model-built flag so status downgrades to MODEL_CREATED
if fs.model is not None:
fs.model._is_built = False
diff --git a/flixopt/interface.py b/flixopt/interface.py
index 227a63c7a..363b74379 100644
--- a/flixopt/interface.py
+++ b/flixopt/interface.py
@@ -6,6 +6,7 @@
from __future__ import annotations
import logging
+from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Literal
import numpy as np
@@ -14,8 +15,9 @@
import xarray as xr
from .config import CONFIG
+from .io import build_repr_from_init
from .plot_result import PlotResult
-from .structure import Interface, register_class_for_io
+from .structure import register_class_for_io
if TYPE_CHECKING: # for type checking and preventing circular imports
from collections.abc import Iterator
@@ -25,8 +27,21 @@
logger = logging.getLogger('flixopt')
+def _has_value(param: Any) -> bool:
+ """Check if a parameter has a meaningful value.
+
+ Returns False for None and empty collections, True for everything else.
+ """
+ if param is None:
+ return False
+ if isinstance(param, (dict, list, tuple, set, frozenset)) and len(param) == 0:
+ return False
+ return True
+
+
@register_class_for_io
-class Piece(Interface):
+@dataclass(eq=False)
+class Piece:
"""Define a single linear segment with specified domain boundaries.
This class represents one linear segment that will be combined with other
@@ -71,19 +86,16 @@ class Piece(Interface):
"""
- def __init__(self, start: Numeric_TPS, end: Numeric_TPS):
- self.start = start
- self.end = end
- self.has_time_dim = False
+ start: Numeric_TPS
+ end: Numeric_TPS
- def transform_data(self) -> None:
- dims = None if self.has_time_dim else ['period', 'scenario']
- self.start = self._fit_coords(f'{self.prefix}|start', self.start, dims=dims)
- self.end = self._fit_coords(f'{self.prefix}|end', self.end, dims=dims)
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
@register_class_for_io
-class Piecewise(Interface):
+@dataclass(eq=False)
+class Piecewise:
"""Define piecewise linear approximations for modeling non-linear relationships.
Enables modeling of non-linear relationships through piecewise linear segments
@@ -199,19 +211,7 @@ class Piecewise(Interface):
"""
- def __init__(self, pieces: list[Piece]):
- self.pieces = pieces
- self._has_time_dim = False
-
- @property
- def has_time_dim(self):
- return self._has_time_dim
-
- @has_time_dim.setter
- def has_time_dim(self, value):
- self._has_time_dim = value
- for piece in self.pieces:
- piece.has_time_dim = value
+ pieces: list[Piece]
def __len__(self):
"""
@@ -228,19 +228,13 @@ def __getitem__(self, index) -> Piece:
def __iter__(self) -> Iterator[Piece]:
return iter(self.pieces) # Enables iteration like for piece in piecewise: ...
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Piece objects."""
- super().link_to_flow_system(flow_system, prefix)
- for i, piece in enumerate(self.pieces):
- piece.link_to_flow_system(flow_system, self._sub_prefix(f'Piece{i}'))
-
- def transform_data(self) -> None:
- for piece in self.pieces:
- piece.transform_data()
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
@register_class_for_io
-class PiecewiseConversion(Interface):
+@dataclass(eq=False)
+class PiecewiseConversion:
"""Define coordinated piecewise linear relationships between multiple flows.
This class models conversion processes where multiple flows (inputs, outputs,
@@ -436,20 +430,7 @@ class PiecewiseConversion(Interface):
"""
- def __init__(self, piecewises: dict[str, Piecewise]):
- self.piecewises = piecewises
- self._has_time_dim = True
- self.has_time_dim = True # Initial propagation
-
- @property
- def has_time_dim(self):
- return self._has_time_dim
-
- @has_time_dim.setter
- def has_time_dim(self, value):
- self._has_time_dim = value
- for piecewise in self.piecewises.values():
- piecewise.has_time_dim = value
+ piecewises: dict[str, Piecewise]
def items(self):
"""
@@ -460,16 +441,6 @@ def items(self):
"""
return self.piecewises.items()
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Piecewise objects."""
- super().link_to_flow_system(flow_system, prefix)
- for name, piecewise in self.piecewises.items():
- piecewise.link_to_flow_system(flow_system, self._sub_prefix(name))
-
- def transform_data(self) -> None:
- for piecewise in self.piecewises.values():
- piecewise.transform_data()
-
def plot(
self,
x_flow: str | None = None,
@@ -484,10 +455,6 @@ def plot(
is shown in a separate subplot (faceted by flow). Pieces are distinguished
by line dash style. If boundaries vary over time, color shows time progression.
- Note:
- Requires FlowSystem to be connected and transformed (call
- flow_system.connect_and_transform() first).
-
Args:
x_flow: Flow label to use for X-axis. Defaults to first flow in dict.
title: Plot title.
@@ -502,15 +469,10 @@ def plot(
PlotResult containing the figure and underlying piecewise data.
Examples:
- >>> flow_system.connect_and_transform()
>>> chp.piecewise_conversion.plot(x_flow='Gas', title='CHP Curves')
>>> # Select specific time range
>>> chp.piecewise_conversion.plot(select={'time': slice(0, 12)})
"""
- if not self.flow_system.connected_and_transformed:
- logger.debug('Connecting flow_system for plotting PiecewiseConversion')
- self.flow_system.connect_and_transform()
-
colorscale = colorscale or CONFIG.Plotting.default_sequential_colorscale
flow_labels = list(self.piecewises.keys())
@@ -524,6 +486,9 @@ def plot(
x_piecewise = self.piecewises[x_label]
+ def _ensure_da(v):
+ return v if isinstance(v, xr.DataArray) else xr.DataArray(v)
+
# Build Dataset with all piece data
datasets = []
for y_label in y_flows:
@@ -531,8 +496,8 @@ def plot(
for i, (x_piece, y_piece) in enumerate(zip(x_piecewise, y_piecewise, strict=False)):
ds = xr.Dataset(
{
- x_label: xr.concat([x_piece.start, x_piece.end], dim='point'),
- 'output': xr.concat([y_piece.start, y_piece.end], dim='point'),
+ x_label: xr.concat([_ensure_da(x_piece.start), _ensure_da(x_piece.end)], dim='point'),
+ 'output': xr.concat([_ensure_da(y_piece.start), _ensure_da(y_piece.end)], dim='point'),
}
)
ds = ds.assign_coords(point=['start', 'end'])
@@ -606,9 +571,13 @@ def plot(
return result
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
+
@register_class_for_io
-class PiecewiseEffects(Interface):
+@dataclass(eq=False)
+class PiecewiseEffects:
"""Define how a single decision variable contributes to system effects with piecewise rates.
This class models situations where a decision variable (the origin) generates
@@ -797,34 +766,8 @@ class PiecewiseEffects(Interface):
"""
- def __init__(self, piecewise_origin: Piecewise, piecewise_shares: dict[str, Piecewise]):
- self.piecewise_origin = piecewise_origin
- self.piecewise_shares = piecewise_shares
- self._has_time_dim = False
- self.has_time_dim = False # Initial propagation
-
- @property
- def has_time_dim(self):
- return self._has_time_dim
-
- @has_time_dim.setter
- def has_time_dim(self, value):
- self._has_time_dim = value
- self.piecewise_origin.has_time_dim = value
- for piecewise in self.piecewise_shares.values():
- piecewise.has_time_dim = value
-
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Piecewise objects."""
- super().link_to_flow_system(flow_system, prefix)
- self.piecewise_origin.link_to_flow_system(flow_system, self._sub_prefix('origin'))
- for effect, piecewise in self.piecewise_shares.items():
- piecewise.link_to_flow_system(flow_system, self._sub_prefix(effect))
-
- def transform_data(self) -> None:
- self.piecewise_origin.transform_data()
- for piecewise in self.piecewise_shares.values():
- piecewise.transform_data()
+ piecewise_origin: Piecewise
+ piecewise_shares: dict[str, Piecewise]
def plot(
self,
@@ -839,10 +782,6 @@ def plot(
and its effect shares. Each effect is shown in a separate subplot (faceted
by effect). Pieces are distinguished by line dash style.
- Note:
- Requires FlowSystem to be connected and transformed (call
- flow_system.connect_and_transform() first).
-
Args:
title: Plot title.
select: xarray-style selection dict to filter data,
@@ -856,19 +795,17 @@ def plot(
PlotResult containing the figure and underlying piecewise data.
Examples:
- >>> flow_system.connect_and_transform()
>>> invest_params.piecewise_effects_of_investment.plot(title='Investment Effects')
"""
- if not self.flow_system.connected_and_transformed:
- logger.debug('Connecting flow_system for plotting PiecewiseEffects')
- self.flow_system.connect_and_transform()
-
colorscale = colorscale or CONFIG.Plotting.default_sequential_colorscale
effect_labels = list(self.piecewise_shares.keys())
if not effect_labels:
raise ValueError('Need at least one effect share to plot')
+ def _ensure_da(v):
+ return v if isinstance(v, xr.DataArray) else xr.DataArray(v)
+
# Build Dataset with all piece data
datasets = []
for effect_label in effect_labels:
@@ -876,8 +813,8 @@ def plot(
for i, (x_piece, y_piece) in enumerate(zip(self.piecewise_origin, y_piecewise, strict=False)):
ds = xr.Dataset(
{
- 'origin': xr.concat([x_piece.start, x_piece.end], dim='point'),
- 'share': xr.concat([y_piece.start, y_piece.end], dim='point'),
+ 'origin': xr.concat([_ensure_da(x_piece.start), _ensure_da(x_piece.end)], dim='point'),
+ 'share': xr.concat([_ensure_da(y_piece.start), _ensure_da(y_piece.end)], dim='point'),
}
)
ds = ds.assign_coords(point=['start', 'end'])
@@ -951,9 +888,13 @@ def plot(
return result
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
+
@register_class_for_io
-class InvestParameters(Interface):
+@dataclass(eq=False)
+class InvestParameters:
"""Define investment decision parameters with flexible sizing and effect modeling.
This class models investment decisions in optimization problems, supporting
@@ -1143,100 +1084,25 @@ class InvestParameters(Interface):
"""
- def __init__(
- self,
- fixed_size: Numeric_PS | None = None,
- minimum_size: Numeric_PS | None = None,
- maximum_size: Numeric_PS | None = None,
- mandatory: bool = False,
- effects_of_investment: Effect_PS | Numeric_PS | None = None,
- effects_of_investment_per_size: Effect_PS | Numeric_PS | None = None,
- effects_of_retirement: Effect_PS | Numeric_PS | None = None,
- piecewise_effects_of_investment: PiecewiseEffects | None = None,
- linked_periods: Numeric_PS | tuple[int, int] | None = None,
- ):
- self.effects_of_investment = effects_of_investment if effects_of_investment is not None else {}
- self.effects_of_retirement = effects_of_retirement if effects_of_retirement is not None else {}
- self.fixed_size = fixed_size
- self.mandatory = mandatory
- self.effects_of_investment_per_size = (
- effects_of_investment_per_size if effects_of_investment_per_size is not None else {}
- )
- self.piecewise_effects_of_investment = piecewise_effects_of_investment
- self.minimum_size = minimum_size if minimum_size is not None else CONFIG.Modeling.epsilon
- self.maximum_size = maximum_size
- self.linked_periods = linked_periods
-
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested PiecewiseEffects object if present."""
- super().link_to_flow_system(flow_system, prefix)
- if self.piecewise_effects_of_investment is not None:
- self.piecewise_effects_of_investment.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseEffects'))
-
- def transform_data(self) -> None:
- # Validate that either fixed_size or maximum_size is set
- if self.fixed_size is None and self.maximum_size is None:
- raise ValueError(
- f'InvestParameters in "{self.prefix}" requires either fixed_size or maximum_size to be set. '
- f'An upper bound is needed to properly scale the optimization model.'
- )
- self.effects_of_investment = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_of_investment,
- suffix='effects_of_investment',
- dims=['period', 'scenario'],
- )
- self.effects_of_retirement = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_of_retirement,
- suffix='effects_of_retirement',
- dims=['period', 'scenario'],
- )
- self.effects_of_investment_per_size = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_of_investment_per_size,
- suffix='effects_of_investment_per_size',
- dims=['period', 'scenario'],
- )
-
- if self.piecewise_effects_of_investment is not None:
- self.piecewise_effects_of_investment.has_time_dim = False
- self.piecewise_effects_of_investment.transform_data()
-
- self.minimum_size = self._fit_coords(
- f'{self.prefix}|minimum_size', self.minimum_size, dims=['period', 'scenario']
- )
- self.maximum_size = self._fit_coords(
- f'{self.prefix}|maximum_size', self.maximum_size, dims=['period', 'scenario']
- )
- # Convert tuple (first_period, last_period) to DataArray if needed
- if isinstance(self.linked_periods, (tuple, list)):
- if len(self.linked_periods) != 2:
- raise TypeError(
- f'If you provide a tuple to "linked_periods", it needs to be len=2. Got {len(self.linked_periods)=}'
- )
- if self.flow_system.periods is None:
- raise ValueError(
- f'Cannot use linked_periods={self.linked_periods} when FlowSystem has no periods defined. '
- f'Please define periods in FlowSystem or use linked_periods=None.'
- )
- logger.debug(f'Computing linked_periods from {self.linked_periods}')
- start, end = self.linked_periods
- if start not in self.flow_system.periods.values:
- logger.warning(
- f'Start of linked periods ({start} not found in periods directly: {self.flow_system.periods.values}'
- )
- if end not in self.flow_system.periods.values:
- logger.warning(
- f'End of linked periods ({end} not found in periods directly: {self.flow_system.periods.values}'
- )
- self.linked_periods = self.compute_linked_periods(start, end, self.flow_system.periods)
- logger.debug(f'Computed {self.linked_periods=}')
-
- self.linked_periods = self._fit_coords(
- f'{self.prefix}|linked_periods', self.linked_periods, dims=['period', 'scenario']
- )
- self.fixed_size = self._fit_coords(f'{self.prefix}|fixed_size', self.fixed_size, dims=['period', 'scenario'])
+ fixed_size: Numeric_PS | None = None
+ minimum_size: Numeric_PS | None = None
+ maximum_size: Numeric_PS | None = None
+ mandatory: bool = False
+ effects_of_investment: Effect_PS | Numeric_PS | None = None
+ effects_of_investment_per_size: Effect_PS | Numeric_PS | None = None
+ effects_of_retirement: Effect_PS | Numeric_PS | None = None
+ piecewise_effects_of_investment: PiecewiseEffects | None = None
+ linked_periods: Numeric_PS | tuple[int, int] | None = None
+
+ def __post_init__(self):
+ if self.effects_of_investment is None:
+ self.effects_of_investment = {}
+ if self.effects_of_retirement is None:
+ self.effects_of_retirement = {}
+ if self.effects_of_investment_per_size is None:
+ self.effects_of_investment_per_size = {}
+ if self.minimum_size is None:
+ self.minimum_size = CONFIG.Modeling.epsilon
@property
def minimum_or_fixed_size(self) -> Numeric_PS:
@@ -1278,9 +1144,13 @@ def compute_linked_periods(first_period: int, last_period: int, periods: pd.Inde
coords=(pd.Index(periods, name='period'),),
).rename('linked_periods')
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
+
@register_class_for_io
-class StatusParameters(Interface):
+@dataclass(eq=False)
+class StatusParameters:
"""Define operational constraints and effects for binary status equipment behavior.
This class models equipment that operates in discrete states (active/inactive) rather than
@@ -1468,56 +1338,23 @@ class StatusParameters(Interface):
"""
- def __init__(
- self,
- effects_per_startup: Effect_TPS | Numeric_TPS | None = None,
- effects_per_active_hour: Effect_TPS | Numeric_TPS | None = None,
- active_hours_min: Numeric_PS | None = None,
- active_hours_max: Numeric_PS | None = None,
- min_uptime: Numeric_TPS | None = None,
- max_uptime: Numeric_TPS | None = None,
- min_downtime: Numeric_TPS | None = None,
- max_downtime: Numeric_TPS | None = None,
- startup_limit: Numeric_PS | None = None,
- force_startup_tracking: bool = False,
- cluster_mode: Literal['relaxed', 'cyclic'] = 'relaxed',
- ):
- self.effects_per_startup = effects_per_startup if effects_per_startup is not None else {}
- self.effects_per_active_hour = effects_per_active_hour if effects_per_active_hour is not None else {}
- self.active_hours_min = active_hours_min
- self.active_hours_max = active_hours_max
- self.min_uptime = min_uptime
- self.max_uptime = max_uptime
- self.min_downtime = min_downtime
- self.max_downtime = max_downtime
- self.startup_limit = startup_limit
- self.force_startup_tracking: bool = force_startup_tracking
- self.cluster_mode = cluster_mode
-
- def transform_data(self) -> None:
- self.effects_per_startup = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_per_startup,
- suffix='per_startup',
- )
- self.effects_per_active_hour = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_per_active_hour,
- suffix='per_active_hour',
- )
- self.min_uptime = self._fit_coords(f'{self.prefix}|min_uptime', self.min_uptime)
- self.max_uptime = self._fit_coords(f'{self.prefix}|max_uptime', self.max_uptime)
- self.min_downtime = self._fit_coords(f'{self.prefix}|min_downtime', self.min_downtime)
- self.max_downtime = self._fit_coords(f'{self.prefix}|max_downtime', self.max_downtime)
- self.active_hours_max = self._fit_coords(
- f'{self.prefix}|active_hours_max', self.active_hours_max, dims=['period', 'scenario']
- )
- self.active_hours_min = self._fit_coords(
- f'{self.prefix}|active_hours_min', self.active_hours_min, dims=['period', 'scenario']
- )
- self.startup_limit = self._fit_coords(
- f'{self.prefix}|startup_limit', self.startup_limit, dims=['period', 'scenario']
- )
+ effects_per_startup: Effect_TPS | Numeric_TPS | None = None
+ effects_per_active_hour: Effect_TPS | Numeric_TPS | None = None
+ active_hours_min: Numeric_PS | None = None
+ active_hours_max: Numeric_PS | None = None
+ min_uptime: Numeric_TPS | None = None
+ max_uptime: Numeric_TPS | None = None
+ min_downtime: Numeric_TPS | None = None
+ max_downtime: Numeric_TPS | None = None
+ startup_limit: Numeric_PS | None = None
+ force_startup_tracking: bool = False
+ cluster_mode: Literal['relaxed', 'cyclic'] = 'relaxed'
+
+ def __post_init__(self):
+ if self.effects_per_startup is None:
+ self.effects_per_startup = {}
+ if self.effects_per_active_hour is None:
+ self.effects_per_active_hour = {}
@property
def use_uptime_tracking(self) -> bool:
@@ -1536,9 +1373,12 @@ def use_startup_tracking(self) -> bool:
return True
return any(
- self._has_value(param)
+ _has_value(param)
for param in [
self.effects_per_startup,
self.startup_limit,
]
)
+
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
diff --git a/flixopt/io.py b/flixopt/io.py
index 514e22665..2e77c0e68 100644
--- a/flixopt/io.py
+++ b/flixopt/io.py
@@ -27,6 +27,40 @@
from .flow_system import FlowSystem
from .types import Numeric_TPS
+# Lazy imports to avoid circular dependency (structure.py imports io.py)
+# These are used at call time, not at import time.
+_resolve_ref = None
+_resolve_da_ref = None
+_create_ref = None
+
+
+def _get_resolve_reference_structure():
+ global _resolve_ref
+ if _resolve_ref is None:
+ from .structure import resolve_reference_structure
+
+ _resolve_ref = resolve_reference_structure
+ return _resolve_ref
+
+
+def _get_resolve_dataarray_reference():
+ global _resolve_da_ref
+ if _resolve_da_ref is None:
+ from .structure import _resolve_dataarray_reference
+
+ _resolve_da_ref = _resolve_dataarray_reference
+ return _resolve_da_ref
+
+
+def _get_create_reference_structure():
+ global _create_ref
+ if _create_ref is None:
+ from .structure import create_reference_structure
+
+ _create_ref = create_reference_structure
+ return _create_ref
+
+
logger = logging.getLogger('flixopt')
@@ -1707,12 +1741,21 @@ def _create_flow_system(
cls: type[FlowSystem],
) -> FlowSystem:
"""Create FlowSystem instance with constructor parameters."""
+ _resolve_da = _get_resolve_dataarray_reference()
+
+ def _resolve(key, default=None):
+ """Resolve a reference_structure value, unwrapping ::: refs via _resolve_da."""
+ val = reference_structure.get(key, default)
+ if isinstance(val, str) and val.startswith(':::'):
+ val = _resolve_da(val, arrays_dict)
+ return val
+
# Extract cluster index if present (clustered FlowSystem)
clusters = ds.indexes.get('cluster')
# Resolve cluster_weight if present in reference structure
cluster_weight_for_constructor = (
- cls._resolve_dataarray_reference(reference_structure['cluster_weight'], arrays_dict)
+ _resolve_da(reference_structure['cluster_weight'], arrays_dict)
if 'cluster_weight' in reference_structure
else None
)
@@ -1720,14 +1763,7 @@ def _create_flow_system(
# Resolve scenario_weights only if scenario dimension exists
scenario_weights = None
if ds.indexes.get('scenario') is not None and 'scenario_weights' in reference_structure:
- scenario_weights = cls._resolve_dataarray_reference(reference_structure['scenario_weights'], arrays_dict)
-
- # Resolve timestep_duration if present as DataArray reference
- timestep_duration = None
- if 'timestep_duration' in reference_structure:
- ref_value = reference_structure['timestep_duration']
- if isinstance(ref_value, str) and ref_value.startswith(':::'):
- timestep_duration = cls._resolve_dataarray_reference(ref_value, arrays_dict)
+ scenario_weights = _resolve_da(reference_structure['scenario_weights'], arrays_dict)
# Get timesteps - convert integer index to RangeIndex for segmented systems
time_index = ds.indexes['time']
@@ -1739,15 +1775,15 @@ def _create_flow_system(
periods=ds.indexes.get('period'),
scenarios=ds.indexes.get('scenario'),
clusters=clusters,
- hours_of_last_timestep=reference_structure.get('hours_of_last_timestep'),
- hours_of_previous_timesteps=reference_structure.get('hours_of_previous_timesteps'),
- weight_of_last_period=reference_structure.get('weight_of_last_period'),
+ hours_of_last_timestep=_resolve('hours_of_last_timestep'),
+ hours_of_previous_timesteps=_resolve('hours_of_previous_timesteps'),
+ weight_of_last_period=_resolve('weight_of_last_period'),
scenario_weights=scenario_weights,
cluster_weight=cluster_weight_for_constructor,
- scenario_independent_sizes=reference_structure.get('scenario_independent_sizes', True),
- scenario_independent_flow_rates=reference_structure.get('scenario_independent_flow_rates', False),
- name=reference_structure.get('name'),
- timestep_duration=timestep_duration,
+ scenario_independent_sizes=_resolve('scenario_independent_sizes', True),
+ scenario_independent_flow_rates=_resolve('scenario_independent_flow_rates', False),
+ name=_resolve('name'),
+ timestep_duration=_resolve('timestep_duration'),
)
@staticmethod
@@ -1761,23 +1797,25 @@ def _restore_elements(
from .effects import Effect
from .elements import Bus, Component
+ _resolve = _get_resolve_reference_structure()
+
# Restore components
for comp_label, comp_data in reference_structure.get('components', {}).items():
- component = cls._resolve_reference_structure(comp_data, arrays_dict)
+ component = _resolve(comp_data, arrays_dict)
if not isinstance(component, Component):
logger.critical(f'Restoring component {comp_label} failed.')
flow_system._add_components(component)
# Restore buses
for bus_label, bus_data in reference_structure.get('buses', {}).items():
- bus = cls._resolve_reference_structure(bus_data, arrays_dict)
+ bus = _resolve(bus_data, arrays_dict)
if not isinstance(bus, Bus):
logger.critical(f'Restoring bus {bus_label} failed.')
flow_system._add_buses(bus)
# Restore effects
for effect_label, effect_data in reference_structure.get('effects', {}).items():
- effect = cls._resolve_reference_structure(effect_data, arrays_dict)
+ effect = _resolve(effect_data, arrays_dict)
if not isinstance(effect, Effect):
logger.critical(f'Restoring effect {effect_label} failed.')
flow_system._add_effects(effect)
@@ -1845,7 +1883,7 @@ def _restore_clustering(
else:
main_var_names.append(name)
- clustering = fs_cls._resolve_reference_structure(clustering_structure, clustering_arrays)
+ clustering = _get_resolve_reference_structure()(clustering_structure, clustering_arrays)
flow_system.clustering = clustering
# Reconstruct aggregated_data from FlowSystem's main data arrays
@@ -1866,11 +1904,12 @@ def _restore_metadata(
cls: type[FlowSystem],
) -> None:
"""Restore carriers from reference structure."""
+ _resolve = _get_resolve_reference_structure()
# Restore carriers if present
if 'carriers' in reference_structure:
carriers_structure = json.loads(reference_structure['carriers'])
for carrier_data in carriers_structure.values():
- carrier = cls._resolve_reference_structure(carrier_data, {})
+ carrier = _resolve(carrier_data, {})
flow_system.carriers.add(carrier)
# --- Serialization (FlowSystem -> Dataset) ---
@@ -1962,9 +2001,10 @@ def _add_solution_to_dataset(
def _add_carriers_to_dataset(ds: xr.Dataset, carriers: Any) -> xr.Dataset:
"""Add carrier definitions to dataset attributes."""
if carriers:
+ _create_ref_fn = _get_create_reference_structure()
carriers_structure = {}
for name, carrier in carriers.items():
- carrier_ref, _ = carrier._create_reference_structure()
+ carrier_ref, _ = _create_ref_fn(carrier)
carriers_structure[name] = carrier_ref
ds.attrs['carriers'] = json.dumps(carriers_structure, ensure_ascii=False)
diff --git a/flixopt/structure.py b/flixopt/structure.py
index 8f26746d1..16f361ffa 100644
--- a/flixopt/structure.py
+++ b/flixopt/structure.py
@@ -5,10 +5,10 @@
from __future__ import annotations
+import dataclasses
import inspect
import json
import logging
-import pathlib
import re
import warnings
from abc import ABC, abstractmethod
@@ -17,7 +17,6 @@
from typing import (
TYPE_CHECKING,
Any,
- ClassVar,
Generic,
Literal,
TypeVar,
@@ -30,7 +29,7 @@
from . import io as fx_io
from .config import DEPRECATION_REMOVAL_VERSION
-from .core import FlowSystemDimensions, TimeSeriesData, get_dataarray_stats
+from .core import TimeSeriesData, align_to_coords, get_dataarray_stats
from .id_list import IdList
if TYPE_CHECKING: # for type checking and preventing circular imports
@@ -38,7 +37,6 @@
from .effects import EffectsModel
from .flow_system import FlowSystem
- from .types import Effect_TPS, Numeric_TPS, NumericOrBool
logger = logging.getLogger('flixopt')
@@ -677,6 +675,339 @@ def register_class_for_io(cls):
return cls
+# =============================================================================
+# Standalone Serialization Functions (path-based DataArray naming)
+# =============================================================================
+
+
+def _is_numeric(obj: Any) -> bool:
+ """Check if an object is a numeric value that should be stored as a DataArray.
+
+ Matches arrays (np.ndarray, pd.Series, pd.DataFrame) and scalars
+ (int, float, np.integer, np.floating). Excludes bool (subclass of int).
+
+ Storing numerics as DataArrays enables:
+ - Dataset operations (resampling, selection, etc.)
+ - Efficient binary storage in NetCDF
+ - Dtype preservation
+ """
+ if isinstance(obj, bool):
+ return False
+ return isinstance(obj, (np.ndarray, pd.Series, pd.DataFrame, int, float, np.integer, np.floating))
+
+
+def create_reference_structure(
+ obj, path_prefix: str = '', coords: dict[str, pd.Index] | None = None
+) -> tuple[dict, dict[str, xr.DataArray]]:
+ """Extract DataArrays from any registered object, using path-based keys.
+
+ Works with
+ any object whose class is in CLASS_REGISTRY, any dataclass, or any object
+ with an inspectable ``__init__``.
+
+ DataArray keys are deterministic paths built from the object hierarchy:
+ ``element_id.param_name`` for top-level, ``element_id.param.sub_param`` for nested.
+
+ Args:
+ obj: Object to serialize.
+ path_prefix: Path prefix for DataArray keys (e.g., ``'components.Boiler'``).
+ coords: Model coordinates for aligning numeric arrays. When provided,
+ numpy arrays / pandas objects are converted to properly-dimensioned
+ DataArrays via ``align_to_coords``, ensuring they participate in
+ dataset operations (resampling, selection) and avoid dimension conflicts.
+
+ Returns:
+ Tuple of (reference_structure dict, extracted_arrays dict).
+ """
+ structure: dict[str, Any] = {'__class__': obj.__class__.__name__}
+ all_arrays: dict[str, xr.DataArray] = {}
+
+ params = _get_serializable_params(obj)
+
+ for name, value in params.items():
+ if value is None:
+ continue
+ if isinstance(value, pd.Index):
+ logger.debug(f'Skipping {name=} because it is an Index')
+ continue
+
+ param_path = f'{path_prefix}|{name}' if path_prefix else name
+ processed, arrays = _extract_recursive(value, param_path, coords)
+ all_arrays.update(arrays)
+ if processed is not None and not _is_empty(processed):
+ structure[name] = processed
+
+ return structure, all_arrays
+
+
+def _extract_recursive(
+ obj: Any, path: str, coords: dict[str, pd.Index] | None = None
+) -> tuple[Any, dict[str, xr.DataArray]]:
+ """Recursively extract DataArrays, using *path* as the array key.
+
+ Handles DataArrays, numeric arrays (np.ndarray, pd.Series, pd.DataFrame),
+ registered classes, plain dataclasses, dicts, lists, tuples, sets, IdList,
+ and scalar/basic types.
+
+ When *coords* is provided, numeric arrays are aligned to model dimensions
+ via ``align_to_coords`` to get proper dimension names.
+ """
+ arrays: dict[str, xr.DataArray] = {}
+
+ if isinstance(obj, xr.DataArray):
+ arrays[path] = obj.rename(path)
+ return f':::{path}', arrays
+
+ # Numeric values → DataArray for dataset operations and binary NetCDF storage.
+ if coords is not None and _is_numeric(obj):
+ da = align_to_coords(obj, coords, name=path)
+ arrays[path] = da.rename(path)
+ return f':::{path}', arrays
+
+ if obj.__class__.__name__ in CLASS_REGISTRY:
+ return create_reference_structure(obj, path_prefix=path, coords=coords)
+
+ if dataclasses.is_dataclass(obj) and not isinstance(obj, type):
+ structure: dict[str, Any] = {'__class__': obj.__class__.__name__}
+ for field in dataclasses.fields(obj):
+ value = getattr(obj, field.name)
+ if value is None:
+ continue
+ processed, field_arrays = _extract_recursive(value, f'{path}|{field.name}', coords)
+ arrays.update(field_arrays)
+ if processed is not None and not _is_empty(processed):
+ structure[field.name] = processed
+ return structure, arrays
+
+ if isinstance(obj, IdList):
+ processed_list: list[Any] = []
+ for key, item in obj.items():
+ p, a = _extract_recursive(item, f'{path}|{key}', coords)
+ arrays.update(a)
+ processed_list.append(p)
+ return processed_list, arrays
+
+ if isinstance(obj, dict):
+ processed_dict = {}
+ for key, value in obj.items():
+ p, a = _extract_recursive(value, f'{path}|{key}', coords)
+ arrays.update(a)
+ processed_dict[key] = p
+ return processed_dict, arrays
+
+ if isinstance(obj, (list, tuple)):
+ processed_list: list[Any] = []
+ for i, item in enumerate(obj):
+ p, a = _extract_recursive(item, f'{path}|{i}', coords)
+ arrays.update(a)
+ processed_list.append(p)
+ return processed_list, arrays
+
+ if isinstance(obj, set):
+ processed_list = []
+ for i, item in enumerate(obj):
+ p, a = _extract_recursive(item, f'{path}|{i}', coords)
+ arrays.update(a)
+ processed_list.append(p)
+ return processed_list, arrays
+
+ # Scalar / basic type
+ return _to_basic_type(obj), arrays
+
+
+def _has_dataclass_init(cls: type) -> bool:
+ """Check if a class uses a dataclass-generated __init__ (not a custom override).
+
+ Returns True only when @dataclass was applied directly to ``cls`` with init=True.
+ Classes that merely inherit from a dataclass (e.g. Boiler(LinearConverter))
+ but define their own __init__ return False.
+ """
+ params = cls.__dict__.get('__dataclass_params__')
+ return params is not None and params.init
+
+
+def _get_serializable_params(obj) -> dict[str, Any]:
+ """Get name->value pairs for serialization from ``__init__`` parameters."""
+ _skip = {'self', 'label', 'label_as_positional', 'args', 'kwargs'}
+
+ # Class-level exclusion set for IO serialization
+ io_exclude = getattr(obj.__class__, '_io_exclude', set())
+ _skip |= io_exclude
+
+ # Prefer dataclass fields when class uses dataclass-generated __init__
+ if _has_dataclass_init(obj.__class__):
+ return {f.name: getattr(obj, f.name, None) for f in dataclasses.fields(obj) if f.name not in _skip and f.init}
+
+ # Fallback for non-dataclass or custom-__init__ classes
+ sig = inspect.signature(obj.__init__)
+ return {name: getattr(obj, name, None) for name in sig.parameters if name not in _skip}
+
+
+def _to_basic_type(obj: Any) -> Any:
+ """Convert a single value to a JSON-compatible basic Python type."""
+ if obj is None or isinstance(obj, (str, int, float, bool)):
+ return obj
+ if isinstance(obj, np.integer):
+ return int(obj)
+ if isinstance(obj, np.floating):
+ return float(obj)
+ if isinstance(obj, np.bool_):
+ return bool(obj)
+ if isinstance(obj, (np.ndarray, pd.Series, pd.DataFrame)):
+ return obj.tolist() if hasattr(obj, 'tolist') else list(obj)
+ if isinstance(obj, dict):
+ return {k: _to_basic_type(v) for k, v in obj.items()}
+ if isinstance(obj, (list, tuple)):
+ return [_to_basic_type(item) for item in obj]
+ if isinstance(obj, set):
+ return [_to_basic_type(item) for item in obj]
+ if hasattr(obj, 'isoformat'):
+ return obj.isoformat()
+ if hasattr(obj, '__dict__'):
+ logger.warning(f'Converting custom object {type(obj)} to dict representation: {obj}')
+ return {str(k): _to_basic_type(v) for k, v in obj.__dict__.items()}
+ logger.error(f'Converting unknown type {type(obj)} to string: {obj}')
+ return str(obj)
+
+
+def _is_empty(obj: Any) -> bool:
+ """Check if object is an empty container (dict, list, tuple, set)."""
+ return isinstance(obj, (dict, list, tuple, set)) and len(obj) == 0
+
+
+def resolve_reference_structure(structure: Any, arrays_dict: dict[str, xr.DataArray]) -> Any:
+ """Resolve a reference structure back to actual objects.
+
+ Resolves ``:::path`` DataArray references and ``__class__`` markers back to objects.
+ Handles ``:::path`` DataArray references, registered classes, lists, and dicts.
+
+ Args:
+ structure: Structure containing ``:::path`` references or ``__class__`` markers.
+ arrays_dict: Dictionary mapping path keys to DataArrays.
+
+ Returns:
+ Resolved structure with DataArrays and reconstructed objects.
+ """
+ # Handle DataArray references
+ if isinstance(structure, str) and structure.startswith(':::'):
+ return _resolve_dataarray_reference(structure, arrays_dict)
+
+ if isinstance(structure, list):
+ resolved_list = []
+ for item in structure:
+ resolved_item = resolve_reference_structure(item, arrays_dict)
+ if resolved_item is not None:
+ resolved_list.append(resolved_item)
+ return resolved_list
+
+ if isinstance(structure, dict):
+ if structure.get('__class__'):
+ class_name = structure['__class__']
+ if class_name not in CLASS_REGISTRY:
+ raise ValueError(
+ f"Class '{class_name}' not found in CLASS_REGISTRY. "
+ f'Available classes: {list(CLASS_REGISTRY.keys())}'
+ )
+
+ nested_class = CLASS_REGISTRY[class_name]
+ nested_data = {k: v for k, v in structure.items() if k != '__class__'}
+ resolved_nested_data = resolve_reference_structure(nested_data, arrays_dict)
+
+ try:
+ # Discover init parameters — prefer dataclass fields
+ if _has_dataclass_init(nested_class):
+ init_params = {f.name for f in dataclasses.fields(nested_class) if f.init} | {'self'}
+ else:
+ init_params = set(inspect.signature(nested_class.__init__).parameters.keys())
+
+ # Filter out legacy runtime attrs from old serialized files
+ _legacy_deferred = {'_variable_names', '_constraint_names'}
+ constructor_data = {k: v for k, v in resolved_nested_data.items() if k not in _legacy_deferred}
+
+ # Handle renamed parameters from old serialized data
+ if 'label' in constructor_data and 'label' not in init_params:
+ new_key = 'flow_id' if 'flow_id' in init_params else 'id'
+ constructor_data[new_key] = constructor_data.pop('label')
+ if 'id' in constructor_data and 'id' not in init_params and 'flow_id' in init_params:
+ constructor_data['flow_id'] = constructor_data.pop('id')
+
+ # Check for unknown parameters
+ unknown_params = set(constructor_data.keys()) - init_params
+ if unknown_params:
+ raise TypeError(
+ f'{class_name}.__init__() got unexpected keyword arguments: {unknown_params}. '
+ f'This may indicate renamed parameters that need conversion. '
+ f'Valid parameters are: {init_params - {"self"}}'
+ )
+
+ instance = nested_class(**constructor_data)
+
+ return instance
+ except TypeError as e:
+ raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
+ except Exception as e:
+ raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
+ else:
+ # Regular dictionary
+ resolved_dict = {}
+ for key, value in structure.items():
+ resolved_value = resolve_reference_structure(value, arrays_dict)
+ if resolved_value is not None or value is None:
+ resolved_dict[key] = resolved_value
+ return resolved_dict
+
+ return structure
+
+
+def _resolve_dataarray_reference(reference: str, arrays_dict: dict[str, xr.DataArray]) -> xr.DataArray | TimeSeriesData:
+ """Resolve a single ``:::path`` DataArray reference.
+
+ Args:
+ reference: Reference string starting with ``:::``.
+ arrays_dict: Dictionary of available DataArrays.
+
+ Returns:
+ Resolved DataArray or TimeSeriesData object.
+ """
+ array_name = reference[3:]
+ if array_name not in arrays_dict:
+ raise ValueError(f"Referenced DataArray '{array_name}' not found in dataset")
+
+ array = arrays_dict[array_name]
+
+ # Handle null values with warning
+ has_nulls = (np.issubdtype(array.dtype, np.floating) and np.any(np.isnan(array.values))) or (
+ array.dtype == object and pd.isna(array.values).any()
+ )
+ if has_nulls:
+ logger.error(f"DataArray '{array_name}' contains null values. Dropping all-null along present dims.")
+ if 'time' in array.dims:
+ array = array.dropna(dim='time', how='all')
+
+ if TimeSeriesData.is_timeseries_data(array):
+ return TimeSeriesData.from_dataarray(array)
+
+ # Unwrap 0-d DataArrays back to Python scalars
+ if array.ndim == 0:
+ return array.item()
+
+ return array
+
+
+def replace_references_with_stats(structure, arrays_dict: dict[str, xr.DataArray]):
+ """Replace ``:::path`` DataArray references with statistical summaries."""
+ if isinstance(structure, str) and structure.startswith(':::'):
+ array_name = structure[3:]
+ if array_name in arrays_dict:
+ return get_dataarray_stats(arrays_dict[array_name])
+ return structure
+ elif isinstance(structure, dict):
+ return {k: replace_references_with_stats(v, arrays_dict) for k, v in structure.items()}
+ elif isinstance(structure, list):
+ return [replace_references_with_stats(item, arrays_dict) for item in structure]
+ return structure
+
+
class _BuildTimer:
"""Simple timing helper for build_model profiling."""
@@ -755,7 +1086,9 @@ def _populate_element_variable_names(self):
self._populate_names_from_type_level_models()
def _populate_names_from_type_level_models(self):
- """Populate element variable/constraint names from type-level models."""
+ """Populate element variable/constraint names in FlowSystem registry."""
+ var_names = self.flow_system._element_variable_names
+ con_names = self.flow_system._element_constraint_names
# Helper to find batched variables that contain a specific element ID in a dimension
def _find_vars_for_element(element_id: str, dim_name: str) -> list[str]:
@@ -763,73 +1096,79 @@ def _find_vars_for_element(element_id: str, dim_name: str) -> list[str]:
Returns the batched variable names (e.g., 'flow|rate', 'storage|charge').
"""
- var_names = []
+ result = []
for var_name in self.variables:
var = self.variables[var_name]
if dim_name in var.dims:
try:
if element_id in var.coords[dim_name].values:
- var_names.append(var_name)
+ result.append(var_name)
except (KeyError, AttributeError):
pass
- return var_names
+ return result
def _find_constraints_for_element(element_id: str, dim_name: str) -> list[str]:
"""Find all constraint names that have this element in their dimension."""
- con_names = []
+ result = []
for con_name in self.constraints:
con = self.constraints[con_name]
if dim_name in con.dims:
try:
if element_id in con.coords[dim_name].values:
- con_names.append(con_name)
+ result.append(con_name)
except (KeyError, AttributeError):
pass
# Also check for element-specific constraints (e.g., bus|BusLabel|balance)
elif element_id in con_name.split('|'):
- con_names.append(con_name)
- return con_names
+ result.append(con_name)
+ return result
# Populate flows
for flow in self.flow_system.flows.values():
- flow._variable_names = _find_vars_for_element(flow.id, 'flow')
- flow._constraint_names = _find_constraints_for_element(flow.id, 'flow')
+ var_names[flow.id] = _find_vars_for_element(flow.id, 'flow')
+ con_names[flow.id] = _find_constraints_for_element(flow.id, 'flow')
# Populate buses
for bus in self.flow_system.buses.values():
- bus._variable_names = _find_vars_for_element(bus.id, 'bus')
- bus._constraint_names = _find_constraints_for_element(bus.id, 'bus')
+ var_names[bus.id] = _find_vars_for_element(bus.id, 'bus')
+ con_names[bus.id] = _find_constraints_for_element(bus.id, 'bus')
# Populate storages
from .components import Storage
for comp in self.flow_system.components.values():
if isinstance(comp, Storage):
- comp._variable_names = _find_vars_for_element(comp.id, 'storage')
- comp._constraint_names = _find_constraints_for_element(comp.id, 'storage')
+ comp_vars = _find_vars_for_element(comp.id, 'storage')
+ comp_cons = _find_constraints_for_element(comp.id, 'storage')
# Also add flow variables (storages have charging/discharging flows)
for flow in comp.flows.values():
- comp._variable_names.extend(flow._variable_names)
- comp._constraint_names.extend(flow._constraint_names)
+ comp_vars.extend(var_names[flow.id])
+ comp_cons.extend(con_names[flow.id])
+ var_names[comp.id] = comp_vars
+ con_names[comp.id] = comp_cons
else:
# Generic component - collect from child flows
- comp._variable_names = []
- comp._constraint_names = []
+ comp_vars = []
+ comp_cons = []
# Add component-level variables (status, etc.)
- comp._variable_names.extend(_find_vars_for_element(comp.id, 'component'))
- comp._constraint_names.extend(_find_constraints_for_element(comp.id, 'component'))
+ comp_vars.extend(_find_vars_for_element(comp.id, 'component'))
+ comp_cons.extend(_find_constraints_for_element(comp.id, 'component'))
# Add flow variables
for flow in comp.flows.values():
- comp._variable_names.extend(flow._variable_names)
- comp._constraint_names.extend(flow._constraint_names)
+ comp_vars.extend(var_names[flow.id])
+ comp_cons.extend(con_names[flow.id])
+ var_names[comp.id] = comp_vars
+ con_names[comp.id] = comp_cons
# Populate effects
for effect in self.flow_system.effects.values():
- effect._variable_names = _find_vars_for_element(effect.id, 'effect')
- effect._constraint_names = _find_constraints_for_element(effect.id, 'effect')
+ var_names[effect.id] = _find_vars_for_element(effect.id, 'effect')
+ con_names[effect.id] = _find_constraints_for_element(effect.id, 'effect')
def _build_results_structure(self) -> dict[str, dict]:
"""Build results structure for all elements using type-level models."""
+ var_names = self.flow_system._element_variable_names
+ con_names = self.flow_system._element_constraint_names
results = {
'Components': {},
@@ -843,8 +1182,8 @@ def _build_results_structure(self) -> dict[str, dict]:
flow_ids = [f.id for f in comp.flows.values()]
results['Components'][comp.id] = {
'id': comp.id,
- 'variables': comp._variable_names,
- 'constraints': comp._constraint_names,
+ 'variables': var_names.get(comp.id, []),
+ 'constraints': con_names.get(comp.id, []),
'inputs': ['flow|rate'] * len(comp.inputs),
'outputs': ['flow|rate'] * len(comp.outputs),
'flows': flow_ids,
@@ -859,8 +1198,8 @@ def _build_results_structure(self) -> dict[str, dict]:
output_vars.append('bus|virtual_demand')
results['Buses'][bus.id] = {
'id': bus.id,
- 'variables': bus._variable_names,
- 'constraints': bus._constraint_names,
+ 'variables': var_names.get(bus.id, []),
+ 'constraints': con_names.get(bus.id, []),
'inputs': input_vars,
'outputs': output_vars,
'flows': [f.id for f in bus.flows.values()],
@@ -870,16 +1209,16 @@ def _build_results_structure(self) -> dict[str, dict]:
for effect in sorted(self.flow_system.effects.values(), key=lambda e: e.id.upper()):
results['Effects'][effect.id] = {
'id': effect.id,
- 'variables': effect._variable_names,
- 'constraints': effect._constraint_names,
+ 'variables': var_names.get(effect.id, []),
+ 'constraints': con_names.get(effect.id, []),
}
# Flows
for flow in sorted(self.flow_system.flows.values(), key=lambda f: f.id.upper()):
results['Flows'][flow.id] = {
'id': flow.id,
- 'variables': flow._variable_names,
- 'constraints': flow._constraint_names,
+ 'variables': var_names.get(flow.id, []),
+ 'constraints': con_names.get(flow.id, []),
'start': flow.bus if flow.is_input_in_component else flow.component,
'end': flow.component if flow.is_input_in_component else flow.bus,
'component': flow.component,
@@ -1127,7 +1466,7 @@ def objective_weights(self) -> xr.DataArray:
elif default_weights is not None:
period_weights = default_weights
else:
- period_weights = obj_effect._fit_coords(name='period_weights', data=1, dims=['period'])
+ period_weights = align_to_coords(1, self.flow_system.indexes, name='period_weights', dims=['period'])
scenario_weights = self.scenario_weights
return period_weights * scenario_weights
@@ -1189,857 +1528,33 @@ def __repr__(self) -> str:
return f'{title}\n{"=" * len(title)}\n\n{all_sections}'
-class Interface:
- """
- Base class for all Elements and Models in flixopt that provides serialization capabilities.
-
- This class enables automatic serialization/deserialization of objects containing xarray DataArrays
- and nested Interface objects to/from xarray Datasets and NetCDF files. It uses introspection
- of constructor parameters to automatically handle most serialization scenarios.
+def valid_id(id: str) -> str:
+ """Check if the id is valid and return it (possibly stripped).
- Key Features:
- - Automatic extraction and restoration of xarray DataArrays
- - Support for nested Interface objects
- - NetCDF and JSON export/import
- - Recursive handling of complex nested structures
-
- Subclasses must implement:
- transform_data(): Transform data to match FlowSystem dimensions
+ Raises:
+ ValueError: If the id contains forbidden characters.
"""
-
- # Class-level defaults for attributes set by link_to_flow_system()
- # These provide type hints and default values without requiring __init__ in subclasses
- _flow_system: FlowSystem | None = None
- _prefix: str = ''
-
- def transform_data(self) -> None:
- """Transform the data of the interface to match the FlowSystem's dimensions.
-
- Uses `self._prefix` (set during `link_to_flow_system()`) to name transformed data.
-
- Raises:
- NotImplementedError: Must be implemented by subclasses
-
- Note:
- The FlowSystem reference is available via self._flow_system (for Interface objects)
- or self.flow_system property (for Element objects). Elements must be registered
- to a FlowSystem before calling this method.
- """
- raise NotImplementedError('Every Interface subclass needs a transform_data() method')
-
- @property
- def prefix(self) -> str:
- """The prefix used for naming transformed data (e.g., 'Boiler(Q_th)|status_parameters')."""
- return self._prefix
-
- def _sub_prefix(self, name: str) -> str:
- """Build a prefix for a nested interface by appending name to current prefix."""
- return f'{self._prefix}|{name}' if self._prefix else name
-
- def link_to_flow_system(self, flow_system: FlowSystem, prefix: str = '') -> None:
- """Link this interface and all nested interfaces to a FlowSystem.
-
- This method is called automatically during element registration to enable
- elements to access FlowSystem properties without passing the reference
- through every method call. It also sets the prefix used for naming
- transformed data.
-
- Subclasses with nested Interface objects should override this method
- to propagate the link to their nested interfaces by calling
- `super().link_to_flow_system(flow_system, prefix)` first, then linking
- nested objects with appropriate prefixes.
-
- Args:
- flow_system: The FlowSystem to link to
- prefix: The prefix for naming transformed data (e.g., 'Boiler(Q_th)')
-
- Examples:
- Override in a subclass with nested interfaces:
-
- ```python
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- super().link_to_flow_system(flow_system, prefix)
- if self.nested_interface is not None:
- self.nested_interface.link_to_flow_system(flow_system, f'{prefix}|nested' if prefix else 'nested')
- ```
-
- Creating an Interface dynamically during modeling:
-
- ```python
- # In a Model class
- if flow.status_parameters is None:
- flow.status_parameters = StatusParameters()
- flow.status_parameters.link_to_flow_system(self._model.flow_system, f'{flow.id}')
- ```
- """
- self._flow_system = flow_system
- self._prefix = prefix
-
- @property
- def flow_system(self) -> FlowSystem:
- """Access the FlowSystem this interface is linked to.
-
- Returns:
- The FlowSystem instance this interface belongs to.
-
- Raises:
- RuntimeError: If interface has not been linked to a FlowSystem yet.
-
- Note:
- For Elements, this is set during add_elements().
- For parameter classes, this is set recursively when the parent Element is registered.
- """
- if self._flow_system is None:
- raise RuntimeError(
- f'{self.__class__.__name__} is not linked to a FlowSystem. '
- f'Ensure the parent element is registered via flow_system.add_elements() first.'
- )
- return self._flow_system
-
- def _fit_coords(
- self, name: str, data: NumericOrBool | None, dims: Collection[FlowSystemDimensions] | None = None
- ) -> xr.DataArray | None:
- """Convenience wrapper for FlowSystem.fit_to_model_coords().
-
- Args:
- name: The name for the data variable
- data: The data to transform
- dims: Optional dimension names
-
- Returns:
- Transformed data aligned to FlowSystem coordinates
- """
- return self.flow_system.fit_to_model_coords(name, data, dims=dims)
-
- def _fit_effect_coords(
- self,
- prefix: str | None,
- effect_values: Effect_TPS | Numeric_TPS | None,
- suffix: str | None = None,
- dims: Collection[FlowSystemDimensions] | None = None,
- ) -> Effect_TPS | None:
- """Convenience wrapper for FlowSystem.fit_effects_to_model_coords().
-
- Args:
- prefix: Label prefix for effect names
- effect_values: The effect values to transform
- suffix: Optional label suffix
- dims: Optional dimension names
-
- Returns:
- Transformed effect values aligned to FlowSystem coordinates
- """
- return self.flow_system.fit_effects_to_model_coords(prefix, effect_values, suffix, dims=dims)
-
- def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
- """
- Convert all DataArrays to references and extract them.
- This is the core method that both to_dict() and to_dataset() build upon.
-
- Returns:
- Tuple of (reference_structure, extracted_arrays_dict)
-
- Raises:
- ValueError: If DataArrays don't have unique names or are duplicated
- """
- # Get constructor parameters using caching for performance
- if not hasattr(self, '_cached_init_params'):
- self._cached_init_params = list(inspect.signature(self.__init__).parameters.keys())
-
- # Process all constructor parameters
- reference_structure = {'__class__': self.__class__.__name__}
- all_extracted_arrays = {}
-
- # Deprecated init params that should not be serialized (they alias other params)
- _deprecated_init_params = {'label', 'label_as_positional'}
- # On Flow, 'id' is deprecated in favor of 'flow_id'
- if 'flow_id' in self._cached_init_params:
- _deprecated_init_params.add('id')
-
- for name in self._cached_init_params:
- if name == 'self' or name in _deprecated_init_params:
- continue
-
- # For 'id' or 'flow_id' param, use _short_id to get the raw constructor value
- # (Flow.id property returns qualified name, but constructor expects short name)
- if name in ('id', 'flow_id') and hasattr(self, '_short_id'):
- value = self._short_id
- else:
- value = getattr(self, name, None)
-
- if value is None:
- continue
- if isinstance(value, pd.Index):
- logger.debug(f'Skipping {name=} because it is an Index')
- continue
-
- # Extract arrays and get reference structure
- processed_value, extracted_arrays = self._extract_dataarrays_recursive(value, name)
-
- # Check for array name conflicts
- conflicts = set(all_extracted_arrays.keys()) & set(extracted_arrays.keys())
- if conflicts:
- raise ValueError(
- f'DataArray name conflicts detected: {conflicts}. '
- f'Each DataArray must have a unique name for serialization.'
- )
-
- # Add extracted arrays to the collection
- all_extracted_arrays.update(extracted_arrays)
-
- # Only store in structure if it's not None/empty after processing
- if processed_value is not None and not self._is_empty_container(processed_value):
- reference_structure[name] = processed_value
-
- return reference_structure, all_extracted_arrays
-
- @staticmethod
- def _is_empty_container(obj) -> bool:
- """Check if object is an empty container (dict, list, tuple, set)."""
- return isinstance(obj, (dict, list, tuple, set)) and len(obj) == 0
-
- def _extract_dataarrays_recursive(self, obj, context_name: str = '') -> tuple[Any, dict[str, xr.DataArray]]:
- """
- Recursively extract DataArrays from nested structures.
-
- Args:
- obj: Object to process
- context_name: Name context for better error messages
-
- Returns:
- Tuple of (processed_object_with_references, extracted_arrays_dict)
-
- Raises:
- ValueError: If DataArrays don't have unique names
- """
- extracted_arrays = {}
-
- # Handle DataArrays directly - use their unique name
- if isinstance(obj, xr.DataArray):
- if not obj.name:
- raise ValueError(
- f'DataArrays must have a unique name for serialization. '
- f'Unnamed DataArray found in {context_name}. Please set array.name = "unique_name"'
- )
-
- array_name = str(obj.name) # Ensure string type
- if array_name in extracted_arrays:
- raise ValueError(
- f'DataArray name "{array_name}" is duplicated in {context_name}. '
- f'Each DataArray must have a unique name for serialization.'
- )
-
- extracted_arrays[array_name] = obj
- return f':::{array_name}', extracted_arrays
-
- # Handle Interface objects - extract their DataArrays too
- elif isinstance(obj, Interface):
- try:
- interface_structure, interface_arrays = obj._create_reference_structure()
- extracted_arrays.update(interface_arrays)
- return interface_structure, extracted_arrays
- except Exception as e:
- raise ValueError(f'Failed to process nested Interface object in {context_name}: {e}') from e
-
- # Handle sequences (lists, tuples)
- elif isinstance(obj, (list, tuple)):
- processed_items = []
- for i, item in enumerate(obj):
- item_context = f'{context_name}[{i}]' if context_name else f'item[{i}]'
- processed_item, nested_arrays = self._extract_dataarrays_recursive(item, item_context)
- extracted_arrays.update(nested_arrays)
- processed_items.append(processed_item)
- return processed_items, extracted_arrays
-
- # Handle IdList containers (treat as dict for serialization)
- elif isinstance(obj, IdList):
- processed_dict = {}
- for key, value in obj.items():
- key_context = f'{context_name}.{key}' if context_name else str(key)
- processed_value, nested_arrays = self._extract_dataarrays_recursive(value, key_context)
- extracted_arrays.update(nested_arrays)
- processed_dict[key] = processed_value
- return processed_dict, extracted_arrays
-
- # Handle dictionaries
- elif isinstance(obj, dict):
- processed_dict = {}
- for key, value in obj.items():
- key_context = f'{context_name}.{key}' if context_name else str(key)
- processed_value, nested_arrays = self._extract_dataarrays_recursive(value, key_context)
- extracted_arrays.update(nested_arrays)
- processed_dict[key] = processed_value
- return processed_dict, extracted_arrays
-
- # Handle sets (convert to list for JSON compatibility)
- elif isinstance(obj, set):
- processed_items = []
- for i, item in enumerate(obj):
- item_context = f'{context_name}.set_item[{i}]' if context_name else f'set_item[{i}]'
- processed_item, nested_arrays = self._extract_dataarrays_recursive(item, item_context)
- extracted_arrays.update(nested_arrays)
- processed_items.append(processed_item)
- return processed_items, extracted_arrays
-
- # For all other types, serialize to basic types
- else:
- return self._serialize_to_basic_types(obj), extracted_arrays
-
- def _handle_deprecated_kwarg(
- self,
- kwargs: dict,
- old_name: str,
- new_name: str,
- current_value: Any = None,
- transform: callable = None,
- check_conflict: bool = True,
- additional_warning_message: str = '',
- ) -> Any:
- """
- Handle a deprecated keyword argument by issuing a warning and returning the appropriate value.
-
- This centralizes the deprecation pattern used across multiple classes (Source, Sink, InvestParameters, etc.).
-
- Args:
- kwargs: Dictionary of keyword arguments to check and modify
- old_name: Name of the deprecated parameter
- new_name: Name of the replacement parameter
- current_value: Current value of the new parameter (if already set)
- transform: Optional callable to transform the old value before returning (e.g., lambda x: [x] to wrap in list)
- check_conflict: Whether to check if both old and new parameters are specified (default: True).
- Note: For parameters with non-None default values (e.g., bool parameters with default=False),
- set check_conflict=False since we cannot distinguish between an explicit value and the default.
- additional_warning_message: Add a custom message which gets appended with a line break to the default warning.
-
- Returns:
- The value to use (either from old parameter or current_value)
-
- Raises:
- ValueError: If both old and new parameters are specified and check_conflict is True
-
- Example:
- # For parameters where None is the default (conflict checking works):
- value = self._handle_deprecated_kwarg(kwargs, 'old_param', 'new_param', current_value)
-
- # For parameters with non-None defaults (disable conflict checking):
- mandatory = self._handle_deprecated_kwarg(
- kwargs, 'optional', 'mandatory', mandatory,
- transform=lambda x: not x,
- check_conflict=False # Cannot detect if mandatory was explicitly passed
- )
- """
- import warnings
-
- old_value = kwargs.pop(old_name, None)
- if old_value is not None:
- # Build base warning message
- base_warning = f'The use of the "{old_name}" argument is deprecated. Use the "{new_name}" argument instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.'
-
- # Append additional message on a new line if provided
- if additional_warning_message:
- # Normalize whitespace: strip leading/trailing whitespace
- extra_msg = additional_warning_message.strip()
- if extra_msg:
- base_warning += '\n' + extra_msg
-
- warnings.warn(
- base_warning,
- DeprecationWarning,
- stacklevel=3, # Stack: this method -> __init__ -> caller
- )
- # Check for conflicts: only raise error if both were explicitly provided
- if check_conflict and current_value is not None:
- raise ValueError(f'Either {old_name} or {new_name} can be specified, but not both.')
-
- # Apply transformation if provided
- if transform is not None:
- return transform(old_value)
- return old_value
-
- return current_value
-
- def _validate_kwargs(self, kwargs: dict, class_name: str = None) -> None:
- """
- Validate that no unexpected keyword arguments are present in kwargs.
-
- This method uses inspect to get the actual function signature and filters out
- any parameters that are not defined in the __init__ method, while also
- handling the special case of 'kwargs' itself which can appear during deserialization.
-
- Args:
- kwargs: Dictionary of keyword arguments to validate
- class_name: Optional class name for error messages. If None, uses self.__class__.__name__
-
- Raises:
- TypeError: If unexpected keyword arguments are found
- """
- if not kwargs:
- return
-
- import inspect
-
- sig = inspect.signature(self.__init__)
- known_params = set(sig.parameters.keys()) - {'self', 'kwargs'}
- # Also filter out 'kwargs' itself which can appear during deserialization
- extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'}
-
- if extra_kwargs:
- class_name = class_name or self.__class__.__name__
- unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys())
- raise TypeError(f'{class_name}.__init__() got unexpected keyword argument(s): {unexpected_params}')
-
- @staticmethod
- def _has_value(param: Any) -> bool:
- """Check if a parameter has a meaningful value.
-
- Args:
- param: The parameter to check.
-
- Returns:
- False for:
- - None
- - Empty collections (dict, list, tuple, set, frozenset)
-
- True for all other values, including:
- - Non-empty collections
- - xarray DataArrays (even if they contain NaN/empty data)
- - Scalar values (0, False, empty strings, etc.)
- - NumPy arrays (even if empty - use .size to check those explicitly)
- """
- if param is None:
- return False
-
- # Check for empty collections (but not strings, arrays, or DataArrays)
- if isinstance(param, (dict, list, tuple, set, frozenset)) and len(param) == 0:
- return False
-
- return True
-
- @classmethod
- def _resolve_dataarray_reference(
- cls, reference: str, arrays_dict: dict[str, xr.DataArray]
- ) -> xr.DataArray | TimeSeriesData:
- """
- Resolve a single DataArray reference (:::name) to actual DataArray or TimeSeriesData.
-
- Args:
- reference: Reference string starting with ":::"
- arrays_dict: Dictionary of available DataArrays
-
- Returns:
- Resolved DataArray or TimeSeriesData object
-
- Raises:
- ValueError: If referenced array is not found
- """
- array_name = reference[3:] # Remove ":::" prefix
- if array_name not in arrays_dict:
- raise ValueError(f"Referenced DataArray '{array_name}' not found in dataset")
-
- array = arrays_dict[array_name]
-
- # Handle null values with warning (use numpy for performance - 200x faster than xarray)
- has_nulls = (np.issubdtype(array.dtype, np.floating) and np.any(np.isnan(array.values))) or (
- array.dtype == object and pd.isna(array.values).any()
+ not_allowed = ['(', ')', '|', '->', '\\', '-slash-'] # \\ is needed to check for \
+ if any([sign in id for sign in not_allowed]):
+ raise ValueError(
+ f'Id "{id}" is not valid. Ids cannot contain the following characters: {not_allowed}. '
+ f'Use any other symbol instead'
)
- if has_nulls:
- logger.error(f"DataArray '{array_name}' contains null values. Dropping all-null along present dims.")
- if 'time' in array.dims:
- array = array.dropna(dim='time', how='all')
-
- # Check if this should be restored as TimeSeriesData
- if TimeSeriesData.is_timeseries_data(array):
- return TimeSeriesData.from_dataarray(array)
-
- return array
-
- @classmethod
- def _resolve_reference_structure(cls, structure, arrays_dict: dict[str, xr.DataArray]):
- """
- Convert reference structure back to actual objects using provided arrays.
-
- Args:
- structure: Structure containing references (:::name) or special type markers
- arrays_dict: Dictionary of available DataArrays
-
- Returns:
- Structure with references resolved to actual DataArrays or objects
-
- Raises:
- ValueError: If referenced arrays are not found or class is not registered
- """
- # Handle DataArray references
- if isinstance(structure, str) and structure.startswith(':::'):
- return cls._resolve_dataarray_reference(structure, arrays_dict)
-
- elif isinstance(structure, list):
- resolved_list = []
- for item in structure:
- resolved_item = cls._resolve_reference_structure(item, arrays_dict)
- if resolved_item is not None: # Filter out None values from missing references
- resolved_list.append(resolved_item)
- return resolved_list
-
- elif isinstance(structure, dict):
- if structure.get('__class__'):
- class_name = structure['__class__']
- if class_name not in CLASS_REGISTRY:
- raise ValueError(
- f"Class '{class_name}' not found in CLASS_REGISTRY. "
- f'Available classes: {list(CLASS_REGISTRY.keys())}'
- )
-
- # This is a nested Interface object - restore it recursively
- nested_class = CLASS_REGISTRY[class_name]
- # Remove the __class__ key and process the rest
- nested_data = {k: v for k, v in structure.items() if k != '__class__'}
- # Resolve references in the nested data
- resolved_nested_data = cls._resolve_reference_structure(nested_data, arrays_dict)
-
- try:
- # Get valid constructor parameters for this class
- init_params = set(inspect.signature(nested_class.__init__).parameters.keys())
-
- # Check for deferred init attributes (defined as class attribute on Element subclasses)
- # These are serialized but set after construction, not passed to child __init__
- deferred_attr_names = getattr(nested_class, '_deferred_init_attrs', set())
- deferred_attrs = {k: v for k, v in resolved_nested_data.items() if k in deferred_attr_names}
- constructor_data = {k: v for k, v in resolved_nested_data.items() if k not in deferred_attr_names}
-
- # Handle renamed parameters from old serialized data
- if 'label' in constructor_data and 'label' not in init_params:
- # label → id for most elements, label → flow_id for Flow
- new_key = 'flow_id' if 'flow_id' in init_params else 'id'
- constructor_data[new_key] = constructor_data.pop('label')
- if 'id' in constructor_data and 'id' not in init_params and 'flow_id' in init_params:
- # id → flow_id for Flow (from recently serialized data)
- constructor_data['flow_id'] = constructor_data.pop('id')
-
- # Check for unknown parameters - these could be typos or renamed params
- unknown_params = set(constructor_data.keys()) - init_params
- if unknown_params:
- raise TypeError(
- f'{class_name}.__init__() got unexpected keyword arguments: {unknown_params}. '
- f'This may indicate renamed parameters that need conversion. '
- f'Valid parameters are: {init_params - {"self"}}'
- )
-
- # Create instance with constructor parameters
- instance = nested_class(**constructor_data)
-
- # Set internal attributes after construction
- for attr_name, attr_value in deferred_attrs.items():
- setattr(instance, attr_name, attr_value)
-
- return instance
- except TypeError as e:
- raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
- except Exception as e:
- raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
- else:
- # Regular dictionary - resolve references in values
- resolved_dict = {}
- for key, value in structure.items():
- resolved_value = cls._resolve_reference_structure(value, arrays_dict)
- if resolved_value is not None or value is None: # Keep None values if they were originally None
- resolved_dict[key] = resolved_value
- return resolved_dict
-
- else:
- return structure
-
- def _serialize_to_basic_types(self, obj):
- """
- Convert object to basic Python types only (no DataArrays, no custom objects).
-
- Args:
- obj: Object to serialize
-
- Returns:
- Object converted to basic Python types (str, int, float, bool, list, dict)
- """
- if obj is None or isinstance(obj, (str, int, float, bool)):
- return obj
- elif isinstance(obj, np.integer):
- return int(obj)
- elif isinstance(obj, np.floating):
- return float(obj)
- elif isinstance(obj, np.bool_):
- return bool(obj)
- elif isinstance(obj, (np.ndarray, pd.Series, pd.DataFrame)):
- return obj.tolist() if hasattr(obj, 'tolist') else list(obj)
- elif isinstance(obj, dict):
- return {k: self._serialize_to_basic_types(v) for k, v in obj.items()}
- elif isinstance(obj, (list, tuple)):
- return [self._serialize_to_basic_types(item) for item in obj]
- elif isinstance(obj, set):
- return [self._serialize_to_basic_types(item) for item in obj]
- elif hasattr(obj, 'isoformat'): # datetime objects
- return obj.isoformat()
- elif hasattr(obj, '__dict__'): # Custom objects with attributes
- logger.warning(f'Converting custom object {type(obj)} to dict representation: {obj}')
- return {str(k): self._serialize_to_basic_types(v) for k, v in obj.__dict__.items()}
- else:
- # For any other object, try to convert to string as fallback
- logger.error(f'Converting unknown type {type(obj)} to string: {obj}')
- return str(obj)
-
- def to_dataset(self) -> xr.Dataset:
- """
- Convert the object to an xarray Dataset representation.
- All DataArrays become dataset variables, everything else goes to attrs.
-
- Its recommended to only call this method on Interfaces with all numeric data stored as xr.DataArrays.
- Interfaces inside a FlowSystem are automatically converted this form after connecting and transforming the FlowSystem.
-
- Returns:
- xr.Dataset: Dataset containing all DataArrays with basic objects only in attributes
-
- Raises:
- ValueError: If serialization fails due to naming conflicts or invalid data
- """
- try:
- reference_structure, extracted_arrays = self._create_reference_structure()
- # Create the dataset with extracted arrays as variables and structure as attrs
- return xr.Dataset(extracted_arrays, attrs=reference_structure)
- except Exception as e:
- raise ValueError(
- f'Failed to convert {self.__class__.__name__} to dataset. Its recommended to only call this method on '
- f'a fully connected and transformed FlowSystem, or Interfaces inside such a FlowSystem.'
- f'Original Error: {e}'
- ) from e
-
- def to_netcdf(self, path: str | pathlib.Path, compression: int = 5, overwrite: bool = False):
- """
- Save the object to a NetCDF file.
-
- Args:
- path: Path to save the NetCDF file. Parent directories are created if they don't exist.
- compression: Compression level (0-9)
- overwrite: If True, overwrite existing file. If False, raise error if file exists.
-
- Raises:
- FileExistsError: If overwrite=False and file already exists.
- ValueError: If serialization fails
- IOError: If file cannot be written
- """
- path = pathlib.Path(path)
-
- # Check if file exists (unless overwrite is True)
- if not overwrite and path.exists():
- raise FileExistsError(f'File already exists: {path}. Use overwrite=True to overwrite existing file.')
-
- # Create parent directories if they don't exist
- path.parent.mkdir(parents=True, exist_ok=True)
-
- try:
- ds = self.to_dataset()
- fx_io.save_dataset_to_netcdf(ds, path, compression=compression)
- except Exception as e:
- raise OSError(f'Failed to save {self.__class__.__name__} to NetCDF file {path}: {e}') from e
-
- @classmethod
- def from_dataset(cls, ds: xr.Dataset) -> Interface:
- """
- Create an instance from an xarray Dataset.
-
- Args:
- ds: Dataset containing the object data
-
- Returns:
- Interface instance
-
- Raises:
- ValueError: If dataset format is invalid or class mismatch
- """
- try:
- # Get class name and verify it matches
- class_name = ds.attrs.get('__class__')
- if class_name and class_name != cls.__name__:
- logger.warning(f"Dataset class '{class_name}' doesn't match target class '{cls.__name__}'")
-
- # Get the reference structure from attrs
- reference_structure = dict(ds.attrs)
-
- # Remove the class name since it's not a constructor parameter
- reference_structure.pop('__class__', None)
-
- # Create arrays dictionary from dataset variables
- # Use ds.variables with coord_cache for faster DataArray construction
- variables = ds.variables
- coord_cache = {k: ds.coords[k] for k in ds.coords}
- arrays_dict = {
- name: xr.DataArray(
- variables[name],
- coords={k: coord_cache[k] for k in variables[name].dims if k in coord_cache},
- name=name,
- )
- for name in ds.data_vars
- }
-
- # Resolve all references using the centralized method
- resolved_params = cls._resolve_reference_structure(reference_structure, arrays_dict)
-
- return cls(**resolved_params)
- except Exception as e:
- raise ValueError(f'Failed to create {cls.__name__} from dataset: {e}') from e
-
- @classmethod
- def from_netcdf(cls, path: str | pathlib.Path) -> Interface:
- """
- Load an instance from a NetCDF file.
-
- Args:
- path: Path to the NetCDF file
-
- Returns:
- Interface instance
-
- Raises:
- IOError: If file cannot be read
- ValueError: If file format is invalid
- """
- try:
- ds = fx_io.load_dataset_from_netcdf(path)
- return cls.from_dataset(ds)
- except Exception as e:
- raise OSError(f'Failed to load {cls.__name__} from NetCDF file {path}: {e}') from e
-
- def get_structure(self, clean: bool = False, stats: bool = False) -> dict:
- """
- Get object structure as a dictionary.
-
- Args:
- clean: If True, remove None and empty dicts and lists.
- stats: If True, replace DataArray references with statistics
-
- Returns:
- Dictionary representation of the object structure
- """
- reference_structure, extracted_arrays = self._create_reference_structure()
+ if id.endswith(' '):
+ logger.error(f'Id "{id}" ends with a space. This will be removed.')
+ return id.rstrip()
+ return id
- if stats:
- # Replace references with statistics
- reference_structure = self._replace_references_with_stats(reference_structure, extracted_arrays)
-
- if clean:
- return fx_io.remove_none_and_empty(reference_structure)
- return reference_structure
-
- def _replace_references_with_stats(self, structure, arrays_dict: dict[str, xr.DataArray]):
- """Replace DataArray references with statistical summaries."""
- if isinstance(structure, str) and structure.startswith(':::'):
- array_name = structure[3:]
- if array_name in arrays_dict:
- return get_dataarray_stats(arrays_dict[array_name])
- return structure
-
- elif isinstance(structure, dict):
- return {k: self._replace_references_with_stats(v, arrays_dict) for k, v in structure.items()}
-
- elif isinstance(structure, list):
- return [self._replace_references_with_stats(item, arrays_dict) for item in structure]
-
- return structure
-
- def to_json(self, path: str | pathlib.Path):
- """
- Save the object to a JSON file.
- This is meant for documentation and comparison, not for reloading.
-
- Args:
- path: The path to the JSON file.
-
- Raises:
- IOError: If file cannot be written
- """
- try:
- # Use the stats mode for JSON export (cleaner output)
- data = self.get_structure(clean=True, stats=True)
- fx_io.save_json(data, path)
- except Exception as e:
- raise OSError(f'Failed to save {self.__class__.__name__} to JSON file {path}: {e}') from e
-
- def __repr__(self):
- """Return a detailed string representation for debugging."""
- return fx_io.build_repr_from_init(self, excluded_params={'self', 'id', 'label', 'kwargs'})
-
- def copy(self) -> Interface:
- """
- Create a copy of the Interface object.
-
- Uses the existing serialization infrastructure to ensure proper copying
- of all DataArrays and nested objects.
-
- Returns:
- A new instance of the same class with copied data.
- """
- # Convert to dataset, copy it, and convert back
- dataset = self.to_dataset().copy(deep=True)
- return self.__class__.from_dataset(dataset)
-
- def __copy__(self):
- """Support for copy.copy()."""
- return self.copy()
-
- def __deepcopy__(self, memo):
- """Support for copy.deepcopy()."""
- return self.copy()
-
-
-class Element(Interface):
- """This class is the basic Element of flixopt. Every Element has an id."""
-
- # Attributes that are serialized but set after construction (not passed to child __init__)
- # These are internal state populated during modeling, not user-facing parameters
- _deferred_init_attrs: ClassVar[set[str]] = {'_variable_names', '_constraint_names'}
-
- def __init__(
- self,
- id: str | None = None,
- meta_data: dict | None = None,
- color: str | None = None,
- _variable_names: list[str] | None = None,
- _constraint_names: list[str] | None = None,
- **kwargs,
- ):
- """
- Args:
- id: The id of the element
- meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types.
- color: Optional color for visualizations (e.g., '#FF6B6B'). If not provided, a color will be automatically assigned during FlowSystem.connect_and_transform().
- _variable_names: Internal. Variable names for this element (populated after modeling).
- _constraint_names: Internal. Constraint names for this element (populated after modeling).
- """
- id = self._handle_deprecated_kwarg(kwargs, 'label', 'id', id)
- if id is None:
- raise TypeError(f'{self.__class__.__name__}.__init__() requires an "id" argument.')
- self._validate_kwargs(kwargs)
- self._short_id: str = Element._valid_id(id)
- self.meta_data = meta_data if meta_data is not None else {}
- self.color = color
- self._flow_system: FlowSystem | None = None
- # Variable/constraint names - populated after modeling, serialized for results
- self._variable_names: list[str] = _variable_names if _variable_names is not None else []
- self._constraint_names: list[str] = _constraint_names if _constraint_names is not None else []
-
- def _plausibility_checks(self) -> None:
- """This function is used to do some basic plausibility checks for each Element during initialization.
- This is run after all data is transformed to the correct format/type"""
- raise NotImplementedError('Every Element needs a _plausibility_checks() method')
- @property
- def id(self) -> str:
- """The unique identifier of this element.
+class Element:
+ """Mixin for all elements in flixopt. Provides deprecated label properties.
- For most elements this is the name passed to the constructor.
- For flows this returns the qualified form: ``component(short_id)``.
- """
- return self._short_id
+ Subclasses (Effect, Bus, Flow, Component) are @dataclass classes that declare
+ their own ``id`` field. Element does NOT define ``id`` — each subclass owns it.
- @id.setter
- def id(self, value: str) -> None:
- self._short_id = value
+ Runtime state (variable names, constraint names) is stored in FlowSystem registries,
+ not on the element objects themselves.
+ """
@property
def label(self) -> str:
@@ -2049,7 +1564,7 @@ def label(self) -> str:
DeprecationWarning,
stacklevel=2,
)
- return self._short_id
+ return self.id
@label.setter
def label(self, value: str) -> None:
@@ -2058,7 +1573,7 @@ def label(self, value: str) -> None:
DeprecationWarning,
stacklevel=2,
)
- self._short_id = value
+ self.id = value
@property
def label_full(self) -> str:
@@ -2080,87 +1595,6 @@ def id_full(self) -> str:
)
return self.id
- @property
- def solution(self) -> xr.Dataset:
- """Solution data for this element's variables.
-
- Returns a Dataset built by selecting this element from batched variables
- in FlowSystem.solution.
-
- Raises:
- ValueError: If no solution is available (optimization not run or not solved).
- """
- if self._flow_system is None:
- raise ValueError(f'Element "{self.id}" is not linked to a FlowSystem.')
- if self._flow_system.solution is None:
- raise ValueError(f'No solution available for "{self.id}". Run optimization first or load results.')
- if not self._variable_names:
- raise ValueError(f'No variable names available for "{self.id}". Element may not have been modeled yet.')
- full_solution = self._flow_system.solution
- data_vars = {}
- for var_name in self._variable_names:
- if var_name not in full_solution:
- continue
- var = full_solution[var_name]
- # Select this element from the appropriate dimension
- for dim in var.dims:
- if dim in ('time', 'period', 'scenario', 'cluster'):
- continue
- if self.id in var.coords[dim].values:
- var = var.sel({dim: self.id}, drop=True)
- break
- data_vars[var_name] = var
- return xr.Dataset(data_vars)
-
- def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
- """
- Override to include _variable_names and _constraint_names in serialization.
-
- These attributes are defined in Element but may not be in subclass constructors,
- so we need to add them explicitly.
- """
- reference_structure, all_extracted_arrays = super()._create_reference_structure()
-
- # Always include variable/constraint names for solution access after loading
- if self._variable_names:
- reference_structure['_variable_names'] = self._variable_names
- if self._constraint_names:
- reference_structure['_constraint_names'] = self._constraint_names
-
- return reference_structure, all_extracted_arrays
-
- def __repr__(self) -> str:
- """Return string representation."""
- return fx_io.build_repr_from_init(self, excluded_params={'self', 'id', 'kwargs'}, skip_default_size=True)
-
- @staticmethod
- def _valid_id(id: str) -> str:
- """Checks if the id is valid.
-
- Raises:
- ValueError: If the id is not valid.
- """
- not_allowed = ['(', ')', '|', '->', '\\', '-slash-'] # \\ is needed to check for \
- if any([sign in id for sign in not_allowed]):
- raise ValueError(
- f'Id "{id}" is not valid. Ids cannot contain the following characters: {not_allowed}. '
- f'Use any other symbol instead'
- )
- if id.endswith(' '):
- logger.error(f'Id "{id}" ends with a space. This will be removed.')
- return id.rstrip()
- return id
-
- @staticmethod
- def _valid_label(label: str) -> str:
- """Deprecated: Use ``_valid_id`` instead."""
- warnings.warn(
- f'_valid_label is deprecated. Use _valid_id instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- return Element._valid_id(label)
-
# Precompiled regex pattern for natural sorting
_NATURAL_SPLIT = re.compile(r'(\d+)')
@@ -2171,257 +1605,6 @@ def _natural_sort_key(text):
return [int(c) if c.isdigit() else c.lower() for c in _NATURAL_SPLIT.split(text)]
-# Type variable for containers
-T = TypeVar('T')
-
-
-class ContainerMixin(dict[str, T]):
- """
- Mixin providing shared container functionality with nice repr and error messages.
-
- Subclasses must implement _get_label() to extract the label from elements.
- """
-
- def __init__(
- self,
- elements: list[T] | dict[str, T] | None = None,
- element_type_name: str = 'elements',
- truncate_repr: int | None = None,
- item_name: str | None = None,
- ):
- """
- Args:
- elements: Initial elements to add (list or dict)
- element_type_name: Name for display (e.g., 'components', 'buses')
- truncate_repr: Maximum number of items to show in repr. If None, show all items. Default: None
- item_name: Singular name for error messages (e.g., 'Component', 'Carrier').
- If None, inferred from first added item's class name.
- """
- super().__init__()
- self._element_type_name = element_type_name
- self._truncate_repr = truncate_repr
- self._item_name = item_name
-
- if elements is not None:
- if isinstance(elements, dict):
- for element in elements.values():
- self.add(element)
- else:
- for element in elements:
- self.add(element)
-
- def _get_label(self, element: T) -> str:
- """
- Extract label from element. Must be implemented by subclasses.
-
- Args:
- element: Element to get label from
-
- Returns:
- Label string
- """
- raise NotImplementedError('Subclasses must implement _get_label()')
-
- def _get_item_name(self) -> str:
- """Get the singular item name for error messages.
-
- Returns the explicitly set item_name, or infers from the first item's class name.
- Falls back to 'Item' if container is empty and no name was set.
- """
- if self._item_name is not None:
- return self._item_name
- # Infer from first item's class name
- if self:
- first_item = next(iter(self.values()))
- return first_item.__class__.__name__
- return 'Item'
-
- def add(self, element: T) -> None:
- """Add an element to the container."""
- label = self._get_label(element)
- if label in self:
- item_name = element.__class__.__name__
- raise ValueError(
- f'{item_name} with label "{label}" already exists in {self._element_type_name}. '
- f'Each {item_name.lower()} must have a unique label.'
- )
- self[label] = element
-
- def __setitem__(self, label: str, element: T) -> None:
- """Set element with validation."""
- element_label = self._get_label(element)
- if label != element_label:
- raise ValueError(
- f'Key "{label}" does not match element label "{element_label}". '
- f'Use the correct label as key or use .add() method.'
- )
- super().__setitem__(label, element)
-
- def __getitem__(self, label: str) -> T:
- """
- Get element by label with helpful error messages.
-
- Args:
- label: Label of the element to retrieve
-
- Returns:
- The element with the given label
-
- Raises:
- KeyError: If element is not found, with suggestions for similar labels
- """
- try:
- return super().__getitem__(label)
- except KeyError:
- # Provide helpful error with close matches suggestions
- item_name = self._get_item_name()
- suggestions = get_close_matches(label, self.keys(), n=3, cutoff=0.6)
- error_msg = f'{item_name} "{label}" not found in {self._element_type_name}.'
- if suggestions:
- error_msg += f' Did you mean: {", ".join(suggestions)}?'
- else:
- available = list(self.keys())
- if len(available) <= 5:
- error_msg += f' Available: {", ".join(available)}'
- else:
- error_msg += f' Available: {", ".join(available[:5])} ... (+{len(available) - 5} more)'
- raise KeyError(error_msg) from None
-
- def _get_repr(self, max_items: int | None = None) -> str:
- """
- Get string representation with optional truncation.
-
- Args:
- max_items: Maximum number of items to show. If None, uses instance default (self._truncate_repr).
- If still None, shows all items.
-
- Returns:
- Formatted string representation
- """
- # Use provided max_items, or fall back to instance default
- limit = max_items if max_items is not None else self._truncate_repr
-
- count = len(self)
- title = f'{self._element_type_name.capitalize()} ({count} item{"s" if count != 1 else ""})'
-
- if not self:
- r = fx_io.format_title_with_underline(title)
- r += '\n'
- else:
- r = fx_io.format_title_with_underline(title)
- sorted_names = sorted(self.keys(), key=_natural_sort_key)
-
- if limit is not None and limit > 0 and len(sorted_names) > limit:
- # Show truncated list
- for name in sorted_names[:limit]:
- r += f' * {name}\n'
- r += f' ... (+{len(sorted_names) - limit} more)\n'
- else:
- # Show all items
- for name in sorted_names:
- r += f' * {name}\n'
-
- return r
-
- def __add__(self, other: ContainerMixin[T]) -> ContainerMixin[T]:
- """Concatenate two containers."""
- result = self.__class__(element_type_name=self._element_type_name)
- for element in self.values():
- result.add(element)
- for element in other.values():
- result.add(element)
- return result
-
- def __repr__(self) -> str:
- """Return a string representation using the instance's truncate_repr setting."""
- return self._get_repr()
-
-
-class FlowContainer(ContainerMixin[T]):
- """Container for Flow objects with dual access: by index or by id.
-
- Supports:
- - container['Boiler(Q_th)'] # id-based access
- - container['Q_th'] # short-id access (when all flows share same component)
- - container[0] # index-based access
- - container.add(flow)
- - for flow in container.values()
- - container1 + container2 # concatenation
-
- Examples:
- >>> boiler = Boiler(id='Boiler', inputs=[Flow('heat_bus')])
- >>> boiler.inputs[0] # Index access
- >>> boiler.inputs['Boiler(heat_bus)'] # Full id access
- >>> boiler.inputs['heat_bus'] # Short id access (same component)
- >>> for flow in boiler.inputs.values():
- ... print(flow.id)
- """
-
- def _get_label(self, flow: T) -> str:
- """Extract id from Flow."""
- return flow.id
-
- def __getitem__(self, key: str | int) -> T:
- """Get flow by id, short id, or index."""
- if isinstance(key, int):
- try:
- return list(self.values())[key]
- except IndexError:
- raise IndexError(f'Flow index {key} out of range (container has {len(self)} flows)') from None
-
- if dict.__contains__(self, key):
- return super().__getitem__(key)
-
- # Try short-id match if all flows share the same component
- if len(self) > 0:
- components = {flow.component for flow in self.values()}
- if len(components) == 1:
- component = next(iter(components))
- full_key = f'{component}({key})'
- if dict.__contains__(self, full_key):
- return super().__getitem__(full_key)
-
- raise KeyError(f"'{key}' not found in {self._element_type_name}")
-
- def __contains__(self, key: object) -> bool:
- """Check if key exists (supports id or short id)."""
- if not isinstance(key, str):
- return False
- if dict.__contains__(self, key):
- return True
- if len(self) > 0:
- components = {flow.component for flow in self.values()}
- if len(components) == 1:
- component = next(iter(components))
- full_key = f'{component}({key})'
- return dict.__contains__(self, full_key)
- return False
-
-
-class ElementContainer(ContainerMixin[T]):
- """
- Container for Element objects (Component, Bus, Flow, Effect).
-
- Uses element.id for keying.
- """
-
- def _get_label(self, element: T) -> str:
- """Extract id from Element."""
- return element.id
-
-
-class ResultsContainer(ContainerMixin[T]):
- """
- Container for Results objects (ComponentResults, BusResults, etc).
-
- Uses element.id for keying.
- """
-
- def _get_label(self, element: T) -> str:
- """Extract id from Results object."""
- return element.id
-
-
T_element = TypeVar('T_element')
diff --git a/tests/conftest.py b/tests/conftest.py
index 970b8f285..20862bb25 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -151,14 +151,14 @@ def simple():
'Boiler',
thermal_efficiency=0.5,
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=50,
relative_minimum=5 / 50,
relative_maximum=1,
status_parameters=fx.StatusParameters(),
),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
@staticmethod
@@ -169,7 +169,7 @@ def complex():
thermal_efficiency=0.5,
status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
load_factor_max=1.0,
load_factor_min=0.1,
@@ -193,7 +193,7 @@ def complex():
),
flow_hours_max=1e6,
),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1),
)
class CHPs:
@@ -205,10 +205,14 @@ def simple():
thermal_efficiency=0.5,
electrical_efficiency=0.4,
electrical_flow=fx.Flow(
- 'Strom', flow_id='P_el', size=60, relative_minimum=5 / 60, status_parameters=fx.StatusParameters()
+ bus='Strom',
+ flow_id='P_el',
+ size=60,
+ relative_minimum=5 / 60,
+ status_parameters=fx.StatusParameters(),
),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
@staticmethod
@@ -220,10 +224,10 @@ def base():
electrical_efficiency=0.4,
status_parameters=fx.StatusParameters(effects_per_startup=0.01),
electrical_flow=fx.Flow(
- 'Strom', flow_id='P_el', size=60, relative_minimum=5 / 60, previous_flow_rate=10
+ bus='Strom', flow_id='P_el', size=60, relative_minimum=5 / 60, previous_flow_rate=10
),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=1e3),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu', size=1e3),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=1e3),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu', size=1e3),
)
class LinearConverters:
@@ -232,10 +236,10 @@ def piecewise():
"""Piecewise converter from flow_system_piecewise_conversion"""
return fx.LinearConverter(
'KWK',
- inputs=[fx.Flow('Gas', flow_id='Q_fu', size=200)],
+ inputs=[fx.Flow(bus='Gas', flow_id='Q_fu', size=200)],
outputs=[
- fx.Flow('Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
- fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fx.Flow(bus='Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
+ fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -252,10 +256,10 @@ def segments(timesteps_length):
"""Segments converter with time-varying piecewise conversion"""
return fx.LinearConverter(
'KWK',
- inputs=[fx.Flow('Gas', flow_id='Q_fu', size=200)],
+ inputs=[fx.Flow(bus='Gas', flow_id='Q_fu', size=200)],
outputs=[
- fx.Flow('Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
- fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fx.Flow(bus='Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
+ fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -286,11 +290,11 @@ def simple(timesteps_length=9):
return fx.Storage(
'Speicher',
charging=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th_load',
size=fx.InvestParameters(fixed_size=1e4, mandatory=True), # Investment for testing sizes
),
- discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=1e4),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_unload', size=1e4),
capacity_in_flow_hours=fx.InvestParameters(effects_of_investment=20, fixed_size=30, mandatory=True),
initial_charge_state=0,
relative_maximum_charge_state=1 / 100 * np.array(charge_state_values),
@@ -320,8 +324,8 @@ def complex():
)
return fx.Storage(
'Speicher',
- charging=fx.Flow('Fernwärme', flow_id='Q_th_load', size=1e4),
- discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=1e4),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_load', size=1e4),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_unload', size=1e4),
capacity_in_flow_hours=invest_speicher,
initial_charge_state=0,
maximal_final_charge_state=10,
@@ -379,7 +383,7 @@ def heat_load(thermal_profile):
"""Create thermal heat load sink"""
return fx.Sink(
'Wärmelast',
- inputs=[fx.Flow('Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_profile)],
+ inputs=[fx.Flow(bus='Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_profile)],
)
@staticmethod
@@ -387,7 +391,7 @@ def electricity_feed_in(electrical_price_profile):
"""Create electricity feed-in sink"""
return fx.Sink(
'Einspeisung',
- inputs=[fx.Flow('Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_price_profile)],
+ inputs=[fx.Flow(bus='Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_price_profile)],
)
@staticmethod
@@ -395,7 +399,7 @@ def electricity_load(electrical_profile):
"""Create electrical load sink (for flow_system_long)"""
return fx.Sink(
'Stromlast',
- inputs=[fx.Flow('Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_profile)],
+ inputs=[fx.Flow(bus='Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_profile)],
)
@@ -413,7 +417,7 @@ def gas_with_costs_and_co2():
def gas_with_costs():
"""Simple gas tariff without CO2"""
return fx.Source(
- 'Gastarif', outputs=[fx.Flow('Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04})]
+ 'Gastarif', outputs=[fx.Flow(bus='Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04})]
)
@@ -604,26 +608,32 @@ def flow_system_long():
Effects.primary_energy(),
fx.Sink(
'Wärmelast',
- inputs=[fx.Flow('Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load_ts)],
+ inputs=[fx.Flow(bus='Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load_ts)],
),
fx.Sink(
'Stromlast',
- inputs=[fx.Flow('Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_load_ts)],
+ inputs=[fx.Flow(bus='Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_load_ts)],
),
fx.Source(
'Kohletarif',
- outputs=[fx.Flow('Kohle', flow_id='Q_Kohle', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
+ outputs=[
+ fx.Flow(bus='Kohle', flow_id='Q_Kohle', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})
+ ],
),
fx.Source(
'Gastarif',
outputs=[
- fx.Flow('Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})
+ fx.Flow(bus='Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})
],
),
- fx.Sink('Einspeisung', inputs=[fx.Flow('Strom', flow_id='P_el', size=1000, effects_per_flow_hour=p_feed_in)]),
+ fx.Sink(
+ 'Einspeisung', inputs=[fx.Flow(bus='Strom', flow_id='P_el', size=1000, effects_per_flow_hour=p_feed_in)]
+ ),
fx.Source(
'Stromtarif',
- outputs=[fx.Flow('Strom', flow_id='P_el', size=1000, effects_per_flow_hour={'costs': p_sell, 'CO2': 0.3})],
+ outputs=[
+ fx.Flow(bus='Strom', flow_id='P_el', size=1000, effects_per_flow_hour={'costs': p_sell, 'CO2': 0.3})
+ ],
),
)
@@ -631,9 +641,9 @@ def flow_system_long():
fx.linear_converters.Boiler(
'Kessel',
thermal_efficiency=0.85,
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th'),
fuel_flow=fx.Flow(
- 'Gas',
+ bus='Gas',
flow_id='Q_fu',
size=95,
relative_minimum=12 / 95,
@@ -646,14 +656,14 @@ def flow_system_long():
thermal_efficiency=(eta_th := 0.58),
electrical_efficiency=(eta_el := 0.22),
status_parameters=fx.StatusParameters(effects_per_startup=24000),
- fuel_flow=fx.Flow('Kohle', flow_id='Q_fu', size=(fuel_size := 288), relative_minimum=87 / fuel_size),
- electrical_flow=fx.Flow('Strom', flow_id='P_el', size=fuel_size * eta_el),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=fuel_size * eta_th),
+ fuel_flow=fx.Flow(bus='Kohle', flow_id='Q_fu', size=(fuel_size := 288), relative_minimum=87 / fuel_size),
+ electrical_flow=fx.Flow(bus='Strom', flow_id='P_el', size=fuel_size * eta_el),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=fuel_size * eta_th),
),
fx.Storage(
'Speicher',
- charging=fx.Flow('Fernwärme', flow_id='Q_th_load', size=137),
- discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=158),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_load', size=137),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_unload', size=158),
capacity_in_flow_hours=684,
initial_charge_state=137,
minimal_final_charge_state=137,
diff --git a/tests/flow_system/test_flow_system_locking.py b/tests/flow_system/test_flow_system_locking.py
index 83d931c87..47bb514f3 100644
--- a/tests/flow_system/test_flow_system_locking.py
+++ b/tests/flow_system/test_flow_system_locking.py
@@ -143,17 +143,17 @@ def test_reset_clears_model(self, simple_flow_system, highs_solver):
assert simple_flow_system.model is None
def test_reset_clears_element_variable_names(self, simple_flow_system, highs_solver):
- """Reset should clear element variable names."""
+ """Reset should clear element variable name registries."""
simple_flow_system.optimize(highs_solver)
- # Check that elements have variable names after optimization
+ # Check that registry has variable names after optimization
boiler = simple_flow_system.components['Boiler']
- assert len(boiler._variable_names) > 0
+ assert len(simple_flow_system._element_variable_names.get(boiler.id, [])) > 0
simple_flow_system.reset()
- # Check that variable names are cleared
- assert len(boiler._variable_names) == 0
+ # Check that variable name registry is cleared
+ assert len(simple_flow_system._element_variable_names) == 0
def test_reset_returns_self(self, simple_flow_system, highs_solver):
"""Reset should return self for method chaining."""
diff --git a/tests/flow_system/test_flow_system_resample.py b/tests/flow_system/test_flow_system_resample.py
index 360b1bfc1..156479d3c 100644
--- a/tests/flow_system/test_flow_system_resample.py
+++ b/tests/flow_system/test_flow_system_resample.py
@@ -19,9 +19,11 @@ def simple_fs():
fs.add_elements(
fx.Sink(
'demand',
- inputs=[fx.Flow('heat', flow_id='in', fixed_relative_profile=np.linspace(10, 20, 24), size=1)],
+ inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.linspace(10, 20, 24), size=1)],
+ ),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
),
- fx.Source('source', outputs=[fx.Flow('heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]),
)
return fs
@@ -42,15 +44,15 @@ def complex_fs():
fs.add_elements(
fx.Storage(
'battery',
- charging=fx.Flow('elec', flow_id='charge', size=10),
- discharging=fx.Flow('elec', flow_id='discharge', size=10),
+ charging=fx.Flow(bus='elec', size=10),
+ discharging=fx.Flow(bus='elec', size=10),
capacity_in_flow_hours=fx.InvestParameters(fixed_size=100),
)
)
# Piecewise converter
converter = fx.linear_converters.Boiler(
- 'boiler', thermal_efficiency=0.9, fuel_flow=fx.Flow('elec', flow_id='gas'), thermal_flow=fx.Flow('heat')
+ 'boiler', thermal_efficiency=0.9, fuel_flow=fx.Flow(bus='elec', flow_id='gas'), thermal_flow=fx.Flow(bus='heat')
)
converter.thermal_flow.size = 100
fs.add_elements(converter)
@@ -61,7 +63,7 @@ def complex_fs():
'pv',
outputs=[
fx.Flow(
- 'elec',
+ bus='elec',
flow_id='gen',
size=fx.InvestParameters(maximum_size=1000, effects_of_investment_per_size={'costs': 100}),
)
@@ -101,7 +103,7 @@ def test_resample_methods(method, expected):
fs.add_elements(
fx.Sink(
's',
- inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.array([10.0, 20.0, 30.0, 40.0]), size=1)],
+ inputs=[fx.Flow(bus='b', flow_id='in', fixed_relative_profile=np.array([10.0, 20.0, 30.0, 40.0]), size=1)],
)
)
@@ -144,7 +146,7 @@ def test_with_dimensions(simple_fs, dim_name, dim_value):
"""Test resampling preserves period/scenario dimensions."""
fs = fx.FlowSystem(simple_fs.timesteps, **{dim_name: dim_value})
fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(fx.Sink('d', inputs=[fx.Flow('h', flow_id='in', fixed_relative_profile=np.ones(24), size=1)]))
+ fs.add_elements(fx.Sink('d', inputs=[fx.Flow(bus='h', flow_id='in', fixed_relative_profile=np.ones(24), size=1)]))
fs_r = fs.resample('2h', method='mean')
assert getattr(fs_r, dim_name) is not None
@@ -159,8 +161,8 @@ def test_storage_resample(complex_fs):
fs_r = complex_fs.resample('4h', method='mean')
assert 'battery' in fs_r.components
storage = fs_r.components['battery']
- assert storage.charging.label == 'charge'
- assert storage.discharging.label == 'discharge'
+ assert storage.charging.flow_id == 'charging'
+ assert storage.discharging.flow_id == 'discharging'
def test_converter_resample(complex_fs):
@@ -195,8 +197,8 @@ def test_modeling(with_dim):
fs = fx.FlowSystem(ts, **kwargs)
fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
fs.add_elements(
- fx.Sink('d', inputs=[fx.Flow('h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]),
- fx.Source('s', outputs=[fx.Flow('h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('d', inputs=[fx.Flow(bus='h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]),
+ fx.Source('s', outputs=[fx.Flow(bus='h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]),
)
fs_r = fs.resample('4h', method='mean')
@@ -212,8 +214,8 @@ def test_model_structure_preserved():
fs = fx.FlowSystem(ts)
fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
fs.add_elements(
- fx.Sink('d', inputs=[fx.Flow('h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]),
- fx.Source('s', outputs=[fx.Flow('h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('d', inputs=[fx.Flow(bus='h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]),
+ fx.Source('s', outputs=[fx.Flow(bus='h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]),
)
fs.build_model()
@@ -256,7 +258,7 @@ def test_frequencies(freq, exp_len):
ts = pd.date_range('2023-01-01', periods=168, freq='h')
fs = fx.FlowSystem(ts)
fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(fx.Sink('s', inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.ones(168), size=1)]))
+ fs.add_elements(fx.Sink('s', inputs=[fx.Flow(bus='b', flow_id='in', fixed_relative_profile=np.ones(168), size=1)]))
assert len(fs.resample(freq, method='mean').timesteps) == exp_len
@@ -266,7 +268,7 @@ def test_irregular_timesteps_error():
ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time')
fs = fx.FlowSystem(ts)
fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(fx.Sink('s', inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.ones(3), size=1)]))
+ fs.add_elements(fx.Sink('s', inputs=[fx.Flow(bus='b', flow_id='in', fixed_relative_profile=np.ones(3), size=1)]))
with pytest.raises(ValueError, match='Resampling created gaps'):
fs.transform.resample('1h', method='mean')
@@ -278,7 +280,7 @@ def test_irregular_timesteps_with_fill_gaps():
fs = fx.FlowSystem(ts)
fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
fs.add_elements(
- fx.Sink('s', inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)])
+ fx.Sink('s', inputs=[fx.Flow(bus='b', flow_id='in', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)])
)
# Test with ffill
diff --git a/tests/flow_system/test_sel_isel_single_selection.py b/tests/flow_system/test_sel_isel_single_selection.py
index 4d84ced51..bb049e590 100644
--- a/tests/flow_system/test_sel_isel_single_selection.py
+++ b/tests/flow_system/test_sel_isel_single_selection.py
@@ -20,8 +20,10 @@ def fs_with_scenarios():
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=np.ones(24), size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.ones(24), size=10)]),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -38,8 +40,10 @@ def fs_with_periods():
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=np.ones(24), size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.ones(24), size=10)]),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -57,8 +61,10 @@ def fs_with_periods_and_scenarios():
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=np.ones(24), size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.ones(24), size=10)]),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
diff --git a/tests/io/test_io.py b/tests/io/test_io.py
index 404f514ec..172b7aa37 100644
--- a/tests/io/test_io.py
+++ b/tests/io/test_io.py
@@ -247,8 +247,8 @@ def test_netcdf_roundtrip_preserves_periods(self, tmp_path):
fx.Effect('costs', unit='EUR', is_objective=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50)]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', size=10)]),
+ fx.Source('source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50)]),
)
path = tmp_path / 'test_periods.nc'
@@ -271,8 +271,8 @@ def test_netcdf_roundtrip_preserves_scenarios(self, tmp_path):
fx.Effect('costs', unit='EUR', is_objective=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50)]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', size=10)]),
+ fx.Source('source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50)]),
)
path = tmp_path / 'test_scenarios.nc'
@@ -300,8 +300,12 @@ def test_netcdf_roundtrip_with_clustering(self, tmp_path):
fx.Effect('costs', unit='EUR', is_objective=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink(
+ 'demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]
+ ),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
fs_clustered = fs.transform.cluster(n_clusters=2, cluster_duration='1D')
diff --git a/tests/plotting/test_solution_and_plotting.py b/tests/plotting/test_solution_and_plotting.py
index 50cc2ec59..4fcb0d070 100644
--- a/tests/plotting/test_solution_and_plotting.py
+++ b/tests/plotting/test_solution_and_plotting.py
@@ -14,7 +14,6 @@
import pytest
import xarray as xr
-import flixopt as fx
from flixopt import plotting
# ============================================================================
@@ -114,53 +113,6 @@ def test_solution_none_before_optimization(self, simple_flow_system):
assert simple_flow_system.solution is None
-class TestElementSolution:
- """Tests for element.solution API (filtered view of flow_system.solution)."""
-
- def test_element_solution_is_filtered_dataset(self, simple_flow_system, highs_solver):
- """Verify element.solution returns filtered Dataset."""
- simple_flow_system.optimize(highs_solver)
-
- boiler = simple_flow_system.components['Boiler']
- element_solution = boiler.solution
-
- assert isinstance(element_solution, xr.Dataset)
-
- def test_element_solution_contains_only_element_variables(self, simple_flow_system, highs_solver):
- """Verify element.solution only contains variables for that element."""
- simple_flow_system.optimize(highs_solver)
-
- boiler = simple_flow_system.components['Boiler']
- element_solution = boiler.solution
-
- # Variables should be batched names from _variable_names
- assert len(list(element_solution.data_vars)) > 0
- # Element solution should contain flow|rate (Boiler has flows)
- assert 'flow|rate' in element_solution
-
- def test_storage_element_solution(self, simple_flow_system, highs_solver):
- """Verify storage element solution contains charge state."""
- simple_flow_system.optimize(highs_solver)
-
- storage = simple_flow_system.components['Speicher']
- element_solution = storage.solution
-
- # Should contain storage charge variable
- charge_vars = [v for v in element_solution.data_vars if 'charge' in v]
- assert len(charge_vars) > 0
-
- def test_element_solution_raises_for_unlinked_element(self):
- """Verify accessing solution for unlinked element raises error."""
- boiler = fx.linear_converters.Boiler(
- 'TestBoiler',
- thermal_efficiency=0.9,
- thermal_flow=fx.Flow('Heat', flow_id='Q_th'),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- )
- with pytest.raises(ValueError, match='not linked to a FlowSystem'):
- _ = boiler.solution
-
-
# ============================================================================
# STATISTICS ACCESSOR TESTS
# ============================================================================
diff --git a/tests/superseded/math/test_bus.py b/tests/superseded/math/test_bus.py
index 62bce1cb2..4c71e99a6 100644
--- a/tests/superseded/math/test_bus.py
+++ b/tests/superseded/math/test_bus.py
@@ -14,8 +14,8 @@ def test_bus(self, basic_flow_system_linopy_coords, coords_config):
bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=None)
flow_system.add_elements(
bus,
- fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]),
- fx.Source('GastarifTest', outputs=[fx.Flow('Q_Gas', 'TestBus')]),
+ fx.Sink('WärmelastTest', inputs=[fx.Flow(bus='TestBus', flow_id='Q_th_Last')]),
+ fx.Source('GastarifTest', outputs=[fx.Flow(bus='TestBus', flow_id='Q_Gas')]),
)
model = create_linopy_model(flow_system)
@@ -39,8 +39,8 @@ def test_bus_penalty(self, basic_flow_system_linopy_coords, coords_config):
bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=1e5)
flow_system.add_elements(
bus,
- fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]),
- fx.Source('GastarifTest', outputs=[fx.Flow('Q_Gas', 'TestBus')]),
+ fx.Sink('WärmelastTest', inputs=[fx.Flow(bus='TestBus', flow_id='Q_th_Last')]),
+ fx.Source('GastarifTest', outputs=[fx.Flow(bus='TestBus', flow_id='Q_Gas')]),
)
model = create_linopy_model(flow_system)
@@ -70,8 +70,8 @@ def test_bus_with_coords(self, basic_flow_system_linopy_coords, coords_config):
bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=None)
flow_system.add_elements(
bus,
- fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]),
- fx.Source('GastarifTest', outputs=[fx.Flow('Q_Gas', 'TestBus')]),
+ fx.Sink('WärmelastTest', inputs=[fx.Flow(bus='TestBus', flow_id='Q_th_Last')]),
+ fx.Source('GastarifTest', outputs=[fx.Flow(bus='TestBus', flow_id='Q_Gas')]),
)
model = create_linopy_model(flow_system)
diff --git a/tests/superseded/math/test_component.py b/tests/superseded/math/test_component.py
index 41d2bcf5e..54151732b 100644
--- a/tests/superseded/math/test_component.py
+++ b/tests/superseded/math/test_component.py
@@ -14,12 +14,12 @@ class TestComponentModel:
def test_flow_label_check(self):
"""Test that flow model constraints are correctly generated."""
inputs = [
- fx.Flow('Q_th_Last', 'Fernwärme', relative_minimum=np.ones(10) * 0.1),
- fx.Flow('Q_Gas', 'Fernwärme', relative_minimum=np.ones(10) * 0.1),
+ fx.Flow(bus='Fernwärme', flow_id='Q_th_Last', relative_minimum=np.ones(10) * 0.1),
+ fx.Flow(bus='Fernwärme', flow_id='Q_Gas', relative_minimum=np.ones(10) * 0.1),
]
outputs = [
- fx.Flow('Q_th_Last', 'Gas', relative_minimum=np.ones(10) * 0.01),
- fx.Flow('Q_Gas', 'Gas', relative_minimum=np.ones(10) * 0.01),
+ fx.Flow(bus='Gas', flow_id='Q_th_Last', relative_minimum=np.ones(10) * 0.01),
+ fx.Flow(bus='Gas', flow_id='Q_Gas', relative_minimum=np.ones(10) * 0.01),
]
with pytest.raises(ValueError, match='Flow names must be unique!'):
_ = flixopt.elements.Component('TestComponent', inputs=inputs, outputs=outputs)
@@ -28,12 +28,12 @@ def test_component(self, basic_flow_system_linopy_coords, coords_config):
"""Test that flow model constraints are correctly generated."""
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
inputs = [
- fx.Flow('In1', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1),
- fx.Flow('In2', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1),
+ fx.Flow(bus='Fernwärme', flow_id='In1', size=100, relative_minimum=np.ones(10) * 0.1),
+ fx.Flow(bus='Fernwärme', flow_id='In2', size=100, relative_minimum=np.ones(10) * 0.1),
]
outputs = [
- fx.Flow('Out1', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01),
- fx.Flow('Out2', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01),
+ fx.Flow(bus='Gas', flow_id='Out1', size=100, relative_minimum=np.ones(10) * 0.01),
+ fx.Flow(bus='Gas', flow_id='Out2', size=100, relative_minimum=np.ones(10) * 0.01),
]
comp = flixopt.elements.Component('TestComponent', inputs=inputs, outputs=outputs)
flow_system.add_elements(comp)
@@ -55,11 +55,11 @@ def test_on_with_multiple_flows(self, basic_flow_system_linopy_coords, coords_co
ub_out2 = np.linspace(1, 1.5, 10).round(2)
inputs = [
- fx.Flow('In1', 'Fernwärme', relative_minimum=np.ones(10) * 0.1, size=100),
+ fx.Flow(bus='Fernwärme', flow_id='In1', relative_minimum=np.ones(10) * 0.1, size=100),
]
outputs = [
- fx.Flow('Out1', 'Gas', relative_minimum=np.ones(10) * 0.2, size=200),
- fx.Flow('Out2', 'Gas', relative_minimum=np.ones(10) * 0.3, relative_maximum=ub_out2, size=300),
+ fx.Flow(bus='Gas', flow_id='Out1', relative_minimum=np.ones(10) * 0.2, size=200),
+ fx.Flow(bus='Gas', flow_id='Out2', relative_minimum=np.ones(10) * 0.3, relative_maximum=ub_out2, size=300),
]
comp = flixopt.elements.Component(
'TestComponent', inputs=inputs, outputs=outputs, status_parameters=fx.StatusParameters()
@@ -102,7 +102,7 @@ def test_on_with_single_flow(self, basic_flow_system_linopy_coords, coords_confi
"""Test that component with status and single flow is correctly generated."""
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
inputs = [
- fx.Flow('In1', 'Fernwärme', relative_minimum=np.ones(10) * 0.1, size=100),
+ fx.Flow(bus='Fernwärme', flow_id='In1', relative_minimum=np.ones(10) * 0.1, size=100),
]
outputs = []
comp = flixopt.elements.Component(
@@ -137,18 +137,20 @@ def test_previous_states_with_multiple_flows(self, basic_flow_system_linopy_coor
ub_out2 = np.linspace(1, 1.5, 10).round(2)
inputs = [
fx.Flow(
- 'In1',
- 'Fernwärme',
+ bus='Fernwärme',
+ flow_id='In1',
relative_minimum=np.ones(10) * 0.1,
size=100,
previous_flow_rate=np.array([0, 0, 1e-6, 1e-5, 1e-4, 3, 4]),
),
]
outputs = [
- fx.Flow('Out1', 'Gas', relative_minimum=np.ones(10) * 0.2, size=200, previous_flow_rate=[3, 4, 5]),
fx.Flow(
- 'Out2',
- 'Gas',
+ bus='Gas', flow_id='Out1', relative_minimum=np.ones(10) * 0.2, size=200, previous_flow_rate=[3, 4, 5]
+ ),
+ fx.Flow(
+ bus='Gas',
+ flow_id='Out2',
relative_minimum=np.ones(10) * 0.3,
relative_maximum=ub_out2,
size=300,
@@ -200,8 +202,8 @@ def test_previous_states_with_multiple_flows_parameterized(
ub_out2 = np.linspace(1, 1.5, 10).round(2)
inputs = [
fx.Flow(
- 'In1',
- 'Fernwärme',
+ bus='Fernwärme',
+ flow_id='In1',
relative_minimum=np.ones(10) * 0.1,
size=100,
previous_flow_rate=in1_previous_flow_rate,
@@ -210,11 +212,15 @@ def test_previous_states_with_multiple_flows_parameterized(
]
outputs = [
fx.Flow(
- 'Out1', 'Gas', relative_minimum=np.ones(10) * 0.2, size=200, previous_flow_rate=out1_previous_flow_rate
+ bus='Gas',
+ flow_id='Out1',
+ relative_minimum=np.ones(10) * 0.2,
+ size=200,
+ previous_flow_rate=out1_previous_flow_rate,
),
fx.Flow(
- 'Out2',
- 'Gas',
+ bus='Gas',
+ flow_id='Out2',
relative_minimum=np.ones(10) * 0.3,
relative_maximum=ub_out2,
size=300,
@@ -260,8 +266,8 @@ def test_transmission_basic(self, basic_flow_system, highs_solver):
boiler = fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- thermal_flow=fx.Flow('Q_th', bus='Wärme lokal'),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Wärme lokal', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
transmission = fx.Transmission(
@@ -269,9 +275,11 @@ def test_transmission_basic(self, basic_flow_system, highs_solver):
relative_losses=0.2,
absolute_losses=20,
in1=fx.Flow(
- 'Rohr1', 'Wärme lokal', size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1e6)
+ bus='Wärme lokal',
+ flow_id='Rohr1',
+ size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1e6),
),
- out1=fx.Flow('Rohr2', 'Fernwärme', size=1000),
+ out1=fx.Flow(bus='Fernwärme', flow_id='Rohr2', size=1000),
)
flow_system.add_elements(transmission, boiler)
@@ -300,24 +308,24 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver):
'Boiler_Standard',
thermal_efficiency=0.9,
thermal_flow=fx.Flow(
- 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
+ bus='Fernwärme', flow_id='Q_th', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
boiler2 = fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.4,
- thermal_flow=fx.Flow('Q_th', bus='Wärme lokal'),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Wärme lokal', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
last2 = fx.Sink(
'Wärmelast2',
inputs=[
fx.Flow(
- 'Q_th_Last',
bus='Wärme lokal',
+ flow_id='Q_th_Last',
size=1,
fixed_relative_profile=flow_system.components['Wärmelast'].inputs[0].fixed_relative_profile
* np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
@@ -330,13 +338,13 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver):
relative_losses=0.2,
absolute_losses=20,
in1=fx.Flow(
- 'Rohr1a',
bus='Wärme lokal',
+ flow_id='Rohr1a',
size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1000),
),
- out1=fx.Flow('Rohr1b', 'Fernwärme', size=1000),
- in2=fx.Flow('Rohr2a', 'Fernwärme', size=fx.InvestParameters(maximum_size=1000)),
- out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000),
+ out1=fx.Flow(bus='Fernwärme', flow_id='Rohr1b', size=1000),
+ in2=fx.Flow(bus='Fernwärme', flow_id='Rohr2a', size=fx.InvestParameters(maximum_size=1000)),
+ out2=fx.Flow(bus='Wärme lokal', flow_id='Rohr2b', size=1000),
balanced=True,
)
@@ -375,24 +383,24 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver):
'Boiler_Standard',
thermal_efficiency=0.9,
thermal_flow=fx.Flow(
- 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
+ bus='Fernwärme', flow_id='Q_th', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
boiler2 = fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.4,
- thermal_flow=fx.Flow('Q_th', bus='Wärme lokal'),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Wärme lokal', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
last2 = fx.Sink(
'Wärmelast2',
inputs=[
fx.Flow(
- 'Q_th_Last',
bus='Wärme lokal',
+ flow_id='Q_th_Last',
size=1,
fixed_relative_profile=flow_system.components['Wärmelast'].inputs[0].fixed_relative_profile
* np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
@@ -405,19 +413,19 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver):
relative_losses=0.2,
absolute_losses=20,
in1=fx.Flow(
- 'Rohr1a',
bus='Wärme lokal',
+ flow_id='Rohr1a',
size=fx.InvestParameters(effects_of_investment_per_size=50, maximum_size=1000),
),
- out1=fx.Flow('Rohr1b', 'Fernwärme', size=1000),
+ out1=fx.Flow(bus='Fernwärme', flow_id='Rohr1b', size=1000),
in2=fx.Flow(
- 'Rohr2a',
- 'Fernwärme',
+ bus='Fernwärme',
+ flow_id='Rohr2a',
size=fx.InvestParameters(
effects_of_investment_per_size=100, minimum_size=10, maximum_size=1000, mandatory=True
),
),
- out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000),
+ out2=fx.Flow(bus='Wärme lokal', flow_id='Rohr2b', size=1000),
balanced=False,
)
diff --git a/tests/superseded/math/test_effect.py b/tests/superseded/math/test_effect.py
index 103eb385a..102e1abee 100644
--- a/tests/superseded/math/test_effect.py
+++ b/tests/superseded/math/test_effect.py
@@ -143,13 +143,13 @@ def test_shares(self, basic_flow_system_linopy_coords, coords_config, highs_solv
'Boiler',
thermal_efficiency=0.5,
thermal_flow=fx.Flow(
- 'Q_th',
bus='Fernwärme',
+ flow_id='Q_th',
size=fx.InvestParameters(
effects_of_investment_per_size=10, minimum_size=20, maximum_size=200, mandatory=True
),
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
),
)
diff --git a/tests/superseded/math/test_flow.py b/tests/superseded/math/test_flow.py
index fa9d558cb..a4294b2d8 100644
--- a/tests/superseded/math/test_flow.py
+++ b/tests/superseded/math/test_flow.py
@@ -13,7 +13,7 @@ def test_flow_minimal(self, basic_flow_system_linopy_coords, coords_config):
"""Test that flow model constraints are correctly generated."""
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
- flow = fx.Flow('Fernwärme', flow_id='Wärme', size=100)
+ flow = fx.Flow(bus='Fernwärme', flow_id='Wärme', size=100)
flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
@@ -34,7 +34,7 @@ def test_flow(self, basic_flow_system_linopy_coords, coords_config):
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
relative_minimum=np.linspace(0, 0.5, timesteps.size),
@@ -69,7 +69,9 @@ def test_effects_per_flow_hour(self, basic_flow_system_linopy_coords, coords_con
co2_per_flow_hour = np.linspace(4, 5, timesteps.size)
flow = fx.Flow(
- 'Fernwärme', flow_id='Wärme', effects_per_flow_hour={'costs': costs_per_flow_hour, 'CO2': co2_per_flow_hour}
+ bus='Fernwärme',
+ flow_id='Wärme',
+ effects_per_flow_hour={'costs': costs_per_flow_hour, 'CO2': co2_per_flow_hour},
)
flow_system.add_elements(fx.Sink('Sink', inputs=[flow]), fx.Effect('CO2', 't', ''))
model = create_linopy_model(flow_system)
@@ -93,7 +95,7 @@ def test_flow_invest(self, basic_flow_system_linopy_coords, coords_config):
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=20, maximum_size=100, mandatory=True),
relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
@@ -127,7 +129,7 @@ def test_flow_invest_optional(self, basic_flow_system_linopy_coords, coords_conf
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=20, maximum_size=100, mandatory=False),
relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
@@ -159,7 +161,7 @@ def test_flow_invest_optional_wo_min_size(self, basic_flow_system_linopy_coords,
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(maximum_size=100, mandatory=False),
relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
@@ -182,7 +184,7 @@ def test_flow_invest_wo_min_size_non_optional(self, basic_flow_system_linopy_coo
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(maximum_size=100, mandatory=True),
relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
@@ -207,7 +209,7 @@ def test_flow_invest_fixed_size(self, basic_flow_system_linopy_coords, coords_co
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(fixed_size=75, mandatory=True),
relative_minimum=0.2,
@@ -240,7 +242,7 @@ def test_flow_invest_with_effects(self, basic_flow_system_linopy_coords, coords_
co2 = fx.Effect('CO2', unit='ton', description='CO2 emissions')
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(
minimum_size=20,
@@ -264,7 +266,7 @@ def test_flow_invest_divest_effects(self, basic_flow_system_linopy_coords, coord
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(
minimum_size=20,
@@ -289,7 +291,7 @@ def test_flow_on(self, basic_flow_system_linopy_coords, coords_config):
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
relative_minimum=0.2,
@@ -329,7 +331,7 @@ def test_effects_per_active_hour(self, basic_flow_system_linopy_coords, coords_c
co2_per_running_hour = np.linspace(4, 5, timesteps.size)
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
status_parameters=fx.StatusParameters(
@@ -353,7 +355,7 @@ def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_conf
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
previous_flow_rate=0, # Required to get initial constraint
@@ -387,7 +389,7 @@ def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, co
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
status_parameters=fx.StatusParameters(
@@ -414,7 +416,7 @@ def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_con
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
previous_flow_rate=0, # Required to get initial constraint (was OFF for 1h, so previous_downtime=1)
@@ -448,7 +450,7 @@ def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, c
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
status_parameters=fx.StatusParameters(
@@ -475,7 +477,7 @@ def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_con
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
previous_flow_rate=0, # Required to get initial constraint
@@ -513,7 +515,7 @@ def test_on_hours_limits(self, basic_flow_system_linopy_coords, coords_config):
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
status_parameters=fx.StatusParameters(
@@ -544,7 +546,7 @@ class TestFlowOnInvestModel:
def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_config):
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=False),
relative_minimum=0.2,
@@ -574,7 +576,7 @@ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_c
def test_flow_on_invest_non_optional(self, basic_flow_system_linopy_coords, coords_config):
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=True),
relative_minimum=0.2,
@@ -613,7 +615,7 @@ def test_fixed_relative_profile(self, basic_flow_system_linopy_coords, coords_co
profile = np.sin(np.linspace(0, 2 * np.pi, len(timesteps))) * 0.5 + 0.5 # Values between 0 and 1
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
fixed_relative_profile=profile,
@@ -638,7 +640,7 @@ def test_fixed_profile_with_investment(self, basic_flow_system_linopy_coords, co
profile = np.sin(np.linspace(0, 2 * np.pi, len(timesteps))) * 0.5 + 0.5
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=50, maximum_size=200, mandatory=False),
fixed_relative_profile=profile,
diff --git a/tests/superseded/math/test_linear_converter.py b/tests/superseded/math/test_linear_converter.py
index 2057581e4..69cb905cf 100644
--- a/tests/superseded/math/test_linear_converter.py
+++ b/tests/superseded/math/test_linear_converter.py
@@ -14,8 +14,8 @@ def test_basic_linear_converter(self, basic_flow_system_linopy_coords, coords_co
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create a simple linear converter with constant conversion factor
converter = fx.LinearConverter(
@@ -48,8 +48,8 @@ def test_linear_converter_time_varying(self, basic_flow_system_linopy_coords, co
varying_efficiency = np.linspace(0.7, 0.9, len(timesteps))
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create a linear converter with time-varying conversion factor
converter = fx.LinearConverter(
@@ -78,10 +78,10 @@ def test_linear_converter_multiple_factors(self, basic_flow_system_linopy_coords
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create flows
- input_flow1 = fx.Flow('input_bus1', flow_id='input1', size=100)
- input_flow2 = fx.Flow('input_bus2', flow_id='input2', size=100)
- output_flow1 = fx.Flow('output_bus1', flow_id='output1', size=100)
- output_flow2 = fx.Flow('output_bus2', flow_id='output2', size=100)
+ input_flow1 = fx.Flow(bus='input_bus1', flow_id='input1', size=100)
+ input_flow2 = fx.Flow(bus='input_bus2', flow_id='input2', size=100)
+ output_flow1 = fx.Flow(bus='output_bus1', flow_id='output1', size=100)
+ output_flow2 = fx.Flow(bus='output_bus2', flow_id='output2', size=100)
# Create a linear converter with multiple inputs/outputs and conversion factors
converter = fx.LinearConverter(
@@ -111,8 +111,8 @@ def test_linear_converter_with_status(self, basic_flow_system_linopy_coords, coo
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create StatusParameters
status_params = fx.StatusParameters(
@@ -158,10 +158,10 @@ def test_linear_converter_multidimensional(self, basic_flow_system_linopy_coords
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create a more complex setup with multiple flows
- input_flow1 = fx.Flow('fuel_bus', flow_id='fuel', size=100)
- input_flow2 = fx.Flow('electricity_bus', flow_id='electricity', size=50)
- output_flow1 = fx.Flow('heat_bus', flow_id='heat', size=70)
- output_flow2 = fx.Flow('cooling_bus', flow_id='cooling', size=30)
+ input_flow1 = fx.Flow(bus='fuel_bus', flow_id='fuel', size=100)
+ input_flow2 = fx.Flow(bus='electricity_bus', flow_id='electricity', size=50)
+ output_flow1 = fx.Flow(bus='heat_bus', flow_id='heat', size=70)
+ output_flow2 = fx.Flow(bus='cooling_bus', flow_id='cooling', size=30)
# Create a CHP-like converter with more complex connections
converter = fx.LinearConverter(
@@ -205,8 +205,8 @@ def test_edge_case_time_varying_conversion(self, basic_flow_system_linopy_coords
)
# Create input and output flows
- input_flow = fx.Flow('electricity_bus', flow_id='electricity', size=100)
- output_flow = fx.Flow('heat_bus', flow_id='heat', size=500) # Higher maximum to allow for COP of 5
+ input_flow = fx.Flow(bus='electricity_bus', flow_id='electricity', size=100)
+ output_flow = fx.Flow(bus='heat_bus', flow_id='heat', size=500) # Higher maximum to allow for COP of 5
conversion_factors = [{input_flow.label: fluctuating_cop, output_flow.label: np.ones(len(timesteps))}]
@@ -229,8 +229,8 @@ def test_piecewise_conversion(self, basic_flow_system_linopy_coords, coords_conf
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create pieces for piecewise conversion
# For input flow: two pieces from 0-50 and 50-100
@@ -269,8 +269,8 @@ def test_piecewise_conversion_with_status(self, basic_flow_system_linopy_coords,
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create pieces for piecewise conversion
input_pieces = [fx.Piece(start=0, end=50), fx.Piece(start=50, end=100)]
diff --git a/tests/superseded/math/test_storage.py b/tests/superseded/math/test_storage.py
index 3e3e23f15..efcb19694 100644
--- a/tests/superseded/math/test_storage.py
+++ b/tests/superseded/math/test_storage.py
@@ -16,8 +16,8 @@ def test_basic_storage(self, basic_flow_system_linopy_coords, coords_config):
# Create a simple storage
storage = fx.Storage(
'TestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30, # 30 kWh storage capacity
initial_charge_state=0, # Start empty
prevent_simultaneous_charge_and_discharge=True,
@@ -64,8 +64,8 @@ def test_lossy_storage(self, basic_flow_system_linopy_coords, coords_config):
# Create a simple storage
storage = fx.Storage(
'TestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30, # 30 kWh storage capacity
initial_charge_state=0, # Start empty
eta_charge=0.9, # Charging efficiency
@@ -111,8 +111,8 @@ def test_charge_state_bounds(self, basic_flow_system_linopy_coords, coords_confi
# Create a simple storage with time-varying bounds
storage = fx.Storage(
'TestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30, # 30 kWh storage capacity
initial_charge_state=3,
prevent_simultaneous_charge_and_discharge=True,
@@ -159,8 +159,8 @@ def test_storage_with_investment(self, basic_flow_system_linopy_coords, coords_c
# Create storage with investment parameters
storage = fx.Storage(
'InvestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=fx.InvestParameters(
effects_of_investment={'costs': 100},
effects_of_investment_per_size={'costs': 10},
@@ -207,8 +207,8 @@ def test_storage_with_final_state_constraints(self, basic_flow_system_linopy_coo
# Create storage with final state constraints
storage = fx.Storage(
'FinalStateStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30,
initial_charge_state=10, # Start with 10 kWh
minimal_final_charge_state=15, # End with at least 15 kWh
@@ -235,8 +235,8 @@ def test_storage_cyclic_initialization(self, basic_flow_system_linopy_coords, co
# Create storage with cyclic initialization
storage = fx.Storage(
'CyclicStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30,
initial_charge_state='equals_final', # Cyclic initialization
eta_charge=0.9,
@@ -261,8 +261,8 @@ def test_simultaneous_charge_discharge(self, basic_flow_system_linopy_coords, co
# Create storage with or without simultaneous charge/discharge prevention
storage = fx.Storage(
'SimultaneousStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30,
initial_charge_state=0,
eta_charge=0.9,
@@ -317,8 +317,8 @@ def test_investment_parameters(
# Create storage with specified investment parameters
storage = fx.Storage(
'InvestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=fx.InvestParameters(**invest_params),
initial_charge_state=0,
eta_charge=0.9,
diff --git a/tests/superseded/test_functional.py b/tests/superseded/test_functional.py
index 2826379b5..d9cfa9d54 100644
--- a/tests/superseded/test_functional.py
+++ b/tests/superseded/test_functional.py
@@ -71,9 +71,9 @@ def flow_system_base(timesteps: pd.DatetimeIndex) -> fx.FlowSystem:
flow_system.add_elements(
fx.Sink(
'Wärmelast',
- inputs=[fx.Flow('Fernwärme', flow_id='Wärme', fixed_relative_profile=data.thermal_demand, size=1)],
+ inputs=[fx.Flow(bus='Fernwärme', flow_id='Wärme', fixed_relative_profile=data.thermal_demand, size=1)],
),
- fx.Source('Gastarif', outputs=[fx.Flow('Gas', flow_id='Gas', effects_per_flow_hour=1)]),
+ fx.Source('Gastarif', outputs=[fx.Flow(bus='Gas', flow_id='Gas', effects_per_flow_hour=1)]),
)
return flow_system
@@ -84,8 +84,8 @@ def flow_system_minimal(timesteps) -> fx.FlowSystem:
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th'),
)
)
return flow_system
@@ -140,9 +140,9 @@ def test_fixed_size(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(fixed_size=1000, effects_of_investment=10, effects_of_investment_per_size=1),
),
@@ -179,9 +179,9 @@ def test_optimize_size(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1, maximum_size=100),
),
@@ -218,9 +218,9 @@ def test_size_bounds(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(
minimum_size=40, maximum_size=100, effects_of_investment=10, effects_of_investment_per_size=1
@@ -259,9 +259,9 @@ def test_optional_invest(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(
mandatory=False,
@@ -275,9 +275,9 @@ def test_optional_invest(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler_optional',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(
mandatory=False,
@@ -336,8 +336,8 @@ def test_on(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=100, status_parameters=fx.StatusParameters()),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100, status_parameters=fx.StatusParameters()),
)
)
@@ -373,9 +373,9 @@ def test_off(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(max_downtime=100),
@@ -422,9 +422,9 @@ def test_startup_shutdown(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(force_startup_tracking=True),
@@ -478,9 +478,9 @@ def test_on_total_max(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(active_hours_max=1),
@@ -489,8 +489,8 @@ def test_on_total_max(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
),
)
@@ -526,9 +526,9 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(active_hours_max=2),
@@ -537,9 +537,9 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(active_hours_min=3),
@@ -597,9 +597,9 @@ def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
previous_flow_rate=0, # Required for initial uptime constraint
@@ -609,8 +609,8 @@ def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
),
)
flow_system['Wärmelast'].inputs[0].fixed_relative_profile = np.array([5, 10, 20, 18, 12])
@@ -656,15 +656,15 @@ def test_consecutive_off(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th'),
),
fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
previous_flow_rate=np.array([20]), # Otherwise its Off before the start
diff --git a/tests/test_align_to_coords.py b/tests/test_align_to_coords.py
new file mode 100644
index 000000000..88a9a0dc6
--- /dev/null
+++ b/tests/test_align_to_coords.py
@@ -0,0 +1,179 @@
+"""Tests for align_to_coords() and align_effects_to_coords()."""
+
+import numpy as np
+import pandas as pd
+import pytest
+import xarray as xr
+
+from flixopt.core import ConversionError, TimeSeriesData, align_effects_to_coords, align_to_coords
+
+
+@pytest.fixture
+def time_coords():
+ """Standard time-only coordinates."""
+ return {'time': pd.date_range('2020-01-01', periods=5, freq='h', name='time')}
+
+
+@pytest.fixture
+def full_coords():
+ """Time + period + scenario coordinates."""
+ return {
+ 'time': pd.date_range('2020-01-01', periods=5, freq='h', name='time'),
+ 'period': pd.Index([2020, 2030], name='period'),
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'),
+ }
+
+
+class TestAlignNone:
+ def test_none_returns_none(self, time_coords):
+ assert align_to_coords(None, time_coords) is None
+
+ def test_none_with_name(self, time_coords):
+ assert align_to_coords(None, time_coords, name='test') is None
+
+
+class TestAlignScalar:
+ def test_int(self, time_coords):
+ result = align_to_coords(42, time_coords, name='val')
+ assert isinstance(result, xr.DataArray)
+ assert result.ndim == 0
+ assert float(result) == 42.0
+
+ def test_float(self, time_coords):
+ result = align_to_coords(0.5, time_coords)
+ assert result.ndim == 0
+ assert float(result) == 0.5
+
+ def test_bool(self, time_coords):
+ result = align_to_coords(True, time_coords)
+ assert result.ndim == 0
+
+ def test_np_float(self, time_coords):
+ result = align_to_coords(np.float64(3.14), time_coords)
+ assert result.ndim == 0
+ assert float(result) == pytest.approx(3.14)
+
+
+class TestAlign1DArray:
+ def test_numpy_array_matches_time(self, time_coords):
+ data = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
+ result = align_to_coords(data, time_coords, name='profile')
+ assert result.dims == ('time',)
+ assert len(result) == 5
+ np.testing.assert_array_equal(result.values, data)
+
+ def test_wrong_length_raises(self, time_coords):
+ data = np.array([1.0, 2.0, 3.0]) # length 3, time has 5
+ with pytest.raises(ConversionError):
+ align_to_coords(data, time_coords)
+
+ def test_matches_period_dim(self, full_coords):
+ data = np.array([10.0, 20.0]) # length 2 matches period
+ result = align_to_coords(data, full_coords, dims=['period', 'scenario'])
+ assert result.dims == ('period',)
+
+ def test_matches_scenario_dim(self, full_coords):
+ data = np.array([1.0, 2.0, 3.0]) # length 3 matches scenario
+ result = align_to_coords(data, full_coords, dims=['period', 'scenario'])
+ assert result.dims == ('scenario',)
+
+
+class TestAlignSeries:
+ def test_series_with_datetime_index(self, time_coords):
+ idx = time_coords['time']
+ data = pd.Series([10, 20, 30, 40, 50], index=idx)
+ result = align_to_coords(data, time_coords)
+ assert result.dims == ('time',)
+ np.testing.assert_array_equal(result.values, [10, 20, 30, 40, 50])
+
+ def test_series_wrong_index_raises(self, time_coords):
+ wrong_idx = pd.date_range('2021-01-01', periods=5, freq='h')
+ data = pd.Series([1, 2, 3, 4, 5], index=wrong_idx)
+ with pytest.raises(ConversionError):
+ align_to_coords(data, time_coords)
+
+
+class TestAlignTimeSeriesData:
+ def test_basic_timeseries(self, time_coords):
+ data = TimeSeriesData([1, 2, 3, 4, 5])
+ result = align_to_coords(data, time_coords, name='ts')
+ assert isinstance(result, TimeSeriesData)
+ assert result.dims == ('time',)
+
+ def test_clustering_metadata_preserved(self, time_coords):
+ data = TimeSeriesData([1, 2, 3, 4, 5], clustering_group='heat')
+ result = align_to_coords(data, time_coords, name='ts')
+ assert result.clustering_group == 'heat'
+
+ def test_clustering_weight_preserved(self, time_coords):
+ data = TimeSeriesData([1, 2, 3, 4, 5], clustering_weight=0.7)
+ result = align_to_coords(data, time_coords, name='ts')
+ assert result.clustering_weight == 0.7
+
+
+class TestAlignDataArray:
+ def test_already_aligned_passthrough(self, time_coords):
+ idx = time_coords['time']
+ da = xr.DataArray([1, 2, 3, 4, 5], dims=['time'], coords={'time': idx})
+ result = align_to_coords(da, time_coords)
+ xr.testing.assert_equal(result, da)
+
+ def test_scalar_dataarray(self, time_coords):
+ da = xr.DataArray(42.0)
+ result = align_to_coords(da, time_coords)
+ assert result.ndim == 0
+ assert float(result) == 42.0
+
+ def test_incompatible_dims_raises(self, time_coords):
+ da = xr.DataArray([1, 2, 3], dims=['foo'])
+ with pytest.raises(ConversionError):
+ align_to_coords(da, time_coords)
+
+
+class TestAlignDimsFilter:
+ def test_dims_restricts_alignment(self, full_coords):
+ data = np.array([10.0, 20.0]) # length 2 matches period
+ result = align_to_coords(data, full_coords, dims=['period'])
+ assert result.dims == ('period',)
+
+ def test_dims_none_uses_all(self, time_coords):
+ data = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
+ result = align_to_coords(data, time_coords, dims=None)
+ assert result.dims == ('time',)
+
+
+class TestAlignName:
+ def test_name_assigned(self, time_coords):
+ result = align_to_coords(42, time_coords, name='my_param')
+ assert result.name == 'my_param'
+
+ def test_no_name(self, time_coords):
+ result = align_to_coords(42, time_coords)
+ # Should not error, name may be None
+ assert result is not None
+
+
+class TestAlignEffects:
+ def test_none_returns_none(self, time_coords):
+ assert align_effects_to_coords(None, time_coords) is None
+
+ def test_scalar_effects(self, time_coords):
+ effects = {'costs': 0.04, 'CO2': 0.3}
+ result = align_effects_to_coords(effects, time_coords, prefix='flow')
+ assert set(result.keys()) == {'costs', 'CO2'}
+ assert float(result['costs']) == pytest.approx(0.04)
+ assert result['costs'].name == 'flow|costs'
+
+ def test_array_effects(self, time_coords):
+ effects = {'costs': np.array([1, 2, 3, 4, 5])}
+ result = align_effects_to_coords(effects, time_coords)
+ assert result['costs'].dims == ('time',)
+
+ def test_prefix_suffix(self, time_coords):
+ effects = {'costs': 42}
+ result = align_effects_to_coords(effects, time_coords, prefix='Boiler', suffix='per_hour')
+ assert result['costs'].name == 'Boiler|costs|per_hour'
+
+ def test_empty_dict(self, time_coords):
+ result = align_effects_to_coords({}, time_coords)
+ assert result == {}
diff --git a/tests/test_clustering/test_cluster_reduce_expand.py b/tests/test_clustering/test_cluster_reduce_expand.py
index fe61144ea..06426e3c5 100644
--- a/tests/test_clustering/test_cluster_reduce_expand.py
+++ b/tests/test_clustering/test_cluster_reduce_expand.py
@@ -20,13 +20,13 @@ def create_simple_system(timesteps: pd.DatetimeIndex) -> fx.FlowSystem:
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -249,14 +249,14 @@ def create_system_with_scenarios(timesteps: pd.DatetimeIndex, scenarios: pd.Inde
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_df, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_df, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -392,12 +392,12 @@ def create_system_with_storage(
flow_system.add_elements(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Grid', outputs=[fx.Flow('P', bus='Elec', size=100, effects_per_flow_hour=0.1)]),
- fx.Sink('Load', inputs=[fx.Flow('P', bus='Elec', fixed_relative_profile=demand, size=1)]),
+ fx.Source('Grid', outputs=[fx.Flow(bus='Elec', flow_id='P', size=100, effects_per_flow_hour=0.1)]),
+ fx.Sink('Load', inputs=[fx.Flow(bus='Elec', flow_id='P', fixed_relative_profile=demand, size=1)]),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=30),
- discharging=fx.Flow('discharge', bus='Elec', size=30),
+ charging=fx.Flow(bus='Elec', size=30),
+ discharging=fx.Flow(bus='Elec', size=30),
capacity_in_flow_hours=100,
relative_loss_per_hour=relative_loss_per_hour,
cluster_mode=cluster_mode,
@@ -579,13 +579,13 @@ def create_system_with_periods(timesteps: pd.DatetimeIndex, periods: pd.Index) -
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -620,14 +620,14 @@ def create_system_with_periods_and_scenarios(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_da, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_da, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -753,13 +753,13 @@ def create_system_with_peak_demand(timesteps: pd.DatetimeIndex) -> fx.FlowSystem
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -948,13 +948,13 @@ def test_cluster_with_data_vars_subset(self, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=price)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=price)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -992,13 +992,13 @@ def test_data_vars_preserves_all_flowsystem_data(self, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=price)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=price)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -1025,13 +1025,13 @@ def test_data_vars_optimization_works(self, solver_fixture, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=price)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=price)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -1057,13 +1057,13 @@ def test_data_vars_with_multiple_variables(self, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=price)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=price)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -1291,12 +1291,12 @@ def test_segmented_total_effects_match_solution(self, solver_fixture, freq):
fs.add_elements(
fx.Source(
'Boiler',
- outputs=[fx.Flow('Q', bus='Heat', size=100, effects_per_flow_hour={'Cost': 50})],
+ outputs=[fx.Flow(bus='Heat', flow_id='Q', size=100, effects_per_flow_hour={'Cost': 50})],
)
)
demand_profile = np.tile([0.5, 1], n_timesteps // 2)
fs.add_elements(
- fx.Sink('Demand', inputs=[fx.Flow('Q', bus='Heat', size=50, fixed_relative_profile=demand_profile)])
+ fx.Sink('Demand', inputs=[fx.Flow(bus='Heat', flow_id='Q', size=50, fixed_relative_profile=demand_profile)])
)
# Cluster with segments -> solve -> expand
@@ -1547,8 +1547,8 @@ def test_startup_shutdown_first_timestep_only(self, solver_fixture, timesteps_8_
'Boiler',
outputs=[
fx.Flow(
- 'Q',
bus='Heat',
+ flow_id='Q',
size=100,
status_parameters=fx.StatusParameters(effects_per_startup={'Cost': 10}),
effects_per_flow_hour={'Cost': 50},
@@ -1561,7 +1561,7 @@ def test_startup_shutdown_first_timestep_only(self, solver_fixture, timesteps_8_
demand_pattern = np.array([0.8] * 12 + [0.0] * 12) # On/off pattern per day (0-1 range)
demand_profile = np.tile(demand_pattern, 8)
fs.add_elements(
- fx.Sink('Demand', inputs=[fx.Flow('Q', bus='Heat', size=50, fixed_relative_profile=demand_profile)])
+ fx.Sink('Demand', inputs=[fx.Flow(bus='Heat', flow_id='Q', size=50, fixed_relative_profile=demand_profile)])
)
# Cluster with segments
@@ -1616,8 +1616,8 @@ def test_startup_timing_preserved_non_segmented(self, solver_fixture, timesteps_
'Boiler',
outputs=[
fx.Flow(
- 'Q',
bus='Heat',
+ flow_id='Q',
size=100,
status_parameters=fx.StatusParameters(effects_per_startup={'Cost': 10}),
effects_per_flow_hour={'Cost': 50},
@@ -1629,7 +1629,7 @@ def test_startup_timing_preserved_non_segmented(self, solver_fixture, timesteps_
demand_pattern = np.array([0.8] * 12 + [0.0] * 12) # On/off pattern per day (0-1 range)
demand_profile = np.tile(demand_pattern, 8)
fs.add_elements(
- fx.Sink('Demand', inputs=[fx.Flow('Q', bus='Heat', size=50, fixed_relative_profile=demand_profile)])
+ fx.Sink('Demand', inputs=[fx.Flow(bus='Heat', flow_id='Q', size=50, fixed_relative_profile=demand_profile)])
)
# Cluster WITHOUT segments
diff --git a/tests/test_clustering/test_clustering_io.py b/tests/test_clustering/test_clustering_io.py
index 527ea645c..92e6842e4 100644
--- a/tests/test_clustering/test_clustering_io.py
+++ b/tests/test_clustering/test_clustering_io.py
@@ -19,8 +19,10 @@ def simple_system_24h():
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=np.ones(24), size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.ones(24), size=10)]),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -54,8 +56,10 @@ def simple_system_8_days():
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -224,8 +228,12 @@ def system_with_scenarios(self):
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink(
+ 'demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]
+ ),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -349,8 +357,12 @@ def system_with_periods(self):
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink(
+ 'demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]
+ ),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -434,12 +446,16 @@ def system_with_intercluster_storage(self):
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.1})]),
+ fx.Sink(
+ 'demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]
+ ),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.1})]
+ ),
fx.Storage(
'storage',
- charging=fx.Flow('in', bus='heat', size=20),
- discharging=fx.Flow('out', bus='heat', size=20),
+ charging=fx.Flow(bus='heat', flow_id='in', size=20),
+ discharging=fx.Flow(bus='heat', flow_id='out', size=20),
capacity_in_flow_hours=100,
cluster_mode='intercluster', # Key: intercluster mode
),
@@ -576,8 +592,10 @@ def system_with_periods_and_scenarios(self):
fs.add_elements(
fx.Bus('heat'),
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=200, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand, size=1)]),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=200, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
diff --git a/tests/test_clustering/test_integration.py b/tests/test_clustering/test_integration.py
index fcec081aa..d91bdb6e6 100644
--- a/tests/test_clustering/test_integration.py
+++ b/tests/test_clustering/test_integration.py
@@ -142,9 +142,10 @@ def test_clustering_data_returns_dataset(self):
# Add components with time-varying data
demand_data = np.sin(np.linspace(0, 4 * np.pi, n_hours)) + 2
bus = Bus('electricity')
- source = Source('grid', outputs=[Flow('grid_in', bus='electricity', size=100)])
+ source = Source('grid', outputs=[Flow(bus='electricity', flow_id='grid_in', size=100)])
sink = Sink(
- 'demand', inputs=[Flow('demand_out', bus='electricity', size=100, fixed_relative_profile=demand_data)]
+ 'demand',
+ inputs=[Flow(bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=demand_data)],
)
fs.add_elements(source, sink, bus)
@@ -162,9 +163,10 @@ def test_clustering_data_contains_only_time_varying(self):
# Add components with time-varying and constant data
demand_data = np.sin(np.linspace(0, 4 * np.pi, n_hours)) + 2
bus = Bus('electricity')
- source = Source('grid', outputs=[Flow('grid_in', bus='electricity', size=100)])
+ source = Source('grid', outputs=[Flow(bus='electricity', flow_id='grid_in', size=100)])
sink = Sink(
- 'demand', inputs=[Flow('demand_out', bus='electricity', size=100, fixed_relative_profile=demand_data)]
+ 'demand',
+ inputs=[Flow(bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=demand_data)],
)
fs.add_elements(source, sink, bus)
@@ -196,9 +198,10 @@ def test_clustering_data_with_periods(self):
)
bus = Bus('electricity')
effect = Effect('costs', '€', is_objective=True)
- source = Source('grid', outputs=[Flow('grid_in', bus='electricity', size=100)])
+ source = Source('grid', outputs=[Flow(bus='electricity', flow_id='grid_in', size=100)])
sink = Sink(
- 'demand', inputs=[Flow('demand_out', bus='electricity', size=100, fixed_relative_profile=demand_data)]
+ 'demand',
+ inputs=[Flow(bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=demand_data)],
)
fs.add_elements(source, sink, bus, effect)
@@ -238,9 +241,9 @@ def test_cluster_reduces_timesteps(self):
demand_data = np.sin(np.linspace(0, 14 * np.pi, n_hours)) + 2 # Varying demand over 7 days
bus = Bus('electricity')
# Bus label is passed as string to Flow
- grid_flow = Flow('grid_in', bus='electricity', size=100)
+ grid_flow = Flow(bus='electricity', flow_id='grid_in', size=100)
demand_flow = Flow(
- 'demand_out', bus='electricity', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
+ bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
)
source = Source('grid', outputs=[grid_flow])
sink = Sink('demand', inputs=[demand_flow])
@@ -276,9 +279,9 @@ def basic_flow_system(self):
demand_data = np.sin(np.linspace(0, 14 * np.pi, n_hours)) + 2
bus = Bus('electricity')
- grid_flow = Flow('grid_in', bus='electricity', size=100)
+ grid_flow = Flow(bus='electricity', flow_id='grid_in', size=100)
demand_flow = Flow(
- 'demand_out', bus='electricity', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
+ bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
)
source = Source('grid', outputs=[grid_flow])
sink = Sink('demand', inputs=[demand_flow])
@@ -349,9 +352,9 @@ def test_metrics_with_periods(self):
demand_data = np.sin(np.linspace(0, 14 * np.pi, n_hours)) + 2
bus = Bus('electricity')
- grid_flow = Flow('grid_in', bus='electricity', size=100)
+ grid_flow = Flow(bus='electricity', flow_id='grid_in', size=100)
demand_flow = Flow(
- 'demand_out', bus='electricity', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
+ bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
)
source = Source('grid', outputs=[grid_flow])
sink = Sink('demand', inputs=[demand_flow])
diff --git a/tests/test_clustering/test_multiperiod_extremes.py b/tests/test_clustering/test_multiperiod_extremes.py
index 55720f3a0..356866399 100644
--- a/tests/test_clustering/test_multiperiod_extremes.py
+++ b/tests/test_clustering/test_multiperiod_extremes.py
@@ -109,14 +109,14 @@ def create_multiperiod_system_with_different_profiles(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_da, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_da, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -195,14 +195,14 @@ def create_system_with_extreme_peaks(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_input, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_input, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -250,14 +250,14 @@ def create_multiperiod_multiscenario_system(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_da, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_da, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -435,13 +435,13 @@ def test_new_cluster_with_min_value(self, solver_fixture, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -737,13 +737,13 @@ def test_cluster_with_scenarios(self, solver_fixture, timesteps_8_days, scenario
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_da, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_da, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
diff --git a/tests/test_comparison.py b/tests/test_comparison.py
index b37b1ca44..bca4ba9bd 100644
--- a/tests/test_comparison.py
+++ b/tests/test_comparison.py
@@ -35,26 +35,26 @@ def _build_base_flow_system():
fs.add_elements(
fx.Source(
'Grid',
- outputs=[fx.Flow('P_el', bus='Electricity', size=100, effects_per_flow_hour={'costs': 0.3})],
+ outputs=[fx.Flow(bus='Electricity', flow_id='P_el', size=100, effects_per_flow_hour={'costs': 0.3})],
),
fx.Source(
'GasSupply',
- outputs=[fx.Flow('Q_gas', bus='Gas', size=200, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})],
+ outputs=[fx.Flow(bus='Gas', flow_id='Q_gas', size=200, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})],
),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q_demand', bus='Heat', size=50, fixed_relative_profile=0.6)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q_demand', size=50, fixed_relative_profile=0.6)],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- thermal_flow=fx.Flow('Q_th', bus='Heat', size=60),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th', size=60),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
),
fx.Storage(
'ThermalStorage',
- charging=fx.Flow('Q_charge', bus='Heat', size=20),
- discharging=fx.Flow('Q_discharge', bus='Heat', size=20),
+ charging=fx.Flow(bus='Heat', flow_id='Q_charge', size=20),
+ discharging=fx.Flow(bus='Heat', flow_id='Q_discharge', size=20),
capacity_in_flow_hours=40,
initial_charge_state=0.5,
),
@@ -75,38 +75,38 @@ def _build_flow_system_with_chp():
fs.add_elements(
fx.Source(
'Grid',
- outputs=[fx.Flow('P_el', bus='Electricity', size=100, effects_per_flow_hour={'costs': 0.3})],
+ outputs=[fx.Flow(bus='Electricity', flow_id='P_el', size=100, effects_per_flow_hour={'costs': 0.3})],
),
fx.Source(
'GasSupply',
- outputs=[fx.Flow('Q_gas', bus='Gas', size=200, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})],
+ outputs=[fx.Flow(bus='Gas', flow_id='Q_gas', size=200, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})],
),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q_demand', bus='Heat', size=50, fixed_relative_profile=0.6)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q_demand', size=50, fixed_relative_profile=0.6)],
),
fx.Sink(
'ElectricitySink',
- inputs=[fx.Flow('P_sink', bus='Electricity', size=100)],
+ inputs=[fx.Flow(bus='Electricity', flow_id='P_sink', size=100)],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- thermal_flow=fx.Flow('Q_th', bus='Heat', size=60),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th', size=60),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
),
fx.linear_converters.CHP(
'CHP',
thermal_efficiency=0.5,
electrical_efficiency=0.3,
- thermal_flow=fx.Flow('Q_th_chp', bus='Heat', size=30),
- electrical_flow=fx.Flow('P_el_chp', bus='Electricity', size=18),
- fuel_flow=fx.Flow('Q_fu_chp', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th_chp', size=30),
+ electrical_flow=fx.Flow(bus='Electricity', flow_id='P_el_chp', size=18),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu_chp'),
),
fx.Storage(
'ThermalStorage',
- charging=fx.Flow('Q_charge', bus='Heat', size=20),
- discharging=fx.Flow('Q_discharge', bus='Heat', size=20),
+ charging=fx.Flow(bus='Heat', flow_id='Q_charge', size=20),
+ discharging=fx.Flow(bus='Heat', flow_id='Q_discharge', size=20),
capacity_in_flow_hours=40,
initial_charge_state=0.5,
),
diff --git a/tests/test_legacy_solution_access.py b/tests/test_legacy_solution_access.py
index 74bcfe917..df83377e2 100644
--- a/tests/test_legacy_solution_access.py
+++ b/tests/test_legacy_solution_access.py
@@ -49,8 +49,10 @@ def test_effect_access(self, optimize):
fs.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Source('Src', outputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Sink(
+ 'Snk', inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
@@ -68,8 +70,10 @@ def test_flow_rate_access(self, optimize):
fs.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Source('Src', outputs=[fx.Flow(bus='Heat', flow_id='heat', size=10)]),
+ fx.Sink(
+ 'Snk', inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
@@ -89,9 +93,15 @@ def test_flow_size_access(self, optimize):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Source(
'Src',
- outputs=[fx.Flow('heat', bus='Heat', size=fx.InvestParameters(fixed_size=50), effects_per_flow_hour=1)],
+ outputs=[
+ fx.Flow(
+ bus='Heat', flow_id='heat', size=fx.InvestParameters(fixed_size=50), effects_per_flow_hour=1
+ )
+ ],
+ ),
+ fx.Sink(
+ 'Snk', inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([5, 5]))]
),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([5, 5]))]),
)
fs = optimize(fs)
@@ -109,15 +119,18 @@ def test_storage_charge_state_access(self, optimize):
fs.add_elements(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Grid', outputs=[fx.Flow('elec', bus='Elec', size=100, effects_per_flow_hour=1)]),
+ fx.Source('Grid', outputs=[fx.Flow(bus='Elec', flow_id='elec', size=100, effects_per_flow_hour=1)]),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=10),
- discharging=fx.Flow('discharge', bus='Elec', size=10),
+ charging=fx.Flow(bus='Elec', size=10),
+ discharging=fx.Flow(bus='Elec', size=10),
capacity_in_flow_hours=50,
initial_charge_state=25,
),
- fx.Sink('Load', inputs=[fx.Flow('elec', bus='Elec', size=10, fixed_relative_profile=np.array([1, 1, 1]))]),
+ fx.Sink(
+ 'Load',
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=10, fixed_relative_profile=np.array([1, 1, 1]))],
+ ),
)
fs = optimize(fs)
@@ -143,8 +156,11 @@ def test_legacy_access_disabled_by_default(self):
fs.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Source('Src', outputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Sink(
+ 'Snk',
+ inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))],
+ ),
)
solver = fx.solvers.HighsSolver(log_to_console=False)
fs.optimize(solver)
@@ -167,8 +183,10 @@ def test_legacy_access_emits_deprecation_warning(self, optimize):
fs.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Source('Src', outputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Sink(
+ 'Snk', inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_bus.py b/tests/test_math/test_bus.py
index 121b4c747..2e3d635c3 100644
--- a/tests/test_math/test_bus.py
+++ b/tests/test_math/test_bus.py
@@ -27,19 +27,19 @@ def test_merit_order_dispatch(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
fx.Source(
'Src1',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=1, size=20),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=1, size=20),
],
),
fx.Source(
'Src2',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=2, size=20),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=2, size=20),
],
),
)
@@ -70,14 +70,18 @@ def test_imbalance_penalty(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'Src',
outputs=[
fx.Flow(
- 'heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20]), effects_per_flow_hour=1
+ bus='Heat',
+ flow_id='heat',
+ size=1,
+ fixed_relative_profile=np.array([20, 20]),
+ effects_per_flow_hour=1,
),
],
),
@@ -109,33 +113,33 @@ def test_prevent_simultaneous_flow_rates(self, optimize):
fx.Sink(
'Demand1',
inputs=[
- fx.Flow('heat', bus='Heat1', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat1', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Sink(
'Demand2',
inputs=[
- fx.Flow('heat', bus='Heat2', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat2', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'DualSrc',
outputs=[
- fx.Flow('heat1', bus='Heat1', effects_per_flow_hour=1, size=100),
- fx.Flow('heat2', bus='Heat2', effects_per_flow_hour=1, size=100),
+ fx.Flow(bus='Heat1', flow_id='heat1', effects_per_flow_hour=1, size=100),
+ fx.Flow(bus='Heat2', flow_id='heat2', effects_per_flow_hour=1, size=100),
],
prevent_simultaneous_flow_rates=True,
),
fx.Source(
'Backup1',
outputs=[
- fx.Flow('heat', bus='Heat1', effects_per_flow_hour=5),
+ fx.Flow(bus='Heat1', flow_id='heat', effects_per_flow_hour=5),
],
),
fx.Source(
'Backup2',
outputs=[
- fx.Flow('heat', bus='Heat2', effects_per_flow_hour=5),
+ fx.Flow(bus='Heat2', flow_id='heat', effects_per_flow_hour=5),
],
),
)
diff --git a/tests/test_math/test_clustering.py b/tests/test_math/test_clustering.py
index d56366508..15b12e2ae 100644
--- a/tests/test_math/test_clustering.py
+++ b/tests/test_math/test_clustering.py
@@ -41,11 +41,11 @@ def test_clustering_basic_objective(self):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand)],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand)],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs_full.optimize(_SOLVER)
@@ -70,11 +70,11 @@ def test_clustering_basic_objective(self):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand_avg)],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand_avg)],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs_clust.optimize(_SOLVER)
@@ -100,16 +100,16 @@ def test_storage_cluster_mode_cyclic(self):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 10]))],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 10]))],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 10, 1, 10]))],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 10, 1, 10]))],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=1,
@@ -138,16 +138,18 @@ def _build(mode):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 10]))],
+ inputs=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 10]))
+ ],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 10, 1, 10]))],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 10, 1, 10]))],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=1,
@@ -184,8 +186,8 @@ def test_status_cluster_mode_cyclic(self):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([10, 10, 10, 10]),
),
@@ -193,15 +195,15 @@ def test_status_cluster_mode_cyclic(self):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(
effects_per_startup=10,
@@ -242,11 +244,11 @@ def test_flow_rates_match_demand_per_cluster(self, optimize):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 40]))],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 40]))],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -269,11 +271,11 @@ def test_per_timestep_effects_with_varying_price(self, optimize):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10, 10]))],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10, 10]))],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 2, 3, 4]))],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 2, 3, 4]))],
),
)
fs = optimize(fs)
@@ -305,16 +307,16 @@ def test_storage_cyclic_charge_discharge_pattern(self, optimize):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 50, 0, 50]))],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 50, 0, 50]))],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100, 1, 100]))],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100, 1, 100]))],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=1,
@@ -331,7 +333,7 @@ def test_storage_cyclic_charge_discharge_pattern(self, optimize):
assert_allclose(grid_fr.sum(axis=1), 50.0, atol=1e-5) # Total purchase per cluster = 50
# Discharge at expensive timesteps (indices 1, 3)
- discharge_fr = fs.solution['Battery(discharge)|flow_rate'].values[:, :4]
+ discharge_fr = fs.solution['Battery(discharging)|flow_rate'].values[:, :4]
assert_allclose(discharge_fr[:, [1, 3]], [[50, 50], [50, 50]], atol=1e-5)
# Charge state: dims=(cluster, time), 5 entries per cluster (incl. final)
diff --git a/tests/test_math/test_combinations.py b/tests/test_math/test_combinations.py
index 915d4b4c2..251202a28 100644
--- a/tests/test_math/test_combinations.py
+++ b/tests/test_math/test_combinations.py
@@ -40,22 +40,22 @@ def test_piecewise_conversion_with_investment_sizing(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([40, 40])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([40, 40])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas', size=fx.InvestParameters(maximum_size=100))],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=fx.InvestParameters(maximum_size=100))],
outputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
effects_of_investment_per_size=1,
@@ -99,20 +99,20 @@ def test_piecewise_invest_cost_with_optional_skip(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
piecewise_effects_of_investment=fx.PiecewiseEffects(
@@ -127,8 +127,8 @@ def test_piecewise_invest_cost_with_optional_skip(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -166,8 +166,8 @@ def test_piecewise_nonlinear_conversion_with_startup_cost(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 40, 0, 40]),
),
@@ -175,20 +175,20 @@ def test_piecewise_nonlinear_conversion_with_startup_cost(self, optimize):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.LinearConverter(
'Converter',
inputs=[
fx.Flow(
- 'fuel',
bus='Gas',
+ flow_id='fuel',
size=100,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(effects_per_startup=100),
)
],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
piecewise_conversion=fx.PiecewiseConversion(
{
# Non-1:1 ratio in operating range!
@@ -227,8 +227,8 @@ def test_piecewise_minimum_load_with_status(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([15, 40]),
),
@@ -236,16 +236,16 @@ def test_piecewise_minimum_load_with_status(self, optimize):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.Source(
'Backup',
- outputs=[fx.Flow('heat', bus='Heat', effects_per_flow_hour=5)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5)],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(0, 0), fx.Piece(20, 50)]),
@@ -292,8 +292,8 @@ def test_piecewise_no_zero_point_with_status(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([5, 35]),
),
@@ -301,23 +301,23 @@ def test_piecewise_no_zero_point_with_status(self, optimize):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.Source(
'Backup',
- outputs=[fx.Flow('heat', bus='Heat', effects_per_flow_hour=5)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5)],
),
fx.LinearConverter(
'Converter',
inputs=[
fx.Flow(
- 'fuel',
bus='Gas',
+ flow_id='fuel',
size=100,
status_parameters=fx.StatusParameters(),
)
],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
piecewise_conversion=fx.PiecewiseConversion(
{
# NO off-state piece — operating range only
@@ -360,8 +360,8 @@ def test_piecewise_no_zero_point_startup_cost(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 40, 0, 40]),
),
@@ -369,24 +369,24 @@ def test_piecewise_no_zero_point_startup_cost(self, optimize):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.Source(
'Backup',
- outputs=[fx.Flow('heat', bus='Heat', effects_per_flow_hour=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=100)],
),
fx.LinearConverter(
'Converter',
inputs=[
fx.Flow(
- 'fuel',
bus='Gas',
+ flow_id='fuel',
size=100,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(effects_per_startup=200),
)
],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
piecewise_conversion=fx.PiecewiseConversion(
{
# NO off-state piece
@@ -431,17 +431,17 @@ def test_three_segment_piecewise(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([40, 40])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([40, 40])),
],
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(0, 10), fx.Piece(10, 30), fx.Piece(30, 60)]),
@@ -472,17 +472,17 @@ def test_three_segment_low_load_selection(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([5, 5])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([5, 5])),
],
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(0, 10), fx.Piece(10, 30), fx.Piece(30, 60)]),
@@ -513,17 +513,17 @@ def test_three_segment_mid_load_selection(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([18, 18])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([18, 18])),
],
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(0, 10), fx.Piece(10, 30), fx.Piece(30, 60)]),
@@ -569,8 +569,8 @@ def test_startup_cost_on_co2_effect(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 20, 0, 20]),
),
@@ -578,15 +578,15 @@ def test_startup_cost_on_co2_effect(self, optimize):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=0,
@@ -628,20 +628,20 @@ def test_effects_per_active_hour_on_multiple_effects(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(
effects_per_active_hour={'costs': 10, 'CO2': 5},
@@ -685,24 +685,24 @@ def test_invest_sizing_respects_relative_minimum(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([5, 50])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([5, 50])),
],
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.Source(
'Backup',
- outputs=[fx.Flow('heat', bus='Heat', effects_per_flow_hour=10)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=10)],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
relative_minimum=0.5,
size=fx.InvestParameters(
maximum_size=100,
@@ -749,20 +749,20 @@ def test_time_varying_effects_per_flow_hour(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=np.array([1, 3])),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=np.array([1, 3])),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -792,28 +792,28 @@ def test_effects_per_flow_hour_with_dual_output_conversion(self, optimize):
fx.Sink(
'HeatDemand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([50, 50])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([50, 50])),
],
),
fx.Sink(
'ElecGrid',
inputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour={'costs': -2, 'CO2': -0.3}),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour={'costs': -2, 'CO2': -0.3}),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour={'costs': 1, 'CO2': 0.5}),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour={'costs': 1, 'CO2': 0.5}),
],
),
fx.linear_converters.CHP(
'CHP',
thermal_efficiency=0.5,
electrical_efficiency=0.4,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
- electrical_flow=fx.Flow('elec', bus='Elec'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
),
)
fs = optimize(fs)
@@ -852,8 +852,8 @@ def test_piecewise_invest_with_startup_cost(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 80, 0, 80]),
),
@@ -861,15 +861,15 @@ def test_piecewise_invest_with_startup_cost(self, optimize):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
relative_minimum=0.5,
previous_flow_rate=0,
size=fx.InvestParameters(
@@ -922,8 +922,8 @@ def test_startup_limit_with_max_downtime(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([10, 10, 10, 10, 10, 10]),
),
@@ -931,15 +931,15 @@ def test_startup_limit_with_max_downtime(self, optimize):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=20,
relative_minimum=0.5,
previous_flow_rate=10,
@@ -952,8 +952,8 @@ def test_startup_limit_with_max_downtime(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -995,8 +995,8 @@ def test_min_uptime_with_min_downtime(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([20, 20, 20, 20, 20, 20]),
),
@@ -1004,15 +1004,15 @@ def test_min_uptime_with_min_downtime(self, optimize):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=0,
@@ -1022,8 +1022,8 @@ def test_min_uptime_with_min_downtime(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -1090,20 +1090,20 @@ def test_effect_share_with_investment(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 50, 'CO2': 10},
@@ -1142,8 +1142,8 @@ def test_effect_maximum_with_status_contribution(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 10, 0, 10]),
),
@@ -1152,16 +1152,16 @@ def test_effect_maximum_with_status_contribution(self, optimize):
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour={'costs': 1, 'CO2': 0.1}),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour={'costs': 1, 'CO2': 0.1}),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=0,
@@ -1202,20 +1202,20 @@ def test_invest_per_size_on_non_cost_effect(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
mandatory=True,
@@ -1226,8 +1226,8 @@ def test_invest_per_size_on_non_cost_effect(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_components.py b/tests/test_math/test_components.py
index 38730b5b3..00e670172 100644
--- a/tests/test_math/test_components.py
+++ b/tests/test_math/test_components.py
@@ -35,19 +35,19 @@ def test_component_status_startup_cost(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20, 0, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20, 0, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'Boiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)], # Size required for component status
- outputs=[fx.Flow('heat', bus='Heat', size=100)], # Size required for component status
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)], # Size required for component status
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)], # Size required for component status
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(effects_per_startup=100),
),
@@ -74,19 +74,19 @@ def test_component_status_min_uptime(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 10, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 10, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'Boiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)], # Size required
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)], # Size required
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(min_uptime=2),
),
@@ -115,27 +115,27 @@ def test_component_status_active_hours_max(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)], # Size required
- outputs=[fx.Flow('heat', bus='Heat', size=100)], # Size required
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)], # Size required
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)], # Size required
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(active_hours_max=2),
),
fx.linear_converters.Boiler(
'ExpensiveBackup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -160,19 +160,19 @@ def test_component_status_effects_per_active_hour(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'Boiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(effects_per_active_hour=50),
),
@@ -199,26 +199,26 @@ def test_component_status_active_hours_min(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'ExpensiveBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 2}], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2)
status_parameters=fx.StatusParameters(active_hours_min=2),
),
fx.LinearConverter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
),
)
@@ -245,26 +245,26 @@ def test_component_status_max_uptime(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100, previous_flow_rate=10)],
- outputs=[fx.Flow('heat', bus='Heat', size=100, previous_flow_rate=10)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100, previous_flow_rate=10)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100, previous_flow_rate=10)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(max_uptime=2, min_uptime=2),
),
fx.LinearConverter(
'ExpensiveBackup',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 2}], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2)
),
)
@@ -303,26 +303,26 @@ def test_component_status_min_downtime(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100, previous_flow_rate=20, relative_minimum=0.1)],
- outputs=[fx.Flow('heat', bus='Heat', size=100, previous_flow_rate=20, relative_minimum=0.1)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100, previous_flow_rate=20, relative_minimum=0.1)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100, previous_flow_rate=20, relative_minimum=0.1)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(min_downtime=3),
),
fx.LinearConverter(
'ExpensiveBackup',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[
{'fuel': 1, 'heat': 2}
], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2) (1 fuel → 0.5 heat)
@@ -356,19 +356,19 @@ def test_component_status_max_downtime(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'ExpensiveBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=40, previous_flow_rate=20)],
- outputs=[fx.Flow('heat', bus='Heat', size=20, relative_minimum=0.5, previous_flow_rate=10)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=40, previous_flow_rate=20)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=20, relative_minimum=0.5, previous_flow_rate=10)],
conversion_factors=[
{'fuel': 1, 'heat': 2}
], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2) (1 fuel → 0.5 heat)
@@ -376,8 +376,8 @@ def test_component_status_max_downtime(self, optimize):
),
fx.LinearConverter(
'CheapBackup',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
),
)
@@ -409,26 +409,26 @@ def test_component_status_startup_limit(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 0, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 0, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=20, previous_flow_rate=0, relative_minimum=0.5)],
- outputs=[fx.Flow('heat', bus='Heat', size=20, previous_flow_rate=0, relative_minimum=0.5)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=20, previous_flow_rate=0, relative_minimum=0.5)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=20, previous_flow_rate=0, relative_minimum=0.5)],
conversion_factors=[{'fuel': 1, 'heat': 1}], # eta=1.0
status_parameters=fx.StatusParameters(startup_limit=1),
),
fx.LinearConverter(
'ExpensiveBackup',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[
{'fuel': 1, 'heat': 2}
], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2) (1 fuel → 0.5 heat)
@@ -465,19 +465,19 @@ def test_transmission_relative_losses(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Sink', size=1, fixed_relative_profile=np.array([50, 50])),
+ fx.Flow(bus='Sink', flow_id='heat', size=1, fixed_relative_profile=np.array([50, 50])),
],
),
fx.Source(
'CheapSource',
outputs=[
- fx.Flow('heat', bus='Source', effects_per_flow_hour=1),
+ fx.Flow(bus='Source', flow_id='heat', effects_per_flow_hour=1),
],
),
fx.Transmission(
'Pipe',
- in1=fx.Flow('in', bus='Source', size=200),
- out1=fx.Flow('out', bus='Sink', size=200),
+ in1=fx.Flow(bus='Source', flow_id='in', size=200),
+ out1=fx.Flow(bus='Sink', flow_id='out', size=200),
relative_losses=0.1,
),
)
@@ -504,19 +504,19 @@ def test_transmission_absolute_losses(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Sink', size=1, fixed_relative_profile=np.array([20, 20])),
+ fx.Flow(bus='Sink', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
fx.Source(
'CheapSource',
outputs=[
- fx.Flow('heat', bus='Source', effects_per_flow_hour=1),
+ fx.Flow(bus='Source', flow_id='heat', effects_per_flow_hour=1),
],
),
fx.Transmission(
'Pipe',
- in1=fx.Flow('in', bus='Source', size=200),
- out1=fx.Flow('out', bus='Sink', size=200),
+ in1=fx.Flow(bus='Source', flow_id='in', size=200),
+ out1=fx.Flow(bus='Sink', flow_id='out', size=200),
absolute_losses=5,
),
)
@@ -542,33 +542,33 @@ def test_transmission_bidirectional(self, optimize):
fx.Sink(
'LeftDemand',
inputs=[
- fx.Flow('heat', bus='Left', size=1, fixed_relative_profile=np.array([20, 0])),
+ fx.Flow(bus='Left', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0])),
],
),
fx.Sink(
'RightDemand',
inputs=[
- fx.Flow('heat', bus='Right', size=1, fixed_relative_profile=np.array([0, 20])),
+ fx.Flow(bus='Right', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
fx.Source(
'LeftSource',
outputs=[
- fx.Flow('heat', bus='Left', effects_per_flow_hour=1),
+ fx.Flow(bus='Left', flow_id='heat', effects_per_flow_hour=1),
],
),
fx.Source(
'RightSource',
outputs=[
- fx.Flow('heat', bus='Right', effects_per_flow_hour=10), # Expensive
+ fx.Flow(bus='Right', flow_id='heat', effects_per_flow_hour=10), # Expensive
],
),
fx.Transmission(
'Link',
- in1=fx.Flow('left', bus='Left', size=100),
- out1=fx.Flow('right', bus='Right', size=100),
- in2=fx.Flow('right_in', bus='Right', size=100),
- out2=fx.Flow('left_out', bus='Left', size=100),
+ in1=fx.Flow(bus='Left', flow_id='left', size=100),
+ out1=fx.Flow(bus='Right', flow_id='right', size=100),
+ in2=fx.Flow(bus='Right', flow_id='right_in', size=100),
+ out2=fx.Flow(bus='Left', flow_id='left_out', size=100),
),
)
fs = optimize(fs)
@@ -594,25 +594,25 @@ def test_transmission_prevent_simultaneous_bidirectional(self, optimize):
fx.Sink(
'LeftDemand',
inputs=[
- fx.Flow('heat', bus='Left', size=1, fixed_relative_profile=np.array([20, 0])),
+ fx.Flow(bus='Left', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0])),
],
),
fx.Sink(
'RightDemand',
inputs=[
- fx.Flow('heat', bus='Right', size=1, fixed_relative_profile=np.array([0, 20])),
+ fx.Flow(bus='Right', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
fx.Source(
'LeftSource',
- outputs=[fx.Flow('heat', bus='Left', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Left', flow_id='heat', effects_per_flow_hour=1)],
),
fx.Transmission(
'Link',
- in1=fx.Flow('left', bus='Left', size=100),
- out1=fx.Flow('right', bus='Right', size=100),
- in2=fx.Flow('right_in', bus='Right', size=100),
- out2=fx.Flow('left_out', bus='Left', size=100),
+ in1=fx.Flow(bus='Left', flow_id='left', size=100),
+ out1=fx.Flow(bus='Right', flow_id='right', size=100),
+ in2=fx.Flow(bus='Right', flow_id='right_in', size=100),
+ out2=fx.Flow(bus='Left', flow_id='left_out', size=100),
prevent_simultaneous_flows_in_both_directions=True,
),
)
@@ -643,17 +643,17 @@ def test_transmission_status_startup_cost(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Sink', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
+ fx.Flow(bus='Sink', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
],
),
fx.Source(
'CheapSource',
- outputs=[fx.Flow('heat', bus='Source', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Source', flow_id='heat', effects_per_flow_hour=1)],
),
fx.Transmission(
'Pipe',
- in1=fx.Flow('in', bus='Source', size=200, previous_flow_rate=0, relative_minimum=0.1),
- out1=fx.Flow('out', bus='Sink', size=200, previous_flow_rate=0, relative_minimum=0.1),
+ in1=fx.Flow(bus='Source', flow_id='in', size=200, previous_flow_rate=0, relative_minimum=0.1),
+ out1=fx.Flow(bus='Sink', flow_id='out', size=200, previous_flow_rate=0, relative_minimum=0.1),
status_parameters=fx.StatusParameters(effects_per_startup=50),
),
)
@@ -681,20 +681,20 @@ def test_heatpump_cop(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=1),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1),
],
),
fx.linear_converters.HeatPump(
'HP',
cop=3.0,
- electrical_flow=fx.Flow('elec', bus='Elec'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -716,20 +716,20 @@ def test_heatpump_variable_cop(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=1),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1),
],
),
fx.linear_converters.HeatPump(
'HP',
cop=np.array([2.0, 4.0]),
- electrical_flow=fx.Flow('elec', bus='Elec'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -757,20 +757,20 @@ def test_cooling_tower_specific_electricity(self, optimize):
fx.Source(
'HeatSource',
outputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([100, 100])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([100, 100])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=1),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1),
],
),
fx.linear_converters.CoolingTower(
'CT',
specific_electricity_demand=0.1, # 0.1 kWel per kWth
- thermal_flow=fx.Flow('heat', bus='Heat'),
- electrical_flow=fx.Flow('elec', bus='Elec'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
),
)
fs = optimize(fs)
@@ -798,18 +798,18 @@ def test_power2heat_efficiency(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
fx.linear_converters.Power2Heat(
'P2H',
thermal_efficiency=0.9,
- electrical_flow=fx.Flow('elec', bus='Elec'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -838,23 +838,23 @@ def test_heatpump_with_source_cop(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
fx.Source(
'FreeHeat',
- outputs=[fx.Flow('heat', bus='HeatSource')],
+ outputs=[fx.Flow(bus='HeatSource', flow_id='heat')],
),
fx.linear_converters.HeatPumpWithSource(
'HP',
cop=3.0,
- electrical_flow=fx.Flow('elec', bus='Elec'),
- heat_source_flow=fx.Flow('source', bus='HeatSource'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
+ heat_source_flow=fx.Flow(bus='HeatSource', flow_id='source'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -881,19 +881,19 @@ def test_source_and_sink_prevent_simultaneous(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
fx.Source(
'Solar',
outputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([30, 30, 0])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([30, 30, 0])),
],
),
fx.SourceAndSink(
'GridConnection',
- outputs=[fx.Flow('buy', bus='Elec', size=100, effects_per_flow_hour=5)],
- inputs=[fx.Flow('sell', bus='Elec', size=100, effects_per_flow_hour=-1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='buy', size=100, effects_per_flow_hour=5)],
+ inputs=[fx.Flow(bus='Elec', flow_id='sell', size=100, effects_per_flow_hour=-1)],
prevent_simultaneous_flow_rates=True,
),
)
diff --git a/tests/test_math/test_conversion.py b/tests/test_math/test_conversion.py
index 6a527a338..7c56c79d0 100644
--- a/tests/test_math/test_conversion.py
+++ b/tests/test_math/test_conversion.py
@@ -22,20 +22,20 @@ def test_boiler_efficiency(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 20, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 20, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.8,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -56,20 +56,20 @@ def test_variable_efficiency(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=np.array([0.5, 1.0]),
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -92,28 +92,28 @@ def test_chp_dual_output(self, optimize):
fx.Sink(
'HeatDemand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([50, 50])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([50, 50])),
],
),
fx.Sink(
'ElecGrid',
inputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=-2),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=-2),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.CHP(
'CHP',
thermal_efficiency=0.5,
electrical_efficiency=0.4,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
- electrical_flow=fx.Flow('elec', bus='Elec'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_effects.py b/tests/test_math/test_effects.py
index a69172bbd..7bca7dbcc 100644
--- a/tests/test_math/test_effects.py
+++ b/tests/test_math/test_effects.py
@@ -29,13 +29,13 @@ def test_effects_per_flow_hour(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 20])),
],
),
fx.Source(
'HeatSrc',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 2, 'CO2': 0.5}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 2, 'CO2': 0.5}),
],
),
)
@@ -64,13 +64,13 @@ def test_share_from_temporal(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'HeatSrc',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 10}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 10}),
],
),
)
@@ -101,19 +101,19 @@ def test_effect_maximum_total(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'Dirty',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
fx.Source(
'Clean',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 10, 'CO2': 0}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 10, 'CO2': 0}),
],
),
)
@@ -147,19 +147,19 @@ def test_effect_minimum_total(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'Dirty',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
fx.Source(
'Clean',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 0}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 0}),
],
),
)
@@ -191,19 +191,19 @@ def test_effect_maximum_per_hour(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([15, 5])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([15, 5])),
],
),
fx.Source(
'Dirty',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
fx.Source(
'Clean',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 5, 'CO2': 0}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 5, 'CO2': 0}),
],
),
)
@@ -232,13 +232,13 @@ def test_effect_minimum_per_hour(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([5, 5])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([5, 5])),
],
),
fx.Source(
'Dirty',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
)
@@ -267,19 +267,19 @@ def test_effect_maximum_temporal(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'Dirty',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
fx.Source(
'Clean',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 5, 'CO2': 0}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 5, 'CO2': 0}),
],
),
)
@@ -309,13 +309,13 @@ def test_effect_minimum_temporal(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'Dirty',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
)
@@ -344,22 +344,22 @@ def test_share_from_periodic(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 100, 'CO2': 5},
@@ -397,22 +397,22 @@ def test_effect_maximum_periodic(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 10, 'CO2': 100},
@@ -422,10 +422,10 @@ def test_effect_maximum_periodic(self, optimize):
fx.linear_converters.Boiler(
'ExpensiveBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 50, 'CO2': 10},
@@ -461,22 +461,22 @@ def test_effect_minimum_periodic(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 100, 'CO2': 50},
@@ -486,8 +486,8 @@ def test_effect_minimum_periodic(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_flow.py b/tests/test_math/test_flow.py
index 940dcdc48..3acae9ba5 100644
--- a/tests/test_math/test_flow.py
+++ b/tests/test_math/test_flow.py
@@ -27,20 +27,20 @@ def test_relative_minimum(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100, relative_minimum=0.4),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100, relative_minimum=0.4),
),
)
fs = optimize(fs)
@@ -68,19 +68,19 @@ def test_relative_maximum(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([60, 60])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([60, 60])),
],
),
fx.Source(
'CheapSrc',
outputs=[
- fx.Flow('heat', bus='Heat', size=100, relative_maximum=0.5, effects_per_flow_hour=1),
+ fx.Flow(bus='Heat', flow_id='heat', size=100, relative_maximum=0.5, effects_per_flow_hour=1),
],
),
fx.Source(
'ExpensiveSrc',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=5),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5),
],
),
)
@@ -109,19 +109,19 @@ def test_flow_hours_max(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20, 20])),
],
),
fx.Source(
'CheapSrc',
outputs=[
- fx.Flow('heat', bus='Heat', flow_hours_max=30, effects_per_flow_hour=1),
+ fx.Flow(bus='Heat', flow_id='heat', flow_hours_max=30, effects_per_flow_hour=1),
],
),
fx.Source(
'ExpensiveSrc',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=5),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5),
],
),
)
@@ -150,19 +150,19 @@ def test_flow_hours_min(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
fx.Source(
'CheapSrc',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=1),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=1),
],
),
fx.Source(
'ExpensiveSrc',
outputs=[
- fx.Flow('heat', bus='Heat', flow_hours_min=40, effects_per_flow_hour=5),
+ fx.Flow(bus='Heat', flow_id='heat', flow_hours_min=40, effects_per_flow_hour=5),
],
),
)
@@ -191,19 +191,19 @@ def test_load_factor_max(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([40, 40])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([40, 40])),
],
),
fx.Source(
'CheapSrc',
outputs=[
- fx.Flow('heat', bus='Heat', size=50, load_factor_max=0.5, effects_per_flow_hour=1),
+ fx.Flow(bus='Heat', flow_id='heat', size=50, load_factor_max=0.5, effects_per_flow_hour=1),
],
),
fx.Source(
'ExpensiveSrc',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=5),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5),
],
),
)
@@ -230,19 +230,19 @@ def test_load_factor_min(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
fx.Source(
'CheapSrc',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=1),
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=1),
],
),
fx.Source(
'ExpensiveSrc',
outputs=[
- fx.Flow('heat', bus='Heat', size=100, load_factor_min=0.3, effects_per_flow_hour=5),
+ fx.Flow(bus='Heat', flow_id='heat', size=100, load_factor_min=0.3, effects_per_flow_hour=5),
],
),
)
diff --git a/tests/test_math/test_flow_invest.py b/tests/test_math/test_flow_invest.py
index f9ae91078..f9dd74a55 100644
--- a/tests/test_math/test_flow_invest.py
+++ b/tests/test_math/test_flow_invest.py
@@ -29,22 +29,22 @@ def test_invest_size_optimized(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 50, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 50, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=200,
effects_of_investment=10,
@@ -78,22 +78,22 @@ def test_invest_optional_not_built(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
effects_of_investment=99999,
@@ -103,8 +103,8 @@ def test_invest_optional_not_built(self, optimize):
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -131,22 +131,22 @@ def test_invest_minimum_size(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=100,
maximum_size=200,
@@ -182,22 +182,22 @@ def test_invest_fixed_size(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'FixedBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=80,
effects_of_investment=10,
@@ -207,8 +207,8 @@ def test_invest_fixed_size(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -239,22 +239,22 @@ def test_piecewise_invest_cost(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([80, 80])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([80, 80])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=0.5),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=0.5),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=200,
piecewise_effects_of_investment=fx.PiecewiseEffects(
@@ -293,22 +293,22 @@ def test_invest_mandatory_forces_investment(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'ExpensiveBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=10,
maximum_size=100,
@@ -321,8 +321,8 @@ def test_invest_mandatory_forces_investment(self, optimize):
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -353,22 +353,22 @@ def test_invest_not_mandatory_skips_when_uneconomical(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'ExpensiveBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=10,
maximum_size=100,
@@ -380,8 +380,8 @@ def test_invest_not_mandatory_skips_when_uneconomical(self, optimize):
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -410,22 +410,22 @@ def test_invest_effects_of_retirement(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'NewBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=10,
maximum_size=100,
@@ -437,8 +437,8 @@ def test_invest_effects_of_retirement(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -468,22 +468,22 @@ def test_invest_retirement_triggers_when_not_investing(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'ExpensiveBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=10,
maximum_size=100,
@@ -495,8 +495,8 @@ def test_invest_retirement_triggers_when_not_investing(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -527,22 +527,22 @@ def test_invest_with_startup_cost(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20, 0, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20, 0, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
relative_minimum=0.5,
size=fx.InvestParameters(
maximum_size=100,
@@ -578,22 +578,22 @@ def test_invest_with_min_uptime(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 10, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 10, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
relative_minimum=0.1,
size=fx.InvestParameters(
maximum_size=100,
@@ -605,8 +605,8 @@ def test_invest_with_min_uptime(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -638,22 +638,22 @@ def test_invest_with_active_hours_max(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
effects_of_investment_per_size=0.1,
@@ -664,8 +664,8 @@ def test_invest_with_active_hours_max(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_flow_status.py b/tests/test_math/test_flow_status.py
index 66f4de269..bfa331732 100644
--- a/tests/test_math/test_flow_status.py
+++ b/tests/test_math/test_flow_status.py
@@ -30,22 +30,22 @@ def test_startup_cost(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 10, 0, 10, 0])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 10, 0, 10, 0])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(effects_per_startup=100),
),
@@ -72,22 +72,22 @@ def test_active_hours_max(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 20, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 20, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(active_hours_max=1),
),
@@ -95,8 +95,8 @@ def test_active_hours_max(self, optimize):
fx.linear_converters.Boiler(
'ExpensiveBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -125,22 +125,22 @@ def test_min_uptime_forces_operation(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([5, 10, 20, 18, 12])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([5, 10, 20, 18, 12])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(min_uptime=2, max_uptime=2),
@@ -149,8 +149,8 @@ def test_min_uptime_forces_operation(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -183,22 +183,22 @@ def test_min_downtime_prevents_restart(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
previous_flow_rate=20,
status_parameters=fx.StatusParameters(min_downtime=3),
@@ -207,8 +207,8 @@ def test_min_downtime_prevents_restart(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -237,22 +237,22 @@ def test_effects_per_active_hour(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(effects_per_active_hour=50),
),
@@ -281,22 +281,22 @@ def test_active_hours_min(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'ExpBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(active_hours_min=2),
),
@@ -304,8 +304,8 @@ def test_active_hours_min(self, optimize):
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -344,22 +344,22 @@ def test_max_downtime(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'ExpBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=20,
relative_minimum=0.5,
previous_flow_rate=10,
@@ -369,8 +369,8 @@ def test_max_downtime(self, optimize):
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -402,22 +402,22 @@ def test_startup_limit(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 0, 10])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 0, 10])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.8,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=20,
relative_minimum=0.5,
previous_flow_rate=0,
@@ -427,8 +427,8 @@ def test_startup_limit(self, optimize):
fx.linear_converters.Boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -458,8 +458,8 @@ def test_max_uptime_standalone(self, optimize):
'Demand',
inputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([10, 10, 10, 10, 10]),
),
@@ -467,15 +467,15 @@ def test_max_uptime_standalone(self, optimize):
),
fx.Source(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(max_uptime=2),
@@ -484,8 +484,8 @@ def test_max_uptime_standalone(self, optimize):
fx.linear_converters.Boiler(
'ExpensiveBackup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -533,22 +533,22 @@ def test_previous_flow_rate_scalar_on_forces_min_uptime(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=10, # Was ON for 1 hour before t=0
@@ -577,22 +577,22 @@ def test_previous_flow_rate_scalar_off_no_carry_over(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=0, # Was OFF before t=0
@@ -623,22 +623,22 @@ def test_previous_flow_rate_array_uptime_satisfied_vs_partial(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=[10, 20], # Was ON for 2 hours → min_uptime=2 satisfied
@@ -669,22 +669,22 @@ def test_previous_flow_rate_array_partial_uptime_forces_continuation(self, optim
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 0, 0])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 0, 0])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=[0, 10], # Off at t=-2, ON at t=-1 (1 hour uptime)
@@ -716,22 +716,22 @@ def test_previous_flow_rate_array_min_downtime_carry_over(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
previous_flow_rate=[10, 0], # ON at t=-2, OFF at t=-1 (1 hour downtime)
status_parameters=fx.StatusParameters(min_downtime=3),
@@ -740,8 +740,8 @@ def test_previous_flow_rate_array_min_downtime_carry_over(self, optimize):
fx.linear_converters.Boiler(
'ExpensiveBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -769,22 +769,22 @@ def test_previous_flow_rate_array_longer_history(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=[0, 10, 20, 30], # Off, then ON for 3 hours
diff --git a/tests/test_math/test_legacy_solution_access.py b/tests/test_math/test_legacy_solution_access.py
index 3686d1aac..a1f7cafda 100644
--- a/tests/test_math/test_legacy_solution_access.py
+++ b/tests/test_math/test_legacy_solution_access.py
@@ -22,8 +22,10 @@ def test_effect_access(self, optimize):
fs.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Source('Src', outputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Sink(
+ 'Snk', inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
@@ -41,8 +43,10 @@ def test_flow_rate_access(self, optimize):
fs.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Source('Src', outputs=[fx.Flow(bus='Heat', flow_id='heat', size=10)]),
+ fx.Sink(
+ 'Snk', inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
@@ -62,9 +66,15 @@ def test_flow_size_access(self, optimize):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Source(
'Src',
- outputs=[fx.Flow('heat', bus='Heat', size=fx.InvestParameters(fixed_size=50), effects_per_flow_hour=1)],
+ outputs=[
+ fx.Flow(
+ bus='Heat', flow_id='heat', size=fx.InvestParameters(fixed_size=50), effects_per_flow_hour=1
+ )
+ ],
+ ),
+ fx.Sink(
+ 'Snk', inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([5, 5]))]
),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([5, 5]))]),
)
fs = optimize(fs)
@@ -82,15 +92,18 @@ def test_storage_charge_state_access(self, optimize):
fs.add_elements(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Grid', outputs=[fx.Flow('elec', bus='Elec', size=100, effects_per_flow_hour=1)]),
+ fx.Source('Grid', outputs=[fx.Flow(bus='Elec', flow_id='elec', size=100, effects_per_flow_hour=1)]),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=10),
- discharging=fx.Flow('discharge', bus='Elec', size=10),
+ charging=fx.Flow(bus='Elec', size=10),
+ discharging=fx.Flow(bus='Elec', size=10),
capacity_in_flow_hours=50,
initial_charge_state=25,
),
- fx.Sink('Load', inputs=[fx.Flow('elec', bus='Elec', size=10, fixed_relative_profile=np.array([1, 1, 1]))]),
+ fx.Sink(
+ 'Load',
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=10, fixed_relative_profile=np.array([1, 1, 1]))],
+ ),
)
fs = optimize(fs)
@@ -116,8 +129,11 @@ def test_legacy_access_disabled_by_default(self):
fs.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Source('Src', outputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Sink(
+ 'Snk',
+ inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))],
+ ),
)
solver = fx.solvers.HighsSolver(log_to_console=False)
fs.optimize(solver)
@@ -140,8 +156,10 @@ def test_legacy_access_emits_deprecation_warning(self, optimize):
fs.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Source('Src', outputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Sink(
+ 'Snk', inputs=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_multi_period.py b/tests/test_math/test_multi_period.py
index d39b0e02f..f9ca231ae 100644
--- a/tests/test_math/test_multi_period.py
+++ b/tests/test_math/test_multi_period.py
@@ -31,12 +31,12 @@ def test_period_weights_affect_objective(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -62,15 +62,15 @@ def test_flow_hours_max_over_periods(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
fx.Source(
'DirtySource',
outputs=[
fx.Flow(
- 'elec',
bus='Elec',
+ flow_id='elec',
effects_per_flow_hour=1,
flow_hours_max_over_periods=50,
),
@@ -78,7 +78,7 @@ def test_flow_hours_max_over_periods(self, optimize):
),
fx.Source(
'CleanSource',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=10)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=10)],
),
)
fs = optimize(fs)
@@ -104,19 +104,19 @@ def test_flow_hours_min_over_periods(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
fx.Source(
'CheapSource',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
fx.Source(
'ExpensiveSource',
outputs=[
fx.Flow(
- 'elec',
bus='Elec',
+ flow_id='elec',
effects_per_flow_hour=10,
flow_hours_min_over_periods=100,
),
@@ -146,18 +146,18 @@ def test_effect_maximum_over_periods(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
fx.Source(
'DirtySource',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
fx.Source(
'CleanSource',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=10)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=10)],
),
)
fs = optimize(fs)
@@ -184,18 +184,18 @@ def test_effect_minimum_over_periods(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([2, 2, 2])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([2, 2, 2])),
],
),
fx.Source(
'DirtySource',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
fx.Source(
'CheapSource',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -222,15 +222,15 @@ def test_invest_linked_periods(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
fx.Source(
'Grid',
outputs=[
fx.Flow(
- 'elec',
bus='Elec',
+ flow_id='elec',
size=fx.InvestParameters(
maximum_size=100,
effects_of_investment_per_size=1,
@@ -273,12 +273,12 @@ def test_effect_period_weights(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -305,19 +305,19 @@ def test_storage_relative_minimum_final_charge_state_scalar(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 0, 80])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 0, 80])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 1, 100])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_final_charge_state=0.5,
@@ -349,19 +349,19 @@ def test_storage_relative_maximum_final_charge_state_scalar(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0, 0])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0, 0])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1, 1])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
relative_maximum_final_charge_state=0.2,
diff --git a/tests/test_math/test_piecewise.py b/tests/test_math/test_piecewise.py
index e9da8a1ba..af095ab65 100644
--- a/tests/test_math/test_piecewise.py
+++ b/tests/test_math/test_piecewise.py
@@ -29,19 +29,19 @@ def test_piecewise_selects_cheap_segment(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([45, 45])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([45, 45])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(10, 30), fx.Piece(30, 100)]),
@@ -74,19 +74,19 @@ def test_piecewise_conversion_at_breakpoint(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([15, 15])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([15, 15])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(10, 30), fx.Piece(30, 100)]),
@@ -121,25 +121,25 @@ def test_piecewise_with_gap_forces_minimum_load(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([50, 50])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([50, 50])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.Source(
'CheapSrc',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=10), # More expensive backup
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=10), # More expensive backup
],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
# Gap between 0 and 40: forbidden region (minimum load requirement)
@@ -180,25 +180,25 @@ def test_piecewise_gap_allows_off_state(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=10), # Expensive gas
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=10), # Expensive gas
],
),
fx.Source(
'Backup',
outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=1), # Cheap backup
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=1), # Cheap backup
],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
# Off state (0,0) + operating range with minimum load
@@ -236,19 +236,19 @@ def test_piecewise_varying_efficiency_across_segments(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([35, 35])),
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([35, 35])),
],
),
fx.Source(
'GasSrc',
outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
fx.LinearConverter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
# Low load: less efficient. High load: more efficient.
diff --git a/tests/test_math/test_scenarios.py b/tests/test_math/test_scenarios.py
index 5656681ee..f9d99fa63 100644
--- a/tests/test_math/test_scenarios.py
+++ b/tests/test_math/test_scenarios.py
@@ -44,11 +44,11 @@ def test_scenario_weights_affect_objective(self, optimize):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand)],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand)],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -76,14 +76,14 @@ def test_scenario_independent_sizes(self, optimize):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand)],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand)],
),
fx.Source(
'Grid',
outputs=[
fx.Flow(
- 'elec',
bus='Elec',
+ flow_id='elec',
size=fx.InvestParameters(maximum_size=100, effects_of_investment_per_size=1),
effects_per_flow_hour=1,
),
@@ -123,15 +123,15 @@ def test_scenario_independent_flow_rates(self, optimize):
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand)],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand)],
),
fx.Sink(
'Dump',
- inputs=[fx.Flow('elec', bus='Elec')],
+ inputs=[fx.Flow(bus='Elec', flow_id='elec')],
),
fx.Source(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -162,19 +162,19 @@ def test_storage_relative_minimum_final_charge_state_scalar(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 0, 80])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 0, 80])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 1, 100])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_final_charge_state=0.5,
@@ -209,19 +209,19 @@ def test_storage_relative_maximum_final_charge_state_scalar(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0, 0])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0, 0])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1, 1])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
relative_maximum_final_charge_state=0.2,
diff --git a/tests/test_math/test_storage.py b/tests/test_math/test_storage.py
index faab0c391..a0d1937c1 100644
--- a/tests/test_math/test_storage.py
+++ b/tests/test_math/test_storage.py
@@ -23,19 +23,19 @@ def test_storage_shift_saves_money(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 0, 20])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 0, 20])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([10, 1, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([10, 1, 10])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=1,
@@ -60,19 +60,19 @@ def test_storage_losses(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 90])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 90])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 1000])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 1000])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=200,
initial_charge_state=0,
eta_charge=1,
@@ -99,19 +99,19 @@ def test_storage_eta_charge_discharge(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 72])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 72])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 1000])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 1000])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=200,
initial_charge_state=0,
eta_charge=0.9,
@@ -141,19 +141,19 @@ def test_storage_soc_bounds(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 60])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 60])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=0,
relative_maximum_charge_state=0.5,
@@ -184,19 +184,19 @@ def test_storage_cyclic_charge_state(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 50])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 50])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state='equals_final',
eta_charge=1,
@@ -226,19 +226,19 @@ def test_storage_minimal_final_charge_state(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 20])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=0,
minimal_final_charge_state=60,
@@ -268,19 +268,19 @@ def test_storage_invest_capacity(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 50])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 50])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 10])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=fx.InvestParameters(
maximum_size=200,
effects_of_investment_per_size=1,
@@ -320,19 +320,19 @@ def test_prevent_simultaneous_charge_and_discharge(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 20, 10])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 20, 10])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 10, 1])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 10, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=0.9,
@@ -342,8 +342,8 @@ def test_prevent_simultaneous_charge_and_discharge(self, optimize):
),
)
fs = optimize(fs)
- charge = fs.solution['Battery(charge)|flow_rate'].values[:-1]
- discharge = fs.solution['Battery(discharge)|flow_rate'].values[:-1]
+ charge = fs.solution['Battery(charging)|flow_rate'].values[:-1]
+ discharge = fs.solution['Battery(discharging)|flow_rate'].values[:-1]
# At no timestep should both be > 0
for t in range(len(charge)):
assert not (charge[t] > 1e-5 and discharge[t] > 1e-5), (
@@ -369,19 +369,19 @@ def test_storage_relative_minimum_charge_state(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 80, 0])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 80, 0])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100, 1])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_charge_state=0.3,
@@ -414,19 +414,19 @@ def test_storage_maximal_final_charge_state(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
maximal_final_charge_state=20,
@@ -458,19 +458,19 @@ def test_storage_relative_minimum_final_charge_state(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 80])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 80])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_charge_state=np.array([0, 0]),
@@ -505,19 +505,19 @@ def test_storage_relative_maximum_final_charge_state(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
relative_maximum_charge_state=np.array([1.0, 1.0]),
@@ -546,19 +546,19 @@ def test_storage_relative_minimum_final_charge_state_scalar(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 80])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 80])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_final_charge_state=0.5,
@@ -585,19 +585,19 @@ def test_storage_relative_maximum_final_charge_state_scalar(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
relative_maximum_final_charge_state=0.2,
@@ -627,24 +627,22 @@ def test_storage_balanced_invest(self, optimize):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 80, 80])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 80, 80])),
],
),
fx.Source(
'Grid',
outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100, 100])),
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100, 100])),
],
),
fx.Storage(
'Battery',
charging=fx.Flow(
- 'charge',
bus='Elec',
size=InvestParameters(maximum_size=200, effects_of_investment_per_size=1),
),
discharging=fx.Flow(
- 'discharge',
bus='Elec',
size=InvestParameters(maximum_size=200, effects_of_investment_per_size=1),
),
@@ -664,8 +662,8 @@ def test_storage_balanced_invest(self, optimize):
# Invest: charge_size=160 @1€ = 160€. discharge_size=160 @1€ = 160€. Total invest=320€.
# Ops: 160 @1€ = 160€. Total = 480€.
# Without balanced: charge_size=160, discharge_size=80 → invest 240, ops 160 → 400€.
- charge_size = fs.solution['Battery(charge)|size'].item()
- discharge_size = fs.solution['Battery(discharge)|size'].item()
+ charge_size = fs.solution['Battery(charging)|size'].item()
+ discharge_size = fs.solution['Battery(discharging)|size'].item()
assert_allclose(charge_size, discharge_size, rtol=1e-5)
# With balanced, total cost is higher than without
assert fs.solution['costs'].item() > 400.0 - 1e-5
diff --git a/tests/test_math/test_validation.py b/tests/test_math/test_validation.py
index 5e1e90344..ef0934761 100644
--- a/tests/test_math/test_validation.py
+++ b/tests/test_math/test_validation.py
@@ -29,13 +29,13 @@ def test_source_and_sink_requires_size_with_prevent_simultaneous(self):
fx.Sink(
'Demand',
inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0.1, 0.1, 0.1])),
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0.1, 0.1, 0.1])),
],
),
fx.SourceAndSink(
'GridConnection',
- outputs=[fx.Flow('buy', bus='Elec', effects_per_flow_hour=5)],
- inputs=[fx.Flow('sell', bus='Elec', effects_per_flow_hour=-1)],
+ outputs=[fx.Flow(bus='Elec', flow_id='buy', effects_per_flow_hour=5)],
+ inputs=[fx.Flow(bus='Elec', flow_id='sell', effects_per_flow_hour=-1)],
prevent_simultaneous_flow_rates=True,
),
)
diff --git a/tests/test_scenarios.py b/tests/test_scenarios.py
index f4b07f9f5..8348a95a9 100644
--- a/tests/test_scenarios.py
+++ b/tests/test_scenarios.py
@@ -76,8 +76,8 @@ def test_system():
generator = Source('Generator', outputs=[power_gen])
# Create a storage for electricity
- storage_charge = Flow(electricity_bus.label_full, flow_id='Charge', size=10)
- storage_discharge = Flow(electricity_bus.label_full, flow_id='Discharge', size=10)
+ storage_charge = Flow(electricity_bus.label_full, size=10)
+ storage_discharge = Flow(electricity_bus.label_full, size=10)
storage = Storage(
'Battery',
charging=storage_charge,
@@ -135,13 +135,16 @@ def flow_system_complex_scenarios() -> fx.FlowSystem:
fx.Bus('Fernwärme'),
fx.Bus('Gas'),
fx.Sink(
- 'Wärmelast', inputs=[fx.Flow('Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load)]
+ 'Wärmelast',
+ inputs=[fx.Flow(bus='Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load)],
),
fx.Source(
'Gastarif',
- outputs=[fx.Flow('Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04, 'CO2': 0.3})],
+ outputs=[fx.Flow(bus='Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04, 'CO2': 0.3})],
+ ),
+ fx.Sink(
+ 'Einspeisung', inputs=[fx.Flow(bus='Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_load)]
),
- fx.Sink('Einspeisung', inputs=[fx.Flow('Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_load)]),
)
boiler = fx.linear_converters.Boiler(
@@ -149,7 +152,7 @@ def flow_system_complex_scenarios() -> fx.FlowSystem:
thermal_efficiency=0.5,
status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
load_factor_max=1.0,
load_factor_min=0.1,
@@ -173,7 +176,7 @@ def flow_system_complex_scenarios() -> fx.FlowSystem:
),
flow_hours_max=1e6,
),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1),
)
invest_speicher = fx.InvestParameters(
@@ -192,8 +195,8 @@ def flow_system_complex_scenarios() -> fx.FlowSystem:
)
speicher = fx.Storage(
'Speicher',
- charging=fx.Flow('Fernwärme', flow_id='Q_th_load', size=1e4),
- discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=1e4),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_load', size=1e4),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_unload', size=1e4),
capacity_in_flow_hours=invest_speicher,
initial_charge_state=0,
maximal_final_charge_state=10,
@@ -218,10 +221,10 @@ def flow_system_piecewise_conversion_scenarios(flow_system_complex_scenarios) ->
flow_system.add_elements(
fx.LinearConverter(
'KWK',
- inputs=[fx.Flow('Gas', flow_id='Q_fu', size=200)],
+ inputs=[fx.Flow(bus='Gas', flow_id='Q_fu', size=200)],
outputs=[
- fx.Flow('Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
- fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fx.Flow(bus='Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
+ fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -512,7 +515,7 @@ def test_size_equality_constraints():
'solar',
outputs=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10,
@@ -551,7 +554,7 @@ def test_flow_rate_equality_constraints():
'solar',
outputs=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10,
@@ -590,7 +593,7 @@ def test_selective_scenario_independence():
'solar',
outputs=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
@@ -600,7 +603,7 @@ def test_selective_scenario_independence():
)
sink = fx.Sink(
'demand',
- inputs=[fx.Flow('grid', flow_id='in', size=50)],
+ inputs=[fx.Flow(bus='grid', flow_id='in', size=50)],
)
fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
@@ -649,7 +652,7 @@ def test_scenario_parameters_io_persistence():
'solar',
outputs=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
@@ -689,7 +692,7 @@ def test_scenario_parameters_io_with_calculation(tmp_path):
'solar',
outputs=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
@@ -699,7 +702,7 @@ def test_scenario_parameters_io_with_calculation(tmp_path):
)
sink = fx.Sink(
'demand',
- inputs=[fx.Flow('grid', flow_id='in', size=50)],
+ inputs=[fx.Flow(bus='grid', flow_id='in', size=50)],
)
fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
@@ -747,7 +750,7 @@ def test_weights_io_persistence():
'solar',
outputs=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
@@ -788,7 +791,7 @@ def test_weights_selection():
'solar',
outputs=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=10,
)