diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9efaf699a..170200ab0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1090,7 +1090,7 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp
- **Penalty as first-class Effect**: Users can now add Penalty contributions anywhere effects are used:
```python
- fx.Flow('Q', 'Bus', effects_per_flow_hour={'Penalty': 2.5})
+ fx.Flow(bus='Bus', flow_id='Q', effects_per_flow_hour={'Penalty': 2.5})
fx.InvestParameters(..., effects_of_investment={'Penalty': 100})
```
- **User-definable Penalty**: Optionally define custom Penalty with constraints (auto-created if not defined):
diff --git a/benchmarks/benchmark_model_build.py b/benchmarks/benchmark_model_build.py
index 21695e80c..a3baffd47 100644
--- a/benchmarks/benchmark_model_build.py
+++ b/benchmarks/benchmark_model_build.py
@@ -246,7 +246,7 @@ def create_large_system(
fs.add_elements(
fx.Source(
'GasGrid',
- outputs=[fx.Flow('Gas', bus='Gas', size=5000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],
+ outputs=[fx.Flow(bus='Gas', size=5000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],
)
)
@@ -255,19 +255,25 @@ def create_large_system(
fx.Source(
'ElecBuy',
outputs=[
- fx.Flow('El', bus='Electricity', size=2000, effects_per_flow_hour={'costs': elec_price, 'CO2': 0.4})
+ fx.Flow(
+ bus='Electricity', flow_id='El', size=2000, effects_per_flow_hour={'costs': elec_price, 'CO2': 0.4}
+ )
],
),
fx.Sink(
'ElecSell',
- inputs=[fx.Flow('El', bus='Electricity', size=1000, effects_per_flow_hour={'costs': -elec_price * 0.8})],
+ inputs=[
+ fx.Flow(bus='Electricity', flow_id='El', size=1000, effects_per_flow_hour={'costs': -elec_price * 0.8})
+ ],
),
)
# Demands
fs.add_elements(
- fx.Sink('HeatDemand', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_profile)]),
- fx.Sink('ElecDemand', inputs=[fx.Flow('El', bus='Electricity', size=1, fixed_relative_profile=elec_profile)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_profile)]),
+ fx.Sink(
+ 'ElecDemand', inputs=[fx.Flow(bus='Electricity', flow_id='El', size=1, fixed_relative_profile=elec_profile)]
+ ),
)
# Converters (CHPs and Boilers)
@@ -294,10 +300,10 @@ def create_large_system(
fs.add_elements(
fx.LinearConverter(
f'CHP_{i}',
- inputs=[fx.Flow('Gas', bus='Gas', size=300)],
+ inputs=[fx.Flow(bus='Gas', size=300)],
outputs=[
- fx.Flow('El', bus='Electricity', size=100),
- fx.Flow('Heat', bus='Heat', size=size_param, status_parameters=status_param),
+ fx.Flow(bus='Electricity', flow_id='El', size=100),
+ fx.Flow(bus='Heat', size=size_param, status_parameters=status_param),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -314,9 +320,9 @@ def create_large_system(
f'CHP_{i}',
thermal_efficiency=0.50,
electrical_efficiency=0.35,
- thermal_flow=fx.Flow('Heat', bus='Heat', size=size_param, status_parameters=status_param),
- electrical_flow=fx.Flow('El', bus='Electricity', size=100),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', size=size_param, status_parameters=status_param),
+ electrical_flow=fx.Flow(bus='Electricity', flow_id='El', size=100),
+ fuel_flow=fx.Flow(bus='Gas'),
)
)
else:
@@ -326,13 +332,12 @@ def create_large_system(
f'Boiler_{i}',
thermal_efficiency=0.90,
thermal_flow=fx.Flow(
- 'Heat',
bus='Heat',
size=size_param,
relative_minimum=0.2,
status_parameters=status_param,
),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas'),
)
)
@@ -356,8 +361,8 @@ def create_large_system(
eta_charge=0.95,
eta_discharge=0.95,
relative_loss_per_hour=0.001,
- charging=fx.Flow('Charge', bus='Heat', size=100),
- discharging=fx.Flow('Discharge', bus='Heat', size=100),
+ charging=fx.Flow(bus='Heat', size=100),
+ discharging=fx.Flow(bus='Heat', size=100),
)
)
diff --git a/docs/home/quick-start.md b/docs/home/quick-start.md
index 7bbc88172..e3c59d1ec 100644
--- a/docs/home/quick-start.md
+++ b/docs/home/quick-start.md
@@ -51,10 +51,9 @@ solar_profile = np.array([0, 0, 0, 0, 0, 0, 0.2, 0.5, 0.8, 1.0,
1.0, 0.9, 0.8, 0.7, 0.5, 0.3, 0.1, 0,
0, 0, 0, 0, 0, 0])
-solar = fx.Source(
+solar = fx.Port(
'solar',
- outputs=[fx.Flow(
- 'power',
+ imports=[fx.Flow(
bus='electricity',
size=100, # 100 kW capacity
relative_maximum=solar_profile
@@ -66,9 +65,8 @@ demand_profile = np.array([30, 25, 20, 20, 25, 35, 50, 70, 80, 75,
70, 65, 60, 65, 70, 80, 90, 95, 85, 70,
60, 50, 40, 35])
-demand = fx.Sink('demand', inputs=[
- fx.Flow('consumption',
- bus='electricity',
+demand = fx.Port('demand', exports=[
+ fx.Flow(bus='electricity',
size=1,
fixed_relative_profile=demand_profile)
])
@@ -76,8 +74,8 @@ demand = fx.Sink('demand', inputs=[
# Battery storage
battery = fx.Storage(
'battery',
- charging=fx.Flow('charge', bus='electricity', size=50),
- discharging=fx.Flow('discharge', bus='electricity', size=50),
+ charging=fx.Flow(bus='electricity', size=50),
+ discharging=fx.Flow(bus='electricity', size=50),
capacity_in_flow_hours=100, # 100 kWh capacity
initial_charge_state=50, # Start at 50%
eta_charge=0.95,
diff --git a/docs/notebooks/01-quickstart.ipynb b/docs/notebooks/01-quickstart.ipynb
index 47d83d664..f689193b2 100644
--- a/docs/notebooks/01-quickstart.ipynb
+++ b/docs/notebooks/01-quickstart.ipynb
@@ -14,7 +14,7 @@
"- **FlowSystem**: The container for your energy system model\n",
"- **Bus**: Balance nodes where energy flows meet\n",
"- **Effect**: Quantities to track and optimize (costs, emissions)\n",
- "- **Components**: Equipment like boilers, sources, and sinks\n",
+ "- **Components**: Equipment like boilers and ports\n",
"- **Flow**: Connections between components and buses"
]
},
@@ -125,21 +125,21 @@
" # === Effect: What we want to minimize ===\n",
" fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n",
" # === Gas Supply: Unlimited gas at 0.08 €/kWh ===\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.08)],\n",
+ " imports=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.08)],\n",
" ),\n",
" # === Boiler: Converts gas to heat at 90% efficiency ===\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.9,\n",
- " thermal_flow=fx.Flow('Heat', bus='Heat', size=100), # 100 kW capacity\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=100), # 100 kW capacity\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Workshop: Heat demand that must be met ===\n",
- " fx.Sink(\n",
+ " fx.Port(\n",
" 'Workshop',\n",
- " inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand.values)],\n",
+ " exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand.values)],\n",
" ),\n",
")"
]
diff --git a/docs/notebooks/02-heat-system.ipynb b/docs/notebooks/02-heat-system.ipynb
index f36a1b7a9..a2aacd855 100644
--- a/docs/notebooks/02-heat-system.ipynb
+++ b/docs/notebooks/02-heat-system.ipynb
@@ -146,16 +146,16 @@
" # === Effect ===\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # === Gas Supply with time-varying price ===\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_price)],\n",
+ " imports=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=gas_price)],\n",
" ),\n",
" # === Gas Boiler: 150 kW, 92% efficiency ===\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.92,\n",
- " thermal_flow=fx.Flow('Heat', bus='Heat', size=150),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=150),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Thermal Storage: 500 kWh tank ===\n",
" fx.Storage(\n",
@@ -166,13 +166,13 @@
" eta_charge=0.98, # 98% charging efficiency\n",
" eta_discharge=0.98, # 98% discharging efficiency\n",
" relative_loss_per_hour=0.005, # 0.5% heat loss per hour\n",
- " charging=fx.Flow('Charge', bus='Heat', size=100), # Max 100 kW charging\n",
- " discharging=fx.Flow('Discharge', bus='Heat', size=100), # Max 100 kW discharging\n",
+ " charging=fx.Flow(bus='Heat', size=100), # Max 100 kW charging\n",
+ " discharging=fx.Flow(bus='Heat', size=100), # Max 100 kW discharging\n",
" ),\n",
" # === Office Heat Demand ===\n",
- " fx.Sink(\n",
+ " fx.Port(\n",
" 'Office',\n",
- " inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n",
+ " exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n",
" ),\n",
")"
]
diff --git a/docs/notebooks/03-investment-optimization.ipynb b/docs/notebooks/03-investment-optimization.ipynb
index 9cfa0afee..53e54e9b9 100644
--- a/docs/notebooks/03-investment-optimization.ipynb
+++ b/docs/notebooks/03-investment-optimization.ipynb
@@ -139,23 +139,22 @@
" # === Effects ===\n",
" fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n",
" # === Gas Supply ===\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=GAS_PRICE)],\n",
+ " imports=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=GAS_PRICE)],\n",
" ),\n",
" # === Gas Boiler (existing, fixed size) ===\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'GasBoiler',\n",
" thermal_efficiency=0.92,\n",
- " thermal_flow=fx.Flow('Heat', bus='Heat', size=200), # 200 kW existing\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=200), # 200 kW existing\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Solar Collectors (size to be optimized) ===\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'SolarCollectors',\n",
- " outputs=[\n",
+ " imports=[\n",
" fx.Flow(\n",
- " 'Heat',\n",
" bus='Heat',\n",
" # Investment optimization: find optimal size between 0-500 kW\n",
" size=fx.InvestParameters(\n",
@@ -181,13 +180,13 @@
" eta_charge=0.95,\n",
" eta_discharge=0.95,\n",
" relative_loss_per_hour=0.01, # 1% loss per hour\n",
- " charging=fx.Flow('Charge', bus='Heat', size=200),\n",
- " discharging=fx.Flow('Discharge', bus='Heat', size=200),\n",
+ " charging=fx.Flow(bus='Heat', size=200),\n",
+ " discharging=fx.Flow(bus='Heat', size=200),\n",
" ),\n",
" # === Pool Heat Demand ===\n",
- " fx.Sink(\n",
+ " fx.Port(\n",
" 'Pool',\n",
- " inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=pool_demand)],\n",
+ " exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=pool_demand)],\n",
" ),\n",
")"
]
@@ -404,7 +403,7 @@
"\n",
"### Where to Use InvestParameters\n",
"\n",
- "- **Flow.size**: Optimize converter/source/sink capacity\n",
+ "- **Flow.size**: Optimize converter/port capacity\n",
"- **Storage.capacity_in_flow_hours**: Optimize storage capacity\n",
"\n",
"## Summary\n",
diff --git a/docs/notebooks/04-operational-constraints.ipynb b/docs/notebooks/04-operational-constraints.ipynb
index 401f99393..5d39834ef 100644
--- a/docs/notebooks/04-operational-constraints.ipynb
+++ b/docs/notebooks/04-operational-constraints.ipynb
@@ -124,12 +124,12 @@
" # === Effect ===\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # === Gas Supply ===\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)],\n",
+ " imports=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)],\n",
" ),\n",
" # === Main Industrial Boiler (with operational constraints) ===\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'MainBoiler',\n",
" thermal_efficiency=0.94, # High efficiency\n",
" # StatusParameters define on/off behavior\n",
@@ -144,20 +144,20 @@
" size=500,\n",
" relative_minimum=0.3, # Minimum load: 30% = 150 kW\n",
" ),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas', size=600), # Size required for status_parameters\n",
+ " fuel_flow=fx.Flow(bus='Gas', size=600), # Size required for status_parameters\n",
" ),\n",
" # === Backup Boiler (flexible, but less efficient) ===\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'BackupBoiler',\n",
" thermal_efficiency=0.85, # Lower efficiency\n",
" # No status parameters = can turn on/off freely\n",
- " thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Steam', size=150),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Factory Steam Demand ===\n",
- " fx.Sink(\n",
+ " fx.Port(\n",
" 'Factory',\n",
- " inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)],\n",
+ " exports=[fx.Flow(bus='Steam', size=1, fixed_relative_profile=steam_demand)],\n",
" ),\n",
")"
]
@@ -340,21 +340,21 @@
" fx.Bus('Gas', carrier='gas'),\n",
" fx.Bus('Steam', carrier='steam'),\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
- " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
+ " fx.Port('GasGrid', imports=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
" # Main boiler WITHOUT status parameters\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'MainBoiler',\n",
" thermal_efficiency=0.94,\n",
- " thermal_flow=fx.Flow('Steam', bus='Steam', size=500),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Steam', size=500),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'BackupBoiler',\n",
" thermal_efficiency=0.85,\n",
- " thermal_flow=fx.Flow('Steam', bus='Steam', size=150),\n",
- " fuel_flow=fx.Flow('Gas', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Steam', size=150),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
- " fx.Sink('Factory', inputs=[fx.Flow('Steam', bus='Steam', size=1, fixed_relative_profile=steam_demand)]),\n",
+ " fx.Port('Factory', exports=[fx.Flow(bus='Steam', size=1, fixed_relative_profile=steam_demand)]),\n",
")\n",
"\n",
"fs_unconstrained.optimize(fx.solvers.HighsSolver())\n",
@@ -559,7 +559,7 @@
"\n",
"Set via `Flow.relative_minimum`:\n",
"```python\n",
- "fx.Flow('Steam', bus='Steam', size=500, relative_minimum=0.3) # Min 30% load\n",
+ "fx.Flow(bus='Steam', size=500, relative_minimum=0.3) # Min 30% load\n",
"```\n",
"\n",
"### When Status is Active\n",
diff --git a/docs/notebooks/05-multi-carrier-system.ipynb b/docs/notebooks/05-multi-carrier-system.ipynb
index 3727227f4..1e79fd8ff 100644
--- a/docs/notebooks/05-multi-carrier-system.ipynb
+++ b/docs/notebooks/05-multi-carrier-system.ipynb
@@ -142,11 +142,10 @@
" fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n",
" fx.Effect('CO2', 'kg', 'CO2 Emissions'), # Track emissions too\n",
" # === Gas Supply ===\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'GasGrid',\n",
- " outputs=[\n",
+ " imports=[\n",
" fx.Flow(\n",
- " 'Gas',\n",
" bus='Gas',\n",
" size=1000,\n",
" effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2}, # Gas: 0.2 kg CO2/kWh\n",
@@ -154,11 +153,10 @@
" ],\n",
" ),\n",
" # === Electricity Grid (buy) ===\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'GridBuy',\n",
- " outputs=[\n",
+ " imports=[\n",
" fx.Flow(\n",
- " 'Electricity',\n",
" bus='Electricity',\n",
" size=500,\n",
" effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}, # Grid: 0.4 kg CO2/kWh\n",
@@ -166,11 +164,10 @@
" ],\n",
" ),\n",
" # === Electricity Grid (sell) - negative cost = revenue ===\n",
- " fx.Sink(\n",
+ " fx.Port(\n",
" 'GridSell',\n",
- " inputs=[\n",
+ " exports=[\n",
" fx.Flow(\n",
- " 'Electricity',\n",
" bus='Electricity',\n",
" size=200,\n",
" effects_per_flow_hour={'costs': -elec_sell_price}, # Negative = income\n",
@@ -178,7 +175,7 @@
" ],\n",
" ),\n",
" # === CHP Unit (Combined Heat and Power) ===\n",
- " fx.linear_converters.CHP(\n",
+ " fx.Converter.chp(\n",
" 'CHP',\n",
" electrical_efficiency=0.40, # 40% to electricity\n",
" thermal_efficiency=0.50, # 50% to heat (total: 90%)\n",
@@ -186,30 +183,29 @@
" effects_per_startup={'costs': 30},\n",
" min_uptime=3,\n",
" ),\n",
- " electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n",
+ " electrical_flow=fx.Flow(bus='Electricity', size=200),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=250),\n",
" fuel_flow=fx.Flow(\n",
- " 'Q_fuel',\n",
" bus='Gas',\n",
" size=500,\n",
" relative_minimum=0.4, # Min 40% load\n",
" ),\n",
" ),\n",
" # === Gas Boiler (heat only) ===\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.92,\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=400),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=400),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Hospital Loads ===\n",
- " fx.Sink(\n",
+ " fx.Port(\n",
" 'HospitalElec',\n",
- " inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],\n",
+ " exports=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],\n",
" ),\n",
- " fx.Sink(\n",
+ " fx.Port(\n",
" 'HospitalHeat',\n",
- " inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n",
+ " exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)],\n",
" ),\n",
")"
]
@@ -303,7 +299,7 @@
"metadata": {},
"outputs": [],
"source": [
- "flow_system.stats.plot.heatmap('CHP(P_el)')"
+ "flow_system.stats.plot.heatmap('CHP(Electricity)')"
]
},
{
@@ -325,9 +321,9 @@
"flow_rates = flow_system.stats.flow_rates\n",
"grid_buy = flow_rates['GridBuy(Electricity)'].sum().item()\n",
"grid_sell = flow_rates['GridSell(Electricity)'].sum().item()\n",
- "chp_elec = flow_rates['CHP(P_el)'].sum().item()\n",
- "chp_heat = flow_rates['CHP(Q_th)'].sum().item()\n",
- "boiler_heat = flow_rates['Boiler(Q_th)'].sum().item()\n",
+ "chp_elec = flow_rates['CHP(Electricity)'].sum().item()\n",
+ "chp_heat = flow_rates['CHP(Heat)'].sum().item()\n",
+ "boiler_heat = flow_rates['Boiler(Heat)'].sum().item()\n",
"\n",
"total_elec = electricity_demand.sum()\n",
"total_heat = heat_demand.sum()\n",
@@ -378,29 +374,26 @@
" fx.Bus('Gas', carrier='gas'),\n",
" fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n",
" fx.Effect('CO2', 'kg', 'CO2 Emissions'),\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'GasGrid',\n",
- " outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],\n",
+ " imports=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.2})],\n",
" ),\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'GridBuy',\n",
- " outputs=[\n",
- " fx.Flow(\n",
- " 'Electricity', bus='Electricity', size=500, effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4}\n",
- " )\n",
- " ],\n",
+ " imports=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour={'costs': elec_buy_price, 'CO2': 0.4})],\n",
" ),\n",
" # Only boiler for heat\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.92,\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=500),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=500),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
- " fx.Sink(\n",
- " 'HospitalElec', inputs=[fx.Flow('Load', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]\n",
+ " fx.Port(\n",
+ " 'HospitalElec',\n",
+ " exports=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],\n",
" ),\n",
- " fx.Sink('HospitalHeat', inputs=[fx.Flow('Load', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n",
+ " fx.Port('HospitalHeat', exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n",
")\n",
"\n",
"fs_no_chp.optimize(fx.solvers.HighsSolver())\n",
@@ -493,21 +486,21 @@
"### CHP Modeling\n",
"\n",
"```python\n",
- "fx.linear_converters.CHP(\n",
+ "fx.Converter.chp(\n",
" 'CHP',\n",
" electrical_efficiency=0.40, # Fuel → Electricity\n",
" thermal_efficiency=0.50, # Fuel → Heat\n",
" # Total efficiency = 0.40 + 0.50 = 0.90 (90%)\n",
- " electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=250),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas', size=500),\n",
+ " electrical_flow=fx.Flow(bus='Electricity', size=200),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=250),\n",
+ " fuel_flow=fx.Flow(bus='Gas', size=500),\n",
")\n",
"```\n",
"\n",
"### Electricity Markets\n",
"\n",
- "- **Buy**: Source with positive cost\n",
- "- **Sell**: Sink with negative cost (= revenue)\n",
+ "- **Buy**: Port with `imports` and positive cost\n",
+ "- **Sell**: Port with `exports` and negative cost (= revenue)\n",
"- Different prices for buy vs. sell (spread)\n",
"\n",
"### Tracking Multiple Effects\n",
diff --git a/docs/notebooks/06a-time-varying-parameters.ipynb b/docs/notebooks/06a-time-varying-parameters.ipynb
index 5e1efa331..270467589 100644
--- a/docs/notebooks/06a-time-varying-parameters.ipynb
+++ b/docs/notebooks/06a-time-varying-parameters.ipynb
@@ -167,16 +167,16 @@
" # Effect for cost tracking\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # Grid electricity source\n",
- " fx.Source('Grid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=0.30)]),\n",
+ " fx.Port('Grid', imports=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour=0.30)]),\n",
" # Heat pump with TIME-VARYING COP\n",
- " fx.LinearConverter(\n",
+ " fx.Converter(\n",
" 'HeatPump',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n",
- " conversion_factors=[{'Elec': cop, 'Heat': 1}], # <-- Array for time-varying COP\n",
+ " inputs=[fx.Flow(bus='Electricity', size=150)],\n",
+ " outputs=[fx.Flow(bus='Heat', size=500)],\n",
+ " conversion_factors=[{'Electricity': cop, 'Heat': 1}], # <-- Array for time-varying COP\n",
" ),\n",
" # Heat demand\n",
- " fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n",
+ " fx.Port('Building', exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),\n",
")\n",
"\n",
"flow_system.optimize(fx.solvers.HighsSolver());"
@@ -221,7 +221,7 @@
"# Create dataset with solution and input data - xarray auto-aligns by time coordinate\n",
"comparison = xr.Dataset(\n",
" {\n",
- " 'elec_consumption': flow_system.solution['HeatPump(Elec)|flow_rate'],\n",
+ " 'elec_consumption': flow_system.solution['HeatPump(Electricity)|flow_rate'],\n",
" 'heat_output': flow_system.solution['HeatPump(Heat)|flow_rate'],\n",
" 'outdoor_temp': xr.DataArray(outdoor_temp, dims=['time'], coords={'time': timesteps}),\n",
" }\n",
@@ -251,15 +251,15 @@
"\n",
"The `conversion_factors` parameter accepts a list of dictionaries where values can be:\n",
"- **Scalars**: Constant efficiency (e.g., `{'Fuel': 1, 'Heat': 0.9}`)\n",
- "- **Arrays**: Time-varying efficiency (e.g., `{'Elec': cop_array, 'Heat': 1}`)\n",
+ "- **Arrays**: Time-varying efficiency (e.g., `{'Electricity': cop_array, 'Heat': 1}`)\n",
"- **TimeSeriesData**: For more complex data with metadata\n",
"\n",
"```python\n",
- "fx.LinearConverter(\n",
+ "fx.Converter(\n",
" 'HeatPump',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=150)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat', size=500)],\n",
- " conversion_factors=[{'Elec': cop_array, 'Heat': 1}], # Time-varying\n",
+ " inputs=[fx.Flow(bus='Electricity', size=150)],\n",
+ " outputs=[fx.Flow(bus='Heat', size=500)],\n",
+ " conversion_factors=[{'Electricity': cop_array, 'Heat': 1}], # Time-varying\n",
")\n",
"```\n",
"\n",
diff --git a/docs/notebooks/06b-piecewise-conversion.ipynb b/docs/notebooks/06b-piecewise-conversion.ipynb
index 957e3ac34..414ff50a1 100644
--- a/docs/notebooks/06b-piecewise-conversion.ipynb
+++ b/docs/notebooks/06b-piecewise-conversion.ipynb
@@ -64,14 +64,14 @@
"source": [
"piecewise_efficiency = fx.PiecewiseConversion(\n",
" {\n",
- " 'Fuel': fx.Piecewise(\n",
+ " 'Gas': fx.Piecewise(\n",
" [\n",
" fx.Piece(start=78, end=132), # Part load\n",
" fx.Piece(start=132, end=179), # Mid load\n",
" fx.Piece(start=179, end=250), # Full load\n",
" ]\n",
" ),\n",
- " 'Elec': fx.Piecewise(\n",
+ " 'Electricity': fx.Piecewise(\n",
" [\n",
" fx.Piece(start=25, end=50), # 32% -> 38% efficiency\n",
" fx.Piece(start=50, end=75), # 38% -> 42% efficiency\n",
@@ -107,14 +107,14 @@
" fx.Bus('Gas'),\n",
" fx.Bus('Electricity'),\n",
" fx.Effect('costs', '€', is_standard=True, is_objective=True),\n",
- " fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=300, effects_per_flow_hour=0.05)]),\n",
- " fx.LinearConverter(\n",
+ " fx.Port('GasGrid', imports=[fx.Flow(bus='Gas', size=300, effects_per_flow_hour=0.05)]),\n",
+ " fx.Converter(\n",
" 'GasEngine',\n",
- " inputs=[fx.Flow('Fuel', bus='Gas')],\n",
- " outputs=[fx.Flow('Elec', bus='Electricity')],\n",
+ " inputs=[fx.Flow(bus='Gas')],\n",
+ " outputs=[fx.Flow(bus='Electricity')],\n",
" piecewise_conversion=piecewise_efficiency,\n",
" ),\n",
- " fx.Sink('Load', inputs=[fx.Flow('Elec', bus='Electricity', size=1, fixed_relative_profile=elec_demand)]),\n",
+ " fx.Port('Load', exports=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=elec_demand)]),\n",
")\n",
"\n",
"fs.optimize(fx.solvers.HighsSolver());"
@@ -135,7 +135,7 @@
"metadata": {},
"outputs": [],
"source": [
- "fs.components['GasEngine'].piecewise_conversion.plot(x_flow='Fuel')"
+ "fs.components['GasEngine'].piecewise_conversion.plot(x_flow='Gas')"
]
},
{
@@ -164,8 +164,8 @@
"outputs": [],
"source": [
"# Verify efficiency varies with load\n",
- "fuel = fs.solution['GasEngine(Fuel)|flow_rate']\n",
- "elec = fs.solution['GasEngine(Elec)|flow_rate']\n",
+ "fuel = fs.solution['GasEngine(Gas)|flow_rate']\n",
+ "elec = fs.solution['GasEngine(Electricity)|flow_rate']\n",
"efficiency = elec / fuel\n",
"\n",
"print(f'Efficiency range: {float(efficiency.min()):.1%} - {float(efficiency.max()):.1%}')\n",
diff --git a/docs/notebooks/06c-piecewise-effects.ipynb b/docs/notebooks/06c-piecewise-effects.ipynb
index dd373ab46..d99cf47fb 100644
--- a/docs/notebooks/06c-piecewise-effects.ipynb
+++ b/docs/notebooks/06c-piecewise-effects.ipynb
@@ -167,12 +167,12 @@
" fx.Bus('Elec'),\n",
" fx.Effect('costs', '€', is_standard=True, is_objective=True),\n",
" # Grid with time-varying price\n",
- " fx.Source('Grid', outputs=[fx.Flow('Elec', bus='Elec', size=500, effects_per_flow_hour=elec_price)]),\n",
+ " fx.Port('Grid', imports=[fx.Flow(bus='Elec', size=500, effects_per_flow_hour=elec_price)]),\n",
" # Battery with PIECEWISE investment cost (discrete tiers)\n",
" fx.Storage(\n",
" 'Battery',\n",
- " charging=fx.Flow('charge', bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
- " discharging=fx.Flow('discharge', bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
+ " charging=fx.Flow(bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
+ " discharging=fx.Flow(bus='Elec', size=fx.InvestParameters(maximum_size=400)),\n",
" capacity_in_flow_hours=fx.InvestParameters(\n",
" piecewise_effects_of_investment=piecewise_costs,\n",
" minimum_size=0,\n",
@@ -182,7 +182,7 @@
" eta_discharge=0.95,\n",
" initial_charge_state=0,\n",
" ),\n",
- " fx.Sink('Demand', inputs=[fx.Flow('Elec', bus='Elec', size=1, fixed_relative_profile=demand)]),\n",
+ " fx.Port('Demand', exports=[fx.Flow(bus='Elec', size=1, fixed_relative_profile=demand)]),\n",
")\n",
"\n",
"fs.optimize(fx.solvers.HighsSolver());"
diff --git a/docs/notebooks/07-scenarios-and-periods.ipynb b/docs/notebooks/07-scenarios-and-periods.ipynb
index 1aae7660b..dcc911a39 100644
--- a/docs/notebooks/07-scenarios-and-periods.ipynb
+++ b/docs/notebooks/07-scenarios-and-periods.ipynb
@@ -170,11 +170,10 @@
" # === Effects ===\n",
" fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),\n",
" # === Gas Supply (price varies by period) ===\n",
- " fx.Source(\n",
+ " fx.Port(\n",
" 'GasGrid',\n",
- " outputs=[\n",
+ " imports=[\n",
" fx.Flow(\n",
- " 'Gas',\n",
" bus='Gas',\n",
" size=1000,\n",
" effects_per_flow_hour=gas_prices, # Array = varies by period\n",
@@ -182,12 +181,11 @@
" ],\n",
" ),\n",
" # === CHP Unit (investment decision) ===\n",
- " fx.linear_converters.CHP(\n",
+ " fx.Converter.chp(\n",
" 'CHP',\n",
" electrical_efficiency=0.35,\n",
" thermal_efficiency=0.50,\n",
" electrical_flow=fx.Flow(\n",
- " 'P_el',\n",
" bus='Electricity',\n",
" # Investment optimization: find optimal CHP size\n",
" size=fx.InvestParameters(\n",
@@ -196,22 +194,21 @@
" effects_of_investment_per_size={'costs': 15}, # 15 €/kW/week annualized\n",
" ),\n",
" ),\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat'),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat'),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Gas Boiler (existing backup) ===\n",
- " fx.linear_converters.Boiler(\n",
+ " fx.Converter.boiler(\n",
" 'Boiler',\n",
" thermal_efficiency=0.90,\n",
- " thermal_flow=fx.Flow('Q_th', bus='Heat', size=500),\n",
- " fuel_flow=fx.Flow('Q_fuel', bus='Gas'),\n",
+ " thermal_flow=fx.Flow(bus='Heat', size=500),\n",
+ " fuel_flow=fx.Flow(bus='Gas'),\n",
" ),\n",
" # === Electricity Sales (revenue varies by period) ===\n",
- " fx.Sink(\n",
+ " fx.Port(\n",
" 'ElecSales',\n",
- " inputs=[\n",
+ " exports=[\n",
" fx.Flow(\n",
- " 'P_el',\n",
" bus='Electricity',\n",
" size=100,\n",
" effects_per_flow_hour=-elec_prices, # Negative = revenue\n",
@@ -219,11 +216,10 @@
" ],\n",
" ),\n",
" # === Heat Demand (varies by scenario) ===\n",
- " fx.Sink(\n",
+ " fx.Port(\n",
" 'HeatDemand',\n",
- " inputs=[\n",
+ " exports=[\n",
" fx.Flow(\n",
- " 'Q_th',\n",
" bus='Heat',\n",
" size=1,\n",
" fixed_relative_profile=heat_demand, # DataFrame with scenario columns\n",
@@ -268,7 +264,7 @@
"metadata": {},
"outputs": [],
"source": [
- "chp_size = flow_system.stats.sizes['CHP(P_el)']\n",
+ "chp_size = flow_system.stats.sizes['CHP(Electricity)']\n",
"\n",
"pd.DataFrame(\n",
" {\n",
@@ -315,7 +311,7 @@
"metadata": {},
"outputs": [],
"source": [
- "flow_system.stats.plot.heatmap('CHP(Q_th)')"
+ "flow_system.stats.plot.heatmap('CHP(Heat)')"
]
},
{
@@ -349,7 +345,7 @@
"outputs": [],
"source": [
"# CHP operation summary by scenario\n",
- "chp_heat = flow_rates['CHP(Q_th)']\n",
+ "chp_heat = flow_rates['CHP(Heat)']\n",
"\n",
"pd.DataFrame(\n",
" {\n",
@@ -383,7 +379,7 @@
"fs_mild = flow_system.transform.sel(scenario='Mild Winter')\n",
"fs_mild.optimize(fx.solvers.HighsSolver(mip_gap=0.01))\n",
"\n",
- "chp_size_mild = float(fs_mild.stats.sizes['CHP(P_el)'].max())\n",
+ "chp_size_mild = float(fs_mild.stats.sizes['CHP(Electricity)'].max())\n",
"chp_size_both = float(chp_size.max())\n",
"\n",
"pd.DataFrame(\n",
diff --git a/docs/notebooks/09-plotting-and-data-access.ipynb b/docs/notebooks/09-plotting-and-data-access.ipynb
index a375fd641..bae32cf22 100644
--- a/docs/notebooks/09-plotting-and-data-access.ipynb
+++ b/docs/notebooks/09-plotting-and-data-access.ipynb
@@ -727,8 +727,8 @@
" 'Heat',\n",
" colors={\n",
" 'Boiler(Heat)': 'orangered',\n",
- " 'ThermalStorage(Charge)': 'steelblue',\n",
- " 'ThermalStorage(Discharge)': 'lightblue',\n",
+ " 'ThermalStorage(charging)': 'steelblue',\n",
+ " 'ThermalStorage(discharging)': 'lightblue',\n",
" 'Office(Heat)': 'forestgreen',\n",
" },\n",
")"
diff --git a/docs/notebooks/10-transmission.ipynb b/docs/notebooks/10-transmission.ipynb
index 065e7d14e..1647e7511 100644
--- a/docs/notebooks/10-transmission.ipynb
+++ b/docs/notebooks/10-transmission.ipynb
@@ -151,32 +151,32 @@
" # === Effect ===\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # === External supplies ===\n",
- " fx.Source('GasSupply', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
- " fx.Source('ElecGrid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=0.25)]),\n",
+ " fx.Port('GasSupply', imports=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
+ " fx.Port('ElecGrid', imports=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour=0.25)]),\n",
" # === Site A: Large gas boiler (cheap) ===\n",
- " fx.LinearConverter(\n",
+ " fx.Converter(\n",
" 'GasBoiler_A',\n",
- " inputs=[fx.Flow('Gas', bus='Gas', size=500)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_A', size=400)],\n",
- " conversion_factors=[{'Gas': 1, 'Heat': 0.92}], # 92% efficiency\n",
+ " inputs=[fx.Flow(bus='Gas', size=500)],\n",
+ " outputs=[fx.Flow(bus='Heat_A', size=400)],\n",
+ " conversion_factors=[{'Gas': 1, 'Heat_A': 0.92}], # 92% efficiency\n",
" ),\n",
" # === Site B: Small electric boiler (expensive but flexible) ===\n",
- " fx.LinearConverter(\n",
+ " fx.Converter(\n",
" 'ElecBoiler_B',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=250)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_B', size=250)],\n",
- " conversion_factors=[{'Elec': 1, 'Heat': 0.99}], # 99% efficiency\n",
+ " inputs=[fx.Flow(bus='Electricity', size=250)],\n",
+ " outputs=[fx.Flow(bus='Heat_B', size=250)],\n",
+ " conversion_factors=[{'Electricity': 1, 'Heat_B': 0.99}], # 99% efficiency\n",
" ),\n",
" # === Transmission: A → B (unidirectional) ===\n",
" fx.Transmission(\n",
" 'Pipe_A_to_B',\n",
- " in1=fx.Flow('from_A', bus='Heat_A', size=200), # Input from Site A\n",
- " out1=fx.Flow('to_B', bus='Heat_B', size=200), # Output to Site B\n",
+ " in1=fx.Flow(bus='Heat_A', size=200), # Input from Site A\n",
+ " out1=fx.Flow(bus='Heat_B', size=200), # Output to Site B\n",
" relative_losses=0.05, # 5% heat loss in pipe\n",
" ),\n",
" # === Demands ===\n",
- " fx.Sink('Demand_A', inputs=[fx.Flow('Heat', bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
- " fx.Sink('Demand_B', inputs=[fx.Flow('Heat', bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
+ " fx.Port('Demand_A', exports=[fx.Flow(bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
+ " fx.Port('Demand_B', exports=[fx.Flow(bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
")\n",
"\n",
"fs_unidirectional.optimize(fx.solvers.HighsSolver());"
@@ -289,37 +289,37 @@
" # === Effect ===\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # === External supplies ===\n",
- " fx.Source('GasSupply', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
- " fx.Source('ElecGrid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n",
+ " fx.Port('GasSupply', imports=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
+ " fx.Port('ElecGrid', imports=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n",
" # === Site A: Gas boiler ===\n",
- " fx.LinearConverter(\n",
+ " fx.Converter(\n",
" 'GasBoiler_A',\n",
- " inputs=[fx.Flow('Gas', bus='Gas', size=500)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_A', size=400)],\n",
- " conversion_factors=[{'Gas': 1, 'Heat': 0.92}],\n",
+ " inputs=[fx.Flow(bus='Gas', size=500)],\n",
+ " outputs=[fx.Flow(bus='Heat_A', size=400)],\n",
+ " conversion_factors=[{'Gas': 1, 'Heat_A': 0.92}],\n",
" ),\n",
" # === Site B: Heat pump (efficient with variable electricity price) ===\n",
- " fx.LinearConverter(\n",
+ " fx.Converter(\n",
" 'HeatPump_B',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=100)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_B', size=350)],\n",
- " conversion_factors=[{'Elec': 1, 'Heat': 3.5}], # COP = 3.5\n",
+ " inputs=[fx.Flow(bus='Electricity', size=100)],\n",
+ " outputs=[fx.Flow(bus='Heat_B', size=350)],\n",
+ " conversion_factors=[{'Electricity': 1, 'Heat_B': 3.5}], # COP = 3.5\n",
" ),\n",
" # === BIDIRECTIONAL Transmission ===\n",
" fx.Transmission(\n",
" 'Pipe_AB',\n",
" # Direction 1: A → B\n",
- " in1=fx.Flow('from_A', bus='Heat_A', size=200),\n",
- " out1=fx.Flow('to_B', bus='Heat_B', size=200),\n",
+ " in1=fx.Flow(bus='Heat_A', flow_id='from_A', size=200),\n",
+ " out1=fx.Flow(bus='Heat_B', flow_id='to_B', size=200),\n",
" # Direction 2: B → A\n",
- " in2=fx.Flow('from_B', bus='Heat_B', size=200),\n",
- " out2=fx.Flow('to_A', bus='Heat_A', size=200),\n",
+ " in2=fx.Flow(bus='Heat_B', flow_id='from_B', size=200),\n",
+ " out2=fx.Flow(bus='Heat_A', flow_id='to_A', size=200),\n",
" relative_losses=0.05,\n",
" prevent_simultaneous_flows_in_both_directions=True, # Can't flow both ways at once\n",
" ),\n",
" # === Demands ===\n",
- " fx.Sink('Demand_A', inputs=[fx.Flow('Heat', bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
- " fx.Sink('Demand_B', inputs=[fx.Flow('Heat', bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
+ " fx.Port('Demand_A', exports=[fx.Flow(bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
+ " fx.Port('Demand_B', exports=[fx.Flow(bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
")\n",
"\n",
"fs_bidirectional.optimize(fx.solvers.HighsSolver());"
@@ -433,28 +433,28 @@
" # === Effect ===\n",
" fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),\n",
" # === External supplies ===\n",
- " fx.Source('GasSupply', outputs=[fx.Flow('Gas', bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
- " fx.Source('ElecGrid', outputs=[fx.Flow('Elec', bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n",
+ " fx.Port('GasSupply', imports=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour=0.06)]),\n",
+ " fx.Port('ElecGrid', imports=[fx.Flow(bus='Electricity', size=500, effects_per_flow_hour=elec_price)]),\n",
" # === Site A: Gas boiler ===\n",
- " fx.LinearConverter(\n",
+ " fx.Converter(\n",
" 'GasBoiler_A',\n",
- " inputs=[fx.Flow('Gas', bus='Gas', size=500)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_A', size=400)],\n",
- " conversion_factors=[{'Gas': 1, 'Heat': 0.92}],\n",
+ " inputs=[fx.Flow(bus='Gas', size=500)],\n",
+ " outputs=[fx.Flow(bus='Heat_A', size=400)],\n",
+ " conversion_factors=[{'Gas': 1, 'Heat_A': 0.92}],\n",
" ),\n",
" # === Site B: Heat pump ===\n",
- " fx.LinearConverter(\n",
+ " fx.Converter(\n",
" 'HeatPump_B',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=100)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_B', size=350)],\n",
- " conversion_factors=[{'Elec': 1, 'Heat': 3.5}],\n",
+ " inputs=[fx.Flow(bus='Electricity', size=100)],\n",
+ " outputs=[fx.Flow(bus='Heat_B', size=350)],\n",
+ " conversion_factors=[{'Electricity': 1, 'Heat_B': 3.5}],\n",
" ),\n",
" # === Site B: Backup electric boiler ===\n",
- " fx.LinearConverter(\n",
+ " fx.Converter(\n",
" 'ElecBoiler_B',\n",
- " inputs=[fx.Flow('Elec', bus='Electricity', size=200)],\n",
- " outputs=[fx.Flow('Heat', bus='Heat_B', size=200)],\n",
- " conversion_factors=[{'Elec': 1, 'Heat': 0.99}],\n",
+ " inputs=[fx.Flow(bus='Electricity', size=200)],\n",
+ " outputs=[fx.Flow(bus='Heat_B', size=200)],\n",
+ " conversion_factors=[{'Electricity': 1, 'Heat_B': 0.99}],\n",
" ),\n",
" # === Transmission with INVESTMENT OPTIMIZATION ===\n",
" # Investment parameters are passed via 'size' parameter\n",
@@ -469,7 +469,7 @@
" maximum_size=300,\n",
" ),\n",
" ),\n",
- " out1=fx.Flow('to_B', bus='Heat_B'),\n",
+ " out1=fx.Flow(bus='Heat_B', flow_id='to_B'),\n",
" in2=fx.Flow(\n",
" 'from_B',\n",
" bus='Heat_B',\n",
@@ -479,14 +479,14 @@
" maximum_size=300,\n",
" ),\n",
" ),\n",
- " out2=fx.Flow('to_A', bus='Heat_A'),\n",
+ " out2=fx.Flow(bus='Heat_A', flow_id='to_A'),\n",
" relative_losses=0.05,\n",
" balanced=True, # Same capacity in both directions\n",
" prevent_simultaneous_flows_in_both_directions=True,\n",
" ),\n",
" # === Demands ===\n",
- " fx.Sink('Demand_A', inputs=[fx.Flow('Heat', bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
- " fx.Sink('Demand_B', inputs=[fx.Flow('Heat', bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
+ " fx.Port('Demand_A', exports=[fx.Flow(bus='Heat_A', size=1, fixed_relative_profile=demand_a)]),\n",
+ " fx.Port('Demand_B', exports=[fx.Flow(bus='Heat_B', size=1, fixed_relative_profile=demand_b)]),\n",
")\n",
"\n",
"fs_invest.optimize(fx.solvers.HighsSolver());"
@@ -542,11 +542,11 @@
"fx.Transmission(\n",
" label='pipe_name',\n",
" # Direction 1: A → B\n",
- " in1=fx.Flow('from_A', bus='Bus_A', size=100),\n",
- " out1=fx.Flow('to_B', bus='Bus_B', size=100),\n",
+ " in1=fx.Flow(bus='Bus_A', flow_id='from_A', size=100),\n",
+ " out1=fx.Flow(bus='Bus_B', flow_id='to_B', size=100),\n",
" # Direction 2: B → A (optional - omit for unidirectional)\n",
- " in2=fx.Flow('from_B', bus='Bus_B', size=100),\n",
- " out2=fx.Flow('to_A', bus='Bus_A', size=100),\n",
+ " in2=fx.Flow(bus='Bus_B', flow_id='from_B', size=100),\n",
+ " out2=fx.Flow(bus='Bus_A', flow_id='to_A', size=100),\n",
" # Loss parameters\n",
" relative_losses=0.05, # 5% proportional loss\n",
" absolute_losses=10, # 10 kW fixed loss when active (optional)\n",
diff --git a/docs/notebooks/data/generate_example_systems.py b/docs/notebooks/data/generate_example_systems.py
index 985628e1f..207ff3595 100644
--- a/docs/notebooks/data/generate_example_systems.py
+++ b/docs/notebooks/data/generate_example_systems.py
@@ -119,12 +119,12 @@ def create_simple_system() -> fx.FlowSystem:
fx.Bus('Gas', carrier='gas'),
fx.Bus('Heat', carrier='heat'),
fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),
- fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_price)]),
- fx.linear_converters.Boiler(
+ fx.Port('GasGrid', imports=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=gas_price)]),
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.92,
- thermal_flow=fx.Flow('Heat', bus='Heat', size=150),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', size=150),
+ fuel_flow=fx.Flow(bus='Gas'),
),
fx.Storage(
'ThermalStorage',
@@ -134,10 +134,10 @@ def create_simple_system() -> fx.FlowSystem:
eta_charge=0.98,
eta_discharge=0.98,
relative_loss_per_hour=0.005,
- charging=fx.Flow('Charge', bus='Heat', size=100),
- discharging=fx.Flow('Discharge', bus='Heat', size=100),
+ charging=fx.Flow(bus='Heat', size=100),
+ discharging=fx.Flow(bus='Heat', size=100),
),
- fx.Sink('Office', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Port('Office', exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
)
return fs
@@ -195,33 +195,30 @@ def create_complex_system() -> fx.FlowSystem:
fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),
fx.Effect('CO2', 'kg', 'CO2 Emissions'),
# Gas supply
- fx.Source(
+ fx.Port(
'GasGrid',
- outputs=[fx.Flow('Gas', bus='Gas', size=300, effects_per_flow_hour={'costs': gas_price, 'CO2': gas_co2})],
+ imports=[fx.Flow(bus='Gas', size=300, effects_per_flow_hour={'costs': gas_price, 'CO2': gas_co2})],
),
# Electricity grid (import and export)
- fx.Source(
+ fx.Port(
'ElectricityImport',
- outputs=[
+ imports=[
fx.Flow(
- 'El',
bus='Electricity',
size=100,
effects_per_flow_hour={'costs': electricity_price, 'CO2': electricity_co2},
)
],
),
- fx.Sink(
+ fx.Port(
'ElectricityExport',
- inputs=[
- fx.Flow('El', bus='Electricity', size=50, effects_per_flow_hour={'costs': -electricity_price * 0.8})
- ],
+ exports=[fx.Flow(bus='Electricity', size=50, effects_per_flow_hour={'costs': -electricity_price * 0.8})],
),
# CHP with piecewise efficiency (efficiency varies with load)
- fx.LinearConverter(
+ fx.Converter(
'CHP',
- inputs=[fx.Flow('Gas', bus='Gas', size=200)],
- outputs=[fx.Flow('El', bus='Electricity', size=80), fx.Flow('Heat', bus='Heat', size=85)],
+ inputs=[fx.Flow(bus='Gas', size=200)],
+ outputs=[fx.Flow(bus='Electricity', size=80), fx.Flow(bus='Heat', size=85)],
piecewise_conversion=fx.PiecewiseConversion(
{
'Gas': fx.Piecewise(
@@ -230,7 +227,7 @@ def create_complex_system() -> fx.FlowSystem:
fx.Piece(start=160, end=200), # Full load
]
),
- 'El': fx.Piecewise(
+ 'Electricity': fx.Piecewise(
[
fx.Piece(start=25, end=60), # ~31-38% electrical efficiency
fx.Piece(start=60, end=80), # ~38-40% electrical efficiency
@@ -247,10 +244,9 @@ def create_complex_system() -> fx.FlowSystem:
status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 2}),
),
# Heat pump (with investment)
- fx.linear_converters.HeatPump(
+ fx.Converter.heat_pump(
'HeatPump',
thermal_flow=fx.Flow(
- 'Heat',
bus='Heat',
size=fx.InvestParameters(
effects_of_investment={'costs': 500},
@@ -258,14 +254,14 @@ def create_complex_system() -> fx.FlowSystem:
maximum_size=60,
),
),
- electrical_flow=fx.Flow('El', bus='Electricity'),
+ electrical_flow=fx.Flow(bus='Electricity'),
cop=3.5,
),
# Backup boiler
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'BackupBoiler',
- thermal_flow=fx.Flow('Heat', bus='Heat', size=80),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', size=80),
+ fuel_flow=fx.Flow(bus='Gas'),
thermal_efficiency=0.90,
),
# Thermal storage (with investment)
@@ -278,13 +274,14 @@ def create_complex_system() -> fx.FlowSystem:
),
eta_charge=0.95,
eta_discharge=0.95,
- charging=fx.Flow('Charge', bus='Heat', size=50),
- discharging=fx.Flow('Discharge', bus='Heat', size=50),
+ charging=fx.Flow(bus='Heat', size=50),
+ discharging=fx.Flow(bus='Heat', size=50),
),
# Demands
- fx.Sink('HeatDemand', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
- fx.Sink(
- 'ElDemand', inputs=[fx.Flow('El', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]
+ fx.Port('HeatDemand', exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Port(
+ 'ElDemand',
+ exports=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],
),
)
return fs
@@ -331,13 +328,12 @@ def create_district_heating_system() -> fx.FlowSystem:
fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),
fx.Effect('CO2', 'kg', 'CO2 Emissions'),
# CHP unit with investment
- fx.linear_converters.CHP(
+ fx.Converter.chp(
'CHP',
thermal_efficiency=0.58,
electrical_efficiency=0.22,
- electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),
+ electrical_flow=fx.Flow(bus='Electricity', size=200),
thermal_flow=fx.Flow(
- 'Q_th',
bus='Heat',
size=fx.InvestParameters(
minimum_size=100,
@@ -347,14 +343,13 @@ def create_district_heating_system() -> fx.FlowSystem:
relative_minimum=0.3,
status_parameters=fx.StatusParameters(),
),
- fuel_flow=fx.Flow('Q_fu', bus='Coal'),
+ fuel_flow=fx.Flow(bus='Coal'),
),
# Gas Boiler with investment
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.85,
thermal_flow=fx.Flow(
- 'Q_th',
bus='Heat',
size=fx.InvestParameters(
minimum_size=0,
@@ -364,7 +359,7 @@ def create_district_heating_system() -> fx.FlowSystem:
relative_minimum=0.1,
status_parameters=fx.StatusParameters(),
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas'),
),
# Thermal Storage with investment
fx.Storage(
@@ -378,38 +373,38 @@ def create_district_heating_system() -> fx.FlowSystem:
eta_charge=1,
eta_discharge=1,
relative_loss_per_hour=0.001,
- charging=fx.Flow('Charge', size=137, bus='Heat'),
- discharging=fx.Flow('Discharge', size=158, bus='Heat'),
+ charging=fx.Flow(bus='Heat', size=137),
+ discharging=fx.Flow(bus='Heat', size=158),
),
# Fuel sources
- fx.Source(
+ fx.Port(
'GasGrid',
- outputs=[fx.Flow('Q_Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})],
+ imports=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})],
),
- fx.Source(
+ fx.Port(
'CoalSupply',
- outputs=[fx.Flow('Q_Coal', bus='Coal', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
+ imports=[fx.Flow(bus='Coal', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
),
# Electricity grid
- fx.Source(
+ fx.Port(
'GridBuy',
- outputs=[
+ imports=[
fx.Flow(
- 'P_el',
bus='Electricity',
size=1000,
effects_per_flow_hour={'costs': electricity_price + 0.5, 'CO2': 0.3},
)
],
),
- fx.Sink(
+ fx.Port(
'GridSell',
- inputs=[fx.Flow('P_el', bus='Electricity', size=1000, effects_per_flow_hour=-(electricity_price - 0.5))],
+ exports=[fx.Flow(bus='Electricity', size=1000, effects_per_flow_hour=-(electricity_price - 0.5))],
),
# Demands
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q_th', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
- fx.Sink(
- 'ElecDemand', inputs=[fx.Flow('P_el', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]
+ fx.Port('HeatDemand', exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Port(
+ 'ElecDemand',
+ exports=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],
),
)
return fs
@@ -455,22 +450,21 @@ def create_operational_system() -> fx.FlowSystem:
fx.Effect('costs', '€', 'Total Costs', is_standard=True, is_objective=True),
fx.Effect('CO2', 'kg', 'CO2 Emissions'),
# CHP with startup costs
- fx.linear_converters.CHP(
+ fx.Converter.chp(
'CHP',
thermal_efficiency=0.58,
electrical_efficiency=0.22,
status_parameters=fx.StatusParameters(effects_per_startup=24000),
- electrical_flow=fx.Flow('P_el', bus='Electricity', size=200),
- thermal_flow=fx.Flow('Q_th', bus='Heat', size=200),
- fuel_flow=fx.Flow('Q_fu', bus='Coal', size=288, relative_minimum=87 / 288, previous_flow_rate=100),
+ electrical_flow=fx.Flow(bus='Electricity', size=200),
+ thermal_flow=fx.Flow(bus='Heat', size=200),
+ fuel_flow=fx.Flow(bus='Coal', size=288, relative_minimum=87 / 288, previous_flow_rate=100),
),
# Boiler with startup costs
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.85,
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ thermal_flow=fx.Flow(bus='Heat'),
fuel_flow=fx.Flow(
- 'Q_fu',
bus='Gas',
size=95,
relative_minimum=12 / 95,
@@ -489,35 +483,35 @@ def create_operational_system() -> fx.FlowSystem:
eta_discharge=1,
relative_loss_per_hour=0.001,
prevent_simultaneous_charge_and_discharge=True,
- charging=fx.Flow('Charge', size=137, bus='Heat'),
- discharging=fx.Flow('Discharge', size=158, bus='Heat'),
+ charging=fx.Flow(bus='Heat', size=137),
+ discharging=fx.Flow(bus='Heat', size=158),
),
- fx.Source(
+ fx.Port(
'GasGrid',
- outputs=[fx.Flow('Q_Gas', bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})],
+ imports=[fx.Flow(bus='Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})],
),
- fx.Source(
+ fx.Port(
'CoalSupply',
- outputs=[fx.Flow('Q_Coal', bus='Coal', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
+ imports=[fx.Flow(bus='Coal', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
),
- fx.Source(
+ fx.Port(
'GridBuy',
- outputs=[
+ imports=[
fx.Flow(
- 'P_el',
bus='Electricity',
size=1000,
effects_per_flow_hour={'costs': electricity_price + 0.5, 'CO2': 0.3},
)
],
),
- fx.Sink(
+ fx.Port(
'GridSell',
- inputs=[fx.Flow('P_el', bus='Electricity', size=1000, effects_per_flow_hour=-(electricity_price - 0.5))],
+ exports=[fx.Flow(bus='Electricity', size=1000, effects_per_flow_hour=-(electricity_price - 0.5))],
),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q_th', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
- fx.Sink(
- 'ElecDemand', inputs=[fx.Flow('P_el', bus='Electricity', size=1, fixed_relative_profile=electricity_demand)]
+ fx.Port('HeatDemand', exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Port(
+ 'ElecDemand',
+ exports=[fx.Flow(bus='Electricity', size=1, fixed_relative_profile=electricity_demand)],
),
)
return fs
@@ -576,11 +570,10 @@ def create_seasonal_storage_system() -> fx.FlowSystem:
fx.Effect('CO2', 'kg', 'CO2 Emissions'),
# Solar thermal collector (investment) - profile includes 70% collector efficiency
# Costs annualized for single-year analysis
- fx.Source(
+ fx.Port(
'SolarThermal',
- outputs=[
+ imports=[
fx.Flow(
- 'Q_th',
bus='Heat',
size=fx.InvestParameters(
minimum_size=0,
@@ -592,11 +585,10 @@ def create_seasonal_storage_system() -> fx.FlowSystem:
],
),
# Gas boiler (backup)
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'GasBoiler',
thermal_efficiency=0.90,
thermal_flow=fx.Flow(
- 'Q_th',
bus='Heat',
size=fx.InvestParameters(
minimum_size=0,
@@ -604,14 +596,13 @@ def create_seasonal_storage_system() -> fx.FlowSystem:
effects_of_investment_per_size={'costs': 20000}, # €/MW (annualized)
),
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas'),
),
# Gas supply (higher price makes solar+storage more attractive)
- fx.Source(
+ fx.Port(
'GasGrid',
- outputs=[
+ imports=[
fx.Flow(
- 'Q_gas',
bus='Gas',
size=20,
effects_per_flow_hour={'costs': gas_price * 1.5, 'CO2': 0.2}, # €/MWh
@@ -631,20 +622,18 @@ def create_seasonal_storage_system() -> fx.FlowSystem:
eta_discharge=0.95,
relative_loss_per_hour=0.0001, # Very low losses for pit storage
charging=fx.Flow(
- 'Charge',
bus='Heat',
size=fx.InvestParameters(maximum_size=10, effects_of_investment_per_size={'costs': 5000}),
),
discharging=fx.Flow(
- 'Discharge',
bus='Heat',
size=fx.InvestParameters(maximum_size=10, effects_of_investment_per_size={'costs': 5000}),
),
),
# Heat demand
- fx.Sink(
+ fx.Port(
'HeatDemand',
- inputs=[fx.Flow('Q_th', bus='Heat', size=1, fixed_relative_profile=heat_demand)],
+ exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)],
),
)
return fs
@@ -709,12 +698,11 @@ def create_multiperiod_system() -> fx.FlowSystem:
fx.Bus('Gas', carrier='gas'),
fx.Bus('Heat', carrier='heat'),
fx.Effect('costs', '€', 'Operating Costs', is_standard=True, is_objective=True),
- fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=gas_prices)]),
- fx.linear_converters.Boiler(
+ fx.Port('GasGrid', imports=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=gas_prices)]),
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.92,
thermal_flow=fx.Flow(
- 'Heat',
bus='Heat',
size=fx.InvestParameters(
effects_of_investment={'costs': 1000},
@@ -722,7 +710,7 @@ def create_multiperiod_system() -> fx.FlowSystem:
maximum_size=250,
),
),
- fuel_flow=fx.Flow('Gas', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas'),
),
fx.Storage(
'ThermalStorage',
@@ -733,10 +721,10 @@ def create_multiperiod_system() -> fx.FlowSystem:
),
eta_charge=0.98,
eta_discharge=0.98,
- charging=fx.Flow('Charge', bus='Heat', size=80),
- discharging=fx.Flow('Discharge', bus='Heat', size=80),
+ charging=fx.Flow(bus='Heat', size=80),
+ discharging=fx.Flow(bus='Heat', size=80),
),
- fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
+ fx.Port('Building', exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=heat_demand)]),
)
return fs
diff --git a/docs/user-guide/building-models/choosing-components.md b/docs/user-guide/building-models/choosing-components.md
index 5f07e82dc..dd45f6f76 100644
--- a/docs/user-guide/building-models/choosing-components.md
+++ b/docs/user-guide/building-models/choosing-components.md
@@ -7,11 +7,11 @@ This guide helps you select the right flixOpt component for your modeling needs.
```mermaid
graph TD
A[What does this element do?] --> B{Brings energy INTO system?}
- B -->|Yes| C[Source]
+ B -->|Yes| C["Port (imports)"]
B -->|No| D{Takes energy OUT of system?}
- D -->|Yes| E[Sink]
+ D -->|Yes| E["Port (exports)"]
D -->|No| F{Converts energy type?}
- F -->|Yes| G[LinearConverter]
+ F -->|Yes| G[Converter]
F -->|No| H{Stores energy?}
H -->|Yes| I[Storage]
H -->|No| J{Transports between locations?}
@@ -21,25 +21,25 @@ graph TD
## Component Comparison
-| Component | Purpose | Inputs | Outputs | Key Parameters |
-|-----------|---------|--------|---------|----------------|
-| **Source** | External supply | None | 1+ flows | `effects_per_flow_hour` |
-| **Sink** | Demand/export | 1+ flows | None | `fixed_relative_profile` |
-| **SourceAndSink** | Bidirectional exchange | 1+ flows | 1+ flows | Both input and output |
-| **LinearConverter** | Transform energy | 1+ flows | 1+ flows | `conversion_factors` |
+| Component | Purpose | Imports | Exports | Key Parameters |
+|-----------|---------|---------|---------|----------------|
+| **Port** (imports only) | External supply | 1+ flows | None | `effects_per_flow_hour` |
+| **Port** (exports only) | Demand/export | None | 1+ flows | `fixed_relative_profile` |
+| **Port** (bidirectional) | Bidirectional exchange | 1+ flows | 1+ flows | Both imports and exports |
+| **Converter** | Transform energy | 1+ flows | 1+ flows | `conversion_factors` |
| **Storage** | Time-shift energy | charge flow | discharge flow | `capacity_in_flow_hours` |
| **Transmission** | Transport energy | in1, in2 | out1, out2 | `relative_losses` |
## Detailed Component Guide
-### Source
+### Port (imports only)
**Use when:** Purchasing or importing energy/material from outside your system boundary.
```python
-fx.Source(
+fx.Port(
'GridElectricity',
- outputs=[fx.Flow('Elec', bus='Electricity', size=1000, effects_per_flow_hour=0.25)]
+ imports=[fx.Flow(bus='Electricity', size=1000, effects_per_flow_hour=0.25)]
)
```
@@ -53,27 +53,27 @@ fx.Source(
| Parameter | Purpose |
|-----------|---------|
-| `outputs` | List of flows leaving this source |
+| `imports` | List of flows coming into the system |
| `effects_per_flow_hour` | Cost/emissions per unit |
| `invest_parameters` | For optimizing connection capacity |
---
-### Sink
+### Port (exports only)
**Use when:** Energy/material leaves your system (demand, export, waste).
```python
# Fixed demand (must be met)
-fx.Sink(
+fx.Port(
'Building',
- inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=demand)]
+ exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=demand)]
)
# Optional export (can sell if profitable)
-fx.Sink(
+fx.Port(
'Export',
- inputs=[fx.Flow('Elec', bus='Electricity', size=100, effects_per_flow_hour=-0.15)]
+ exports=[fx.Flow(bus='Electricity', size=100, effects_per_flow_hour=-0.15)]
)
```
@@ -87,21 +87,21 @@ fx.Sink(
| Parameter | Purpose |
|-----------|---------|
-| `inputs` | List of flows entering this sink |
+| `exports` | List of flows going out of the system |
| `fixed_relative_profile` | Demand profile (on flow) |
| `effects_per_flow_hour` | Negative = revenue |
---
-### SourceAndSink
+### Port (bidirectional)
**Use when:** Bidirectional exchange at a single point (buy AND sell from same connection).
```python
-fx.SourceAndSink(
+fx.Port(
'GridConnection',
- inputs=[fx.Flow('import', bus='Electricity', size=500, effects_per_flow_hour=0.25)],
- outputs=[fx.Flow('export', bus='Electricity', size=500, effects_per_flow_hour=-0.15)],
+ imports=[fx.Flow(bus='Electricity', flow_id='import', size=500, effects_per_flow_hour=0.25)],
+ exports=[fx.Flow(bus='Electricity', flow_id='export', size=500, effects_per_flow_hour=-0.15)],
prevent_simultaneous_flow_rates=True, # Can't buy and sell at same time
)
```
@@ -113,38 +113,38 @@ fx.SourceAndSink(
---
-### LinearConverter
+### Converter
**Use when:** Transforming one energy type to another with a linear relationship.
```python
# Single input, single output
-fx.LinearConverter(
+fx.Converter(
'Boiler',
- inputs=[fx.Flow('Gas', bus='Gas', size=500)],
- outputs=[fx.Flow('Heat', bus='Heat', size=450)],
+ inputs=[fx.Flow(bus='Gas', size=500)],
+ outputs=[fx.Flow(bus='Heat', size=450)],
conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
)
# Multiple outputs (CHP)
-fx.LinearConverter(
+fx.Converter(
'CHP',
- inputs=[fx.Flow('Gas', bus='Gas', size=300)],
+ inputs=[fx.Flow(bus='Gas', size=300)],
outputs=[
- fx.Flow('Elec', bus='Electricity', size=100),
- fx.Flow('Heat', bus='Heat', size=150),
+ fx.Flow(bus='Electricity', size=100),
+ fx.Flow(bus='Heat', size=150),
],
- conversion_factors=[{'Gas': 1, 'Elec': 0.35, 'Heat': 0.50}],
+ conversion_factors=[{'Gas': 1, 'Electricity': 0.35, 'Heat': 0.50}],
)
# Multiple inputs
-fx.LinearConverter(
+fx.Converter(
'CoFiringBoiler',
inputs=[
- fx.Flow('Gas', bus='Gas', size=200),
- fx.Flow('Biomass', bus='Biomass', size=100),
+ fx.Flow(bus='Gas', size=200),
+ fx.Flow(bus='Biomass', size=100),
],
- outputs=[fx.Flow('Heat', bus='Heat', size=270)],
+ outputs=[fx.Flow(bus='Heat', size=270)],
conversion_factors=[{'Gas': 1, 'Biomass': 1, 'Heat': 0.9}],
)
```
@@ -167,24 +167,22 @@ fx.LinearConverter(
#### Pre-built Converters
-flixOpt includes ready-to-use converters in `flixopt.linear_converters`:
+flixOpt includes ready-to-use converters as factory methods on `Converter`:
-| Class | Description | Key Parameters |
-|-------|-------------|----------------|
-| `Boiler` | Fuel → Heat | `thermal_efficiency` |
-| `HeatPump` | Electricity → Heat | `cop` |
-| `HeatPumpWithSource` | Elec + Ambient → Heat | `cop`, source flow |
-| `CHP` | Fuel → Elec + Heat | `electrical_efficiency`, `thermal_efficiency` |
-| `Chiller` | Electricity → Cooling | `cop` |
+| Factory Method | Description | Key Parameters |
+|----------------|-------------|----------------|
+| `Converter.boiler()` | Fuel → Heat | `thermal_efficiency` |
+| `Converter.heat_pump()` | Electricity → Heat | `cop` |
+| `Converter.heat_pump_with_source()` | Elec + Ambient → Heat | `cop`, source flow |
+| `Converter.chp()` | Fuel → Elec + Heat | `electrical_efficiency`, `thermal_efficiency` |
+| `Converter.cooling_tower()` | Electricity → Cooling | `cop` |
```python
-from flixopt.linear_converters import Boiler, HeatPump
-
-boiler = Boiler(
+boiler = fx.Converter.boiler(
'GasBoiler',
thermal_efficiency=0.92,
- fuel_flow=fx.Flow('gas', bus='Gas', size=500, effects_per_flow_hour=0.05),
- thermal_flow=fx.Flow('heat', bus='Heat', size=460),
+ fuel_flow=fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05),
+ thermal_flow=fx.Flow(bus='Heat', size=460),
)
```
@@ -197,8 +195,8 @@ boiler = Boiler(
```python
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Electricity', size=100),
- discharging=fx.Flow('discharge', bus='Electricity', size=100),
+ charging=fx.Flow(bus='Electricity', size=100),
+ discharging=fx.Flow(bus='Electricity', size=100),
capacity_in_flow_hours=4, # 4 hours at full rate = 400 kWh
eta_charge=0.95,
eta_discharge=0.95,
@@ -233,18 +231,18 @@ fx.Storage(
# Unidirectional
fx.Transmission(
'HeatPipe',
- in1=fx.Flow('from_A', bus='Heat_A', size=200),
- out1=fx.Flow('to_B', bus='Heat_B', size=200),
+ in1=fx.Flow(bus='Heat_A', size=200),
+ out1=fx.Flow(bus='Heat_B', size=200),
relative_losses=0.05,
)
# Bidirectional
fx.Transmission(
'PowerLine',
- in1=fx.Flow('A_to_B', bus='Elec_A', size=100),
- out1=fx.Flow('at_B', bus='Elec_B', size=100),
- in2=fx.Flow('B_to_A', bus='Elec_B', size=100),
- out2=fx.Flow('at_A', bus='Elec_A', size=100),
+ in1=fx.Flow(bus='Elec_A', flow_id='A_to_B', size=100),
+ out1=fx.Flow(bus='Elec_B', flow_id='at_B', size=100),
+ in2=fx.Flow(bus='Elec_B', flow_id='B_to_A', size=100),
+ out2=fx.Flow(bus='Elec_A', flow_id='at_A', size=100),
relative_losses=0.03,
prevent_simultaneous_flows_in_both_directions=True,
)
@@ -274,7 +272,6 @@ Add `InvestParameters` to flows to let the optimizer choose sizes:
```python
fx.Flow(
- 'Heat',
bus='Heat',
invest_parameters=fx.InvestParameters(
effects_of_investment_per_size={'costs': 100}, # €/kW
@@ -284,7 +281,7 @@ fx.Flow(
)
```
-Works with: Source, Sink, LinearConverter, Storage, Transmission
+Works with: Port, Converter, Storage, Transmission
### Operational Constraints
@@ -292,7 +289,6 @@ Add `StatusParameters` to flows for on/off behavior:
```python
fx.Flow(
- 'Heat',
bus='Heat',
size=500,
status_parameters=fx.StatusParameters(
@@ -310,18 +306,18 @@ Works with: All components with flows
Use `PiecewiseConversion` for load-dependent efficiency:
```python
-fx.LinearConverter(
+fx.Converter(
'GasEngine',
- inputs=[fx.Flow('Fuel', bus='Gas')],
- outputs=[fx.Flow('Elec', bus='Electricity')],
+ inputs=[fx.Flow(bus='Gas')],
+ outputs=[fx.Flow(bus='Electricity')],
piecewise_conversion=fx.PiecewiseConversion({
- 'Fuel': fx.Piecewise([fx.Piece(100, 200), fx.Piece(200, 300)]),
- 'Elec': fx.Piecewise([fx.Piece(35, 80), fx.Piece(80, 110)]),
+ 'Gas': fx.Piecewise([fx.Piece(100, 200), fx.Piece(200, 300)]),
+ 'Electricity': fx.Piecewise([fx.Piece(35, 80), fx.Piece(80, 110)]),
}),
)
```
-Works with: LinearConverter
+Works with: Converter
## Common Modeling Patterns
@@ -332,10 +328,10 @@ Model N identical units that can operate independently:
```python
for i in range(3):
flow_system.add_elements(
- fx.LinearConverter(
+ fx.Converter(
f'Boiler_{i}',
- inputs=[fx.Flow('Gas', bus='Gas', size=100)],
- outputs=[fx.Flow('Heat', bus='Heat', size=90)],
+ inputs=[fx.Flow(bus='Gas', size=100)],
+ outputs=[fx.Flow(bus='Heat', size=90)],
conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
)
)
@@ -347,14 +343,14 @@ Model waste heat recovery from one process to another:
```python
# Process that generates waste heat
-process = fx.LinearConverter(
+process = fx.Converter(
'Process',
- inputs=[fx.Flow('Elec', bus='Electricity', size=100)],
+ inputs=[fx.Flow(bus='Electricity', size=100)],
outputs=[
- fx.Flow('Product', bus='Products', size=80),
- fx.Flow('WasteHeat', bus='Heat', size=20), # Recovered heat
+ fx.Flow(bus='Products', size=80),
+ fx.Flow(bus='Heat', size=20), # Recovered heat
],
- conversion_factors=[{'Elec': 1, 'Product': 0.8, 'WasteHeat': 0.2}],
+ conversion_factors=[{'Electricity': 1, 'Products': 0.8, 'Heat': 0.2}],
)
```
@@ -363,13 +359,13 @@ process = fx.LinearConverter(
Model a component that can use multiple fuels:
```python
-flex_boiler = fx.LinearConverter(
+flex_boiler = fx.Converter(
'FlexBoiler',
inputs=[
- fx.Flow('Gas', bus='Gas', size=200, effects_per_flow_hour=0.05),
- fx.Flow('Oil', bus='Oil', size=200, effects_per_flow_hour=0.08),
+ fx.Flow(bus='Gas', size=200, effects_per_flow_hour=0.05),
+ fx.Flow(bus='Oil', size=200, effects_per_flow_hour=0.08),
],
- outputs=[fx.Flow('Heat', bus='Heat', size=180)],
+ outputs=[fx.Flow(bus='Heat', size=180)],
conversion_factors=[{'Gas': 1, 'Oil': 1, 'Heat': 0.9}],
)
```
diff --git a/docs/user-guide/building-models/index.md b/docs/user-guide/building-models/index.md
index 248c7ada5..cf3fc2e3d 100644
--- a/docs/user-guide/building-models/index.md
+++ b/docs/user-guide/building-models/index.md
@@ -82,72 +82,66 @@ heat_bus = fx.Bus(
Components are the equipment in your system. Choose based on function:
-### Sources — External Inputs
+### Ports — External Inputs and Demands
-Use for **purchasing** energy or materials from outside:
+Use for **importing** energy or materials from outside, or for **consuming** energy (demands, exports):
```python
-# Grid electricity with time-varying price
-grid = fx.Source(
+# Grid electricity with time-varying price (importing into the system)
+grid = fx.Port(
'Grid',
- outputs=[fx.Flow('Elec', bus='Electricity', size=1000, effects_per_flow_hour=price_profile)]
+ imports=[fx.Flow(bus='Electricity', size=1000, effects_per_flow_hour=price_profile)]
)
-# Natural gas with fixed price
-gas_supply = fx.Source(
+# Natural gas with fixed price (importing into the system)
+gas_supply = fx.Port(
'GasSupply',
- outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)]
+ imports=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05)]
)
-```
-
-### Sinks — Demands
-
-Use for **consuming** energy or materials (demands, exports):
-```python
-# Heat demand (must be met exactly)
-building = fx.Sink(
+# Heat demand (must be met exactly, exporting from the system)
+building = fx.Port(
'Building',
- inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=demand_profile)]
+ exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=demand_profile)]
)
# Optional export (can sell but not required)
-export = fx.Sink(
+export = fx.Port(
'Export',
- inputs=[fx.Flow('Elec', bus='Electricity', size=100, effects_per_flow_hour=-0.15)] # Negative = revenue
+ exports=[fx.Flow(bus='Electricity', size=100, effects_per_flow_hour=-0.15)] # Negative = revenue
)
```
-### LinearConverter — Transformations
+### Converter — Transformations
Use for **converting** one form of energy to another:
```python
# Gas boiler: Gas → Heat
-boiler = fx.LinearConverter(
+boiler = fx.Converter(
'Boiler',
- inputs=[fx.Flow('Gas', bus='Gas', size=500)],
- outputs=[fx.Flow('Heat', bus='Heat', size=450)],
+ inputs=[fx.Flow(bus='Gas', size=500)],
+ outputs=[fx.Flow(bus='Heat', size=450)],
conversion_factors=[{'Gas': 1, 'Heat': 0.9}], # 90% efficiency
)
# Heat pump: Electricity → Heat
-heat_pump = fx.LinearConverter(
+heat_pump = fx.Converter(
'HeatPump',
- inputs=[fx.Flow('Elec', bus='Electricity', size=100)],
- outputs=[fx.Flow('Heat', bus='Heat', size=350)],
- conversion_factors=[{'Elec': 1, 'Heat': 3.5}], # COP = 3.5
+ inputs=[fx.Flow(bus='Electricity', size=100)],
+ outputs=[fx.Flow(bus='Heat', size=350)],
+ conversion_factors=[{'Electricity': 1, 'Heat': 3.5}], # COP = 3.5
)
# CHP: Gas → Electricity + Heat (multiple outputs)
-chp = fx.LinearConverter(
+chp = fx.Converter(
'CHP',
- inputs=[fx.Flow('Gas', bus='Gas', size=300)],
+ inputs=[fx.Flow(bus='Gas', size=300)],
outputs=[
- fx.Flow('Elec', bus='Electricity', size=100),
- fx.Flow('Heat', bus='Heat', size=150),
+ fx.Flow(bus='Electricity', size=100),
+ fx.Flow(bus='Heat', size=150),
],
- conversion_factors=[{'Gas': 1, 'Elec': 0.35, 'Heat': 0.50}],
+ conversion_factors=[{'Gas': 1, 'Electricity': 0.35, 'Heat': 0.50}],
)
```
@@ -159,8 +153,8 @@ Use for **storing** energy or materials:
# Thermal storage
tank = fx.Storage(
'ThermalTank',
- charging=fx.Flow('charge', bus='Heat', size=200),
- discharging=fx.Flow('discharge', bus='Heat', size=200),
+ charging=fx.Flow(bus='Heat', size=200),
+ discharging=fx.Flow(bus='Heat', size=200),
capacity_in_flow_hours=10, # 10 hours at full charge/discharge rate
eta_charge=0.95,
eta_discharge=0.95,
@@ -177,8 +171,8 @@ Use for **connecting** different locations:
# District heating pipe
pipe = fx.Transmission(
'HeatPipe',
- in1=fx.Flow('from_A', bus='Heat_A', size=200),
- out1=fx.Flow('to_B', bus='Heat_B', size=200),
+ in1=fx.Flow(bus='Heat_A', size=200),
+ out1=fx.Flow(bus='Heat_B', size=200),
relative_losses=0.05, # 5% loss
)
```
@@ -212,10 +206,10 @@ Effects are typically assigned per flow hour:
```python
# Gas costs 0.05 €/kWh
-fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})
+fx.Flow(bus='Gas', size=500, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})
# Shorthand when only one effect (the standard one)
-fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)
+fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05)
```
## Step 5: Add Everything to FlowSystem
@@ -234,14 +228,14 @@ flow_system.add_elements(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
# Components
- fx.Source('GasGrid', outputs=[fx.Flow('Gas', bus='Gas', size=500, effects_per_flow_hour=0.05)]),
- fx.LinearConverter(
+ fx.Port('GasGrid', imports=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05)]),
+ fx.Converter(
'Boiler',
- inputs=[fx.Flow('Gas', bus='Gas', size=500)],
- outputs=[fx.Flow('Heat', bus='Heat', size=450)],
+ inputs=[fx.Flow(bus='Gas', size=500)],
+ outputs=[fx.Flow(bus='Heat', size=450)],
conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
),
- fx.Sink('Building', inputs=[fx.Flow('Heat', bus='Heat', size=1, fixed_relative_profile=demand)]),
+ fx.Port('Building', exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=demand)]),
)
```
@@ -255,14 +249,14 @@ Gas → Boiler → Heat
flow_system.add_elements(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Gas', outputs=[fx.Flow('gas', bus=None, size=500, effects_per_flow_hour=0.05)]),
- fx.LinearConverter(
+ fx.Port('Gas', imports=[fx.Flow(bus='Gas', size=500, effects_per_flow_hour=0.05)]),
+ fx.Converter(
'Boiler',
- inputs=[fx.Flow('gas', bus=None, size=500)], # Inline source
- outputs=[fx.Flow('heat', bus='Heat', size=450)],
- conversion_factors=[{'gas': 1, 'heat': 0.9}],
+ inputs=[fx.Flow(bus='Gas', size=500)],
+ outputs=[fx.Flow(bus='Heat', size=450)],
+ conversion_factors=[{'Gas': 1, 'Heat': 0.9}],
),
- fx.Sink('Demand', inputs=[fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=demand)]),
+ fx.Port('Demand', exports=[fx.Flow(bus='Heat', size=1, fixed_relative_profile=demand)]),
)
```
@@ -276,13 +270,13 @@ flow_system.add_elements(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
# Option 1: Gas boiler (cheap gas, moderate efficiency)
- fx.LinearConverter('Boiler', ...),
+ fx.Converter('Boiler', ...),
# Option 2: Heat pump (expensive electricity, high efficiency)
- fx.LinearConverter('HeatPump', ...),
+ fx.Converter('HeatPump', ...),
# Demand
- fx.Sink('Building', ...),
+ fx.Port('Building', ...),
)
```
@@ -298,13 +292,13 @@ flow_system.add_elements(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
# Generation
- fx.LinearConverter('Boiler', ...),
+ fx.Converter('Boiler', ...),
# Storage (can shift load in time)
fx.Storage('Tank', ...),
# Demand
- fx.Sink('Building', ...),
+ fx.Port('Building', ...),
)
```
@@ -312,13 +306,13 @@ flow_system.add_elements(
| I need to... | Use this component |
|-------------|-------------------|
-| Buy/import energy | `Source` |
-| Sell/export energy | `Sink` with negative effects |
-| Meet a demand | `Sink` with `fixed_relative_profile` |
-| Convert energy type | `LinearConverter` |
+| Buy/import energy | `Port` with `imports` |
+| Sell/export energy | `Port` with `exports` and negative effects |
+| Meet a demand | `Port` with `exports` and `fixed_relative_profile` |
+| Convert energy type | `Converter` |
| Store energy | `Storage` |
| Transport between sites | `Transmission` |
-| Model combined heat & power | `LinearConverter` with multiple outputs |
+| Model combined heat & power | `Converter` with multiple outputs |
For detailed component selection, see [Choosing Components](choosing-components.md).
@@ -384,7 +378,7 @@ graph LR
B -->|create variables &
constraints| C["Model Layer
FlowsModel, StoragesModel, ..."]
```
-1. **User Layer** — The Python objects you create (`Flow`, `Bus`, `LinearConverter`, etc.) with their parameters.
+1. **User Layer** — The Python objects you create (`Flow`, `Bus`, `Converter`, etc.) with their parameters.
2. **Data Layer** — `*Data` classes (`FlowsData`, `StoragesData`, etc.) batch parameters from all elements of the same type into `xr.DataArray` arrays and validate them.
3. **Model Layer** — `*Model` classes (`FlowsModel`, `StoragesModel`, etc.) create linopy variables and constraints from the batched data.
diff --git a/docs/user-guide/mathematical-notation/elements/LinearConverter.md b/docs/user-guide/mathematical-notation/elements/LinearConverter.md
index 915537d60..071563c68 100644
--- a/docs/user-guide/mathematical-notation/elements/LinearConverter.md
+++ b/docs/user-guide/mathematical-notation/elements/LinearConverter.md
@@ -1,6 +1,6 @@
-# LinearConverter
+# Converter
-A LinearConverter transforms inputs into outputs with fixed ratios.
+A Converter transforms inputs into outputs with fixed ratios.
## Basic: Conversion Equation
@@ -13,7 +13,7 @@ $$
$0.9 \cdot p_{gas}(t) = p_{heat}(t)$
```python
- boiler = fx.LinearConverter(
+ boiler = fx.Converter(
label='boiler',
inputs=[fx.Flow(label='gas', bus=gas_bus, size=111)],
outputs=[fx.Flow(label='heat', bus=heat_bus, size=100)],
@@ -26,7 +26,7 @@ $$
$3.5 \cdot p_{el}(t) = p_{heat}(t)$
```python
- hp = fx.LinearConverter(
+ hp = fx.Converter(
label='hp',
inputs=[fx.Flow(label='el', bus=elec_bus, size=100)],
outputs=[fx.Flow(label='heat', bus=heat_bus, size=350)],
@@ -39,7 +39,7 @@ $$
Two constraints linking fuel to outputs:
```python
- chp = fx.LinearConverter(
+ chp = fx.Converter(
label='chp',
inputs=[fx.Flow(label='fuel', bus=gas_bus, size=100)],
outputs=[
@@ -62,7 +62,7 @@ Pass a list for time-dependent conversion:
```python
cop = np.array([3.0, 3.2, 3.5, 4.0, 3.8, ...]) # Varies with ambient temperature
-hp = fx.LinearConverter(
+hp = fx.Converter(
...,
conversion_factors=[{'el': cop, 'heat': 1}],
)
@@ -103,7 +103,7 @@ chp = fx.linear_converters.CHP(
A component is active when any of its flows is non-zero. Add startup costs, minimum run times:
```python
- gen = fx.LinearConverter(
+ gen = fx.Converter(
...,
status_parameters=fx.StatusParameters(
effects_per_startup={'costs': 1000},
@@ -119,17 +119,17 @@ chp = fx.linear_converters.CHP(
For variable efficiency — all flows change together based on operating point:
```python
- chp = fx.LinearConverter(
+ chp = fx.Converter(
label='CHP',
- inputs=[fx.Flow('fuel', bus=gas_bus)],
+ inputs=[fx.Flow(bus='Gas')],
outputs=[
- fx.Flow('el', bus=elec_bus, size=60),
- fx.Flow('heat', bus=heat_bus),
+ fx.Flow(bus='Electricity', size=60),
+ fx.Flow(bus='Heat'),
],
piecewise_conversion=fx.PiecewiseConversion({
- 'el': fx.Piecewise([fx.Piece(5, 30), fx.Piece(40, 60)]),
- 'heat': fx.Piecewise([fx.Piece(6, 35), fx.Piece(45, 100)]),
- 'fuel': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]),
+ 'Electricity': fx.Piecewise([fx.Piece(5, 30), fx.Piece(40, 60)]),
+ 'Heat': fx.Piecewise([fx.Piece(6, 35), fx.Piece(45, 100)]),
+ 'Gas': fx.Piecewise([fx.Piece(12, 70), fx.Piece(90, 200)]),
}),
)
```
@@ -148,4 +148,4 @@ The converter creates **constraints** linking flows, not new variables.
| $a_f$ | $\mathbb{R}$ | Conversion factor for input flow $f$ |
| $b_f$ | $\mathbb{R}$ | Conversion factor for output flow $f$ |
-**Classes:** [`LinearConverter`][flixopt.components.LinearConverter], [`LinearConverterModel`][flixopt.components.LinearConverterModel]
+**Classes:** [`Converter`][flixopt.components.Converter], [`ConverterModel`][flixopt.elements.ConvertersModel]
diff --git a/flixopt/__init__.py b/flixopt/__init__.py
index 1bded9aaa..fe9b63cdc 100644
--- a/flixopt/__init__.py
+++ b/flixopt/__init__.py
@@ -22,7 +22,9 @@
from .carrier import Carrier, CarrierContainer
from .comparison import Comparison
from .components import (
+ Converter,
LinearConverter,
+ Port,
Sink,
Source,
SourceAndSink,
@@ -52,6 +54,8 @@
'IdList',
'PENALTY_EFFECT_ID',
'PENALTY_EFFECT_LABEL',
+ 'Converter',
+ 'Port',
'Source',
'Sink',
'SourceAndSink',
diff --git a/flixopt/batched.py b/flixopt/batched.py
index 4bc732b62..e0067e6fc 100644
--- a/flixopt/batched.py
+++ b/flixopt/batched.py
@@ -13,14 +13,14 @@
import logging
from functools import cached_property
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Any
import numpy as np
import pandas as pd
import xarray as xr
-from .core import PlausibilityError
-from .features import fast_isnull, fast_notnull, stack_along_dim
+from .core import PlausibilityError, align_effects_to_coords, align_to_coords
+from .features import stack_along_dim
from .id_list import IdList, element_id_list
from .interface import InvestParameters, StatusParameters
from .modeling import _scalar_safe_isel_drop
@@ -107,6 +107,8 @@ def __init__(
effect_ids: list[str] | None = None,
timestep_duration: xr.DataArray | float | None = None,
previous_states: dict[str, xr.DataArray] | None = None,
+ coords: dict[str, pd.Index] | None = None,
+ normalize_effects: Any = None,
):
self._params = params
self._dim = dim_name
@@ -114,6 +116,8 @@ def __init__(
self._effect_ids = effect_ids or []
self._timestep_duration = timestep_duration
self._previous_states = previous_states or {}
+ self._coords = coords
+ self._normalize_effects = normalize_effects
@property
def ids(self) -> list[str]:
@@ -264,7 +268,23 @@ def previous_downtime(self) -> xr.DataArray | None:
def _build_effects(self, attr: str) -> xr.DataArray | None:
"""Build effect factors array for a status effect attribute."""
ids = self._categorize(lambda p: getattr(p, attr))
- dicts = {eid: getattr(self._params[eid], attr) for eid in ids}
+ if not ids:
+ return None
+ norm = self._normalize_effects or (lambda x: x)
+ dicts = {}
+ for eid in ids:
+ raw = getattr(self._params[eid], attr)
+ normalized = norm(raw) or {}
+ if self._coords is not None:
+ aligned = align_effects_to_coords(
+ normalized,
+ self._coords,
+ prefix=eid,
+ suffix=attr,
+ )
+ dicts[eid] = aligned or {}
+ else:
+ dicts[eid] = normalized
return build_effects_array(dicts, self._effect_ids, self._dim)
@cached_property
@@ -295,11 +315,25 @@ def __init__(
params: dict[str, InvestParameters],
dim_name: str,
effect_ids: list[str] | None = None,
+ coords: dict[str, pd.Index] | None = None,
+ normalize_effects: Any = None,
):
self._params = params
self._dim = dim_name
self._ids = list(params.keys())
self._effect_ids = effect_ids or []
+ self._coords = coords
+ self._normalize_effects = normalize_effects
+ self._validate()
+
+ def _validate(self) -> None:
+ """Validate investment parameters."""
+ for eid, p in self._params.items():
+ if p.fixed_size is None and p.maximum_size is None:
+ raise PlausibilityError(
+ f'InvestParameters for "{eid}" requires either fixed_size or maximum_size to be set. '
+ f'An upper bound is needed to properly scale the optimization model.'
+ )
@property
def ids(self) -> list[str]:
@@ -398,7 +432,22 @@ def _build_effects(self, attr: str, ids: list[str] | None = None) -> xr.DataArra
"""Build effect factors array for an investment effect attribute."""
if ids is None:
ids = self._categorize(lambda p: getattr(p, attr))
- dicts = {eid: getattr(self._params[eid], attr) for eid in ids}
+ norm = self._normalize_effects or (lambda x: x)
+ dicts = {}
+ for eid in ids:
+ raw = getattr(self._params[eid], attr)
+ normalized = norm(raw) or {}
+ if self._coords is not None:
+ aligned = align_effects_to_coords(
+ normalized,
+ self._coords,
+ prefix=eid,
+ suffix=attr,
+ dims=['period', 'scenario'],
+ )
+ dicts[eid] = aligned or {}
+ else:
+ dicts[eid] = normalized
return build_effects_array(dicts, self._effect_ids, self._dim)
@cached_property
@@ -526,7 +575,13 @@ class StoragesData:
"""
def __init__(
- self, storages: list, dim_name: str, effect_ids: list[str], timesteps_extra: pd.DatetimeIndex | None = None
+ self,
+ storages: list,
+ dim_name: str,
+ effect_ids: list[str],
+ timesteps_extra: pd.DatetimeIndex | None = None,
+ coords: dict[str, pd.Index] | None = None,
+ normalize_effects: Any = None,
):
"""Initialize StoragesData.
@@ -536,11 +591,15 @@ def __init__(
effect_ids: List of effect IDs for building effect arrays.
timesteps_extra: Extended timesteps (time + 1 final step) for charge state bounds.
Required for StoragesModel, None for InterclusterStoragesModel.
+ coords: Coordinate indexes for alignment (time, period, scenario).
+ normalize_effects: Callable to normalize raw effect values.
"""
self._storages = storages
self._dim_name = dim_name
self._effect_ids = effect_ids
self._timesteps_extra = timesteps_extra
+ self._coords = coords
+ self._normalize_effects = normalize_effects
self._by_id = {s.id: s for s in storages}
@cached_property
@@ -570,6 +629,11 @@ def __getitem__(self, label: str):
def __len__(self) -> int:
return len(self._storages)
+ def _align(self, storage_id: str, attr: str, dims: list[str] | None = None) -> xr.DataArray | None:
+ """Align a single storage attribute value to model coords."""
+ raw = getattr(self._by_id[storage_id], attr)
+ return align_to_coords(raw, self._coords, name=f'{storage_id}|{attr}', dims=dims)
+
# === Categorization ===
@cached_property
@@ -608,6 +672,8 @@ def investment_data(self) -> InvestmentData | None:
params=self.invest_params,
dim_name=self._dim_name,
effect_ids=self._effect_ids,
+ coords=self._coords,
+ normalize_effects=self._normalize_effects,
)
# === Stacked Storage Parameters ===
@@ -615,27 +681,33 @@ def investment_data(self) -> InvestmentData | None:
@cached_property
def eta_charge(self) -> xr.DataArray:
"""(element, [time]) - charging efficiency."""
- return stack_along_dim([s.eta_charge for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim([self._align(s.id, 'eta_charge') for s in self._storages], self._dim_name, self.ids)
@cached_property
def eta_discharge(self) -> xr.DataArray:
"""(element, [time]) - discharging efficiency."""
- return stack_along_dim([s.eta_discharge for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim([self._align(s.id, 'eta_discharge') for s in self._storages], self._dim_name, self.ids)
@cached_property
def relative_loss_per_hour(self) -> xr.DataArray:
"""(element, [time]) - relative loss per hour."""
- return stack_along_dim([s.relative_loss_per_hour for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim(
+ [self._align(s.id, 'relative_loss_per_hour') for s in self._storages], self._dim_name, self.ids
+ )
@cached_property
def relative_minimum_charge_state(self) -> xr.DataArray:
"""(element, [time]) - relative minimum charge state."""
- return stack_along_dim([s.relative_minimum_charge_state for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim(
+ [self._align(s.id, 'relative_minimum_charge_state') for s in self._storages], self._dim_name, self.ids
+ )
@cached_property
def relative_maximum_charge_state(self) -> xr.DataArray:
"""(element, [time]) - relative maximum charge state."""
- return stack_along_dim([s.relative_maximum_charge_state for s in self._storages], self._dim_name, self.ids)
+ return stack_along_dim(
+ [self._align(s.id, 'relative_maximum_charge_state') for s in self._storages], self._dim_name, self.ids
+ )
@cached_property
def charging_flow_ids(self) -> list[str]:
@@ -647,6 +719,20 @@ def discharging_flow_ids(self) -> list[str]:
"""Flow IDs for discharging flows, aligned with self.ids."""
return [s.discharging.id for s in self._storages]
+ def aligned_initial_charge_state(self, storage) -> xr.DataArray | None:
+ """Get aligned initial_charge_state for a storage (None if string or None)."""
+ if storage.initial_charge_state is None or isinstance(storage.initial_charge_state, str):
+ return None
+ return self._align(storage.id, 'initial_charge_state', dims=['period', 'scenario'])
+
+ def aligned_minimal_final_charge_state(self, storage) -> xr.DataArray | None:
+ """Get aligned minimal_final_charge_state for a storage."""
+ return self._align(storage.id, 'minimal_final_charge_state', dims=['period', 'scenario'])
+
+ def aligned_maximal_final_charge_state(self, storage) -> xr.DataArray | None:
+ """Get aligned maximal_final_charge_state for a storage."""
+ return self._align(storage.id, 'maximal_final_charge_state', dims=['period', 'scenario'])
+
# === Capacity and Charge State Bounds ===
@cached_property
@@ -659,7 +745,7 @@ def capacity_lower(self) -> xr.DataArray:
elif isinstance(s.capacity_in_flow_hours, InvestParameters):
values.append(s.capacity_in_flow_hours.minimum_or_fixed_size)
else:
- values.append(s.capacity_in_flow_hours)
+ values.append(self._align(s.id, 'capacity_in_flow_hours', dims=['period', 'scenario']))
return stack_along_dim(values, self._dim_name, self.ids)
@cached_property
@@ -672,7 +758,7 @@ def capacity_upper(self) -> xr.DataArray:
elif isinstance(s.capacity_in_flow_hours, InvestParameters):
values.append(s.capacity_in_flow_hours.maximum_or_fixed_size)
else:
- values.append(s.capacity_in_flow_hours)
+ values.append(self._align(s.id, 'capacity_in_flow_hours', dims=['period', 'scenario']))
return stack_along_dim(values, self._dim_name, self.ids)
def _relative_bounds_extra(self) -> tuple[xr.DataArray, xr.DataArray]:
@@ -685,19 +771,21 @@ def _relative_bounds_extra(self) -> tuple[xr.DataArray, xr.DataArray]:
rel_mins = []
rel_maxs = []
for s in self._storages:
- rel_min = s.relative_minimum_charge_state
- rel_max = s.relative_maximum_charge_state
+ rel_min = self._align(s.id, 'relative_minimum_charge_state')
+ rel_max = self._align(s.id, 'relative_maximum_charge_state')
# Get final values
- if s.relative_minimum_final_charge_state is None:
+ rel_min_final = self._align(s.id, 'relative_minimum_final_charge_state', dims=['period', 'scenario'])
+ rel_max_final = self._align(s.id, 'relative_maximum_final_charge_state', dims=['period', 'scenario'])
+ if rel_min_final is None:
min_final_value = _scalar_safe_isel_drop(rel_min, 'time', -1)
else:
- min_final_value = s.relative_minimum_final_charge_state
+ min_final_value = rel_min_final
- if s.relative_maximum_final_charge_state is None:
+ if rel_max_final is None:
max_final_value = _scalar_safe_isel_drop(rel_max, 'time', -1)
else:
- max_final_value = s.relative_maximum_final_charge_state
+ max_final_value = rel_max_final
# Build bounds arrays for timesteps_extra
if 'time' in rel_min.dims:
@@ -762,10 +850,6 @@ def charge_state_upper_bounds(self) -> xr.DataArray:
def validate(self) -> None:
"""Validate all storages (config + DataArray checks).
- Performs both:
- - Config validation via Storage.validate_config()
- - DataArray validation (post-transformation checks)
-
Raises:
PlausibilityError: If any validation check fails.
"""
@@ -774,59 +858,92 @@ def validate(self) -> None:
errors: list[str] = []
for storage in self._storages:
- storage.validate_config()
sid = storage.id
- # Capacity required for non-default relative bounds (DataArray checks)
+ # Config checks (moved from Storage.validate_config / Component.validate_config)
+ storage._check_unique_flow_ids()
+ if storage.status_parameters:
+ for flow in storage.flows.values():
+ if flow.size is None:
+ raise PlausibilityError(
+ f'"{storage.id}": Flow "{flow.flow_id}" must have a defined size '
+ f'because {storage.id} has status_parameters. '
+ f'A size is required for big-M constraints.'
+ )
+
+ if isinstance(storage.initial_charge_state, str):
+ if storage.initial_charge_state != 'equals_final':
+ raise PlausibilityError(f'initial_charge_state has undefined value: {storage.initial_charge_state}')
+
if storage.capacity_in_flow_hours is None:
- if np.any(storage.relative_minimum_charge_state > 0):
+ if storage.relative_minimum_final_charge_state is not None:
+ raise PlausibilityError(
+ f'Storage "{sid}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. '
+ f'A capacity is required for relative final charge state constraints.'
+ )
+ if storage.relative_maximum_final_charge_state is not None:
+ raise PlausibilityError(
+ f'Storage "{sid}" has relative_maximum_final_charge_state but no capacity_in_flow_hours. '
+ f'A capacity is required for relative final charge state constraints.'
+ )
+
+ if storage.balanced:
+ if not isinstance(storage.charging.size, InvestParameters) or not isinstance(
+ storage.discharging.size, InvestParameters
+ ):
+ raise PlausibilityError(
+ f'Balancing charging and discharging Flows in {sid} is only possible with Investments.'
+ )
+
+ # DataArray checks (use aligned values)
+ rel_min = self._align(sid, 'relative_minimum_charge_state')
+ rel_max = self._align(sid, 'relative_maximum_charge_state')
+
+ if storage.capacity_in_flow_hours is None:
+ if np.any(rel_min > 0):
errors.append(
f'Storage "{sid}" has relative_minimum_charge_state > 0 but no capacity_in_flow_hours. '
f'A capacity is required because the lower bound is capacity * relative_minimum_charge_state.'
)
- if np.any(storage.relative_maximum_charge_state < 1):
+ if np.any(rel_max < 1):
errors.append(
f'Storage "{sid}" has relative_maximum_charge_state < 1 but no capacity_in_flow_hours. '
f'A capacity is required because the upper bound is capacity * relative_maximum_charge_state.'
)
- # Initial charge state vs capacity bounds (DataArray checks)
if storage.capacity_in_flow_hours is not None:
if isinstance(storage.capacity_in_flow_hours, InvestParameters):
minimum_capacity = storage.capacity_in_flow_hours.minimum_or_fixed_size
maximum_capacity = storage.capacity_in_flow_hours.maximum_or_fixed_size
else:
- maximum_capacity = storage.capacity_in_flow_hours
- minimum_capacity = storage.capacity_in_flow_hours
+ aligned_cap = self._align(sid, 'capacity_in_flow_hours', dims=['period', 'scenario'])
+ maximum_capacity = aligned_cap
+ minimum_capacity = aligned_cap
- min_initial_at_max_capacity = maximum_capacity * _scalar_safe_isel(
- storage.relative_minimum_charge_state, {'time': 0}
- )
- max_initial_at_min_capacity = minimum_capacity * _scalar_safe_isel(
- storage.relative_maximum_charge_state, {'time': 0}
- )
+ min_initial_at_max_capacity = maximum_capacity * _scalar_safe_isel(rel_min, {'time': 0})
+ max_initial_at_min_capacity = minimum_capacity * _scalar_safe_isel(rel_max, {'time': 0})
initial_equals_final = isinstance(storage.initial_charge_state, str)
if not initial_equals_final and storage.initial_charge_state is not None:
- if (storage.initial_charge_state > max_initial_at_min_capacity).any():
+ initial = self._align(sid, 'initial_charge_state', dims=['period', 'scenario'])
+ if (initial > max_initial_at_min_capacity).any():
errors.append(
f'{sid}: initial_charge_state={storage.initial_charge_state} '
f'is constraining the investment decision. Choose a value <= {max_initial_at_min_capacity}.'
)
- if (storage.initial_charge_state < min_initial_at_max_capacity).any():
+ if (initial < min_initial_at_max_capacity).any():
errors.append(
f'{sid}: initial_charge_state={storage.initial_charge_state} '
f'is constraining the investment decision. Choose a value >= {min_initial_at_max_capacity}.'
)
- # Balanced charging/discharging size compatibility (DataArray checks)
if storage.balanced:
charging_min = storage.charging.size.minimum_or_fixed_size
charging_max = storage.charging.size.maximum_or_fixed_size
discharging_min = storage.discharging.size.minimum_or_fixed_size
discharging_max = storage.discharging.size.maximum_or_fixed_size
- if (charging_min > discharging_max).any() or (charging_max < discharging_min).any():
+ if np.any(charging_min > discharging_max) or np.any(charging_max < discharging_min):
errors.append(
f'Balancing charging and discharging Flows in {sid} need compatible minimum and maximum sizes. '
f'Got: charging.size.minimum={charging_min}, charging.size.maximum={charging_max} and '
@@ -838,787 +955,261 @@ def validate(self) -> None:
class FlowsData:
- """Batched data container for all flows with indexed access.
+ """Thin wrapper around flows xr.Dataset.
Provides:
- Element lookup by id: `flows['Boiler(gas_in)']` or `flows.get('id')`
- Categorizations as list[str]: `flows.with_status`, `flows.with_investment`
- - Batched parameters as xr.DataArray with flow dimension
+ - Dataset access via `flows.ds['variable_name']`
This separates data access from mathematical modeling (FlowsModel).
+ No FlowSystem reference — takes explicit params only.
"""
- def __init__(self, flows: list[Flow], flow_system: FlowSystem):
- """Initialize FlowsData.
+ def __init__(
+ self,
+ flows: list[Flow],
+ coords: dict[str, pd.Index],
+ effect_ids: list[str],
+ timestep_duration: xr.DataArray | float | None = None,
+ normalize_effects: Any = None,
+ ):
+ from .datasets import build_flows_dataset
- Args:
- flows: List of all Flow elements.
- flow_system: Parent FlowSystem for model coordinates.
- """
self.elements: IdList = element_id_list(flows)
- self._fs = flow_system
+ self.ds: xr.Dataset = build_flows_dataset(flows, coords, effect_ids, timestep_duration, normalize_effects)
+
+ # Non-Dataset attributes (raw Python objects needed by features)
+ self.invest_params: dict[str, InvestParameters] = {
+ f.id: f.size for f in flows if isinstance(f.size, InvestParameters)
+ }
+ self.status_params: dict[str, StatusParameters] = {
+ f.id: f.status_parameters for f in flows if f.status_parameters is not None
+ }
+ self.previous_states: dict[str, xr.DataArray] = _build_previous_states(flows)
+
+ @classmethod
+ def from_elements(
+ cls,
+ flows: list[Flow],
+ coords: dict[str, pd.Index],
+ effect_ids: list[str],
+ timestep_duration: xr.DataArray | float | None = None,
+ normalize_effects: Any = None,
+ ) -> FlowsData:
+ return cls(flows, coords, effect_ids, timestep_duration, normalize_effects)
+
+ # === Element access ===
def __getitem__(self, label: str) -> Flow:
- """Get a flow by its id."""
return self.elements[label]
def get(self, label: str, default: Flow | None = None) -> Flow | None:
- """Get a flow by id, returning default if not found."""
return self.elements.get(label, default)
def __len__(self) -> int:
return len(self.elements)
def __iter__(self):
- """Iterate over flow IDs."""
return iter(self.elements)
+ # === TypeModel protocol ===
+
@property
def ids(self) -> list[str]:
- """List of all flow IDs."""
return list(self.elements.keys())
@property
def element_ids(self) -> list[str]:
- """List of all flow IDs (alias for ids)."""
return self.ids
- @cached_property
- def _ids_index(self) -> pd.Index:
- """Cached pd.Index of flow IDs for fast DataArray creation."""
- return pd.Index(self.ids)
-
- def _categorize(self, condition) -> list[str]:
- """Return IDs of flows matching condition(flow) -> bool."""
- return [f.id for f in self.elements.values() if condition(f)]
-
- def _mask(self, condition) -> xr.DataArray:
- """Return boolean DataArray mask for condition(flow) -> bool."""
- return xr.DataArray(
- [condition(f) for f in self.elements.values()],
- dims=['flow'],
- coords={'flow': self._ids_index},
- )
-
- # === Flow Categorizations ===
- # All return list[str] of element IDs.
-
- @cached_property
- def with_status(self) -> list[str]:
- """IDs of flows with status parameters."""
- return self._categorize(lambda f: f.status_parameters is not None)
-
- # === Boolean Masks (PyPSA-style) ===
- # These enable efficient batched constraint creation using linopy's mask= parameter.
-
- @cached_property
- def has_status(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with status parameters."""
- return self._mask(lambda f: f.status_parameters is not None)
-
- @cached_property
- def has_investment(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with investment parameters."""
- return self._mask(lambda f: isinstance(f.size, InvestParameters))
-
- @cached_property
- def has_optional_investment(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with optional (non-mandatory) investment."""
- return self._mask(lambda f: isinstance(f.size, InvestParameters) and not f.size.mandatory)
-
- @cached_property
- def has_mandatory_investment(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with mandatory investment."""
- return self._mask(lambda f: isinstance(f.size, InvestParameters) and f.size.mandatory)
-
- @cached_property
- def has_fixed_size(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with fixed (non-investment) size."""
- return self._mask(lambda f: f.size is not None and not isinstance(f.size, InvestParameters))
-
- @cached_property
- def has_size(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with any size (fixed or investment)."""
- return self._mask(lambda f: f.size is not None)
-
- @cached_property
- def has_effects(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with effects_per_flow_hour."""
- return self._mask(lambda f: bool(f.effects_per_flow_hour))
-
- @cached_property
- def has_flow_hours_min(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with flow_hours_min constraint."""
- return self._mask(lambda f: f.flow_hours_min is not None)
-
- @cached_property
- def has_flow_hours_max(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with flow_hours_max constraint."""
- return self._mask(lambda f: f.flow_hours_max is not None)
-
- @cached_property
- def has_load_factor_min(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with load_factor_min constraint."""
- return self._mask(lambda f: f.load_factor_min is not None)
-
- @cached_property
- def has_load_factor_max(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with load_factor_max constraint."""
- return self._mask(lambda f: f.load_factor_max is not None)
-
- @cached_property
- def has_startup_tracking(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows needing startup/shutdown tracking."""
- mask = np.zeros(len(self.ids), dtype=bool)
- if self._status_data:
- for i, fid in enumerate(self.ids):
- mask[i] = fid in self._status_data.with_startup_tracking
- return xr.DataArray(mask, dims=['flow'], coords={'flow': self._ids_index})
-
- @cached_property
- def has_uptime_tracking(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows needing uptime duration tracking."""
- mask = np.zeros(len(self.ids), dtype=bool)
- if self._status_data:
- for i, fid in enumerate(self.ids):
- mask[i] = fid in self._status_data.with_uptime_tracking
- return xr.DataArray(mask, dims=['flow'], coords={'flow': self._ids_index})
-
- @cached_property
- def has_downtime_tracking(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows needing downtime tracking."""
- mask = np.zeros(len(self.ids), dtype=bool)
- if self._status_data:
- for i, fid in enumerate(self.ids):
- mask[i] = fid in self._status_data.with_downtime_tracking
- return xr.DataArray(mask, dims=['flow'], coords={'flow': self._ids_index})
+ @property
+ def dim_name(self) -> str:
+ return 'flow'
- @cached_property
- def has_startup_limit(self) -> xr.DataArray:
- """(flow,) - boolean mask for flows with startup limit."""
- mask = np.zeros(len(self.ids), dtype=bool)
- if self._status_data:
- for i, fid in enumerate(self.ids):
- mask[i] = fid in self._status_data.with_startup_limit
- return xr.DataArray(mask, dims=['flow'], coords={'flow': self._ids_index})
+ # === Piecewise metadata (from ds.attrs) ===
@property
- def with_startup_tracking(self) -> list[str]:
- """IDs of flows that need startup/shutdown tracking."""
- return self._status_data.with_startup_tracking if self._status_data else []
+ def piecewise_element_ids(self) -> list[str]:
+ return self.ds.attrs.get('piecewise_element_ids', [])
@property
- def with_downtime_tracking(self) -> list[str]:
- """IDs of flows that need downtime (inactive) tracking."""
- return self._status_data.with_downtime_tracking if self._status_data else []
+ def piecewise_max_segments(self) -> int:
+ return self.ds.attrs.get('piecewise_max_segments', 0)
@property
- def with_uptime_tracking(self) -> list[str]:
- """IDs of flows that need uptime duration tracking."""
- return self._status_data.with_uptime_tracking if self._status_data else []
+ def piecewise_effect_names(self) -> list[str]:
+ return self.ds.attrs.get('piecewise_effect_names', [])
- @property
- def with_startup_limit(self) -> list[str]:
- """IDs of flows with startup limit."""
- return self._status_data.with_startup_limit if self._status_data else []
+ # === Categorization helpers (from Dataset masks) ===
+
+ def _ids_where(self, mask_name: str) -> list[str]:
+ return list(self.ds['flow'].values[self.ds[mask_name].values])
@cached_property
- def without_size(self) -> list[str]:
- """IDs of flows without size."""
- return self._categorize(lambda f: f.size is None)
+ def with_status(self) -> list[str]:
+ return self._ids_where('has_status')
@cached_property
def with_investment(self) -> list[str]:
- """IDs of flows with investment parameters."""
- return self._categorize(lambda f: isinstance(f.size, InvestParameters))
+ return self._ids_where('has_investment')
- @property
+ @cached_property
def with_optional_investment(self) -> list[str]:
- """IDs of flows with optional (non-mandatory) investment."""
- return self._investment_data.with_optional if self._investment_data else []
+ return self._ids_where('has_optional_investment')
- @property
+ @cached_property
def with_mandatory_investment(self) -> list[str]:
- """IDs of flows with mandatory investment."""
- return self._investment_data.with_mandatory if self._investment_data else []
+ return self._ids_where('has_mandatory_investment')
+
+ @cached_property
+ def without_size(self) -> list[str]:
+ return [fid for fid, has in zip(self.ids, self.ds['has_size'].values, strict=False) if not has]
@cached_property
def with_status_only(self) -> list[str]:
- """IDs of flows with status but no investment and a fixed size."""
return sorted(set(self.with_status) - set(self.with_investment) - set(self.without_size))
@cached_property
def with_investment_only(self) -> list[str]:
- """IDs of flows with investment but no status."""
return sorted(set(self.with_investment) - set(self.with_status))
@cached_property
def with_status_and_investment(self) -> list[str]:
- """IDs of flows with both status and investment."""
return sorted(set(self.with_status) & set(self.with_investment))
@cached_property
def with_flow_hours_min(self) -> list[str]:
- """IDs of flows with explicit flow_hours_min constraint."""
- return self._categorize(lambda f: f.flow_hours_min is not None)
+ return self._ids_where('has_flow_hours_min')
@cached_property
def with_flow_hours_max(self) -> list[str]:
- """IDs of flows with explicit flow_hours_max constraint."""
- return self._categorize(lambda f: f.flow_hours_max is not None)
+ return self._ids_where('has_flow_hours_max')
@cached_property
def with_flow_hours_over_periods_min(self) -> list[str]:
- """IDs of flows with explicit flow_hours_min_over_periods constraint."""
- return self._categorize(lambda f: f.flow_hours_min_over_periods is not None)
+ return [f.id for f in self.elements.values() if f.flow_hours_min_over_periods is not None]
@cached_property
def with_flow_hours_over_periods_max(self) -> list[str]:
- """IDs of flows with explicit flow_hours_max_over_periods constraint."""
- return self._categorize(lambda f: f.flow_hours_max_over_periods is not None)
+ return [f.id for f in self.elements.values() if f.flow_hours_max_over_periods is not None]
@cached_property
def with_load_factor_min(self) -> list[str]:
- """IDs of flows with explicit load_factor_min constraint."""
- return self._categorize(lambda f: f.load_factor_min is not None)
+ return self._ids_where('has_load_factor_min')
@cached_property
def with_load_factor_max(self) -> list[str]:
- """IDs of flows with explicit load_factor_max constraint."""
- return self._categorize(lambda f: f.load_factor_max is not None)
+ return self._ids_where('has_load_factor_max')
@cached_property
def with_effects(self) -> list[str]:
- """IDs of flows with effects_per_flow_hour defined."""
- return self._categorize(lambda f: f.effects_per_flow_hour)
+ return self._ids_where('has_effects')
@cached_property
def with_previous_flow_rate(self) -> list[str]:
- """IDs of flows with previous_flow_rate defined (for startup/shutdown tracking)."""
- return self._categorize(lambda f: f.previous_flow_rate is not None)
-
- # === Parameter Dicts ===
-
- @cached_property
- def invest_params(self) -> dict[str, InvestParameters]:
- """Investment parameters for flows with investment, keyed by id."""
- return {fid: self[fid].size for fid in self.with_investment}
-
- @cached_property
- def status_params(self) -> dict[str, StatusParameters]:
- """Status parameters for flows with status, keyed by id."""
- return {fid: self[fid].status_parameters for fid in self.with_status}
-
- @cached_property
- def _status_data(self) -> StatusData | None:
- """Batched status data for flows with status."""
- if not self.with_status:
- return None
- return StatusData(
- params=self.status_params,
- dim_name='flow',
- effect_ids=list(self._fs.effects.keys()),
- timestep_duration=self._fs.timestep_duration,
- previous_states=self.previous_states,
- )
-
- @cached_property
- def _investment_data(self) -> InvestmentData | None:
- """Batched investment data for flows with investment."""
- if not self.with_investment:
- return None
- return InvestmentData(
- params=self.invest_params,
- dim_name='flow',
- effect_ids=list(self._fs.effects.keys()),
- )
-
- # === Batched Parameters ===
- # Properties return xr.DataArray only for relevant flows (based on categorizations).
-
- @cached_property
- def flow_hours_minimum(self) -> xr.DataArray | None:
- """(flow, period, scenario) - minimum total flow hours for flows with explicit min."""
- return self._batched_parameter(self.with_flow_hours_min, 'flow_hours_min', ['period', 'scenario'])
-
- @cached_property
- def flow_hours_maximum(self) -> xr.DataArray | None:
- """(flow, period, scenario) - maximum total flow hours for flows with explicit max."""
- return self._batched_parameter(self.with_flow_hours_max, 'flow_hours_max', ['period', 'scenario'])
-
- @cached_property
- def flow_hours_minimum_over_periods(self) -> xr.DataArray | None:
- """(flow, scenario) - minimum flow hours over all periods for flows with explicit min."""
- return self._batched_parameter(
- self.with_flow_hours_over_periods_min, 'flow_hours_min_over_periods', ['scenario']
- )
-
- @cached_property
- def flow_hours_maximum_over_periods(self) -> xr.DataArray | None:
- """(flow, scenario) - maximum flow hours over all periods for flows with explicit max."""
- return self._batched_parameter(
- self.with_flow_hours_over_periods_max, 'flow_hours_max_over_periods', ['scenario']
- )
-
- @cached_property
- def load_factor_minimum(self) -> xr.DataArray | None:
- """(flow, period, scenario) - minimum load factor for flows with explicit min."""
- return self._batched_parameter(self.with_load_factor_min, 'load_factor_min', ['period', 'scenario'])
-
- @cached_property
- def load_factor_maximum(self) -> xr.DataArray | None:
- """(flow, period, scenario) - maximum load factor for flows with explicit max."""
- return self._batched_parameter(self.with_load_factor_max, 'load_factor_max', ['period', 'scenario'])
-
- @cached_property
- def relative_minimum(self) -> xr.DataArray:
- """(flow, time, period, scenario) - relative lower bound on flow rate."""
- values = [f.relative_minimum for f in self.elements.values()]
- arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(None))
- return self._ensure_canonical_order(arr)
-
- @cached_property
- def relative_maximum(self) -> xr.DataArray:
- """(flow, time, period, scenario) - relative upper bound on flow rate."""
- values = [f.relative_maximum for f in self.elements.values()]
- arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(None))
- return self._ensure_canonical_order(arr)
-
- @cached_property
- def fixed_relative_profile(self) -> xr.DataArray:
- """(flow, time, period, scenario) - fixed profile. NaN = not fixed."""
- values = [
- f.fixed_relative_profile if f.fixed_relative_profile is not None else np.nan for f in self.elements.values()
- ]
- arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(None))
- return self._ensure_canonical_order(arr)
-
- @cached_property
- def effective_relative_minimum(self) -> xr.DataArray:
- """(flow, time, period, scenario) - effective lower bound (uses fixed_profile if set)."""
- fixed = self.fixed_relative_profile
- rel_min = self.relative_minimum
- # Use DataArray.where with fast_isnull (faster than xr.where)
- return rel_min.where(fast_isnull(fixed), fixed)
-
- @cached_property
- def effective_relative_maximum(self) -> xr.DataArray:
- """(flow, time, period, scenario) - effective upper bound (uses fixed_profile if set)."""
- fixed = self.fixed_relative_profile
- rel_max = self.relative_maximum
- # Use DataArray.where with fast_isnull (faster than xr.where)
- return rel_max.where(fast_isnull(fixed), fixed)
-
- @cached_property
- def fixed_size(self) -> xr.DataArray:
- """(flow, period, scenario) - fixed size for non-investment flows. NaN for investment/no-size flows."""
- values = []
- for f in self.elements.values():
- if f.size is None or isinstance(f.size, InvestParameters):
- values.append(np.nan)
- else:
- values.append(f.size)
- arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(['period', 'scenario']))
- return self._ensure_canonical_order(arr)
-
- @cached_property
- def effective_size_lower(self) -> xr.DataArray:
- """(flow, period, scenario) - effective lower size for bounds.
-
- - Fixed size flows: the size value
- - Investment flows: minimum_or_fixed_size
- - No size: NaN
- """
- values = []
- for f in self.elements.values():
- if f.size is None:
- values.append(np.nan)
- elif isinstance(f.size, InvestParameters):
- values.append(f.size.minimum_or_fixed_size)
- else:
- values.append(f.size)
- arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(['period', 'scenario']))
- return self._ensure_canonical_order(arr)
-
- @cached_property
- def effective_size_upper(self) -> xr.DataArray:
- """(flow, period, scenario) - effective upper size for bounds.
-
- - Fixed size flows: the size value
- - Investment flows: maximum_or_fixed_size
- - No size: NaN
- """
- values = []
- for f in self.elements.values():
- if f.size is None:
- values.append(np.nan)
- elif isinstance(f.size, InvestParameters):
- values.append(f.size.maximum_or_fixed_size)
- else:
- values.append(f.size)
- arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(['period', 'scenario']))
- return self._ensure_canonical_order(arr)
-
- @cached_property
- def absolute_lower_bounds(self) -> xr.DataArray:
- """(flow, cluster, time, period, scenario) - absolute lower bounds for flow rate.
-
- Logic:
- - Status flows → 0 (status variable controls activation)
- - Optional investment → 0 (invested variable controls)
- - Mandatory investment → relative_min * effective_size_lower
- - Fixed size → relative_min * effective_size_lower
- - No size → 0
- """
- # Base: relative_min * size_lower
- base = self.effective_relative_minimum * self.effective_size_lower
-
- # Build mask for flows that should have lb=0 (use pre-computed boolean masks)
- is_zero = self.has_status | self.has_optional_investment | fast_isnull(self.effective_size_lower)
- # Use DataArray.where (faster than xr.where)
- result = base.where(~is_zero, 0.0).fillna(0.0)
- return self._ensure_canonical_order(result)
-
- @cached_property
- def absolute_upper_bounds(self) -> xr.DataArray:
- """(flow, cluster, time, period, scenario) - absolute upper bounds for flow rate.
-
- Logic:
- - Investment flows → relative_max * effective_size_upper
- - Fixed size → relative_max * effective_size_upper
- - No size → inf
- """
- # Base: relative_max * size_upper
- base = self.effective_relative_maximum * self.effective_size_upper
-
- # Inf for flows without size (use DataArray.where, faster than xr.where)
- result = base.where(fast_notnull(self.effective_size_upper), np.inf)
- return self._ensure_canonical_order(result)
-
- # --- Investment Bounds (delegated to InvestmentData) ---
-
- @property
- def investment_size_minimum(self) -> xr.DataArray | None:
- """(flow, period, scenario) - minimum size for flows with investment."""
- if not self._investment_data:
- return None
- # InvestmentData.size_minimum already has flow dim via stack_along_dim
- raw = self._investment_data.size_minimum
- return self._broadcast_existing(raw, dims=['period', 'scenario'])
-
- @property
- def investment_size_maximum(self) -> xr.DataArray | None:
- """(flow, period, scenario) - maximum size for flows with investment."""
- if not self._investment_data:
- return None
- raw = self._investment_data.size_maximum
- return self._broadcast_existing(raw, dims=['period', 'scenario'])
-
- @property
- def optional_investment_size_minimum(self) -> xr.DataArray | None:
- """(flow, period, scenario) - minimum size for optional investment flows."""
- if not self._investment_data:
- return None
- raw = self._investment_data.optional_size_minimum
- if raw is None:
- return None
- return self._broadcast_existing(raw, dims=['period', 'scenario'])
-
- @property
- def optional_investment_size_maximum(self) -> xr.DataArray | None:
- """(flow, period, scenario) - maximum size for optional investment flows."""
- if not self._investment_data:
- return None
- raw = self._investment_data.optional_size_maximum
- if raw is None:
- return None
- return self._broadcast_existing(raw, dims=['period', 'scenario'])
-
- # --- All-Flows Bounds (for mask-based variable creation) ---
+ return [f.id for f in self.elements.values() if f.previous_flow_rate is not None]
@cached_property
- def size_minimum_all(self) -> xr.DataArray:
- """(flow, period, scenario) - size minimum for ALL flows. NaN for non-investment flows."""
- if self.investment_size_minimum is not None:
- return self.investment_size_minimum.reindex({self.dim_name: self._ids_index})
- return xr.DataArray(
- np.nan,
- dims=[self.dim_name],
- coords={self.dim_name: self._ids_index},
- )
-
- @cached_property
- def size_maximum_all(self) -> xr.DataArray:
- """(flow, period, scenario) - size maximum for ALL flows. NaN for non-investment flows."""
- if self.investment_size_maximum is not None:
- return self.investment_size_maximum.reindex({self.dim_name: self._ids_index})
- return xr.DataArray(
- np.nan,
- dims=[self.dim_name],
- coords={self.dim_name: self._ids_index},
- )
-
- @property
- def dim_name(self) -> str:
- """Dimension name for this data container."""
- return 'flow'
+ def with_startup_tracking(self) -> list[str]:
+ return self._ids_where('has_startup_tracking')
@cached_property
- def effects_per_flow_hour(self) -> xr.DataArray | None:
- """(flow, effect, ...) - effect factors per flow hour.
-
- Missing (flow, effect) combinations are 0 (pre-filled for efficient computation).
- """
- if not self.with_effects:
- return None
-
- effect_ids = list(self._fs.effects.keys())
- if not effect_ids:
- return None
-
- dicts = {fid: self[fid].effects_per_flow_hour for fid in self.with_effects}
- return build_effects_array(dicts, effect_ids, 'flow')
-
- # --- Investment Parameters ---
+ def with_downtime_tracking(self) -> list[str]:
+ return self._ids_where('has_downtime_tracking')
@cached_property
- def linked_periods(self) -> xr.DataArray | None:
- """(flow, period) - period linking mask. 1=linked, 0=not linked, NaN=no linking."""
- has_linking = any(
- isinstance(f.size, InvestParameters) and f.size.linked_periods is not None for f in self.elements.values()
- )
- if not has_linking:
- return None
-
- values = []
- for f in self.elements.values():
- if not isinstance(f.size, InvestParameters) or f.size.linked_periods is None:
- values.append(np.nan)
- else:
- values.append(f.size.linked_periods)
- arr = stack_along_dim(values, 'flow', self.ids, self._model_coords(['period']))
- return self._ensure_canonical_order(arr)
-
- # --- Status Effects (delegated to StatusData) ---
-
- @property
- def effects_per_active_hour(self) -> xr.DataArray | None:
- """(flow, effect, ...) - effect factors per active hour for flows with status."""
- return self._status_data.effects_per_active_hour if self._status_data else None
-
- @property
- def effects_per_startup(self) -> xr.DataArray | None:
- """(flow, effect, ...) - effect factors per startup for flows with status."""
- return self._status_data.effects_per_startup if self._status_data else None
-
- # --- Previous Status ---
+ def with_uptime_tracking(self) -> list[str]:
+ return self._ids_where('has_uptime_tracking')
@cached_property
- def previous_states(self) -> dict[str, xr.DataArray]:
- """Previous status for flows with previous_flow_rate, keyed by id.
-
- Returns:
- Dict mapping flow_id -> binary DataArray (time dimension).
- """
- from .config import CONFIG
- from .modeling import ModelingUtilitiesAbstract
-
- result = {}
- for fid in self.with_previous_flow_rate:
- flow = self[fid]
- if flow.previous_flow_rate is not None:
- result[fid] = ModelingUtilitiesAbstract.to_binary(
- values=xr.DataArray(
- [flow.previous_flow_rate] if np.isscalar(flow.previous_flow_rate) else flow.previous_flow_rate,
- dims='time',
- ),
- epsilon=CONFIG.Modeling.epsilon,
- dims='time',
- )
- return result
-
- # --- Status Bounds (delegated to StatusData) ---
-
- @property
- def min_uptime(self) -> xr.DataArray | None:
- """(flow,) - minimum uptime for flows with uptime tracking. NaN = no constraint."""
- return self._status_data.min_uptime if self._status_data else None
-
- @property
- def max_uptime(self) -> xr.DataArray | None:
- """(flow,) - maximum uptime for flows with uptime tracking. NaN = no constraint."""
- return self._status_data.max_uptime if self._status_data else None
-
- @property
- def min_downtime(self) -> xr.DataArray | None:
- """(flow,) - minimum downtime for flows with downtime tracking. NaN = no constraint."""
- return self._status_data.min_downtime if self._status_data else None
-
- @property
- def max_downtime(self) -> xr.DataArray | None:
- """(flow,) - maximum downtime for flows with downtime tracking. NaN = no constraint."""
- return self._status_data.max_downtime if self._status_data else None
-
- @property
- def startup_limit_values(self) -> xr.DataArray | None:
- """(flow,) - startup limit for flows with startup limit."""
- return self._status_data.startup_limit if self._status_data else None
-
- @property
- def previous_uptime(self) -> xr.DataArray | None:
- """(flow,) - previous uptime duration for flows with uptime tracking."""
- return self._status_data.previous_uptime if self._status_data else None
-
- @property
- def previous_downtime(self) -> xr.DataArray | None:
- """(flow,) - previous downtime duration for flows with downtime tracking."""
- return self._status_data.previous_downtime if self._status_data else None
-
- # === Helper Methods ===
-
- def _batched_parameter(
- self,
- ids: list[str],
- attr: str,
- dims: list[str] | None,
- ) -> xr.DataArray | None:
- """Build a batched parameter array from per-flow attributes.
-
- Args:
- ids: Flow IDs to include (typically from a with_* property).
- attr: Attribute name to extract from each Flow.
- dims: Model dimensions to broadcast to (e.g., ['period', 'scenario']).
-
- Returns:
- DataArray with (flow, *dims) or None if ids is empty.
- """
- if not ids:
- return None
- values = [getattr(self[fid], attr) for fid in ids]
- arr = stack_along_dim(values, 'flow', ids, self._model_coords(dims))
- return self._ensure_canonical_order(arr)
-
- def _model_coords(self, dims: list[str] | None = None) -> dict[str, pd.Index | np.ndarray]:
- """Get model coordinates for broadcasting.
-
- Args:
- dims: Dimensions to include. None = all (time, period, scenario).
-
- Returns:
- Dict of dim name -> coordinate values.
- """
- if dims is None:
- dims = ['time', 'period', 'scenario']
- indexes = self._fs.indexes
- return {dim: indexes[dim] for dim in dims if dim in indexes}
-
- def _ensure_canonical_order(self, arr: xr.DataArray) -> xr.DataArray:
- """Ensure array has canonical dimension order and coord dict order.
-
- Args:
- arr: Input DataArray.
-
- Returns:
- DataArray with dims in order (flow, cluster, time, period, scenario, ...) and
- coords dict matching dims order. Additional dims are appended at the end.
- """
- # Note: cluster comes before time to match FlowSystem.dims ordering
- canonical_order = ['flow', 'cluster', 'time', 'period', 'scenario']
- # Start with canonical dims that exist in arr
- actual_dims = [d for d in canonical_order if d in arr.dims]
- # Append any additional dims not in canonical order
- for d in arr.dims:
- if d not in actual_dims:
- actual_dims.append(d)
-
- if list(arr.dims) != actual_dims:
- arr = arr.transpose(*actual_dims)
-
- # Ensure coords dict order matches dims order (linopy uses coords order)
- if list(arr.coords.keys()) != list(arr.dims):
- ordered_coords = {d: arr.coords[d] for d in arr.dims}
- arr = xr.DataArray(arr.values, dims=arr.dims, coords=ordered_coords)
-
- return arr
-
- def _broadcast_existing(self, arr: xr.DataArray, dims: list[str] | None = None) -> xr.DataArray:
- """Broadcast an existing DataArray (with element dim) to model coordinates.
-
- Use this for arrays that already have the flow dimension (e.g., from InvestmentData).
-
- Args:
- arr: DataArray with flow dimension.
- dims: Model dimensions to add. None = all (time, period, scenario).
-
- Returns:
- DataArray with dimensions in canonical order: (flow, time, period, scenario)
- """
- coords_to_add = self._model_coords(dims)
-
- if not coords_to_add:
- return self._ensure_canonical_order(arr)
-
- # Broadcast to include new dimensions
- for dim_name, coord in coords_to_add.items():
- if dim_name not in arr.dims:
- arr = arr.expand_dims({dim_name: coord})
-
- return self._ensure_canonical_order(arr)
+ def with_startup_limit(self) -> list[str]:
+ return self._ids_where('has_startup_limit')
# === Validation ===
def _any_per_flow(self, arr: xr.DataArray) -> xr.DataArray:
- """Reduce to (flow,) by collapsing all non-flow dims with .any()."""
non_flow_dims = [d for d in arr.dims if d != self.dim_name]
return arr.any(dim=non_flow_dims) if non_flow_dims else arr
def _flagged_ids(self, mask: xr.DataArray) -> list[str]:
- """Return flow IDs where mask is True."""
return [fid for fid, flag in zip(self.ids, mask.values, strict=False) if flag]
def validate(self) -> None:
- """Validate all flows (config + DataArray checks).
-
- Performs both:
- - Config validation via Flow.validate_config()
- - DataArray validation (post-transformation checks)
-
- Raises:
- PlausibilityError: If any validation check fails.
- """
+ """Validate all flows (config + DataArray checks)."""
if not self.elements:
return
for flow in self.elements.values():
- flow.validate_config()
+ if flow.status_parameters is not None and flow.size is None:
+ raise PlausibilityError(
+ f'Flow "{flow.id}" has status_parameters but no size defined. '
+ f'A size is required when using status_parameters to bound the flow rate.'
+ )
+
+ if flow.size is None and flow.fixed_relative_profile is not None:
+ raise PlausibilityError(
+ f'Flow "{flow.id}" has a fixed_relative_profile but no size defined. '
+ f'A size is required because flow_rate = size * fixed_relative_profile.'
+ )
+
+ if flow.size is None and flow.load_factor_min is not None:
+ raise PlausibilityError(
+ f'Flow "{flow.id}" has load_factor_min but no size defined. '
+ f'A size is required because the constraint is total_flow_hours >= size * load_factor_min * hours.'
+ )
+
+ if flow.size is None and flow.load_factor_max is not None:
+ raise PlausibilityError(
+ f'Flow "{flow.id}" has load_factor_max but no size defined. '
+ f'A size is required because the constraint is total_flow_hours <= size * load_factor_max * hours.'
+ )
+
+ if flow.previous_flow_rate is not None:
+ if not any(
+ [
+ isinstance(flow.previous_flow_rate, np.ndarray) and flow.previous_flow_rate.ndim == 1,
+ isinstance(flow.previous_flow_rate, (int, float, list)),
+ ]
+ ):
+ raise TypeError(
+ f'previous_flow_rate must be None, a scalar, a list of scalars or a 1D-numpy-array. '
+ f'Got {type(flow.previous_flow_rate)}. '
+ f'Different values in different periods or scenarios are not yet supported.'
+ )
+
+ if flow.fixed_relative_profile is not None and flow.status_parameters is not None:
+ logger.warning(
+ f'Flow {flow.id} has both a fixed_relative_profile and status_parameters. '
+ f'This will allow the flow to be switched active and inactive, '
+ f'effectively differing from the fixed_flow_rate.'
+ )
errors: list[str] = []
- # Batched checks: relative_minimum <= relative_maximum
- invalid_bounds = self._any_per_flow(self.relative_minimum > self.relative_maximum)
+ invalid_bounds = self._any_per_flow(self.ds['relative_minimum'] > self.ds['relative_maximum'])
if invalid_bounds.any():
errors.append(f'relative_minimum > relative_maximum for flows: {self._flagged_ids(invalid_bounds)}')
- # Check: size required when relative_minimum > 0
- has_nonzero_min = self._any_per_flow(self.relative_minimum > 0)
- if (has_nonzero_min & ~self.has_size).any():
+ has_nonzero_min = self._any_per_flow(self.ds['relative_minimum'] > 0)
+ has_size = self.ds['has_size']
+ if (has_nonzero_min & ~has_size).any():
errors.append(
f'relative_minimum > 0 but no size defined for flows: '
- f'{self._flagged_ids(has_nonzero_min & ~self.has_size)}. '
+ f'{self._flagged_ids(has_nonzero_min & ~has_size)}. '
f'A size is required because the lower bound is size * relative_minimum.'
)
- # Check: size required when relative_maximum < 1
- has_nondefault_max = self._any_per_flow(self.relative_maximum < 1)
- if (has_nondefault_max & ~self.has_size).any():
+ has_nondefault_max = self._any_per_flow(self.ds['relative_maximum'] < 1)
+ if (has_nondefault_max & ~has_size).any():
errors.append(
f'relative_maximum < 1 but no size defined for flows: '
- f'{self._flagged_ids(has_nondefault_max & ~self.has_size)}. '
+ f'{self._flagged_ids(has_nondefault_max & ~has_size)}. '
f'A size is required because the upper bound is size * relative_maximum.'
)
- # Warning: relative_minimum > 0 without status_parameters prevents switching inactive
- has_nonzero_min_no_status = has_nonzero_min & ~self.has_status
+ has_status = self.ds['has_status']
+ has_nonzero_min_no_status = has_nonzero_min & ~has_status
if has_nonzero_min_no_status.any():
logger.warning(
f'Flows {self._flagged_ids(has_nonzero_min_no_status)} have relative_minimum > 0 '
@@ -1626,8 +1217,7 @@ def validate(self) -> None:
f'Consider using status_parameters to allow switching active and inactive.'
)
- # Warning: status_parameters with relative_minimum=0 allows status=1 with flow=0
- has_zero_min_with_status = ~has_nonzero_min & self.has_status
+ has_zero_min_with_status = ~has_nonzero_min & has_status
if has_zero_min_with_status.any():
logger.warning(
f'Flows {self._flagged_ids(has_zero_min_with_status)} have status_parameters but '
@@ -1639,6 +1229,25 @@ def validate(self) -> None:
raise PlausibilityError('\n'.join(errors))
+def _build_previous_states(flows: list) -> dict[str, xr.DataArray]:
+ """Build previous_states dict from flows with previous_flow_rate."""
+ from .config import CONFIG
+ from .modeling import ModelingUtilitiesAbstract
+
+ result = {}
+ for f in flows:
+ if f.previous_flow_rate is not None:
+ result[f.id] = ModelingUtilitiesAbstract.to_binary(
+ values=xr.DataArray(
+ [f.previous_flow_rate] if np.isscalar(f.previous_flow_rate) else f.previous_flow_rate,
+ dims='time',
+ ),
+ epsilon=CONFIG.Modeling.epsilon,
+ dims='time',
+ )
+ return result
+
+
class EffectsData:
"""Batched data container for all effects.
@@ -1647,9 +1256,11 @@ class EffectsData:
modeling (EffectsModel).
"""
- def __init__(self, effect_collection: EffectCollection):
+ def __init__(self, effect_collection: EffectCollection, coords: dict[str, pd.Index], default_period_weights):
self._collection = effect_collection
self._effects: list[Effect] = list(effect_collection.values())
+ self._coords = coords
+ self._default_period_weights = default_period_weights
@cached_property
def effect_ids(self) -> list[str]:
@@ -1685,45 +1296,97 @@ def _effect_values(self, attr_name: str, default: float) -> list:
values.append(default if val is None else val)
return values
+ def _align(self, effect_id: str, attr: str, dims: list[str] | None = None) -> xr.DataArray | None:
+ """Align a single effect attribute value to model coords."""
+ raw = getattr(self._collection[effect_id], attr)
+ return align_to_coords(raw, self._coords, name=f'{effect_id}|{attr}', dims=dims)
+
+ def _aligned_values(self, attr_name: str, default: float, dims: list[str] | None = None) -> list:
+ """Extract per-effect attribute values, aligned to model coords."""
+ values = []
+ for effect in self._effects:
+ aligned = self._align(effect.id, attr_name, dims=dims)
+ values.append(default if aligned is None else aligned)
+ return values
+
+ def aligned_share_from_temporal(self, effect: Effect) -> dict[str, xr.DataArray]:
+ """Get aligned share_from_temporal for a specific effect."""
+ return (
+ align_effects_to_coords(
+ effect.share_from_temporal,
+ self._coords,
+ suffix=f'(temporal)->{effect.id}(temporal)',
+ )
+ or {}
+ )
+
+ def aligned_share_from_periodic(self, effect: Effect) -> dict[str, xr.DataArray]:
+ """Get aligned share_from_periodic for a specific effect."""
+ return (
+ align_effects_to_coords(
+ effect.share_from_periodic,
+ self._coords,
+ suffix=f'(periodic)->{effect.id}(periodic)',
+ dims=['period', 'scenario'],
+ )
+ or {}
+ )
+
@cached_property
def minimum_periodic(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_periodic', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('minimum_periodic', -np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def maximum_periodic(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_periodic', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('maximum_periodic', np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def minimum_temporal(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_temporal', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('minimum_temporal', -np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def maximum_temporal(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_temporal', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('maximum_temporal', np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def minimum_per_hour(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_per_hour', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(self._aligned_values('minimum_per_hour', -np.inf), 'effect', self.effect_ids)
@cached_property
def maximum_per_hour(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_per_hour', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(self._aligned_values('maximum_per_hour', np.inf), 'effect', self.effect_ids)
@cached_property
def minimum_total(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_total', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('minimum_total', -np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def maximum_total(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_total', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('maximum_total', np.inf, dims=['period', 'scenario']), 'effect', self.effect_ids
+ )
@cached_property
def minimum_over_periods(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('minimum_over_periods', -np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('minimum_over_periods', -np.inf, dims=['scenario']), 'effect', self.effect_ids
+ )
@cached_property
def maximum_over_periods(self) -> xr.DataArray:
- return stack_along_dim(self._effect_values('maximum_over_periods', np.inf), 'effect', self.effect_ids)
+ return stack_along_dim(
+ self._aligned_values('maximum_over_periods', np.inf, dims=['scenario']), 'effect', self.effect_ids
+ )
@cached_property
def effects_with_over_periods(self) -> list[Effect]:
@@ -1734,14 +1397,13 @@ def period_weights(self) -> dict[str, xr.DataArray]:
"""Get period weights for each effect, keyed by effect id."""
result = {}
for effect in self._effects:
- effect_weights = effect.period_weights
- default_weights = effect._flow_system.period_weights
- if effect_weights is not None:
- result[effect.id] = effect_weights
- elif default_weights is not None:
- result[effect.id] = default_weights
+ aligned = self._align(effect.id, 'period_weights', dims=['period', 'scenario'])
+ if aligned is not None:
+ result[effect.id] = aligned
+ elif self._default_period_weights is not None:
+ result[effect.id] = self._default_period_weights
else:
- result[effect.id] = effect._fit_coords(name='period_weights', data=1, dims=['period'])
+ result[effect.id] = align_to_coords(1, self._coords, name='period_weights', dims=['period'])
return result
def effects(self) -> list[Effect]:
@@ -1763,8 +1425,16 @@ def validate(self) -> None:
- Individual effect config validation
- Collection-level validation (circular loops in share mappings, unknown effect refs)
"""
+ has_periods = 'period' in self._coords
+
for effect in self._effects:
- effect.validate_config()
+ # Check that minimum_over_periods and maximum_over_periods require a period dimension
+ if (effect.minimum_over_periods is not None or effect.maximum_over_periods is not None) and not has_periods:
+ raise PlausibilityError(
+ f"Effect '{effect.id}': minimum_over_periods and maximum_over_periods require "
+ f"the FlowSystem to have a 'period' dimension. Please define periods when creating "
+ f'the FlowSystem, or remove these constraints.'
+ )
# Collection-level validation (share structure)
self._validate_share_structure()
@@ -1799,9 +1469,10 @@ def _validate_share_structure(self) -> None:
class BusesData:
"""Batched data container for buses."""
- def __init__(self, buses: list[Bus]):
+ def __init__(self, buses: list[Bus], coords: dict[str, pd.Index]):
self._buses = buses
self.elements: IdList = element_id_list(buses)
+ self._coords = coords
@property
def element_ids(self) -> list[str]:
@@ -1821,6 +1492,14 @@ def imbalance_elements(self) -> list[Bus]:
"""Bus objects that allow imbalance."""
return [b for b in self._buses if b.allows_imbalance]
+ def aligned_imbalance_penalty(self, bus: Bus) -> xr.DataArray | None:
+ """Get aligned imbalance penalty for a specific bus."""
+ return align_to_coords(
+ bus.imbalance_penalty_per_flow_hour,
+ self._coords,
+ name=f'{bus.id}|imbalance_penalty_per_flow_hour',
+ )
+
@cached_property
def balance_coefficients(self) -> dict[tuple[str, str], float]:
"""Sparse (bus_id, flow_id) -> +1/-1 coefficients for bus balance."""
@@ -1833,17 +1512,16 @@ def balance_coefficients(self) -> dict[tuple[str, str], float]:
return coefficients
def validate(self) -> None:
- """Validate all buses (config + DataArray checks).
-
- Performs both:
- - Config validation via Bus.validate_config()
- - DataArray validation (post-transformation checks)
- """
+ """Validate all buses (config + DataArray checks)."""
for bus in self._buses:
- bus.validate_config()
+ # Config validation (moved from Bus.validate_config)
+ if len(bus.inputs) == 0 and len(bus.outputs) == 0:
+ raise ValueError(f'Bus "{bus.id}" has no Flows connected to it. Please remove it from the FlowSystem')
+
# Warning: imbalance_penalty == 0 (DataArray check)
if bus.imbalance_penalty_per_flow_hour is not None:
- zero_penalty = np.all(np.equal(bus.imbalance_penalty_per_flow_hour, 0))
+ aligned = self.aligned_imbalance_penalty(bus)
+ zero_penalty = np.all(np.equal(aligned, 0))
if zero_penalty:
logger.warning(
f'In Bus {bus.id}, the imbalance_penalty_per_flow_hour is 0. Use "None" or a value > 0.'
@@ -1860,12 +1538,16 @@ def __init__(
flows_data: FlowsData,
effect_ids: list[str],
timestep_duration: xr.DataArray | float,
+ coords: dict[str, pd.Index] | None = None,
+ normalize_effects: Any = None,
):
self._components_with_status = components_with_status
self._all_components = all_components
self._flows_data = flows_data
self._effect_ids = effect_ids
self._timestep_duration = timestep_duration
+ self._coords = coords
+ self._normalize_effects = normalize_effects
self.elements: IdList = element_id_list(components_with_status)
@property
@@ -1955,6 +1637,8 @@ def status_data(self) -> StatusData:
effect_ids=self._effect_ids,
timestep_duration=self._timestep_duration,
previous_states=self.previous_status_dict,
+ coords=self._coords,
+ normalize_effects=self._normalize_effects,
)
@cached_property
@@ -1977,7 +1661,7 @@ def flow_mask(self) -> xr.DataArray:
@cached_property
def flow_count(self) -> xr.DataArray:
"""(component,) number of flows per component."""
- counts = [len(c.inputs) + len(c.outputs) for c in self._components_with_status]
+ counts = [len(list(c.flows)) for c in self._components_with_status]
return xr.DataArray(
counts,
dims=['component'],
@@ -1993,17 +1677,35 @@ def validate(self) -> None:
from .components import LinearConverter, Storage, Transmission
for component in self._all_components:
- if not isinstance(component, (Storage, LinearConverter, Transmission)):
- component.validate_config()
+ if isinstance(component, (Storage, LinearConverter, Transmission)):
+ continue
+
+ component._check_unique_flow_ids()
+
+ if component.status_parameters is not None:
+ flows_without_size = [flow.flow_id for flow in component.flows.values() if flow.size is None]
+ if flows_without_size:
+ raise PlausibilityError(
+ f'Component "{component.id}" has status_parameters, but the following flows '
+ f'have no size: {flows_without_size}. All flows need explicit sizes when the '
+ f'component uses status_parameters (required for big-M constraints).'
+ )
class ConvertersData:
"""Batched data container for converters."""
- def __init__(self, converters: list[LinearConverter], flow_ids: list[str], timesteps: pd.DatetimeIndex):
+ def __init__(
+ self,
+ converters: list[LinearConverter],
+ flow_ids: list[str],
+ timesteps: pd.DatetimeIndex,
+ coords: dict[str, pd.Index],
+ ):
self._converters = converters
self._flow_ids = flow_ids
self._timesteps = timesteps
+ self._coords = coords
self.elements: IdList = element_id_list(converters)
@property
@@ -2024,6 +1726,22 @@ def with_piecewise(self) -> list[LinearConverter]:
"""Converters with piecewise_conversion."""
return [c for c in self._converters if c.piecewise_conversion]
+ def aligned_conversion_factors(self, converter: LinearConverter) -> list[dict[str, xr.DataArray]]:
+ """Align all conversion factors for a converter to model coords."""
+ result = []
+ for idx, conv_factor in enumerate(converter.conversion_factors):
+ aligned_dict = {}
+ for flow_label, values in conv_factor.items():
+ flow_id = converter.flows[flow_label].id
+ aligned = align_to_coords(values, self._coords, name=f'{flow_id}|conversion_factor{idx}')
+ if aligned is None:
+ raise PlausibilityError(
+ f'{converter.id}: conversion factor for flow "{flow_label}" must not be None'
+ )
+ aligned_dict[flow_label] = aligned
+ result.append(aligned_dict)
+ return result
+
# === Linear Conversion Properties ===
@cached_property
@@ -2075,10 +1793,14 @@ def signed_coefficients(self) -> dict[tuple[str, str], float | xr.DataArray]:
for conv in self.with_factors:
flow_map = {fl.flow_id: fl.id for fl in conv.flows.values()}
# +1 for inputs, -1 for outputs
- flow_signs = {f.id: 1.0 for f in conv.inputs.values() if f.id in all_flow_ids_set}
- flow_signs.update({f.id: -1.0 for f in conv.outputs.values() if f.id in all_flow_ids_set})
-
- for eq_idx, conv_factors in enumerate(conv.conversion_factors):
+ flow_signs = {
+ f.id: (1.0 if f.is_input_in_component else -1.0)
+ for f in conv.flows.values()
+ if f.id in all_flow_ids_set
+ }
+
+ aligned_factors = self.aligned_conversion_factors(conv)
+ for eq_idx, conv_factors in enumerate(aligned_factors):
for flow_label, coeff in conv_factors.items():
flow_id = flow_map.get(flow_label)
sign = flow_signs.get(flow_id, 0.0) if flow_id else 0.0
@@ -2227,17 +1949,59 @@ def piecewise_breakpoints(self) -> xr.Dataset | None:
return xr.Dataset({'starts': starts_combined, 'ends': ends_combined})
def validate(self) -> None:
- """Validate all converters (config checks, no DataArray operations needed)."""
- for converter in self._converters:
- converter.validate_config()
+ """Validate all converters."""
+ for conv in self._converters:
+ # Checks from LinearConverter.validate_config
+ conv._check_unique_flow_ids()
+ # Validate flow sizes for status_parameters
+ if conv.status_parameters:
+ for flow in conv.flows.values():
+ if flow.size is None:
+ raise PlausibilityError(
+ f'"{conv.id}": Flow "{flow.flow_id}" must have a defined size '
+ f'because {conv.id} has status_parameters. '
+ f'A size is required for big-M constraints.'
+ )
+
+ if not conv.conversion_factors and not conv.piecewise_conversion:
+ raise PlausibilityError('Either conversion_factors or piecewise_conversion must be defined!')
+ if conv.conversion_factors and conv.piecewise_conversion:
+ raise PlausibilityError(
+ 'Only one of conversion_factors or piecewise_conversion can be defined, not both!'
+ )
+
+ if conv.conversion_factors:
+ if conv.degrees_of_freedom <= 0:
+ n_flows = len(list(conv.flows))
+ raise PlausibilityError(
+ f'Too Many conversion_factors_specified. Care that you use less conversion_factors '
+ f'then inputs + outputs!! With {n_flows} inputs and outputs, '
+ f'use not more than {n_flows - 1} conversion_factors!'
+ )
+
+ for conversion_factor in conv.conversion_factors:
+ for flow in conversion_factor:
+ if flow not in conv.flows:
+ raise PlausibilityError(
+ f'{conv.id}: Flow {flow} in conversion_factors is not in inputs/outputs'
+ )
+ if conv.piecewise_conversion:
+ for flow in conv.flows.values():
+ if isinstance(flow.size, InvestParameters) and flow.size.fixed_size is None:
+ logger.warning(
+ f'Using a Flow with variable size (InvestParameters without fixed_size) '
+ f'and a piecewise_conversion in {conv.id} is uncommon. Please verify intent '
+ f'({flow.id}).'
+ )
class TransmissionsData:
"""Batched data container for transmissions."""
- def __init__(self, transmissions: list[Transmission], flow_ids: list[str]):
+ def __init__(self, transmissions: list[Transmission], flow_ids: list[str], coords: dict[str, pd.Index]):
self._transmissions = transmissions
self._flow_ids = flow_ids
+ self._coords = coords
self.elements: IdList = element_id_list(transmissions)
@property
@@ -2325,6 +2089,11 @@ def balanced_in2_mask(self) -> xr.DataArray:
# === Loss Properties ===
+ def _align(self, transmission_id: str, attr: str) -> xr.DataArray | None:
+ """Align a single transmission attribute value to model coords."""
+ raw = getattr(self.elements[transmission_id], attr)
+ return align_to_coords(raw, self._coords, name=f'{transmission_id}|{attr}')
+
@cached_property
def relative_losses(self) -> xr.DataArray:
"""(transmission, [time, ...]) relative losses. 0 if None."""
@@ -2332,8 +2101,8 @@ def relative_losses(self) -> xr.DataArray:
return xr.DataArray()
values = []
for t in self._transmissions:
- loss = t.relative_losses if t.relative_losses is not None else 0
- values.append(loss)
+ aligned = self._align(t.id, 'relative_losses')
+ values.append(aligned if aligned is not None else 0)
return stack_along_dim(values, self.dim_name, self.element_ids)
@cached_property
@@ -2343,8 +2112,8 @@ def absolute_losses(self) -> xr.DataArray:
return xr.DataArray()
values = []
for t in self._transmissions:
- loss = t.absolute_losses if t.absolute_losses is not None else 0
- values.append(loss)
+ aligned = self._align(t.id, 'absolute_losses')
+ values.append(aligned if aligned is not None else 0)
return stack_along_dim(values, self.dim_name, self.element_ids)
@cached_property
@@ -2367,19 +2136,45 @@ def transmissions_with_abs_losses(self) -> list[str]:
def validate(self) -> None:
"""Validate all transmissions (config + DataArray checks).
- Performs both:
- - Config validation via Transmission.validate_config()
- - DataArray validation (post-transformation checks)
-
Raises:
PlausibilityError: If any validation check fails.
"""
- for transmission in self._transmissions:
- transmission.validate_config()
-
errors: list[str] = []
for transmission in self._transmissions:
+ # Config checks (moved from Transmission.validate_config / Component.validate_config)
+ transmission._check_unique_flow_ids()
+ if transmission.status_parameters:
+ for flow in transmission.flows.values():
+ if flow.size is None:
+ raise PlausibilityError(
+ f'"{transmission.id}": Flow "{flow.flow_id}" must have a defined size '
+ f'because {transmission.id} has status_parameters. '
+ f'A size is required for big-M constraints.'
+ )
+
+ # Bus consistency checks
+ if transmission.in2 is not None:
+ if transmission.in2.bus != transmission.out1.bus:
+ raise ValueError(
+ f'Output 1 and Input 2 do not start/end at the same Bus: '
+ f'{transmission.out1.bus=}, {transmission.in2.bus=}'
+ )
+ if transmission.out2 is not None:
+ if transmission.out2.bus != transmission.in1.bus:
+ raise ValueError(
+ f'Input 1 and Output 2 do not start/end at the same Bus: '
+ f'{transmission.in1.bus=}, {transmission.out2.bus=}'
+ )
+
+ # Balanced requires InvestParameters on both in-Flows
+ if transmission.balanced:
+ if transmission.in2 is None:
+ raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows')
+ if not isinstance(transmission.in1.size, InvestParameters) or not isinstance(
+ transmission.in2.size, InvestParameters
+ ):
+ raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows')
tid = transmission.id
# Balanced size compatibility (DataArray check)
@@ -2389,7 +2184,7 @@ def validate(self) -> None:
in2_min = transmission.in2.size.minimum_or_fixed_size
in2_max = transmission.in2.size.maximum_or_fixed_size
- if (in1_min > in2_max).any() or (in1_max < in2_min).any():
+ if np.any(in1_min > in2_max) or np.any(in1_max < in2_min):
errors.append(
f'Balanced Transmission {tid} needs compatible minimum and maximum sizes. '
f'Got: in1.size.minimum={in1_min}, in1.size.maximum={in1_max} and '
@@ -2429,25 +2224,33 @@ def flows(self) -> FlowsData:
"""Get or create FlowsData for all flows in the system."""
if self._flows is None:
all_flows = list(self._fs.flows.values())
- self._flows = FlowsData(all_flows, self._fs)
+ self._flows = FlowsData.from_elements(
+ all_flows,
+ coords=self._fs.indexes,
+ effect_ids=list(self._fs.effects.keys()),
+ timestep_duration=self._fs.timestep_duration,
+ normalize_effects=self._fs.effects.create_effect_values_dict,
+ )
return self._flows
@property
def storages(self) -> StoragesData:
"""Get or create StoragesData for basic storages (excludes intercluster)."""
if self._storages is None:
- from .components import Storage
-
clustering = self._fs.clustering
basic_storages = [
c
- for c in self._fs.components.values()
- if isinstance(c, Storage)
- and not (clustering is not None and c.cluster_mode in ('intercluster', 'intercluster_cyclic'))
+ for c in self._fs.storages.values()
+ if not (clustering is not None and c.cluster_mode in ('intercluster', 'intercluster_cyclic'))
]
effect_ids = list(self._fs.effects.keys())
self._storages = StoragesData(
- basic_storages, 'storage', effect_ids, timesteps_extra=self._fs.timesteps_extra
+ basic_storages,
+ 'storage',
+ effect_ids,
+ timesteps_extra=self._fs.timesteps_extra,
+ coords=self._fs.indexes,
+ normalize_effects=self._fs.effects.create_effect_values_dict,
)
return self._storages
@@ -2455,32 +2258,36 @@ def storages(self) -> StoragesData:
def intercluster_storages(self) -> StoragesData:
"""Get or create StoragesData for intercluster storages."""
if self._intercluster_storages is None:
- from .components import Storage
-
clustering = self._fs.clustering
intercluster = [
c
- for c in self._fs.components.values()
- if isinstance(c, Storage)
- and clustering is not None
- and c.cluster_mode in ('intercluster', 'intercluster_cyclic')
+ for c in self._fs.storages.values()
+ if clustering is not None and c.cluster_mode in ('intercluster', 'intercluster_cyclic')
]
effect_ids = list(self._fs.effects.keys())
- self._intercluster_storages = StoragesData(intercluster, 'intercluster_storage', effect_ids)
+ self._intercluster_storages = StoragesData(
+ intercluster,
+ 'intercluster_storage',
+ effect_ids,
+ coords=self._fs.indexes,
+ normalize_effects=self._fs.effects.create_effect_values_dict,
+ )
return self._intercluster_storages
@property
def buses(self) -> BusesData:
"""Get or create BusesData for all buses."""
if self._buses is None:
- self._buses = BusesData(list(self._fs.buses.values()))
+ self._buses = BusesData(list(self._fs.buses.values()), coords=self._fs.indexes)
return self._buses
@property
def effects(self) -> EffectsData:
"""Get or create EffectsData for all effects."""
if self._effects is None:
- self._effects = EffectsData(self._fs.effects)
+ self._effects = EffectsData(
+ self._fs.effects, coords=self._fs.indexes, default_period_weights=self._fs.period_weights
+ )
return self._effects
@property
@@ -2495,6 +2302,8 @@ def components(self) -> ComponentsData:
flows_data=self.flows,
effect_ids=list(self._fs.effects.keys()),
timestep_duration=self._fs.timestep_duration,
+ coords=self._fs.indexes,
+ normalize_effects=self._fs.effects.create_effect_values_dict,
)
return self._components
@@ -2502,20 +2311,25 @@ def components(self) -> ComponentsData:
def converters(self) -> ConvertersData:
"""Get or create ConvertersData for all converters."""
if self._converters is None:
- from .components import LinearConverter
-
- converters = [c for c in self._fs.components.values() if isinstance(c, LinearConverter)]
- self._converters = ConvertersData(converters, flow_ids=self.flows.element_ids, timesteps=self._fs.timesteps)
+ converters = list(self._fs.converters.values())
+ self._converters = ConvertersData(
+ converters,
+ flow_ids=self.flows.element_ids,
+ timesteps=self._fs.timesteps,
+ coords=self._fs.indexes,
+ )
return self._converters
@property
def transmissions(self) -> TransmissionsData:
"""Get or create TransmissionsData for all transmissions."""
if self._transmissions is None:
- from .components import Transmission
-
- transmissions = [c for c in self._fs.components.values() if isinstance(c, Transmission)]
- self._transmissions = TransmissionsData(transmissions, flow_ids=self.flows.element_ids)
+ transmissions = list(self._fs.transmissions.values())
+ self._transmissions = TransmissionsData(
+ transmissions,
+ flow_ids=self.flows.element_ids,
+ coords=self._fs.indexes,
+ )
return self._transmissions
def _reset(self) -> None:
diff --git a/flixopt/carrier.py b/flixopt/carrier.py
index ca4ac0de0..13f4edf0f 100644
--- a/flixopt/carrier.py
+++ b/flixopt/carrier.py
@@ -8,19 +8,17 @@
from __future__ import annotations
from .id_list import IdList
-from .structure import Interface, register_class_for_io
+from .structure import register_class_for_io
@register_class_for_io
-class Carrier(Interface):
+class Carrier:
"""Definition of an energy or material carrier type.
Carriers represent the type of energy or material flowing through a Bus.
They provide consistent color, unit, and description across all visualizations
and can be shared between multiple buses of the same type.
- Inherits from Interface to provide serialization capabilities.
-
Args:
name: Identifier for the carrier (e.g., 'electricity', 'heat', 'gas').
color: Hex color string for visualizations (e.g., '#FFD700').
@@ -94,16 +92,6 @@ def __init__(
self.unit = unit
self.description = description
- def transform_data(self, name_prefix: str = '') -> None:
- """Transform data to match FlowSystem dimensions.
-
- Carriers don't have time-series data, so this is a no-op.
-
- Args:
- name_prefix: Ignored for Carrier.
- """
- pass # Carriers have no data to transform
-
@property
def label(self) -> str:
"""Label for container keying (alias for name)."""
diff --git a/flixopt/components.py b/flixopt/components.py
index 9837bc7bf..1e41ac82f 100644
--- a/flixopt/components.py
+++ b/flixopt/components.py
@@ -7,24 +7,29 @@
import functools
import logging
import warnings
-from typing import TYPE_CHECKING, Literal
+from dataclasses import dataclass
+from functools import cached_property
+from typing import TYPE_CHECKING, ClassVar, Literal
import numpy as np
import xarray as xr
from . import io as fx_io
-from .core import PlausibilityError
-from .elements import Component, Flow
+from .elements import Component, Flow, _connect_and_validate_flows
from .features import MaskHelpers, stack_along_dim
+from .id_list import IdList, flow_id_list
from .interface import InvestParameters, PiecewiseConversion, StatusParameters
from .modeling import _scalar_safe_reduce
from .structure import (
+ CLASS_REGISTRY,
+ Element,
FlowSystemModel,
FlowVarName,
InterclusterStorageVarName,
StorageVarName,
TypeModel,
register_class_for_io,
+ valid_id,
)
if TYPE_CHECKING:
@@ -36,229 +41,452 @@
logger = logging.getLogger('flixopt')
+def check_bounds(
+ value,
+ parameter_label: str,
+ element_label: str,
+ lower_bound,
+ upper_bound,
+) -> None:
+ """Check if the value is within the bounds. The bounds are exclusive. If not, log a warning."""
+ value_arr = np.asarray(value)
+ if not np.all(value_arr > lower_bound):
+ logger.warning(
+ f"'{element_label}.{parameter_label}' <= lower bound {lower_bound}. "
+ f'{parameter_label}.min={float(np.min(value_arr))}, shape={np.shape(value_arr)}'
+ )
+ if not np.all(value_arr < upper_bound):
+ logger.warning(
+ f"'{element_label}.{parameter_label}' >= upper bound {upper_bound}. "
+ f'{parameter_label}.max={float(np.max(value_arr))}, shape={np.shape(value_arr)}'
+ )
+
+
@register_class_for_io
-class LinearConverter(Component):
+class Converter(Element):
+ """Converts input-Flows into output-Flows via linear conversion factors.
+
+ Self-contained component class that handles its own flows directly.
+ Supports both simple conversion factors and piecewise conversion.
+
+ Use factory classmethods (``Converter.boiler()``, ``Converter.chp()``, etc.)
+ for common component types.
+
+ Args:
+ id: Element identifier.
+ inputs: Input Flows feeding into the converter.
+ outputs: Output Flows produced by the converter.
+ conversion_factors: Linear relationships between flows.
+ piecewise_conversion: Piecewise linear relationships between flow rates.
+ status_parameters: Binary operation constraints and costs.
+ meta_data: Additional metadata stored in results.
+ color: Visualization color.
"""
- Converts input-Flows into output-Flows via linear conversion factors.
- LinearConverter models equipment that transforms one or more input flows into one or
- more output flows through linear relationships. This includes heat exchangers,
- electrical converters, chemical reactors, and other equipment where the
- relationship between inputs and outputs can be expressed as linear equations.
+ _io_exclude: ClassVar[set[str]] = {'prevent_simultaneous_flows'}
+
+ def __init__(
+ self,
+ id: str,
+ inputs: list[Flow],
+ outputs: list[Flow],
+ conversion_factors: list[dict[str, Numeric_TPS]] | None = None,
+ piecewise_conversion: PiecewiseConversion | None = None,
+ status_parameters: StatusParameters | None = None,
+ prevent_simultaneous_flows: list[Flow] | None = None,
+ meta_data: dict | None = None,
+ color: str | None = None,
+ ):
+ self.id = valid_id(id)
+ self.conversion_factors = conversion_factors or []
+ self.piecewise_conversion = piecewise_conversion
+ self.status_parameters = status_parameters
+ self.prevent_simultaneous_flows = list(prevent_simultaneous_flows) if prevent_simultaneous_flows else []
+ self.meta_data = meta_data or {}
+ self.color = color
+
+ _connect_and_validate_flows(self.id, inputs, outputs, self.prevent_simultaneous_flows)
+ self.inputs: IdList = flow_id_list(inputs, display_name='inputs')
+ self.outputs: IdList = flow_id_list(outputs, display_name='outputs')
- The component supports two modeling approaches: simple conversion factors for
- straightforward linear relationships, or piecewise conversion for complex non-linear
- behavior approximated through piecewise linear segments.
+ @cached_property
+ def flows(self) -> IdList:
+ """All flows (inputs and outputs) as an IdList."""
+ return self.inputs + self.outputs
- Mathematical Formulation:
- See
+ @property
+ def degrees_of_freedom(self):
+ return len(self.inputs + self.outputs) - len(self.conversion_factors)
- Args:
- id: The id of the Element. Used to identify it in the FlowSystem.
- inputs: list of input Flows that feed into the converter.
- outputs: list of output Flows that are produced by the converter.
- status_parameters: Information about active and inactive state of LinearConverter.
- Component is active/inactive if all connected Flows are active/inactive. This induces a
- status variable (binary) in all Flows! If possible, use StatusParameters in a
- single Flow instead to keep the number of binary variables low.
- conversion_factors: Linear relationships between flows expressed as a list of
- dictionaries. Each dictionary maps flow ids to their coefficients in one
- linear equation. The number of conversion factors must be less than the total
- number of flows to ensure degrees of freedom > 0. Either 'conversion_factors'
- OR 'piecewise_conversion' can be used, but not both.
- For examples also look into the linear_converters.py file.
- piecewise_conversion: Define piecewise linear relationships between flow rates
- of different flows. Enables modeling of non-linear conversion behavior through
- linear approximation. Either 'conversion_factors' or 'piecewise_conversion'
- can be used, but not both.
- meta_data: Used to store additional information about the Element. Not used
- internally, but saved in results. Only use Python native types.
+ def _propagate_status_parameters(self) -> None:
+ if self.status_parameters:
+ for flow in self.flows.values():
+ if flow.status_parameters is None:
+ flow.status_parameters = StatusParameters()
+ if self.prevent_simultaneous_flows:
+ for flow in self.prevent_simultaneous_flows:
+ if flow.status_parameters is None:
+ flow.status_parameters = StatusParameters()
- Examples:
- Simple 1:1 heat exchanger with 95% efficiency:
+ def _check_unique_flow_ids(self, inputs: list = None, outputs: list = None):
+ if inputs is None:
+ inputs = list(self.inputs.values())
+ if outputs is None:
+ outputs = list(self.outputs.values())
+ all_flow_ids = [flow.flow_id for flow in inputs + outputs]
+ if len(set(all_flow_ids)) != len(all_flow_ids):
+ duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1}
+ raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}')
- ```python
- heat_exchanger = LinearConverter(
- id='primary_hx',
- inputs=[hot_water_in],
- outputs=[hot_water_out],
- conversion_factors=[{'hot_water_in': 0.95, 'hot_water_out': 1}],
+ def __repr__(self) -> str:
+ return fx_io.build_repr_from_init(
+ self, excluded_params={'self', 'id', 'inputs', 'outputs', 'kwargs'}, skip_default_size=True
+ ) + fx_io.format_flow_details(self)
+
+ # === Factory classmethods for common converter types ===
+
+ @classmethod
+ def boiler(
+ cls,
+ id: str,
+ *,
+ thermal_efficiency,
+ fuel_flow: Flow,
+ thermal_flow: Flow,
+ status_parameters: StatusParameters | None = None,
+ meta_data: dict | None = None,
+ color: str | None = None,
+ ) -> Converter:
+ """Create a fuel-fired boiler.
+
+ Args:
+ id: Element identifier.
+ thermal_efficiency: Thermal efficiency (0-1).
+ fuel_flow: Fuel input flow.
+ thermal_flow: Thermal output flow.
+ status_parameters: Optional status parameters.
+ meta_data: Optional metadata.
+ color: Optional visualization color.
+ """
+ check_bounds(thermal_efficiency, 'thermal_efficiency', id, 0, 1)
+ fuel_id = fuel_flow.flow_id or (fuel_flow.bus if isinstance(fuel_flow.bus, str) else str(fuel_flow.bus))
+ thermal_id = thermal_flow.flow_id or (
+ thermal_flow.bus if isinstance(thermal_flow.bus, str) else str(thermal_flow.bus)
+ )
+ return cls(
+ id,
+ inputs=[fuel_flow],
+ outputs=[thermal_flow],
+ conversion_factors=[{fuel_id: thermal_efficiency, thermal_id: 1}],
+ status_parameters=status_parameters,
+ meta_data=meta_data,
+ color=color,
)
- ```
- Multi-input heat pump with COP=3:
+ @classmethod
+ def power2heat(
+ cls,
+ id: str,
+ *,
+ thermal_efficiency,
+ electrical_flow: Flow,
+ thermal_flow: Flow,
+ status_parameters: StatusParameters | None = None,
+ meta_data: dict | None = None,
+ color: str | None = None,
+ ) -> Converter:
+ """Create an electric resistance heater / power-to-heat converter.
- ```python
- heat_pump = LinearConverter(
- id='air_source_hp',
- inputs=[electricity_in],
- outputs=[heat_output],
- conversion_factors=[{'electricity_in': 3, 'heat_output': 1}],
+ Args:
+ id: Element identifier.
+ thermal_efficiency: Thermal efficiency (0-1).
+ electrical_flow: Electrical input flow.
+ thermal_flow: Thermal output flow.
+ status_parameters: Optional status parameters.
+ meta_data: Optional metadata.
+ color: Optional visualization color.
+ """
+ check_bounds(thermal_efficiency, 'thermal_efficiency', id, 0, 1)
+ elec_id = electrical_flow.flow_id or (
+ electrical_flow.bus if isinstance(electrical_flow.bus, str) else str(electrical_flow.bus)
+ )
+ thermal_id = thermal_flow.flow_id or (
+ thermal_flow.bus if isinstance(thermal_flow.bus, str) else str(thermal_flow.bus)
+ )
+ return cls(
+ id,
+ inputs=[electrical_flow],
+ outputs=[thermal_flow],
+ conversion_factors=[{elec_id: thermal_efficiency, thermal_id: 1}],
+ status_parameters=status_parameters,
+ meta_data=meta_data,
+ color=color,
)
- ```
- Combined heat and power (CHP) unit with multiple outputs:
+ @classmethod
+ def heat_pump(
+ cls,
+ id: str,
+ *,
+ cop,
+ electrical_flow: Flow,
+ thermal_flow: Flow,
+ status_parameters: StatusParameters | None = None,
+ meta_data: dict | None = None,
+ color: str | None = None,
+ ) -> Converter:
+ """Create a heat pump.
- ```python
- chp_unit = LinearConverter(
- id='gas_chp',
- inputs=[natural_gas],
- outputs=[electricity_out, heat_out],
+ Args:
+ id: Element identifier.
+ cop: Coefficient of Performance (typically 1-20).
+ electrical_flow: Electrical input flow.
+ thermal_flow: Thermal output flow.
+ status_parameters: Optional status parameters.
+ meta_data: Optional metadata.
+ color: Optional visualization color.
+ """
+ check_bounds(cop, 'cop', id, 1, 20)
+ elec_id = electrical_flow.flow_id or (
+ electrical_flow.bus if isinstance(electrical_flow.bus, str) else str(electrical_flow.bus)
+ )
+ thermal_id = thermal_flow.flow_id or (
+ thermal_flow.bus if isinstance(thermal_flow.bus, str) else str(thermal_flow.bus)
+ )
+ return cls(
+ id,
+ inputs=[electrical_flow],
+ outputs=[thermal_flow],
+ conversion_factors=[{elec_id: cop, thermal_id: 1}],
+ status_parameters=status_parameters,
+ meta_data=meta_data,
+ color=color,
+ )
+
+ @classmethod
+ def cooling_tower(
+ cls,
+ id: str,
+ *,
+ specific_electricity_demand,
+ electrical_flow: Flow,
+ thermal_flow: Flow,
+ status_parameters: StatusParameters | None = None,
+ meta_data: dict | None = None,
+ color: str | None = None,
+ ) -> Converter:
+ """Create a cooling tower.
+
+ Args:
+ id: Element identifier.
+ specific_electricity_demand: Auxiliary electricity per unit cooling (0-1).
+ electrical_flow: Electrical input flow.
+ thermal_flow: Thermal input flow (waste heat).
+ status_parameters: Optional status parameters.
+ meta_data: Optional metadata.
+ color: Optional visualization color.
+ """
+ check_bounds(specific_electricity_demand, 'specific_electricity_demand', id, 0, 1)
+ elec_id = electrical_flow.flow_id or (
+ electrical_flow.bus if isinstance(electrical_flow.bus, str) else str(electrical_flow.bus)
+ )
+ thermal_id = thermal_flow.flow_id or (
+ thermal_flow.bus if isinstance(thermal_flow.bus, str) else str(thermal_flow.bus)
+ )
+ return cls(
+ id,
+ inputs=[electrical_flow, thermal_flow],
+ outputs=[],
+ conversion_factors=[{elec_id: -1, thermal_id: specific_electricity_demand}],
+ status_parameters=status_parameters,
+ meta_data=meta_data,
+ color=color,
+ )
+
+ @classmethod
+ def chp(
+ cls,
+ id: str,
+ *,
+ thermal_efficiency,
+ electrical_efficiency,
+ fuel_flow: Flow,
+ electrical_flow: Flow,
+ thermal_flow: Flow,
+ status_parameters: StatusParameters | None = None,
+ meta_data: dict | None = None,
+ color: str | None = None,
+ ) -> Converter:
+ """Create a combined heat and power (CHP) unit.
+
+ Args:
+ id: Element identifier.
+ thermal_efficiency: Thermal efficiency (0-1).
+ electrical_efficiency: Electrical efficiency (0-1).
+ fuel_flow: Fuel input flow.
+ electrical_flow: Electrical output flow.
+ thermal_flow: Thermal output flow.
+ status_parameters: Optional status parameters.
+ meta_data: Optional metadata.
+ color: Optional visualization color.
+ """
+ check_bounds(thermal_efficiency, 'thermal_efficiency', id, 0, 1)
+ check_bounds(electrical_efficiency, 'electrical_efficiency', id, 0, 1)
+ check_bounds(electrical_efficiency + thermal_efficiency, 'thermal_efficiency+electrical_efficiency', id, 0, 1)
+ fuel_id = fuel_flow.flow_id or (fuel_flow.bus if isinstance(fuel_flow.bus, str) else str(fuel_flow.bus))
+ elec_id = electrical_flow.flow_id or (
+ electrical_flow.bus if isinstance(electrical_flow.bus, str) else str(electrical_flow.bus)
+ )
+ thermal_id = thermal_flow.flow_id or (
+ thermal_flow.bus if isinstance(thermal_flow.bus, str) else str(thermal_flow.bus)
+ )
+ return cls(
+ id,
+ inputs=[fuel_flow],
+ outputs=[thermal_flow, electrical_flow],
conversion_factors=[
- {'natural_gas': 0.35, 'electricity_out': 1},
- {'natural_gas': 0.45, 'heat_out': 1},
+ {fuel_id: thermal_efficiency, thermal_id: 1},
+ {fuel_id: electrical_efficiency, elec_id: 1},
],
+ status_parameters=status_parameters,
+ meta_data=meta_data,
+ color=color,
)
- ```
- Electrolyzer with multiple conversion relationships:
+ @classmethod
+ def heat_pump_with_source(
+ cls,
+ id: str,
+ *,
+ cop,
+ electrical_flow: Flow,
+ heat_source_flow: Flow,
+ thermal_flow: Flow,
+ status_parameters: StatusParameters | None = None,
+ meta_data: dict | None = None,
+ color: str | None = None,
+ ) -> Converter:
+ """Create a heat pump with explicit heat source modeling.
- ```python
- electrolyzer = LinearConverter(
- id='pem_electrolyzer',
- inputs=[electricity_in, water_in],
- outputs=[hydrogen_out, oxygen_out],
+ Args:
+ id: Element identifier.
+ cop: Coefficient of Performance (>1, !=1).
+ electrical_flow: Electrical input flow.
+ heat_source_flow: Heat source input flow.
+ thermal_flow: Thermal output flow.
+ status_parameters: Optional status parameters.
+ meta_data: Optional metadata.
+ color: Optional visualization color.
+ """
+ check_bounds(cop, 'cop', id, 1, 20)
+ if np.any(np.asarray(cop) == 1):
+ raise ValueError(f'{id}.cop must be strictly !=1 for heat_pump_with_source.')
+ elec_id = electrical_flow.flow_id or (
+ electrical_flow.bus if isinstance(electrical_flow.bus, str) else str(electrical_flow.bus)
+ )
+ source_id = heat_source_flow.flow_id or (
+ heat_source_flow.bus if isinstance(heat_source_flow.bus, str) else str(heat_source_flow.bus)
+ )
+ thermal_id = thermal_flow.flow_id or (
+ thermal_flow.bus if isinstance(thermal_flow.bus, str) else str(thermal_flow.bus)
+ )
+ return cls(
+ id,
+ inputs=[electrical_flow, heat_source_flow],
+ outputs=[thermal_flow],
conversion_factors=[
- {'electricity_in': 1, 'hydrogen_out': 50}, # 50 kWh/kg H2
- {'water_in': 1, 'hydrogen_out': 9}, # 9 kg H2O/kg H2
- {'hydrogen_out': 8, 'oxygen_out': 1}, # Mass balance
+ {elec_id: cop, thermal_id: 1},
+ {source_id: cop / (cop - 1), thermal_id: 1},
],
+ status_parameters=status_parameters,
+ meta_data=meta_data,
+ color=color,
)
- ```
- Complex converter with piecewise efficiency:
- ```python
- variable_efficiency_converter = LinearConverter(
- id='variable_converter',
- inputs=[fuel_in],
- outputs=[power_out],
- piecewise_conversion=PiecewiseConversion(
- {
- 'fuel_in': Piecewise(
- [
- Piece(0, 10), # Low load operation
- Piece(10, 25), # High load operation
- ]
- ),
- 'power_out': Piecewise(
- [
- Piece(0, 3.5), # Lower efficiency at part load
- Piece(3.5, 10), # Higher efficiency at full load
- ]
- ),
- }
- ),
- )
- ```
+# Backward compatibility alias
+LinearConverter = Converter
+
+# Register under old name for IO backward compat with saved files
+CLASS_REGISTRY['LinearConverter'] = Converter
- Note:
- Conversion factors define linear relationships where the sum of (coefficient × flow_rate)
- equals zero for each equation: factor1×flow1 + factor2×flow2 + ... = 0
- Conversion factors define linear relationships:
- `{flow1: a1, flow2: a2, ...}` yields `a1×flow_rate1 + a2×flow_rate2 + ... = 0`.
- Note: The input format may be unintuitive. For example,
- `{"electricity": 1, "H2": 50}` implies `1×electricity = 50×H2`,
- i.e., 50 units of electricity produce 1 unit of H2.
- The system must have fewer conversion factors than total flows (degrees of freedom > 0)
- to avoid over-constraining the problem. For n total flows, use at most n-1 conversion factors.
+@register_class_for_io
+class Port(Element):
+ """A Port represents a system boundary for importing/exporting energy or material.
- When using piecewise_conversion, the converter operates on one piece at a time,
- with binary variables determining which piece is active.
+ Ports replace Source, Sink, and SourceAndSink with a unified interface.
+ Imports are flows coming INTO the system (supply), exports are flows going OUT
+ (demand).
+ Args:
+ id: Element identifier.
+ imports: Flows supplying energy/material into the system (component outputs to buses).
+ exports: Flows consuming energy/material from the system (component inputs from buses).
+ prevent_simultaneous_flow_rates: If True, prevents simultaneous import and export.
+ status_parameters: Binary operation constraints and costs.
+ meta_data: Additional metadata stored in results.
+ color: Visualization color.
"""
+ _io_exclude: ClassVar[set[str]] = {'prevent_simultaneous_flows'}
+
def __init__(
self,
- id: str | None = None,
- inputs: list[Flow] | None = None,
- outputs: list[Flow] | None = None,
+ id: str,
+ imports: list[Flow] | None = None,
+ exports: list[Flow] | None = None,
+ prevent_simultaneous_flow_rates: bool = False,
status_parameters: StatusParameters | None = None,
- conversion_factors: list[dict[str, Numeric_TPS]] | None = None,
- piecewise_conversion: PiecewiseConversion | None = None,
meta_data: dict | None = None,
color: str | None = None,
- **kwargs,
):
- super().__init__(id, inputs, outputs, status_parameters, meta_data=meta_data, color=color, **kwargs)
- self.conversion_factors = conversion_factors or []
- self.piecewise_conversion = piecewise_conversion
+ self.id = valid_id(id)
+ self.imports = imports or []
+ self.exports = exports or []
+ self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates
+ self.status_parameters = status_parameters
+ self.meta_data = meta_data or {}
+ self.color = color
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to parent Component and piecewise_conversion."""
- super().link_to_flow_system(flow_system, prefix)
- if self.piecewise_conversion is not None:
- self.piecewise_conversion.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseConversion'))
+ # imports go TO buses (is_input=False in component terms, i.e. outputs of the component)
+ # exports come FROM buses (is_input=True in component terms, i.e. inputs of the component)
+ self.prevent_simultaneous_flows = self.imports + self.exports if prevent_simultaneous_flow_rates else []
+ _connect_and_validate_flows(self.id, self.exports, self.imports, self.prevent_simultaneous_flows)
- def validate_config(self) -> None:
- """Validate configuration consistency.
+ # For backward compat with code that accesses .inputs/.outputs
+ self.inputs: IdList = flow_id_list(list(self.exports), display_name='inputs')
+ self.outputs: IdList = flow_id_list(list(self.imports), display_name='outputs')
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- super().validate_config()
- if not self.conversion_factors and not self.piecewise_conversion:
- raise PlausibilityError('Either conversion_factors or piecewise_conversion must be defined!')
- if self.conversion_factors and self.piecewise_conversion:
- raise PlausibilityError('Only one of conversion_factors or piecewise_conversion can be defined, not both!')
-
- if self.conversion_factors:
- if self.degrees_of_freedom <= 0:
- raise PlausibilityError(
- f'Too Many conversion_factors_specified. Care that you use less conversion_factors '
- f'then inputs + outputs!! With {len(self.inputs + self.outputs)} inputs and outputs, '
- f'use not more than {len(self.inputs + self.outputs) - 1} conversion_factors!'
- )
+ @cached_property
+ def flows(self) -> IdList:
+ """All flows as an IdList."""
+ return flow_id_list(list(self.imports) + list(self.exports))
- for conversion_factor in self.conversion_factors:
- for flow in conversion_factor:
- if flow not in self.flows:
- raise PlausibilityError(
- f'{self.id}: Flow {flow} in conversion_factors is not in inputs/outputs'
- )
- if self.piecewise_conversion:
+ def _propagate_status_parameters(self) -> None:
+ if self.status_parameters:
for flow in self.flows.values():
- if isinstance(flow.size, InvestParameters) and flow.size.fixed_size is None:
- logger.warning(
- f'Using a Flow with variable size (InvestParameters without fixed_size) '
- f'and a piecewise_conversion in {self.id} is uncommon. Please verify intent '
- f'({flow.id}).'
- )
+ if flow.status_parameters is None:
+ flow.status_parameters = StatusParameters()
+ if self.prevent_simultaneous_flows:
+ for flow in self.prevent_simultaneous_flows:
+ if flow.status_parameters is None:
+ flow.status_parameters = StatusParameters()
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config()."""
- self.validate_config()
-
- def transform_data(self) -> None:
- super().transform_data()
- if self.conversion_factors:
- self.conversion_factors = self._transform_conversion_factors()
- if self.piecewise_conversion:
- self.piecewise_conversion.has_time_dim = True
- self.piecewise_conversion.transform_data()
-
- def _transform_conversion_factors(self) -> list[dict[str, xr.DataArray]]:
- """Converts all conversion factors to internal datatypes"""
- list_of_conversion_factors = []
- for idx, conversion_factor in enumerate(self.conversion_factors):
- transformed_dict = {}
- for flow, values in conversion_factor.items():
- # TODO: Might be better to use the label of the component instead of the flow
- ts = self._fit_coords(f'{self.flows[flow].id}|conversion_factor{idx}', values)
- if ts is None:
- raise PlausibilityError(f'{self.id}: conversion factor for flow "{flow}" must not be None')
- transformed_dict[flow] = ts
- list_of_conversion_factors.append(transformed_dict)
- return list_of_conversion_factors
+ def _check_unique_flow_ids(self, inputs: list = None, outputs: list = None):
+ all_flow_ids = [flow.flow_id for flow in self.flows.values()]
+ if len(set(all_flow_ids)) != len(all_flow_ids):
+ duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1}
+ raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}')
- @property
- def degrees_of_freedom(self):
- return len(self.inputs + self.outputs) - len(self.conversion_factors)
+ def __repr__(self) -> str:
+ return fx_io.build_repr_from_init(
+ self, excluded_params={'self', 'id', 'imports', 'exports', 'kwargs'}, skip_default_size=True
+ ) + fx_io.format_flow_details(self)
@register_class_for_io
-class Storage(Component):
+class Storage(Element):
"""
A Storage models the temporary storage and release of energy or material.
@@ -407,11 +635,13 @@ class Storage(Component):
With flow rates in m3/h, the charge state is therefore in m3.
"""
+ _io_exclude: ClassVar[set[str]] = {'inputs', 'outputs', 'prevent_simultaneous_flows'}
+
def __init__(
self,
- id: str | None = None,
- charging: Flow | None = None,
- discharging: Flow | None = None,
+ id: str,
+ charging: Flow,
+ discharging: Flow,
capacity_in_flow_hours: Numeric_PS | InvestParameters | None = None,
relative_minimum_charge_state: Numeric_TPS = 0,
relative_maximum_charge_state: Numeric_TPS = 1,
@@ -426,128 +656,62 @@ def __init__(
prevent_simultaneous_charge_and_discharge: bool = True,
balanced: bool = False,
cluster_mode: Literal['independent', 'cyclic', 'intercluster', 'intercluster_cyclic'] = 'intercluster_cyclic',
- meta_data: dict | None = None,
- color: str | None = None,
**kwargs,
):
- # TODO: fixed_relative_chargeState implementieren
- super().__init__(
- id,
- inputs=[charging],
- outputs=[discharging],
- prevent_simultaneous_flows=[charging, discharging] if prevent_simultaneous_charge_and_discharge else None,
- meta_data=meta_data,
- color=color,
- **kwargs,
- )
-
+ self.id = valid_id(id)
+ # Store all params as attributes
self.charging = charging
self.discharging = discharging
self.capacity_in_flow_hours = capacity_in_flow_hours
- self.relative_minimum_charge_state: Numeric_TPS = relative_minimum_charge_state
- self.relative_maximum_charge_state: Numeric_TPS = relative_maximum_charge_state
-
- self.relative_minimum_final_charge_state = relative_minimum_final_charge_state
- self.relative_maximum_final_charge_state = relative_maximum_final_charge_state
-
+ self.relative_minimum_charge_state = relative_minimum_charge_state
+ self.relative_maximum_charge_state = relative_maximum_charge_state
self.initial_charge_state = initial_charge_state
self.minimal_final_charge_state = minimal_final_charge_state
self.maximal_final_charge_state = maximal_final_charge_state
-
- self.eta_charge: Numeric_TPS = eta_charge
- self.eta_discharge: Numeric_TPS = eta_discharge
- self.relative_loss_per_hour: Numeric_TPS = relative_loss_per_hour
+ self.relative_minimum_final_charge_state = relative_minimum_final_charge_state
+ self.relative_maximum_final_charge_state = relative_maximum_final_charge_state
+ self.eta_charge = eta_charge
+ self.eta_discharge = eta_discharge
+ self.relative_loss_per_hour = relative_loss_per_hour
self.prevent_simultaneous_charge_and_discharge = prevent_simultaneous_charge_and_discharge
self.balanced = balanced
self.cluster_mode = cluster_mode
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to parent Component and capacity_in_flow_hours if it's InvestParameters."""
- super().link_to_flow_system(flow_system, prefix)
- if isinstance(self.capacity_in_flow_hours, InvestParameters):
- self.capacity_in_flow_hours.link_to_flow_system(flow_system, self._sub_prefix('InvestParameters'))
-
- def transform_data(self) -> None:
- super().transform_data()
- self.relative_minimum_charge_state = self._fit_coords(
- f'{self.prefix}|relative_minimum_charge_state', self.relative_minimum_charge_state
- )
- self.relative_maximum_charge_state = self._fit_coords(
- f'{self.prefix}|relative_maximum_charge_state', self.relative_maximum_charge_state
- )
- self.eta_charge = self._fit_coords(f'{self.prefix}|eta_charge', self.eta_charge)
- self.eta_discharge = self._fit_coords(f'{self.prefix}|eta_discharge', self.eta_discharge)
- self.relative_loss_per_hour = self._fit_coords(
- f'{self.prefix}|relative_loss_per_hour', self.relative_loss_per_hour
- )
- if self.initial_charge_state is not None and not isinstance(self.initial_charge_state, str):
- self.initial_charge_state = self._fit_coords(
- f'{self.prefix}|initial_charge_state', self.initial_charge_state, dims=['period', 'scenario']
- )
- self.minimal_final_charge_state = self._fit_coords(
- f'{self.prefix}|minimal_final_charge_state', self.minimal_final_charge_state, dims=['period', 'scenario']
- )
- self.maximal_final_charge_state = self._fit_coords(
- f'{self.prefix}|maximal_final_charge_state', self.maximal_final_charge_state, dims=['period', 'scenario']
- )
- self.relative_minimum_final_charge_state = self._fit_coords(
- f'{self.prefix}|relative_minimum_final_charge_state',
- self.relative_minimum_final_charge_state,
- dims=['period', 'scenario'],
- )
- self.relative_maximum_final_charge_state = self._fit_coords(
- f'{self.prefix}|relative_maximum_final_charge_state',
- self.relative_maximum_final_charge_state,
- dims=['period', 'scenario'],
- )
- if isinstance(self.capacity_in_flow_hours, InvestParameters):
- self.capacity_in_flow_hours.transform_data()
- else:
- self.capacity_in_flow_hours = self._fit_coords(
- f'{self.prefix}|capacity_in_flow_hours', self.capacity_in_flow_hours, dims=['period', 'scenario']
- )
+ self.status_parameters = kwargs.get('status_parameters')
+ self.meta_data = kwargs.get('meta_data') or {}
+ self.color = kwargs.get('color')
- def validate_config(self) -> None:
- """Validate configuration consistency.
+ # Default flow_ids to 'charging'/'discharging' when not explicitly set
+ self.charging.flow_id = self.charging.flow_id or 'charging'
+ self.discharging.flow_id = self.discharging.flow_id or 'discharging'
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- super().validate_config()
-
- # Validate string values for initial_charge_state
- if isinstance(self.initial_charge_state, str):
- if self.initial_charge_state != 'equals_final':
- raise PlausibilityError(f'initial_charge_state has undefined value: {self.initial_charge_state}')
-
- # Capacity is required for final charge state constraints (simple None checks)
- if self.capacity_in_flow_hours is None:
- if self.relative_minimum_final_charge_state is not None:
- raise PlausibilityError(
- f'Storage "{self.id}" has relative_minimum_final_charge_state but no capacity_in_flow_hours. '
- f'A capacity is required for relative final charge state constraints.'
- )
- if self.relative_maximum_final_charge_state is not None:
- raise PlausibilityError(
- f'Storage "{self.id}" has relative_maximum_final_charge_state but no capacity_in_flow_hours. '
- f'A capacity is required for relative final charge state constraints.'
- )
+ self.prevent_simultaneous_flows = (
+ [self.charging, self.discharging] if prevent_simultaneous_charge_and_discharge else []
+ )
+ _connect_and_validate_flows(self.id, [self.charging], [self.discharging], self.prevent_simultaneous_flows)
+ self.inputs: IdList = flow_id_list([self.charging], display_name='inputs')
+ self.outputs: IdList = flow_id_list([self.discharging], display_name='outputs')
- # Balanced requires InvestParameters on charging/discharging flows
- if self.balanced:
- if not isinstance(self.charging.size, InvestParameters) or not isinstance(
- self.discharging.size, InvestParameters
- ):
- raise PlausibilityError(
- f'Balancing charging and discharging Flows in {self.id} is only possible with Investments.'
- )
+ @cached_property
+ def flows(self) -> IdList:
+ """All flows (charging and discharging) as an IdList."""
+ return flow_id_list([self.charging, self.discharging])
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config().
+ def _propagate_status_parameters(self) -> None:
+ if self.status_parameters:
+ for flow in self.flows.values():
+ if flow.status_parameters is None:
+ flow.status_parameters = StatusParameters()
+ if self.prevent_simultaneous_flows:
+ for flow in self.prevent_simultaneous_flows:
+ if flow.status_parameters is None:
+ flow.status_parameters = StatusParameters()
- DataArray-based checks moved to StoragesData.validate().
- """
- self.validate_config()
+ def _check_unique_flow_ids(self, inputs: list = None, outputs: list = None):
+ all_flow_ids = [flow.flow_id for flow in self.flows.values()]
+ if len(set(all_flow_ids)) != len(all_flow_ids):
+ duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1}
+ raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}')
def __repr__(self) -> str:
"""Return string representation."""
@@ -560,121 +724,32 @@ def __repr__(self) -> str:
@register_class_for_io
-class Transmission(Component):
- """
- Models transmission infrastructure that transports flows between two locations with losses.
-
- Transmission components represent physical infrastructure like pipes, cables,
- transmission lines, or conveyor systems that transport energy or materials between
- two points. They can model both unidirectional and bidirectional flow with
- configurable loss mechanisms and operational constraints.
-
- The component supports complex transmission scenarios including relative losses
- (proportional to flow), absolute losses (fixed when active), and bidirectional
- operation with flow direction constraints.
+class Transmission(Element):
+ """Models transmission infrastructure that transports flows between two locations with losses.
Args:
id: The id of the Element. Used to identify it in the FlowSystem.
in1: The primary inflow (side A). Pass InvestParameters here for capacity optimization.
out1: The primary outflow (side B).
in2: Optional secondary inflow (side B) for bidirectional operation.
- If in1 has InvestParameters, in2 will automatically have matching capacity.
out2: Optional secondary outflow (side A) for bidirectional operation.
relative_losses: Proportional losses as fraction of throughput (e.g., 0.02 for 2% loss).
- Applied as: output = input × (1 - relative_losses)
absolute_losses: Fixed losses that occur when transmission is active.
- Automatically creates binary variables for active/inactive states.
status_parameters: Parameters defining binary operation constraints and costs.
prevent_simultaneous_flows_in_both_directions: If True, prevents simultaneous
- flow in both directions. Increases binary variables but reflects physical
- reality for most transmission systems. Default is True.
- balanced: Whether to equate the size of the in1 and in2 Flow. Needs InvestParameters in both Flows.
- meta_data: Used to store additional information. Not used internally but saved
- in results. Only use Python native types.
-
- Examples:
- Simple electrical transmission line:
-
- ```python
- power_line = Transmission(
- id='110kv_line',
- in1=substation_a_out,
- out1=substation_b_in,
- relative_losses=0.03, # 3% line losses
- )
- ```
-
- Bidirectional natural gas pipeline:
-
- ```python
- gas_pipeline = Transmission(
- id='interstate_pipeline',
- in1=compressor_station_a,
- out1=distribution_hub_b,
- in2=compressor_station_b,
- out2=distribution_hub_a,
- relative_losses=0.005, # 0.5% friction losses
- absolute_losses=50, # 50 kW compressor power when active
- prevent_simultaneous_flows_in_both_directions=True,
- )
- ```
-
- District heating network with investment optimization:
-
- ```python
- heating_network = Transmission(
- id='dh_main_line',
- in1=Flow(
- label='heat_supply',
- bus=central_plant_bus,
- size=InvestParameters(
- minimum_size=1000, # Minimum 1 MW capacity
- maximum_size=10000, # Maximum 10 MW capacity
- specific_effects={'cost': 200}, # €200/kW capacity
- fix_effects={'cost': 500000}, # €500k fixed installation
- ),
- ),
- out1=district_heat_demand,
- relative_losses=0.15, # 15% thermal losses in distribution
- )
- ```
-
- Material conveyor with active/inactive status:
-
- ```python
- conveyor_belt = Transmission(
- id='material_transport',
- in1=loading_station,
- out1=unloading_station,
- absolute_losses=25, # 25 kW motor power when running
- status_parameters=StatusParameters(
- effects_per_startup={'maintenance': 0.1},
- min_uptime=2, # Minimum 2-hour operation
- startup_limit=10, # Maximum 10 starts per period
- ),
- )
- ```
-
- Note:
- The transmission equation balances flows with losses:
- output_flow = input_flow × (1 - relative_losses) - absolute_losses
-
- For bidirectional transmission, each direction has independent loss calculations.
-
- When using InvestParameters on in1, the capacity automatically applies to in2
- to maintain consistent bidirectional capacity without additional investment variables.
-
- Absolute losses force the creation of binary on/inactive variables, which increases
- computational complexity but enables realistic modeling of equipment with
- standby power consumption.
-
+ flow in both directions. Default is True.
+ balanced: Whether to equate the size of the in1 and in2 Flow.
+ meta_data: Additional metadata stored in results.
+ color: Visualization color.
"""
+ _io_exclude: ClassVar[set[str]] = {'prevent_simultaneous_flows'}
+
def __init__(
self,
- id: str | None = None,
- in1: Flow | None = None,
- out1: Flow | None = None,
+ id: str,
+ in1: Flow,
+ out1: Flow,
in2: Flow | None = None,
out2: Flow | None = None,
relative_losses: Numeric_TPS | None = None,
@@ -684,69 +759,47 @@ def __init__(
balanced: bool = False,
meta_data: dict | None = None,
color: str | None = None,
- **kwargs,
):
- super().__init__(
- id,
- inputs=[flow for flow in (in1, in2) if flow is not None],
- outputs=[flow for flow in (out1, out2) if flow is not None],
- status_parameters=status_parameters,
- prevent_simultaneous_flows=None
- if in2 is None or prevent_simultaneous_flows_in_both_directions is False
- else [in1, in2],
- meta_data=meta_data,
- color=color,
- **kwargs,
- )
+ self.id = valid_id(id)
self.in1 = in1
self.out1 = out1
self.in2 = in2
self.out2 = out2
-
self.relative_losses = relative_losses
self.absolute_losses = absolute_losses
+ self.status_parameters = status_parameters
+ self.prevent_simultaneous_flows_in_both_directions = prevent_simultaneous_flows_in_both_directions
self.balanced = balanced
+ self.meta_data = meta_data or {}
+ self.color = color
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- super().validate_config()
- # Check buses consistency
- if self.in2 is not None:
- if self.in2.bus != self.out1.bus:
- raise ValueError(
- f'Output 1 and Input 2 do not start/end at the same Bus: {self.out1.bus=}, {self.in2.bus=}'
- )
- if self.out2 is not None:
- if self.out2.bus != self.in1.bus:
- raise ValueError(
- f'Input 1 and Output 2 do not start/end at the same Bus: {self.in1.bus=}, {self.out2.bus=}'
- )
-
- # Balanced requires InvestParameters on both in-Flows
- if self.balanced:
- if self.in2 is None:
- raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows')
- if not isinstance(self.in1.size, InvestParameters) or not isinstance(self.in2.size, InvestParameters):
- raise ValueError('Balanced Transmission needs InvestParameters in both in-Flows')
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config().
+ inputs = [f for f in (self.in1, self.in2) if f is not None]
+ outputs = [f for f in (self.out1, self.out2) if f is not None]
+ self.prevent_simultaneous_flows = (
+ [self.in1, self.in2] if self.in2 is not None and prevent_simultaneous_flows_in_both_directions else []
+ )
+ _connect_and_validate_flows(self.id, inputs, outputs, self.prevent_simultaneous_flows)
+ self.inputs: IdList = flow_id_list(inputs, display_name='inputs')
+ self.outputs: IdList = flow_id_list(outputs, display_name='outputs')
- DataArray-based checks moved to TransmissionsData.validate().
- """
- self.validate_config()
+ @cached_property
+ def flows(self) -> IdList:
+ """All flows (inputs and outputs) as an IdList."""
+ return self.inputs + self.outputs
def _propagate_status_parameters(self) -> None:
- super()._propagate_status_parameters()
+ if self.status_parameters:
+ for flow in self.flows.values():
+ if flow.status_parameters is None:
+ flow.status_parameters = StatusParameters()
+ if self.prevent_simultaneous_flows:
+ for flow in self.prevent_simultaneous_flows:
+ if flow.status_parameters is None:
+ flow.status_parameters = StatusParameters()
# Transmissions with absolute_losses need status variables on input flows
# Also need relative_minimum > 0 to link status to flow rate properly
if self.absolute_losses is not None and np.any(self.absolute_losses != 0):
from .config import CONFIG
- from .interface import StatusParameters
input_flows = [self.in1]
if self.in2 is not None:
@@ -754,7 +807,6 @@ def _propagate_status_parameters(self) -> None:
for flow in input_flows:
if flow.status_parameters is None:
flow.status_parameters = StatusParameters()
- flow.status_parameters.link_to_flow_system(self._flow_system, f'{flow.id}|status_parameters')
rel_min = flow.relative_minimum
needs_update = (
rel_min is None
@@ -764,10 +816,16 @@ def _propagate_status_parameters(self) -> None:
if needs_update:
flow.relative_minimum = CONFIG.Modeling.epsilon
- def transform_data(self) -> None:
- super().transform_data()
- self.relative_losses = self._fit_coords(f'{self.prefix}|relative_losses', self.relative_losses)
- self.absolute_losses = self._fit_coords(f'{self.prefix}|absolute_losses', self.absolute_losses)
+ def _check_unique_flow_ids(self, inputs: list = None, outputs: list = None):
+ all_flow_ids = [flow.flow_id for flow in self.flows.values()]
+ if len(set(all_flow_ids)) != len(all_flow_ids):
+ duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1}
+ raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}')
+
+ def __repr__(self) -> str:
+ return fx_io.build_repr_from_init(
+ self, excluded_params={'self', 'id', 'in1', 'out1', 'in2', 'out2', 'kwargs'}, skip_default_size=True
+ ) + fx_io.format_flow_details(self)
class StoragesModel(TypeModel):
@@ -808,10 +866,6 @@ def __init__(
super().__init__(model, data)
self._flows_model = flows_model
- # Set reference on each storage element
- for storage in self.elements.values():
- storage._storages_model = self
-
self.create_variables()
self.create_constraints()
self.create_investment_model()
@@ -1109,13 +1163,15 @@ def _add_batched_initial_final_constraints(self, charge_state) -> None:
if isinstance(storage.initial_charge_state, str): # 'equals_final'
storages_equals_final.append(storage)
else:
- storages_numeric_initial.append((storage, storage.initial_charge_state))
+ storages_numeric_initial.append((storage, self.data.aligned_initial_charge_state(storage)))
- if storage.maximal_final_charge_state is not None:
- storages_max_final.append((storage, storage.maximal_final_charge_state))
+ aligned_max_final = self.data.aligned_maximal_final_charge_state(storage)
+ if aligned_max_final is not None:
+ storages_max_final.append((storage, aligned_max_final))
- if storage.minimal_final_charge_state is not None:
- storages_min_final.append((storage, storage.minimal_final_charge_state))
+ aligned_min_final = self.data.aligned_minimal_final_charge_state(storage)
+ if aligned_min_final is not None:
+ storages_min_final.append((storage, aligned_min_final))
dim = self.dim_name
@@ -1328,14 +1384,16 @@ def _add_initial_final_constraints_legacy(self, storage, cs) -> None:
name=f'storage|{storage.id}|initial_charge_state',
)
else:
+ aligned_initial = self.data.aligned_initial_charge_state(storage)
self.model.add_constraints(
- cs.isel(time=0) == storage.initial_charge_state,
+ cs.isel(time=0) == aligned_initial,
name=f'storage|{storage.id}|initial_charge_state',
)
- if storage.maximal_final_charge_state is not None:
+ aligned_min_final = self.data.aligned_minimal_final_charge_state(storage)
+ if aligned_min_final is not None:
self.model.add_constraints(
- cs.isel(time=-1) >= storage.minimal_final_charge_state,
+ cs.isel(time=-1) >= aligned_min_final,
name=f'storage|{storage.id}|final_charge_min',
)
@@ -1717,7 +1775,7 @@ def _add_cyclic_or_initial_constraints(self) -> None:
cyclic_ids.append(storage.id)
else:
initial_fixed_ids.append(storage.id)
- initial_values.append(initial)
+ initial_values.append(self.data.aligned_initial_charge_state(storage))
# Add cyclic constraints
if cyclic_ids:
@@ -1948,6 +2006,7 @@ def create_effect_shares(self) -> None:
@register_class_for_io
+@dataclass(eq=False, repr=False)
class SourceAndSink(Component):
"""
A SourceAndSink combines both supply and demand capabilities in a single component.
@@ -2033,32 +2092,24 @@ class SourceAndSink(Component):
The deprecated `sink` and `source` kwargs are accepted for compatibility but will be removed in future releases.
"""
- def __init__(
- self,
- id: str | None = None,
- inputs: list[Flow] | None = None,
- outputs: list[Flow] | None = None,
- prevent_simultaneous_flow_rates: bool = True,
- meta_data: dict | None = None,
- color: str | None = None,
- **kwargs,
- ):
- # Convert dict to list for deserialization compatibility (IdLists serialize as dicts)
- _inputs_list = list(inputs.values()) if isinstance(inputs, dict) else (inputs or [])
- _outputs_list = list(outputs.values()) if isinstance(outputs, dict) else (outputs or [])
- super().__init__(
- id,
- inputs=_inputs_list,
- outputs=_outputs_list,
- prevent_simultaneous_flows=_inputs_list + _outputs_list if prevent_simultaneous_flow_rates else None,
- meta_data=meta_data,
- color=color,
- **kwargs,
+ _io_exclude: ClassVar[set[str]] = {'prevent_simultaneous_flows'}
+
+ prevent_simultaneous_flow_rates: bool = True
+
+ def __post_init__(self):
+ warnings.warn(
+ 'SourceAndSink is deprecated. Use Port(imports=..., exports=...) instead. '
+ 'Will be removed in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
)
- self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates
+ if self.prevent_simultaneous_flow_rates:
+ self.prevent_simultaneous_flows = (self.inputs or []) + (self.outputs or [])
+ super().__post_init__()
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Source(Component):
"""
A Source generates or provides energy or material flows into the system.
@@ -2134,27 +2185,23 @@ class Source(Component):
The deprecated `source` kwarg is accepted for compatibility but will be removed in future releases.
"""
- def __init__(
- self,
- id: str | None = None,
- outputs: list[Flow] | None = None,
- meta_data: dict | None = None,
- prevent_simultaneous_flow_rates: bool = False,
- color: str | None = None,
- **kwargs,
- ):
- self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates
- super().__init__(
- id,
- outputs=outputs,
- meta_data=meta_data,
- prevent_simultaneous_flows=outputs if prevent_simultaneous_flow_rates else None,
- color=color,
- **kwargs,
+ _io_exclude: ClassVar[set[str]] = {'inputs', 'prevent_simultaneous_flows'}
+
+ prevent_simultaneous_flow_rates: bool = False
+
+ def __post_init__(self):
+ warnings.warn(
+ 'Source is deprecated. Use Port(imports=[...]) instead. Will be removed in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
)
+ if self.prevent_simultaneous_flow_rates:
+ self.prevent_simultaneous_flows = self.outputs or []
+ super().__post_init__()
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Sink(Component):
"""
A Sink consumes energy or material flows from the system.
@@ -2231,32 +2278,16 @@ class Sink(Component):
The deprecated `sink` kwarg is accepted for compatibility but will be removed in future releases.
"""
- def __init__(
- self,
- id: str | None = None,
- inputs: list[Flow] | None = None,
- meta_data: dict | None = None,
- prevent_simultaneous_flow_rates: bool = False,
- color: str | None = None,
- **kwargs,
- ):
- """Initialize a Sink (consumes flow from the system).
+ _io_exclude: ClassVar[set[str]] = {'outputs', 'prevent_simultaneous_flows'}
- Args:
- id: Unique element id.
- inputs: Input flows for the sink.
- meta_data: Arbitrary metadata attached to the element.
- prevent_simultaneous_flow_rates: If True, prevents simultaneous nonzero flow rates
- across the element's inputs by wiring that restriction into the base Component setup.
- color: Optional color for visualizations.
- """
+ prevent_simultaneous_flow_rates: bool = False
- self.prevent_simultaneous_flow_rates = prevent_simultaneous_flow_rates
- super().__init__(
- id,
- inputs=inputs,
- meta_data=meta_data,
- prevent_simultaneous_flows=inputs if prevent_simultaneous_flow_rates else None,
- color=color,
- **kwargs,
+ def __post_init__(self):
+ warnings.warn(
+ 'Sink is deprecated. Use Port(exports=[...]) instead. Will be removed in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
)
+ if self.prevent_simultaneous_flow_rates:
+ self.prevent_simultaneous_flows = self.inputs or []
+ super().__post_init__()
diff --git a/flixopt/core.py b/flixopt/core.py
index aca380f5e..6fd4af3d5 100644
--- a/flixopt/core.py
+++ b/flixopt/core.py
@@ -474,6 +474,16 @@ def to_dataarray(
# Scalar values - create scalar DataArray
intermediate = xr.DataArray(data.item() if hasattr(data, 'item') else data)
+ elif isinstance(data, list):
+ # Plain Python list (e.g. from IO roundtrip) — convert to ndarray
+ data = np.asarray(data)
+ if data.ndim == 0:
+ intermediate = xr.DataArray(data.item())
+ elif data.ndim == 1:
+ intermediate = cls._match_1d_array_by_length(data, validated_coords, target_dims)
+ else:
+ intermediate = cls._match_multidim_array_by_shape_permutation(data, validated_coords, target_dims)
+
elif isinstance(data, np.ndarray):
# NumPy arrays - dispatch based on dimensionality
if data.ndim == 0:
@@ -522,6 +532,7 @@ def to_dataarray(
'np.integer',
'np.floating',
'np.bool_',
+ 'list',
'np.ndarray',
'pd.Series',
'pd.DataFrame',
@@ -588,6 +599,104 @@ def _validate_and_prepare_target_coordinates(
return validated_coords, tuple(dimension_names)
+def align_to_coords(
+ data: NumericOrBool | None,
+ coords: dict[str, pd.Index],
+ name: str = '',
+ dims: list[str] | None = None,
+) -> xr.DataArray | None:
+ """Convert any raw input to a DataArray aligned with model coordinates.
+
+ Standalone replacement for the ``FlowSystem.fit_to_model_coords`` →
+ ``DataConverter.to_dataarray`` chain. Handles every type users may pass:
+
+ * **scalar** (int / float / bool / np.number) → 0-d DataArray
+ * **1-D array** (np.ndarray / list) → matched to a dim by length
+ * **pd.Series** → matched by index
+ * **TimeSeriesData** → aligned via its own ``fit_to_coords``
+ * **xr.DataArray** (e.g. from IO roundtrip) → validated, returned as-is
+ * **None** → returns None (pass-through)
+
+ Args:
+ data: Raw input value. ``None`` is a legal no-op.
+ coords: Model coordinate mapping, e.g.
+ ``{'time': DatetimeIndex, 'period': Index, 'scenario': Index}``.
+ name: Optional name assigned to the resulting DataArray.
+ dims: If given, only these coordinate keys are considered for
+ alignment (subset of *coords*).
+
+ Returns:
+ DataArray aligned to *coords*, or ``None`` when *data* is ``None``.
+
+ Raises:
+ ConversionError: If the input cannot be mapped to the target
+ coordinates (length mismatch, incompatible dims, …).
+ """
+ if data is None:
+ return None
+
+ # Restrict coords to requested dims
+ if dims is not None:
+ coords = {k: v for k, v in coords.items() if k in dims}
+
+ # TimeSeriesData carries clustering metadata — delegate to its own method
+ if isinstance(data, TimeSeriesData):
+ try:
+ return data.fit_to_coords(coords, name=name or None)
+ except ConversionError as e:
+ raise ConversionError(
+ f'Could not align TimeSeriesData "{name}" to model coords:\n{data}\nOriginal error: {e}'
+ ) from e
+
+ # Everything else goes through DataConverter
+ try:
+ da = DataConverter.to_dataarray(data, coords=coords)
+ except ConversionError as e:
+ raise ConversionError(f'Could not align data "{name}" to model coords:\n{data}\nOriginal error: {e}') from e
+
+ if name:
+ da = da.rename(name)
+ return da
+
+
+def align_effects_to_coords(
+ effect_values: dict | None,
+ coords: dict[str, pd.Index],
+ prefix: str = '',
+ suffix: str = '',
+ dims: list[str] | None = None,
+ delimiter: str = '|',
+) -> dict[str, xr.DataArray] | None:
+ """Align a dict of effect values to model coordinates.
+
+ Convenience wrapper around :func:`align_to_coords` for
+ ``effects_per_flow_hour`` and similar effect dicts.
+
+ Args:
+ effect_values: ``{effect_id: numeric_value}`` mapping, or ``None``.
+ coords: Model coordinate mapping.
+ prefix: Label prefix for DataArray names.
+ suffix: Label suffix for DataArray names.
+ dims: Passed through to :func:`align_to_coords`.
+ delimiter: Separator between prefix, effect id, and suffix.
+
+ Returns:
+ ``{effect_id: DataArray}`` or ``None``.
+ """
+ if effect_values is None:
+ return None
+
+ return {
+ effect_id: align_to_coords(
+ value,
+ coords,
+ name=delimiter.join(filter(None, [prefix, effect_id, suffix])),
+ dims=dims,
+ )
+ for effect_id, value in effect_values.items()
+ }
+
+
def get_dataarray_stats(arr: xr.DataArray) -> dict:
"""Generate statistical summary of a DataArray."""
stats = {}
diff --git a/flixopt/datasets.py b/flixopt/datasets.py
new file mode 100644
index 000000000..df815b684
--- /dev/null
+++ b/flixopt/datasets.py
@@ -0,0 +1,388 @@
+"""Dataset builders for FlowSystem elements.
+
+Functions that eagerly build xr.Dataset containers from element lists,
+replacing lazy cached_property getters with a single upfront computation.
+"""
+
+from __future__ import annotations
+
+import logging
+from typing import TYPE_CHECKING, Any
+
+import numpy as np
+import pandas as pd
+import xarray as xr
+
+from .core import align_effects_to_coords, align_to_coords
+from .features import fast_isnull, stack_along_dim
+from .interface import InvestParameters
+
+if TYPE_CHECKING:
+ from .elements import Flow
+ from .interface import StatusParameters
+
+logger = logging.getLogger('flixopt')
+
+# Canonical dimension ordering for all arrays
+_CANONICAL_ORDER = ['flow', 'cluster', 'time', 'period', 'scenario']
+
+
+def _ensure_canonical_order(arr: xr.DataArray) -> xr.DataArray:
+ """Ensure array has canonical dimension order and coord dict order."""
+ actual_dims = [d for d in _CANONICAL_ORDER if d in arr.dims]
+ for d in arr.dims:
+ if d not in actual_dims:
+ actual_dims.append(d)
+
+ if list(arr.dims) != actual_dims:
+ arr = arr.transpose(*actual_dims)
+
+ if list(arr.coords.keys()) != list(arr.dims):
+ ordered_coords = {d: arr.coords[d] for d in arr.dims}
+ arr = xr.DataArray(arr.values, dims=arr.dims, coords=ordered_coords)
+
+ return arr
+
+
+def build_flows_dataset(
+ flows: list[Flow],
+ coords: dict[str, pd.Index],
+ effect_ids: list[str],
+ timestep_duration: xr.DataArray | float | None = None,
+ normalize_effects: Any = None,
+) -> xr.Dataset:
+ """Build an xr.Dataset containing all numeric flow data.
+
+ Args:
+ flows: List of all Flow elements.
+ coords: Model coordinate indexes (time, period, scenario).
+ effect_ids: List of effect IDs for building effect arrays.
+ timestep_duration: Duration per timestep (for previous duration computation).
+ normalize_effects: Callable to normalize raw effect values.
+
+ Returns:
+ Dataset with all flow parameters as variables, boolean masks, and attrs.
+ """
+ from .batched import build_effects_array
+
+ if not flows:
+ return xr.Dataset()
+
+ flow_ids = [f.id for f in flows]
+ ids_index = pd.Index(flow_ids)
+ dim = 'flow'
+
+ def _align(flow, attr, dims=None):
+ raw = getattr(flow, attr)
+ return align_to_coords(raw, coords, name=f'{flow.id}|{attr}', dims=dims)
+
+ def _model_coords(dims=None):
+ if dims is None:
+ dims = ['time', 'period', 'scenario']
+ return {d: coords[d] for d in dims if d in coords}
+
+ def _batched_parameter(ids, attr, dims):
+ if not ids:
+ return None
+ by_id = {f.id: f for f in flows}
+ values = [align_to_coords(getattr(by_id[fid], attr), coords, name=f'{fid}|{attr}', dims=dims) for fid in ids]
+ arr = stack_along_dim(values, dim, ids, _model_coords(dims))
+ return _ensure_canonical_order(arr)
+
+ ds = xr.Dataset()
+
+ # === Boolean masks ===
+ def _mask(condition):
+ return xr.DataArray([condition(f) for f in flows], dims=[dim], coords={dim: ids_index})
+
+ ds['has_status'] = _mask(lambda f: f.status_parameters is not None)
+ ds['has_investment'] = _mask(lambda f: isinstance(f.size, InvestParameters))
+ ds['has_optional_investment'] = _mask(lambda f: isinstance(f.size, InvestParameters) and not f.size.mandatory)
+ ds['has_mandatory_investment'] = _mask(lambda f: isinstance(f.size, InvestParameters) and f.size.mandatory)
+ ds['has_fixed_size'] = _mask(lambda f: f.size is not None and not isinstance(f.size, InvestParameters))
+ ds['has_size'] = _mask(lambda f: f.size is not None)
+ ds['has_effects'] = _mask(lambda f: f.effects_per_flow_hour is not None)
+ ds['has_flow_hours_min'] = _mask(lambda f: f.flow_hours_min is not None)
+ ds['has_flow_hours_max'] = _mask(lambda f: f.flow_hours_max is not None)
+ ds['has_load_factor_min'] = _mask(lambda f: f.load_factor_min is not None)
+ ds['has_load_factor_max'] = _mask(lambda f: f.load_factor_max is not None)
+
+ # Status tracking masks (inline StatusData logic)
+ status_params = {f.id: f.status_parameters for f in flows if f.status_parameters is not None}
+
+ def _status_mask(condition):
+ mask = np.zeros(len(flow_ids), dtype=bool)
+ for i, fid in enumerate(flow_ids):
+ if fid in status_params:
+ mask[i] = condition(status_params[fid])
+ return xr.DataArray(mask, dims=[dim], coords={dim: ids_index})
+
+ ds['has_startup_tracking'] = _status_mask(
+ lambda p: (
+ p.effects_per_startup
+ or p.min_uptime is not None
+ or p.max_uptime is not None
+ or p.startup_limit is not None
+ or p.force_startup_tracking
+ )
+ )
+ ds['has_uptime_tracking'] = _status_mask(lambda p: p.min_uptime is not None or p.max_uptime is not None)
+ ds['has_downtime_tracking'] = _status_mask(lambda p: p.min_downtime is not None or p.max_downtime is not None)
+ ds['has_startup_limit'] = _status_mask(lambda p: p.startup_limit is not None)
+
+ # === Relative bounds ===
+ rel_min_values = [_align(f, 'relative_minimum') for f in flows]
+ ds['relative_minimum'] = _ensure_canonical_order(
+ stack_along_dim(rel_min_values, dim, flow_ids, _model_coords(None))
+ )
+
+ rel_max_values = [_align(f, 'relative_maximum') for f in flows]
+ ds['relative_maximum'] = _ensure_canonical_order(
+ stack_along_dim(rel_max_values, dim, flow_ids, _model_coords(None))
+ )
+
+ # Fixed relative profile
+ fixed_values = [
+ _align(f, 'fixed_relative_profile') if f.fixed_relative_profile is not None else np.nan for f in flows
+ ]
+ ds['fixed_relative_profile'] = _ensure_canonical_order(
+ stack_along_dim(fixed_values, dim, flow_ids, _model_coords(None))
+ )
+
+ # Effective relative bounds
+ fixed = ds['fixed_relative_profile']
+ ds['effective_relative_minimum'] = ds['relative_minimum'].where(fast_isnull(fixed), fixed)
+ ds['effective_relative_maximum'] = ds['relative_maximum'].where(fast_isnull(fixed), fixed)
+
+ # === Size arrays ===
+ fixed_size_values = []
+ eff_size_lower_values = []
+ eff_size_upper_values = []
+ for f in flows:
+ if f.size is None:
+ fixed_size_values.append(np.nan)
+ eff_size_lower_values.append(np.nan)
+ eff_size_upper_values.append(np.nan)
+ elif isinstance(f.size, InvestParameters):
+ fixed_size_values.append(np.nan)
+ eff_size_lower_values.append(f.size.minimum_or_fixed_size)
+ eff_size_upper_values.append(f.size.maximum_or_fixed_size)
+ else:
+ aligned = _align(f, 'size', ['period', 'scenario'])
+ fixed_size_values.append(aligned)
+ eff_size_lower_values.append(aligned)
+ eff_size_upper_values.append(aligned)
+
+ ds['fixed_size'] = _ensure_canonical_order(
+ stack_along_dim(fixed_size_values, dim, flow_ids, _model_coords(['period', 'scenario']))
+ )
+ ds['effective_size_lower'] = _ensure_canonical_order(
+ stack_along_dim(eff_size_lower_values, dim, flow_ids, _model_coords(['period', 'scenario']))
+ )
+ ds['effective_size_upper'] = _ensure_canonical_order(
+ stack_along_dim(eff_size_upper_values, dim, flow_ids, _model_coords(['period', 'scenario']))
+ )
+
+ # === Investment size bounds (all flows, NaN for non-investment) ===
+ invest_ids = [f.id for f in flows if isinstance(f.size, InvestParameters)]
+ if invest_ids:
+ invest_params = {f.id: f.size for f in flows if isinstance(f.size, InvestParameters)}
+
+ inv_min_values = [
+ invest_params[fid].minimum_or_fixed_size if invest_params[fid].mandatory else 0.0 for fid in invest_ids
+ ]
+ inv_min = stack_along_dim(inv_min_values, dim, invest_ids)
+
+ inv_max_values = [invest_params[fid].maximum_or_fixed_size for fid in invest_ids]
+ inv_max = stack_along_dim(inv_max_values, dim, invest_ids)
+
+ ds['size_minimum_all'] = _ensure_canonical_order(inv_min.reindex({dim: ids_index}))
+ ds['size_maximum_all'] = _ensure_canonical_order(inv_max.reindex({dim: ids_index}))
+ else:
+ nan_arr = xr.DataArray(np.nan, dims=[dim], coords={dim: ids_index})
+ ds['size_minimum_all'] = nan_arr
+ ds['size_maximum_all'] = nan_arr
+
+ # === Flow hours / load factor bounds (subset arrays) ===
+ fh_min_ids = [f.id for f in flows if f.flow_hours_min is not None]
+ fh = _batched_parameter(fh_min_ids, 'flow_hours_min', ['period', 'scenario'])
+ if fh is not None:
+ ds['flow_hours_minimum'] = fh
+
+ fh_max_ids = [f.id for f in flows if f.flow_hours_max is not None]
+ fh = _batched_parameter(fh_max_ids, 'flow_hours_max', ['period', 'scenario'])
+ if fh is not None:
+ ds['flow_hours_maximum'] = fh
+
+ fh_op_min_ids = [f.id for f in flows if f.flow_hours_min_over_periods is not None]
+ fh = _batched_parameter(fh_op_min_ids, 'flow_hours_min_over_periods', ['scenario'])
+ if fh is not None:
+ ds['flow_hours_minimum_over_periods'] = fh
+
+ fh_op_max_ids = [f.id for f in flows if f.flow_hours_max_over_periods is not None]
+ fh = _batched_parameter(fh_op_max_ids, 'flow_hours_max_over_periods', ['scenario'])
+ if fh is not None:
+ ds['flow_hours_maximum_over_periods'] = fh
+
+ lf_min_ids = [f.id for f in flows if f.load_factor_min is not None]
+ lf = _batched_parameter(lf_min_ids, 'load_factor_min', ['period', 'scenario'])
+ if lf is not None:
+ ds['load_factor_minimum'] = lf
+
+ lf_max_ids = [f.id for f in flows if f.load_factor_max is not None]
+ lf = _batched_parameter(lf_max_ids, 'load_factor_max', ['period', 'scenario'])
+ if lf is not None:
+ ds['load_factor_maximum'] = lf
+
+ # === Effects per flow hour ===
+ with_effects = [f.id for f in flows if f.effects_per_flow_hour is not None]
+ if with_effects and effect_ids:
+ norm = normalize_effects or (lambda x: x)
+ by_id = {f.id: f for f in flows}
+ dicts = {}
+ for fid in with_effects:
+ raw = by_id[fid].effects_per_flow_hour
+ normalized = norm(raw) or {}
+ aligned = align_effects_to_coords(normalized, coords, prefix=fid, suffix='per_flow_hour')
+ dicts[fid] = aligned or {}
+ arr = build_effects_array(dicts, effect_ids, dim)
+ if arr is not None:
+ ds['effects_per_flow_hour'] = arr
+
+ # Note: linked_periods is NOT computed here — it's handled directly via
+ # InvestParameters in InvestmentBuilder.add_linked_periods_constraints()
+
+ # === Investment effects (delegated to InvestmentData patterns) ===
+ if invest_ids:
+ invest_params_dict = {f.id: f.size for f in flows if isinstance(f.size, InvestParameters)}
+ _build_investment_effects(ds, invest_params_dict, dim, effect_ids, coords, normalize_effects)
+
+ # === Status effects and bounds ===
+ if status_params:
+ _build_status_data(ds, status_params, dim, effect_ids, timestep_duration, flows, coords, normalize_effects)
+
+ return ds
+
+
+def _build_investment_effects(
+ ds: xr.Dataset,
+ invest_params: dict[str, InvestParameters],
+ dim: str,
+ effect_ids: list[str],
+ coords: dict[str, pd.Index],
+ normalize_effects: Any,
+) -> None:
+ """Add investment-related effect arrays to the dataset."""
+ from .batched import InvestmentData
+
+ inv = InvestmentData(
+ params=invest_params,
+ dim_name=dim,
+ effect_ids=effect_ids,
+ coords=coords,
+ normalize_effects=normalize_effects,
+ )
+
+ # Effects per size
+ if inv.effects_per_size is not None:
+ ds['invest_effects_per_size'] = inv.effects_per_size
+
+ # Effects of investment (optional)
+ if inv.effects_of_investment is not None:
+ ds['invest_effects_of_investment'] = inv.effects_of_investment
+
+ # Effects of retirement (optional)
+ if inv.effects_of_retirement is not None:
+ ds['invest_effects_of_retirement'] = inv.effects_of_retirement
+
+ # Mandatory investment effects
+ if inv.effects_of_investment_mandatory is not None:
+ ds['invest_effects_of_investment_mandatory'] = inv.effects_of_investment_mandatory
+
+ # Constant retirement effects
+ if inv.effects_of_retirement_constant is not None:
+ ds['invest_effects_of_retirement_constant'] = inv.effects_of_retirement_constant
+
+ # Optional investment size bounds
+ if inv.optional_size_minimum is not None:
+ ds['optional_investment_size_minimum'] = inv.optional_size_minimum
+ if inv.optional_size_maximum is not None:
+ ds['optional_investment_size_maximum'] = inv.optional_size_maximum
+
+ # Piecewise effects
+ if inv.piecewise_element_ids:
+ ds.attrs['piecewise_element_ids'] = inv.piecewise_element_ids
+ ds.attrs['piecewise_max_segments'] = inv.piecewise_max_segments
+ ds.attrs['piecewise_effect_names'] = inv.piecewise_effect_names
+ if inv.piecewise_segment_mask is not None:
+ ds['piecewise_segment_mask'] = inv.piecewise_segment_mask
+ if inv.piecewise_origin_starts is not None:
+ ds['piecewise_origin_starts'] = inv.piecewise_origin_starts
+ if inv.piecewise_origin_ends is not None:
+ ds['piecewise_origin_ends'] = inv.piecewise_origin_ends
+ if inv.piecewise_effect_starts is not None:
+ ds['piecewise_effect_starts'] = inv.piecewise_effect_starts
+ if inv.piecewise_effect_ends is not None:
+ ds['piecewise_effect_ends'] = inv.piecewise_effect_ends
+
+
+def _build_status_data(
+ ds: xr.Dataset,
+ status_params: dict[str, StatusParameters],
+ dim: str,
+ effect_ids: list[str],
+ timestep_duration: xr.DataArray | float | None,
+ flows: list[Flow],
+ coords: dict[str, pd.Index],
+ normalize_effects: Any,
+) -> None:
+ """Add status-related arrays to the dataset."""
+ from .batched import StatusData
+
+ # Build previous_states for duration computation
+ from .config import CONFIG
+ from .modeling import ModelingUtilitiesAbstract
+
+ previous_states = {}
+ for f in flows:
+ if f.previous_flow_rate is not None:
+ previous_states[f.id] = ModelingUtilitiesAbstract.to_binary(
+ values=xr.DataArray(
+ [f.previous_flow_rate] if np.isscalar(f.previous_flow_rate) else f.previous_flow_rate,
+ dims='time',
+ ),
+ epsilon=CONFIG.Modeling.epsilon,
+ dims='time',
+ )
+
+ sd = StatusData(
+ params=status_params,
+ dim_name=dim,
+ effect_ids=effect_ids,
+ timestep_duration=timestep_duration,
+ previous_states=previous_states,
+ coords=coords,
+ normalize_effects=normalize_effects,
+ )
+
+ # Effects
+ if sd.effects_per_active_hour is not None:
+ ds['effects_per_active_hour'] = sd.effects_per_active_hour
+ if sd.effects_per_startup is not None:
+ ds['effects_per_startup'] = sd.effects_per_startup
+
+ # Duration bounds
+ if sd.min_uptime is not None:
+ ds['min_uptime'] = sd.min_uptime
+ if sd.max_uptime is not None:
+ ds['max_uptime'] = sd.max_uptime
+ if sd.min_downtime is not None:
+ ds['min_downtime'] = sd.min_downtime
+ if sd.max_downtime is not None:
+ ds['max_downtime'] = sd.max_downtime
+ if sd.startup_limit is not None:
+ ds['startup_limit'] = sd.startup_limit
+ if sd.previous_uptime is not None:
+ ds['previous_uptime'] = sd.previous_uptime
+ if sd.previous_downtime is not None:
+ ds['previous_downtime'] = sd.previous_downtime
diff --git a/flixopt/effects.py b/flixopt/effects.py
index 4ad339eeb..2c3cfaccb 100644
--- a/flixopt/effects.py
+++ b/flixopt/effects.py
@@ -9,18 +9,20 @@
import logging
from collections import deque
+from dataclasses import dataclass, field
from typing import TYPE_CHECKING
import linopy
import numpy as np
import xarray as xr
-from .core import PlausibilityError
+from . import io as fx_io
from .id_list import IdList
from .structure import (
Element,
FlowSystemModel,
register_class_for_io,
+ valid_id,
)
if TYPE_CHECKING:
@@ -38,6 +40,7 @@
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Effect(Element):
"""Represents system-wide impacts like costs, emissions, or resource consumption.
@@ -188,131 +191,43 @@ class Effect(Element):
"""
- def __init__(
- self,
- id: str | None = None,
- unit: str = '',
- description: str = '',
- meta_data: dict | None = None,
- is_standard: bool = False,
- is_objective: bool = False,
- period_weights: Numeric_PS | None = None,
- share_from_temporal: Effect_TPS | Numeric_TPS | None = None,
- share_from_periodic: Effect_PS | Numeric_PS | None = None,
- minimum_temporal: Numeric_PS | None = None,
- maximum_temporal: Numeric_PS | None = None,
- minimum_periodic: Numeric_PS | None = None,
- maximum_periodic: Numeric_PS | None = None,
- minimum_per_hour: Numeric_TPS | None = None,
- maximum_per_hour: Numeric_TPS | None = None,
- minimum_total: Numeric_PS | None = None,
- maximum_total: Numeric_PS | None = None,
- minimum_over_periods: Numeric_S | None = None,
- maximum_over_periods: Numeric_S | None = None,
- **kwargs,
- ):
- super().__init__(id, meta_data=meta_data, **kwargs)
- self.unit = unit
- self.description = description
- self.is_standard = is_standard
-
+ id: str
+ unit: str = ''
+ description: str = ''
+ is_standard: bool = False
+ is_objective: bool = False
+ period_weights: Numeric_PS | None = None
+ share_from_temporal: Effect_TPS | Numeric_TPS | None = None
+ share_from_periodic: Effect_PS | Numeric_PS | None = None
+ minimum_temporal: Numeric_PS | None = None
+ maximum_temporal: Numeric_PS | None = None
+ minimum_periodic: Numeric_PS | None = None
+ maximum_periodic: Numeric_PS | None = None
+ minimum_per_hour: Numeric_TPS | None = None
+ maximum_per_hour: Numeric_TPS | None = None
+ minimum_total: Numeric_PS | None = None
+ maximum_total: Numeric_PS | None = None
+ minimum_over_periods: Numeric_S | None = None
+ maximum_over_periods: Numeric_S | None = None
+ meta_data: dict = field(default_factory=dict)
+ color: str | None = None
+
+ def __post_init__(self):
+ self.id = valid_id(self.id)
# Validate that Penalty cannot be set as objective
- if is_objective and id == PENALTY_EFFECT_ID:
+ if self.is_objective and self.id == PENALTY_EFFECT_ID:
raise ValueError(
f'The Penalty effect ("{PENALTY_EFFECT_ID}") cannot be set as the objective effect. '
f'Please use a different effect as the optimization objective.'
)
+ # Default to {} when None (no shares defined)
+ if self.share_from_temporal is None:
+ self.share_from_temporal = {}
+ if self.share_from_periodic is None:
+ self.share_from_periodic = {}
- self.is_objective = is_objective
- self.period_weights = period_weights
- # Share parameters accept Effect_* | Numeric_* unions (dict or single value).
- # Store as-is here; transform_data() will normalize via fit_effects_to_model_coords().
- # Default to {} when None (no shares defined).
- self.share_from_temporal = share_from_temporal if share_from_temporal is not None else {}
- self.share_from_periodic = share_from_periodic if share_from_periodic is not None else {}
-
- # Set attributes directly
- self.minimum_temporal = minimum_temporal
- self.maximum_temporal = maximum_temporal
- self.minimum_periodic = minimum_periodic
- self.maximum_periodic = maximum_periodic
- self.minimum_per_hour = minimum_per_hour
- self.maximum_per_hour = maximum_per_hour
- self.minimum_total = minimum_total
- self.maximum_total = maximum_total
- self.minimum_over_periods = minimum_over_periods
- self.maximum_over_periods = maximum_over_periods
-
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Link this effect to a FlowSystem.
-
- Elements use their id as prefix by default, ignoring the passed prefix.
- """
- super().link_to_flow_system(flow_system, self.id)
-
- def transform_data(self) -> None:
- self.minimum_per_hour = self._fit_coords(f'{self.prefix}|minimum_per_hour', self.minimum_per_hour)
- self.maximum_per_hour = self._fit_coords(f'{self.prefix}|maximum_per_hour', self.maximum_per_hour)
-
- self.share_from_temporal = self._fit_effect_coords(
- prefix=None,
- effect_values=self.share_from_temporal,
- suffix=f'(temporal)->{self.prefix}(temporal)',
- )
- self.share_from_periodic = self._fit_effect_coords(
- prefix=None,
- effect_values=self.share_from_periodic,
- suffix=f'(periodic)->{self.prefix}(periodic)',
- dims=['period', 'scenario'],
- )
-
- self.minimum_temporal = self._fit_coords(
- f'{self.prefix}|minimum_temporal', self.minimum_temporal, dims=['period', 'scenario']
- )
- self.maximum_temporal = self._fit_coords(
- f'{self.prefix}|maximum_temporal', self.maximum_temporal, dims=['period', 'scenario']
- )
- self.minimum_periodic = self._fit_coords(
- f'{self.prefix}|minimum_periodic', self.minimum_periodic, dims=['period', 'scenario']
- )
- self.maximum_periodic = self._fit_coords(
- f'{self.prefix}|maximum_periodic', self.maximum_periodic, dims=['period', 'scenario']
- )
- self.minimum_total = self._fit_coords(
- f'{self.prefix}|minimum_total', self.minimum_total, dims=['period', 'scenario']
- )
- self.maximum_total = self._fit_coords(
- f'{self.prefix}|maximum_total', self.maximum_total, dims=['period', 'scenario']
- )
- self.minimum_over_periods = self._fit_coords(
- f'{self.prefix}|minimum_over_periods', self.minimum_over_periods, dims=['scenario']
- )
- self.maximum_over_periods = self._fit_coords(
- f'{self.prefix}|maximum_over_periods', self.maximum_over_periods, dims=['scenario']
- )
- self.period_weights = self._fit_coords(
- f'{self.prefix}|period_weights', self.period_weights, dims=['period', 'scenario']
- )
-
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- # Check that minimum_over_periods and maximum_over_periods require a period dimension
- if (
- self.minimum_over_periods is not None or self.maximum_over_periods is not None
- ) and self.flow_system.periods is None:
- raise PlausibilityError(
- f"Effect '{self.id}': minimum_over_periods and maximum_over_periods require "
- f"the FlowSystem to have a 'period' dimension. Please define periods when creating "
- f'the FlowSystem, or remove these constraints.'
- )
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config()."""
- self.validate_config()
+ def __repr__(self) -> str:
+ return fx_io.build_repr_from_init(self, excluded_params={'self', 'id', 'kwargs'}, skip_default_size=True)
class EffectsModel:
@@ -723,13 +638,13 @@ def _add_share_between_effects(self):
for target_effect in self.data.values():
target_id = target_effect.id
# 1. temporal: <- receiving temporal shares from other effects
- for source_effect, time_series in target_effect.share_from_temporal.items():
+ for source_effect, time_series in self.data.aligned_share_from_temporal(target_effect).items():
source_id = self.data[source_effect].id
source_per_timestep = self.get_per_timestep(source_id)
expr = (source_per_timestep * time_series).expand_dims(effect=[target_id], contributor=[source_id])
self.add_temporal_contribution(expr)
# 2. periodic: <- receiving periodic shares from other effects
- for source_effect, factor in target_effect.share_from_periodic.items():
+ for source_effect, factor in self.data.aligned_share_from_periodic(target_effect).items():
source_id = self.data[source_effect].id
source_periodic = self.get_periodic(source_id)
expr = (source_periodic * factor).expand_dims(effect=[target_id], contributor=[source_id])
@@ -832,22 +747,6 @@ def get_effect_id(eff: str | None) -> str:
return {get_effect_id(effect): value for effect, value in effect_values_user.items()}
return {self.standard_effect.id: effect_values_user}
- def validate_config(self) -> None:
- """Deprecated: Validation is now handled by EffectsData.validate().
-
- This method is kept for backwards compatibility but does nothing.
- Collection-level validation (cycles, unknown refs) is now in EffectsData._validate_share_structure().
- """
- pass
-
- def _plausibility_checks(self) -> None:
- """Deprecated: Legacy validation method.
-
- Kept for backwards compatibility but does nothing.
- Validation is now handled by EffectsData.validate().
- """
- pass
-
def __getitem__(self, effect: str | Effect | None) -> Effect:
"""
Get an effect by id, or return the standard effect if None is passed
diff --git a/flixopt/elements.py b/flixopt/elements.py
index 7513eb8f9..1b6753daa 100644
--- a/flixopt/elements.py
+++ b/flixopt/elements.py
@@ -5,8 +5,10 @@
from __future__ import annotations
import logging
+import warnings
+from dataclasses import dataclass, field
from functools import cached_property
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, ClassVar
import numpy as np
import pandas as pd
@@ -14,10 +16,10 @@
from . import io as fx_io
from .config import CONFIG
-from .core import PlausibilityError
from .features import (
MaskHelpers,
StatusBuilder,
+ fast_isnull,
fast_notnull,
sparse_multiply_sum,
sparse_weighted_sum,
@@ -35,6 +37,7 @@
TransmissionVarName,
TypeModel,
register_class_for_io,
+ valid_id,
)
if TYPE_CHECKING:
@@ -42,7 +45,6 @@
from .batched import BusesData, ComponentsData, ConvertersData, FlowsData, TransmissionsData
from .types import (
- Effect_TPS,
Numeric_PS,
Numeric_S,
Numeric_TPS,
@@ -92,87 +94,85 @@ def _add_prevent_simultaneous_constraints(
)
-@register_class_for_io
-class Component(Element):
- """
- Base class for all system components that transform, convert, or process flows.
+def _connect_flow(flow: Flow, component_id: str, is_input: bool) -> None:
+ """Connect a flow to its owning component.
- Components are the active elements in energy systems that define how input and output
- Flows interact with each other. They represent equipment, processes, or logical
- operations that transform energy or materials between different states, carriers,
- or locations.
+ Sets component name, defaults flow_id to bus name, and sets is_input_in_component.
+ """
+ if flow.flow_id is None:
+ flow.flow_id = valid_id(flow.bus if isinstance(flow.bus, str) else str(flow.bus))
+ if flow.component not in ('UnknownComponent', component_id):
+ raise ValueError(
+ f'Flow "{flow.id}" already assigned to component "{flow.component}". Cannot attach to "{component_id}".'
+ )
+ flow.component = component_id
+ flow.is_input_in_component = is_input
- Components serve as connection points between Buses through their associated Flows,
- enabling the modeling of complex energy system topologies and operational constraints.
- Args:
- id: The id of the Element. Used to identify it in the FlowSystem.
- inputs: list of input Flows feeding into the component. These represent
- energy/material consumption by the component.
- outputs: list of output Flows leaving the component. These represent
- energy/material production by the component.
- status_parameters: Defines binary operation constraints and costs when the
- component has discrete active/inactive states. Creates binary variables for all
- connected Flows. For better performance, prefer defining StatusParameters
- on individual Flows when possible.
- prevent_simultaneous_flows: list of Flows that cannot be active simultaneously.
- Creates binary variables to enforce mutual exclusivity. Use sparingly as
- it increases computational complexity.
- meta_data: Used to store additional information. Not used internally but saved
- in results. Only use Python native types.
+def _connect_and_validate_flows(
+ component_id: str,
+ input_flows: list[Flow],
+ output_flows: list[Flow],
+ prevent_simultaneous: list[Flow] | None = None,
+) -> None:
+ """Connect flows and validate uniqueness. Shared by all component-like classes."""
+ for flow in input_flows:
+ _connect_flow(flow, component_id, is_input=True)
+ for flow in output_flows:
+ _connect_flow(flow, component_id, is_input=False)
+
+ all_flows = input_flows + output_flows
+ all_ids = [f.flow_id for f in all_flows]
+ if len(set(all_ids)) != len(all_ids):
+ dupes = {fid for fid in all_ids if all_ids.count(fid) > 1}
+ raise ValueError(f'Flow names must be unique! "{component_id}" got 2 or more of: {dupes}')
+
+ if prevent_simultaneous:
+ # Deduplicate while preserving order
+ seen = set()
+ prevent_simultaneous[:] = [f for f in prevent_simultaneous if id(f) not in seen and not seen.add(id(f))]
+ local = set(all_flows)
+ foreign = [f for f in prevent_simultaneous if f not in local]
+ if foreign:
+ names = ', '.join(f.id for f in foreign)
+ raise ValueError(
+ f'prevent_simultaneous_flows for "{component_id}" must reference its own flows. '
+ f'Foreign flows detected: {names}'
+ )
- Note:
- Component operational state is determined by its connected Flows:
- - Component is "active" if ANY of its Flows is active (flow_rate > 0)
- - Component is "inactive" only when ALL Flows are inactive (flow_rate = 0)
- Binary variables and constraints:
- - status_parameters creates binary variables for ALL connected Flows
- - prevent_simultaneous_flows creates binary variables for specified Flows
- - For better computational performance, prefer Flow-level StatusParameters
+@register_class_for_io
+@dataclass(eq=False, repr=False)
+class Component(Element):
+ """Deprecated base class for flow-owning elements.
- Component is an abstract base class. In practice, use specialized subclasses:
- - LinearConverter: Linear input/output relationships
- - Storage: Temporal energy/material storage
- - Transmission: Transport between locations
- - Source/Sink: System boundaries
+ Use Converter, Port, or Storage directly instead. Component is kept only
+ as an internal base class for Transmission and the deprecated Source/Sink/SourceAndSink.
+ Args:
+ id: The id of the Element. Used to identify it in the FlowSystem.
+ inputs: list of input Flows.
+ outputs: list of output Flows.
+ status_parameters: Binary operation constraints and costs.
+ prevent_simultaneous_flows: Flows that cannot be active simultaneously.
+ meta_data: Additional metadata.
"""
- def __init__(
- self,
- id: str | None = None,
- inputs: list[Flow] | dict[str, Flow] | None = None,
- outputs: list[Flow] | dict[str, Flow] | None = None,
- status_parameters: StatusParameters | None = None,
- prevent_simultaneous_flows: list[Flow] | None = None,
- meta_data: dict | None = None,
- color: str | None = None,
- **kwargs,
- ):
- super().__init__(id, meta_data=meta_data, color=color, **kwargs)
- self.status_parameters = status_parameters
- if isinstance(prevent_simultaneous_flows, dict):
- prevent_simultaneous_flows = list(prevent_simultaneous_flows.values())
- self.prevent_simultaneous_flows: list[Flow] = prevent_simultaneous_flows or []
-
- # IdLists serialize as dicts, but constructor expects lists
- if isinstance(inputs, dict):
- inputs = list(inputs.values())
- if isinstance(outputs, dict):
- outputs = list(outputs.values())
-
- _inputs = inputs or []
- _outputs = outputs or []
-
- # Check uniqueness on raw lists (before connecting)
- all_flow_ids = [flow.flow_id for flow in _inputs + _outputs]
- if len(set(all_flow_ids)) != len(all_flow_ids):
- duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1}
- raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}')
+ id: str
+ inputs: list[Flow] = field(default_factory=list)
+ outputs: list[Flow] = field(default_factory=list)
+ status_parameters: StatusParameters | None = None
+ prevent_simultaneous_flows: list[Flow] = field(default_factory=list)
+ meta_data: dict = field(default_factory=dict)
+ color: str | None = None
+
+ def __post_init__(self):
+ self.id = valid_id(self.id)
- # Connect flows (sets component name) before creating IdLists
- self._connect_flows(_inputs, _outputs)
+ _inputs = self.inputs or []
+ _outputs = self.outputs or []
+
+ _connect_and_validate_flows(self.id, _inputs, _outputs, self.prevent_simultaneous_flows)
# Now flow.id is qualified, so IdList can key by it
self.inputs: IdList = flow_id_list(_inputs, display_name='inputs')
@@ -183,26 +183,6 @@ def flows(self) -> IdList:
"""All flows (inputs and outputs) as an IdList."""
return self.inputs + self.outputs
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Interface objects and flows.
-
- Elements use their id_full as prefix by default, ignoring the passed prefix.
- """
- super().link_to_flow_system(flow_system, self.id)
- if self.status_parameters is not None:
- self.status_parameters.link_to_flow_system(flow_system, self._sub_prefix('status_parameters'))
- for flow in self.flows.values():
- flow.link_to_flow_system(flow_system)
-
- def transform_data(self) -> None:
- self._propagate_status_parameters()
-
- if self.status_parameters is not None:
- self.status_parameters.transform_data()
-
- for flow in self.flows.values():
- flow.transform_data()
-
def _propagate_status_parameters(self) -> None:
"""Propagate status parameters from this component to flows that need them.
@@ -216,12 +196,10 @@ def _propagate_status_parameters(self) -> None:
for flow in self.flows.values():
if flow.status_parameters is None:
flow.status_parameters = StatusParameters()
- flow.status_parameters.link_to_flow_system(self._flow_system, f'{flow.id}|status_parameters')
if self.prevent_simultaneous_flows:
for flow in self.prevent_simultaneous_flows:
if flow.status_parameters is None:
flow.status_parameters = StatusParameters()
- flow.status_parameters.link_to_flow_system(self._flow_system, f'{flow.id}|status_parameters')
def _check_unique_flow_ids(self, inputs: list = None, outputs: list = None):
if inputs is None:
@@ -234,67 +212,6 @@ def _check_unique_flow_ids(self, inputs: list = None, outputs: list = None):
duplicates = {fid for fid in all_flow_ids if all_flow_ids.count(fid) > 1}
raise ValueError(f'Flow names must be unique! "{self.id}" got 2 or more of: {duplicates}')
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- self._check_unique_flow_ids()
-
- # Component with status_parameters requires all flows to have sizes set
- # (status_parameters are propagated to flows in _do_modeling, which need sizes for big-M constraints)
- if self.status_parameters is not None:
- flows_without_size = [flow.flow_id for flow in self.flows.values() if flow.size is None]
- if flows_without_size:
- raise PlausibilityError(
- f'Component "{self.id}" has status_parameters, but the following flows have no size: '
- f'{flows_without_size}. All flows need explicit sizes when the component uses status_parameters '
- f'(required for big-M constraints).'
- )
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config()."""
- self.validate_config()
-
- def _connect_flows(self, inputs=None, outputs=None):
- if inputs is None:
- inputs = list(self.inputs.values())
- if outputs is None:
- outputs = list(self.outputs.values())
- # Inputs
- for flow in inputs:
- if flow.component not in ('UnknownComponent', self.id):
- raise ValueError(
- f'Flow "{flow.id}" already assigned to component "{flow.component}". Cannot attach to "{self.id}".'
- )
- flow.component = self.id
- flow.is_input_in_component = True
- # Outputs
- for flow in outputs:
- if flow.component not in ('UnknownComponent', self.id):
- raise ValueError(
- f'Flow "{flow.id}" already assigned to component "{flow.component}". Cannot attach to "{self.id}".'
- )
- flow.component = self.id
- flow.is_input_in_component = False
-
- # Validate prevent_simultaneous_flows: only allow local flows
- if self.prevent_simultaneous_flows:
- # Deduplicate while preserving order
- seen = set()
- self.prevent_simultaneous_flows = [
- f for f in self.prevent_simultaneous_flows if id(f) not in seen and not seen.add(id(f))
- ]
- local = set(inputs + outputs)
- foreign = [f for f in self.prevent_simultaneous_flows if f not in local]
- if foreign:
- names = ', '.join(f.id for f in foreign)
- raise ValueError(
- f'prevent_simultaneous_flows for "{self.id}" must reference its own flows. '
- f'Foreign flows detected: {names}'
- )
-
def __repr__(self) -> str:
"""Return string representation with flow information."""
return fx_io.build_repr_from_init(
@@ -303,6 +220,7 @@ def __repr__(self) -> str:
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Bus(Element):
"""
Buses represent nodal balances between flow rates, serving as connection points.
@@ -316,7 +234,7 @@ class Bus(Element):
See
Args:
- label: The label of the Element. Used to identify it in the FlowSystem.
+ id: The id of the Element. Used to identify it in the FlowSystem.
carrier: Name of the energy/material carrier type (e.g., 'electricity', 'heat', 'gas').
Carriers are registered via ``flow_system.add_carrier()`` or available as
predefined defaults in CONFIG.Carriers. Used for automatic color assignment in plots.
@@ -330,8 +248,8 @@ class Bus(Element):
Using predefined carrier names:
```python
- electricity_bus = Bus(label='main_grid', carrier='electricity')
- heat_bus = Bus(label='district_heating', carrier='heat')
+ electricity_bus = Bus(id='main_grid', carrier='electricity')
+ heat_bus = Bus(id='district_heating', carrier='heat')
```
Registering custom carriers on FlowSystem:
@@ -341,14 +259,14 @@ class Bus(Element):
fs = fx.FlowSystem(timesteps)
fs.add_carrier(fx.Carrier('biogas', '#228B22', 'kW'))
- biogas_bus = fx.Bus(label='biogas_network', carrier='biogas')
+ biogas_bus = fx.Bus(id='biogas_network', carrier='biogas')
```
Heat network with penalty for imbalances:
```python
heat_bus = Bus(
- label='district_heating',
+ id='district_heating',
carrier='heat',
imbalance_penalty_per_flow_hour=1000,
)
@@ -366,71 +284,54 @@ class Bus(Element):
by the FlowSystem during system setup.
"""
- def __init__(
- self,
- id: str | None = None,
- carrier: str | None = None,
- imbalance_penalty_per_flow_hour: Numeric_TPS | None = None,
- meta_data: dict | None = None,
- **kwargs,
- ):
- # Handle Bus-specific deprecated kwarg before passing kwargs to super
- old_penalty = kwargs.pop('excess_penalty_per_flow_hour', None)
- super().__init__(id, meta_data=meta_data, **kwargs)
- if old_penalty is not None:
- imbalance_penalty_per_flow_hour = self._handle_deprecated_kwarg(
- {'excess_penalty_per_flow_hour': old_penalty},
- 'excess_penalty_per_flow_hour',
- 'imbalance_penalty_per_flow_hour',
- imbalance_penalty_per_flow_hour,
+ _io_exclude: ClassVar[set[str]] = {'excess_penalty_per_flow_hour'}
+
+ id: str
+ carrier: str | None = None
+ imbalance_penalty_per_flow_hour: Numeric_TPS | None = None
+ excess_penalty_per_flow_hour: Numeric_TPS | None = field(default=None, repr=False)
+ meta_data: dict = field(default_factory=dict)
+ color: str | None = None
+ # Internal state (populated by FlowSystem._connect_network)
+ inputs: IdList = field(default_factory=lambda: flow_id_list(display_name='inputs'), init=False, repr=False)
+ outputs: IdList = field(default_factory=lambda: flow_id_list(display_name='outputs'), init=False, repr=False)
+
+ def __post_init__(self):
+ self.id = valid_id(self.id)
+ # Handle deprecated excess_penalty_per_flow_hour
+ if self.excess_penalty_per_flow_hour is not None:
+ from .config import DEPRECATION_REMOVAL_VERSION
+
+ warnings.warn(
+ f'The use of the "excess_penalty_per_flow_hour" argument is deprecated. '
+ f'Use the "imbalance_penalty_per_flow_hour" argument instead. '
+ f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
+ DeprecationWarning,
+ stacklevel=2,
)
- self.carrier = carrier.lower() if carrier else None # Store as lowercase string
- self.imbalance_penalty_per_flow_hour = imbalance_penalty_per_flow_hour
- self.inputs: IdList = flow_id_list(display_name='inputs')
- self.outputs: IdList = flow_id_list(display_name='outputs')
+ if self.imbalance_penalty_per_flow_hour is not None:
+ raise ValueError(
+ 'Either excess_penalty_per_flow_hour or imbalance_penalty_per_flow_hour can be specified, but not both.'
+ )
+ self.imbalance_penalty_per_flow_hour = self.excess_penalty_per_flow_hour
+ self.excess_penalty_per_flow_hour = None
+ if self.carrier:
+ self.carrier = self.carrier.lower()
@property
def flows(self) -> IdList:
"""All flows (inputs and outputs) as an IdList."""
return self.inputs + self.outputs
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested flows.
-
- Elements use their id_full as prefix by default, ignoring the passed prefix.
- """
- super().link_to_flow_system(flow_system, self.id)
- for flow in self.flows.values():
- flow.link_to_flow_system(flow_system)
-
- def transform_data(self) -> None:
- self.imbalance_penalty_per_flow_hour = self._fit_coords(
- f'{self.prefix}|imbalance_penalty_per_flow_hour', self.imbalance_penalty_per_flow_hour
- )
-
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- if len(self.inputs) == 0 and len(self.outputs) == 0:
- raise ValueError(f'Bus "{self.id}" has no Flows connected to it. Please remove it from the FlowSystem')
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config().
-
- DataArray-based checks (imbalance_penalty warning) moved to BusesData.validate().
- """
- self.validate_config()
-
@property
def allows_imbalance(self) -> bool:
return self.imbalance_penalty_per_flow_hour is not None
def __repr__(self) -> str:
"""Return string representation."""
- return super().__repr__() + fx_io.format_flow_details(self)
+ return fx_io.build_repr_from_init(
+ self, excluded_params={'self', 'id', 'kwargs'}, skip_default_size=True
+ ) + fx_io.format_flow_details(self)
@register_class_for_io
@@ -447,6 +348,7 @@ def __init__(self):
@register_class_for_io
+@dataclass(eq=False, repr=False)
class Flow(Element):
"""Define a directed flow of energy or material between bus and component.
@@ -470,7 +372,7 @@ class Flow(Element):
See
Args:
- bus: Bus this flow connects to (string id). First positional argument.
+ bus: Bus this flow connects to (string id).
flow_id: Unique flow identifier within its component. Defaults to the bus name.
size: Flow capacity. Scalar, InvestParameters, or None (unbounded).
relative_minimum: Minimum flow rate as fraction of size (0-1). Default: 0.
@@ -496,8 +398,7 @@ class Flow(Element):
```python
generator_output = Flow(
- 'electricity_grid',
- flow_id='electricity_out',
+ bus='electricity_grid',
size=100, # 100 MW capacity
relative_minimum=0.4, # Cannot operate below 40 MW
effects_per_flow_hour={'fuel_cost': 45, 'co2_emissions': 0.8},
@@ -508,7 +409,7 @@ class Flow(Element):
```python
battery_flow = Flow(
- 'electricity_grid',
+ bus='electricity_grid',
size=InvestParameters(
minimum_size=10, # Minimum 10 MWh
maximum_size=100, # Maximum 100 MWh
@@ -521,8 +422,7 @@ class Flow(Element):
```python
heat_pump = Flow(
- 'heating_network',
- flow_id='heat_output',
+ bus='heating_network',
size=50, # 50 kW thermal
relative_minimum=0.3, # Minimum 15 kW output when active
effects_per_flow_hour={'electricity_cost': 25, 'maintenance': 2},
@@ -539,8 +439,7 @@ class Flow(Element):
```python
solar_generation = Flow(
- 'electricity_grid',
- flow_id='solar_power',
+ bus='electricity_grid',
size=25, # 25 MW installed capacity
fixed_relative_profile=np.array([0, 0.1, 0.4, 0.8, 0.9, 0.7, 0.3, 0.1, 0]),
effects_per_flow_hour={'maintenance_costs': 5}, # €5/MWh maintenance
@@ -551,8 +450,7 @@ class Flow(Element):
```python
production_line = Flow(
- 'product_market',
- flow_id='product_output',
+ bus='product_market',
size=1000, # 1000 units/hour capacity
load_factor_min=0.6, # Must achieve 60% annual utilization
load_factor_max=0.85, # Cannot exceed 85% for maintenance
@@ -582,286 +480,66 @@ class Flow(Element):
"""
- def __init__(
- self,
- *args,
- bus: str | None = None,
- flow_id: str | None = None,
- size: Numeric_PS | InvestParameters | None = None,
- fixed_relative_profile: Numeric_TPS | None = None,
- relative_minimum: Numeric_TPS = 0,
- relative_maximum: Numeric_TPS = 1,
- effects_per_flow_hour: Effect_TPS | Numeric_TPS | None = None,
- status_parameters: StatusParameters | None = None,
- flow_hours_max: Numeric_PS | None = None,
- flow_hours_min: Numeric_PS | None = None,
- flow_hours_max_over_periods: Numeric_S | None = None,
- flow_hours_min_over_periods: Numeric_S | None = None,
- load_factor_min: Numeric_PS | None = None,
- load_factor_max: Numeric_PS | None = None,
- previous_flow_rate: Scalar | list[Scalar] | None = None,
- meta_data: dict | None = None,
- label: str | None = None,
- id: str | None = None,
- **kwargs,
- ):
- # --- Resolve positional args + deprecation bridge ---
- import warnings
-
- from .config import DEPRECATION_REMOVAL_VERSION
-
- # Handle deprecated 'id' kwarg (use flow_id instead)
- if id is not None:
- warnings.warn(
- f'Flow(id=...) is deprecated. Use Flow(flow_id=...) instead. '
- f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- if flow_id is not None:
- raise ValueError('Either id or flow_id can be specified, but not both.')
- flow_id = id
-
- if len(args) == 2:
- # Old API: Flow(label, bus)
- warnings.warn(
- f'Flow(label, bus) positional form is deprecated. '
- f'Use Flow(bus, flow_id=...) instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- if flow_id is None and label is None:
- flow_id = args[0]
- if bus is None:
- bus = args[1]
- elif len(args) == 1:
- if bus is not None:
- # Old API: Flow(label, bus=...)
- warnings.warn(
- f'Flow(label, bus=...) positional form is deprecated. '
- f'Use Flow(bus, flow_id=...) instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- if flow_id is None and label is None:
- flow_id = args[0]
- else:
- # New API: Flow(bus) — bus is the positional arg
- bus = args[0]
- elif len(args) > 2:
- raise TypeError(f'Flow() takes at most 2 positional arguments ({len(args)} given)')
-
- # Handle deprecated label kwarg
- if label is not None:
- warnings.warn(
- f'The "label" argument is deprecated. Use "flow_id" instead. '
- f'Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- if flow_id is not None:
- raise ValueError('Either label or flow_id can be specified, but not both.')
- flow_id = label
-
- # Default flow_id to bus name
- if flow_id is None:
- if bus is None:
- raise TypeError('Flow() requires a bus argument.')
- flow_id = bus if isinstance(bus, str) else str(bus)
-
- if bus is None:
- raise TypeError('Flow() requires a bus argument.')
-
- super().__init__(flow_id, meta_data=meta_data, **kwargs)
- self.size = size
- self.relative_minimum = relative_minimum
- self.relative_maximum = relative_maximum
- self.fixed_relative_profile = fixed_relative_profile
-
- self.load_factor_min = load_factor_min
- self.load_factor_max = load_factor_max
-
- self.effects_per_flow_hour = effects_per_flow_hour if effects_per_flow_hour is not None else {}
- self.flow_hours_max = flow_hours_max
- self.flow_hours_min = flow_hours_min
- self.flow_hours_max_over_periods = flow_hours_max_over_periods
- self.flow_hours_min_over_periods = flow_hours_min_over_periods
- self.status_parameters = status_parameters
-
- self.previous_flow_rate = previous_flow_rate
-
- self.component: str = 'UnknownComponent'
- self.is_input_in_component: bool | None = None
- if isinstance(bus, Bus):
+ bus: str
+ flow_id: str | None = None
+ size: Numeric_PS | InvestParameters | None = None
+ relative_minimum: Numeric_TPS = 0
+ relative_maximum: Numeric_TPS = 1
+ fixed_relative_profile: Numeric_TPS | None = None
+ effects_per_flow_hour: Numeric_TPS | dict | None = None
+ status_parameters: StatusParameters | None = None
+ flow_hours_max: Numeric_PS | None = None
+ flow_hours_min: Numeric_PS | None = None
+ flow_hours_max_over_periods: Numeric_S | None = None
+ flow_hours_min_over_periods: Numeric_S | None = None
+ load_factor_min: Numeric_PS | None = None
+ load_factor_max: Numeric_PS | None = None
+ previous_flow_rate: Scalar | list[Scalar] | None = None
+ meta_data: dict = field(default_factory=dict)
+ color: str | None = None
+ # Internal state (not user-facing)
+ component: str = field(default='UnknownComponent', init=False)
+ is_input_in_component: bool | None = field(default=None, init=False)
+
+ def __post_init__(self):
+ if isinstance(self.bus, Bus):
raise TypeError(
- f'Bus {bus.id} is passed as a Bus object to Flow {self.id}. '
+ f'Bus {self.bus.id} is passed as a Bus object to Flow {self.flow_id or self.bus}. '
f'This is no longer supported. Add the Bus to the FlowSystem and pass its id (string) to the Flow.'
)
- self.bus = bus
-
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Interface objects.
-
- Elements use their id_full as prefix by default, ignoring the passed prefix.
- """
- super().link_to_flow_system(flow_system, self.id)
- if self.status_parameters is not None:
- self.status_parameters.link_to_flow_system(flow_system, self._sub_prefix('status_parameters'))
- if isinstance(self.size, InvestParameters):
- self.size.link_to_flow_system(flow_system, self._sub_prefix('InvestParameters'))
-
- def transform_data(self) -> None:
- self.relative_minimum = self._fit_coords(f'{self.prefix}|relative_minimum', self.relative_minimum)
- self.relative_maximum = self._fit_coords(f'{self.prefix}|relative_maximum', self.relative_maximum)
- self.fixed_relative_profile = self._fit_coords(
- f'{self.prefix}|fixed_relative_profile', self.fixed_relative_profile
- )
- self.effects_per_flow_hour = self._fit_effect_coords(self.prefix, self.effects_per_flow_hour, 'per_flow_hour')
- self.flow_hours_max = self._fit_coords(
- f'{self.prefix}|flow_hours_max', self.flow_hours_max, dims=['period', 'scenario']
- )
- self.flow_hours_min = self._fit_coords(
- f'{self.prefix}|flow_hours_min', self.flow_hours_min, dims=['period', 'scenario']
- )
- self.flow_hours_max_over_periods = self._fit_coords(
- f'{self.prefix}|flow_hours_max_over_periods', self.flow_hours_max_over_periods, dims=['scenario']
- )
- self.flow_hours_min_over_periods = self._fit_coords(
- f'{self.prefix}|flow_hours_min_over_periods', self.flow_hours_min_over_periods, dims=['scenario']
- )
- self.load_factor_max = self._fit_coords(
- f'{self.prefix}|load_factor_max', self.load_factor_max, dims=['period', 'scenario']
- )
- self.load_factor_min = self._fit_coords(
- f'{self.prefix}|load_factor_min', self.load_factor_min, dims=['period', 'scenario']
- )
-
- if self.status_parameters is not None:
- self.status_parameters.transform_data()
- if isinstance(self.size, InvestParameters):
- self.size.transform_data()
- elif self.size is not None:
- self.size = self._fit_coords(f'{self.prefix}|size', self.size, dims=['period', 'scenario'])
-
- def validate_config(self) -> None:
- """Validate configuration consistency.
-
- Called BEFORE transformation via FlowSystem._run_config_validation().
- These are simple checks that don't require DataArray operations.
- """
- # Size is required when using StatusParameters (for big-M constraints)
- if self.status_parameters is not None and self.size is None:
- raise PlausibilityError(
- f'Flow "{self.id}" has status_parameters but no size defined. '
- f'A size is required when using status_parameters to bound the flow rate.'
- )
-
- if self.size is None and self.fixed_relative_profile is not None:
- raise PlausibilityError(
- f'Flow "{self.id}" has a fixed_relative_profile but no size defined. '
- f'A size is required because flow_rate = size * fixed_relative_profile.'
- )
-
- # Size is required for load factor constraints (total_flow_hours / size)
- if self.size is None and self.load_factor_min is not None:
- raise PlausibilityError(
- f'Flow "{self.id}" has load_factor_min but no size defined. '
- f'A size is required because the constraint is total_flow_hours >= size * load_factor_min * hours.'
- )
-
- if self.size is None and self.load_factor_max is not None:
- raise PlausibilityError(
- f'Flow "{self.id}" has load_factor_max but no size defined. '
- f'A size is required because the constraint is total_flow_hours <= size * load_factor_max * hours.'
- )
-
- # Validate previous_flow_rate type
- if self.previous_flow_rate is not None:
- if not any(
- [
- isinstance(self.previous_flow_rate, np.ndarray) and self.previous_flow_rate.ndim == 1,
- isinstance(self.previous_flow_rate, (int, float, list)),
- ]
- ):
- raise TypeError(
- f'previous_flow_rate must be None, a scalar, a list of scalars or a 1D-numpy-array. '
- f'Got {type(self.previous_flow_rate)}. '
- f'Different values in different periods or scenarios are not yet supported.'
- )
-
- # Warning: fixed_relative_profile + status_parameters is unusual
- if self.fixed_relative_profile is not None and self.status_parameters is not None:
- logger.warning(
- f'Flow {self.id} has both a fixed_relative_profile and status_parameters. '
- f'This will allow the flow to be switched active and inactive, effectively differing from the fixed_flow_rate.'
- )
-
- def _plausibility_checks(self) -> None:
- """Legacy validation method - delegates to validate_config().
-
- DataArray-based validation is now done in FlowsData.validate().
- """
- self.validate_config()
-
- @property
- def flow_id(self) -> str:
- """The short flow identifier (e.g. ``'Heat'``).
-
- This is the user-facing name. Defaults to the bus name if not set explicitly.
- """
- return self._short_id
-
- @flow_id.setter
- def flow_id(self, value: str) -> None:
- self._short_id = value
+ if self.flow_id is not None:
+ self.flow_id = valid_id(self.flow_id)
@property
def id(self) -> str:
"""The qualified identifier: ``component(flow_id)``."""
- return f'{self.component}({self._short_id})'
-
- @id.setter
- def id(self, value: str) -> None:
- self._short_id = value
-
- # =========================================================================
- # Type-Level Model Access (for FlowsModel integration)
- # =========================================================================
-
- _flows_model: FlowsModel | None = None # Set by FlowsModel during creation
-
- def set_flows_model(self, flows_model: FlowsModel) -> None:
- """Set reference to the type-level FlowsModel.
-
- Called by FlowsModel during initialization to enable element access.
- """
- self._flows_model = flows_model
+ return f'{self.component}({self.flow_id})'
@property
- def flow_rate_from_type_model(self) -> linopy.Variable | None:
- """Get flow_rate from FlowsModel (if using type-level modeling).
+ def label(self) -> str:
+ """Deprecated: Use ``flow_id`` instead."""
+ from .config import DEPRECATION_REMOVAL_VERSION
- Returns the slice of the batched variable for this specific flow.
- """
- if self._flows_model is None:
- return None
- return self._flows_model.get_variable(FlowVarName.RATE, self.id)
+ warnings.warn(
+ f'Accessing ".label" is deprecated. Use ".flow_id" instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.flow_id
- @property
- def total_flow_hours_from_type_model(self) -> linopy.Variable | None:
- """Get total_flow_hours from FlowsModel (if using type-level modeling)."""
- if self._flows_model is None:
- return None
- return self._flows_model.get_variable(FlowVarName.TOTAL_FLOW_HOURS, self.id)
+ @label.setter
+ def label(self, value: str) -> None:
+ from .config import DEPRECATION_REMOVAL_VERSION
- @property
- def status_from_type_model(self) -> linopy.Variable | None:
- """Get status from FlowsModel (if using type-level modeling)."""
- if self._flows_model is None or FlowVarName.STATUS not in self._flows_model:
- return None
- if self.id not in self._flows_model.status_ids:
- return None
- return self._flows_model.get_variable(FlowVarName.STATUS, self.id)
+ warnings.warn(
+ f'Setting ".label" is deprecated. Use ".flow_id" instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.flow_id = value
+
+ def __repr__(self) -> str:
+ return fx_io.build_repr_from_init(self, excluded_params={'self', 'id'}, skip_default_size=True)
@property
def size_is_fixed(self) -> bool:
@@ -907,12 +585,20 @@ class FlowsModel(TypeModel):
@cached_property
def rate(self) -> linopy.Variable:
"""(flow, time, ...) - flow rate variable for ALL flows."""
- return self.add_variables(
- FlowVarName.RATE,
- lower=self.data.absolute_lower_bounds,
- upper=self.data.absolute_upper_bounds,
- dims=None,
- )
+ from .datasets import _ensure_canonical_order
+
+ ds = self.data.ds
+
+ # Lower bounds (inline from former FlowsData.absolute_lower_bounds)
+ base_lower = ds['effective_relative_minimum'] * ds['effective_size_lower']
+ is_zero = ds['has_status'] | ds['has_optional_investment'] | fast_isnull(ds['effective_size_lower'])
+ lower = _ensure_canonical_order(base_lower.where(~is_zero, 0.0).fillna(0.0))
+
+ # Upper bounds (inline from former FlowsData.absolute_upper_bounds)
+ base_upper = ds['effective_relative_maximum'] * ds['effective_size_upper']
+ upper = _ensure_canonical_order(base_upper.where(fast_notnull(ds['effective_size_upper']), np.inf))
+
+ return self.add_variables(FlowVarName.RATE, lower=lower, upper=upper, dims=None)
@cached_property
def status(self) -> linopy.Variable | None:
@@ -922,7 +608,7 @@ def status(self) -> linopy.Variable | None:
return self.add_variables(
FlowVarName.STATUS,
dims=None,
- mask=self.data.has_status,
+ mask=self.data.ds['has_status'],
binary=True,
)
@@ -931,12 +617,13 @@ def size(self) -> linopy.Variable | None:
"""(flow, period, scenario) - size variable, masked to flows with investment."""
if not self.data.with_investment:
return None
+ ds = self.data.ds
return self.add_variables(
FlowVarName.SIZE,
- lower=self.data.size_minimum_all,
- upper=self.data.size_maximum_all,
+ lower=ds['size_minimum_all'],
+ upper=ds['size_maximum_all'],
dims=('period', 'scenario'),
- mask=self.data.has_investment,
+ mask=ds['has_investment'],
)
@cached_property
@@ -947,7 +634,7 @@ def invested(self) -> linopy.Variable | None:
return self.add_variables(
FlowVarName.INVESTED,
dims=('period', 'scenario'),
- mask=self.data.has_optional_investment,
+ mask=self.data.ds['has_optional_investment'],
binary=True,
)
@@ -997,12 +684,13 @@ def constraint_investment(self) -> None:
# Optional investment: size controlled by invested binary
if self.invested is not None:
+ ds = self.data.ds
InvestmentBuilder.add_optional_size_bounds(
model=self.model,
size_var=self.size,
invested_var=self.invested,
- min_bounds=self.data.optional_investment_size_minimum,
- max_bounds=self.data.optional_investment_size_maximum,
+ min_bounds=ds.get('optional_investment_size_minimum'),
+ max_bounds=ds.get('optional_investment_size_maximum'),
element_ids=self.data.with_optional_investment,
dim_name=dim,
name_prefix='flow',
@@ -1025,22 +713,24 @@ def constraint_investment(self) -> None:
def constraint_flow_hours(self) -> None:
"""Constrain sum_temporal(rate) for flows with flow_hours bounds."""
dim = self.dim_name
+ ds = self.data.ds
# Min constraint
- if self.data.flow_hours_minimum is not None:
- flow_ids = self.data.with_flow_hours_min
+ flow_ids = self.data.with_flow_hours_min
+ if flow_ids:
hours = self.model.sum_temporal(self.rate.sel({dim: flow_ids}))
- self.add_constraints(hours >= self.data.flow_hours_minimum, name='hours_min')
+ self.add_constraints(hours >= ds['flow_hours_minimum'].sel(flow=flow_ids), name='hours_min')
# Max constraint
- if self.data.flow_hours_maximum is not None:
- flow_ids = self.data.with_flow_hours_max
+ flow_ids = self.data.with_flow_hours_max
+ if flow_ids:
hours = self.model.sum_temporal(self.rate.sel({dim: flow_ids}))
- self.add_constraints(hours <= self.data.flow_hours_maximum, name='hours_max')
+ self.add_constraints(hours <= ds['flow_hours_maximum'].sel(flow=flow_ids), name='hours_max')
def constraint_flow_hours_over_periods(self) -> None:
"""Constrain weighted sum of hours across periods."""
dim = self.dim_name
+ ds = self.data.ds
def compute_hours_over_periods(flow_ids: list[str]):
rate_subset = self.rate.sel({dim: flow_ids})
@@ -1051,36 +741,41 @@ def compute_hours_over_periods(flow_ids: list[str]):
return hours_per_period
# Min constraint
- if self.data.flow_hours_minimum_over_periods is not None:
- flow_ids = self.data.with_flow_hours_over_periods_min
+ flow_ids = self.data.with_flow_hours_over_periods_min
+ if flow_ids:
hours = compute_hours_over_periods(flow_ids)
- self.add_constraints(hours >= self.data.flow_hours_minimum_over_periods, name='hours_over_periods_min')
+ self.add_constraints(
+ hours >= ds['flow_hours_minimum_over_periods'].sel(flow=flow_ids), name='hours_over_periods_min'
+ )
# Max constraint
- if self.data.flow_hours_maximum_over_periods is not None:
- flow_ids = self.data.with_flow_hours_over_periods_max
+ flow_ids = self.data.with_flow_hours_over_periods_max
+ if flow_ids:
hours = compute_hours_over_periods(flow_ids)
- self.add_constraints(hours <= self.data.flow_hours_maximum_over_periods, name='hours_over_periods_max')
+ self.add_constraints(
+ hours <= ds['flow_hours_maximum_over_periods'].sel(flow=flow_ids), name='hours_over_periods_max'
+ )
def constraint_load_factor(self) -> None:
"""Load factor min/max constraints for flows that have them."""
dim = self.dim_name
+ ds = self.data.ds
total_time = self.model.temporal_weight.sum(self.model.temporal_dims)
# Min constraint: hours >= total_time * load_factor_min * size
- if self.data.load_factor_minimum is not None:
- flow_ids = self.data.with_load_factor_min
+ flow_ids = self.data.with_load_factor_min
+ if flow_ids:
hours = self.model.sum_temporal(self.rate.sel({dim: flow_ids}))
- size = self.data.effective_size_lower.sel({dim: flow_ids}).fillna(0)
- rhs = total_time * self.data.load_factor_minimum * size
+ size = ds['effective_size_lower'].sel({dim: flow_ids}).fillna(0)
+ rhs = total_time * ds['load_factor_minimum'].sel(flow=flow_ids) * size
self.add_constraints(hours >= rhs, name='load_factor_min')
# Max constraint: hours <= total_time * load_factor_max * size
- if self.data.load_factor_maximum is not None:
- flow_ids = self.data.with_load_factor_max
+ flow_ids = self.data.with_load_factor_max
+ if flow_ids:
hours = self.model.sum_temporal(self.rate.sel({dim: flow_ids}))
- size = self.data.effective_size_upper.sel({dim: flow_ids}).fillna(np.inf)
- rhs = total_time * self.data.load_factor_maximum * size
+ size = ds['effective_size_upper'].sel({dim: flow_ids}).fillna(np.inf)
+ rhs = total_time * ds['load_factor_maximum'].sel(flow=flow_ids) * size
self.add_constraints(hours <= rhs, name='load_factor_max')
def __init__(self, model: FlowSystemModel, data: FlowsData):
@@ -1092,10 +787,6 @@ def __init__(self, model: FlowSystemModel, data: FlowsData):
"""
super().__init__(model, data)
- # Set reference on each flow element for element access pattern
- for flow in self.elements.values():
- flow.set_flows_model(self)
-
self.create_variables()
self.create_status_model()
self.create_constraints()
@@ -1148,16 +839,18 @@ def _constraint_investment_bounds(self) -> None:
if not mask.any():
return
+ ds = self.data.ds
+
# Upper bound: rate <= size * relative_max
self.model.add_constraints(
- self.rate <= self.size * self.data.effective_relative_maximum,
+ self.rate <= self.size * ds['effective_relative_maximum'],
name=f'{self.dim_name}|invest_ub', # TODO Rename to size_ub
mask=mask,
)
# Lower bound: rate >= size * relative_min
self.model.add_constraints(
- self.rate >= self.size * self.data.effective_relative_minimum,
+ self.rate >= self.size * ds['effective_relative_minimum'],
name=f'{self.dim_name}|invest_lb', # TODO Rename to size_lb
mask=mask,
)
@@ -1168,13 +861,14 @@ def _constraint_status_bounds(self) -> None:
rate <= status * size * relative_max, rate >= status * epsilon."""
flow_ids = self.data.with_status_only
dim = self.dim_name
+ ds = self.data.ds
flow_rate = self.rate.sel({dim: flow_ids})
status = self.status.sel({dim: flow_ids})
# Get effective relative bounds and fixed size for the subset
- rel_max = self.data.effective_relative_maximum.sel({dim: flow_ids})
- rel_min = self.data.effective_relative_minimum.sel({dim: flow_ids})
- size = self.data.fixed_size.sel({dim: flow_ids})
+ rel_max = ds['effective_relative_maximum'].sel({dim: flow_ids})
+ rel_min = ds['effective_relative_minimum'].sel({dim: flow_ids})
+ size = ds['fixed_size'].sel({dim: flow_ids})
# Upper bound: rate <= status * size * relative_max
upper_bounds = rel_max * size
@@ -1194,14 +888,15 @@ def _constraint_status_investment_bounds(self) -> None:
"""
flow_ids = self.data.with_status_and_investment
dim = self.dim_name
+ ds = self.data.ds
flow_rate = self.rate.sel({dim: flow_ids})
size = self.size.sel({dim: flow_ids})
status = self.status.sel({dim: flow_ids})
# Get effective relative bounds and effective_size_upper for the subset
- rel_max = self.data.effective_relative_maximum.sel({dim: flow_ids})
- rel_min = self.data.effective_relative_minimum.sel({dim: flow_ids})
- max_size = self.data.effective_size_upper.sel({dim: flow_ids})
+ rel_max = ds['effective_relative_maximum'].sel({dim: flow_ids})
+ rel_min = ds['effective_relative_minimum'].sel({dim: flow_ids})
+ max_size = ds['effective_size_upper'].sel({dim: flow_ids})
# Upper bound 1: rate <= status * M where M = max_size * relative_max
big_m_upper = max_size * rel_max
@@ -1233,18 +928,19 @@ def _create_piecewise_effects(self) -> None:
if size_var is None:
return
- inv = self.data._investment_data
- if inv is None or not inv.piecewise_element_ids:
+ element_ids = self.data.piecewise_element_ids
+ if not element_ids:
return
- element_ids = inv.piecewise_element_ids
- segment_mask = inv.piecewise_segment_mask
- origin_starts = inv.piecewise_origin_starts
- origin_ends = inv.piecewise_origin_ends
- effect_starts = inv.piecewise_effect_starts
- effect_ends = inv.piecewise_effect_ends
- effect_names = inv.piecewise_effect_names
- max_segments = inv.piecewise_max_segments
+ ds = self.data.ds
+ # Piecewise arrays are auto-aligned to all flows in the Dataset — select back to subset
+ segment_mask = ds['piecewise_segment_mask'].sel(flow=element_ids)
+ origin_starts = ds['piecewise_origin_starts'].sel(flow=element_ids)
+ origin_ends = ds['piecewise_origin_ends'].sel(flow=element_ids)
+ effect_starts = ds['piecewise_effect_starts'].sel(flow=element_ids)
+ effect_ends = ds['piecewise_effect_ends'].sel(flow=element_ids)
+ effect_names = self.data.piecewise_effect_names
+ max_segments = self.data.piecewise_max_segments
# Create batched piecewise variables
base_coords = self.model.get_coords(['period', 'scenario'])
@@ -1334,54 +1030,66 @@ def add_effect_contributions(self, effects_model) -> None:
# === Temporal: rate * effects_per_flow_hour * dt ===
# Batched over flows and effects - _accumulate_shares handles effect dim internally
- factors = self.data.effects_per_flow_hour
- if factors is not None:
- flow_ids = factors.coords[dim].values
+ ds = self.data.ds
+ flow_ids = self.data.with_effects
+ if flow_ids:
+ factors = ds['effects_per_flow_hour'].sel(flow=flow_ids)
rate_subset = self.rate.sel({dim: flow_ids})
effects_model.add_temporal_contribution(rate_subset * (factors * dt), contributor_dim=dim)
# === Temporal: status effects ===
if self.status is not None:
# effects_per_active_hour
- factor = self.data.effects_per_active_hour
+ factor = ds.get('effects_per_active_hour')
if factor is not None:
+ factor = factor.dropna(dim='flow', how='all')
flow_ids = factor.coords[dim].values
status_subset = self.status.sel({dim: flow_ids})
effects_model.add_temporal_contribution(status_subset * (factor * dt), contributor_dim=dim)
# effects_per_startup
- factor = self.data.effects_per_startup
+ factor = ds.get('effects_per_startup')
if self.startup is not None and factor is not None:
+ factor = factor.dropna(dim='flow', how='all')
flow_ids = factor.coords[dim].values
startup_subset = self.startup.sel({dim: flow_ids})
effects_model.add_temporal_contribution(startup_subset * factor, contributor_dim=dim)
# === Periodic: size * effects_per_size ===
- inv = self.data._investment_data
- if inv is not None and inv.effects_per_size is not None:
- factors = inv.effects_per_size
+ def _get_subset(name):
+ """Get investment effect array from ds, dropping auto-aligned NaN rows."""
+ arr = ds.get(name)
+ if arr is None:
+ return None
+ return arr.dropna(dim=dim, how='all')
+
+ factors = _get_subset('invest_effects_per_size')
+ if factors is not None:
flow_ids = factors.coords[dim].values
size_subset = self.size.sel({dim: flow_ids})
effects_model.add_periodic_contribution(size_subset * factors, contributor_dim=dim)
# === Investment/retirement effects (optional investments) ===
- if inv is not None and self.invested is not None:
- if (ff := inv.effects_of_investment) is not None:
+ if self.invested is not None:
+ ff = _get_subset('invest_effects_of_investment')
+ if ff is not None:
flow_ids = ff.coords[dim].values
invested_subset = self.invested.sel({dim: flow_ids})
effects_model.add_periodic_contribution(invested_subset * ff, contributor_dim=dim)
- if (ff := inv.effects_of_retirement) is not None:
+ ff = _get_subset('invest_effects_of_retirement')
+ if ff is not None:
flow_ids = ff.coords[dim].values
invested_subset = self.invested.sel({dim: flow_ids})
effects_model.add_periodic_contribution(invested_subset * (-ff), contributor_dim=dim)
# === Constants: mandatory fixed + retirement ===
- if inv is not None:
- if inv.effects_of_investment_mandatory is not None:
- effects_model.add_periodic_contribution(inv.effects_of_investment_mandatory, contributor_dim=dim)
- if inv.effects_of_retirement_constant is not None:
- effects_model.add_periodic_contribution(inv.effects_of_retirement_constant, contributor_dim=dim)
+ ff = _get_subset('invest_effects_of_investment_mandatory')
+ if ff is not None:
+ effects_model.add_periodic_contribution(ff, contributor_dim=dim)
+ ff = _get_subset('invest_effects_of_retirement_constant')
+ if ff is not None:
+ effects_model.add_periodic_contribution(ff, contributor_dim=dim)
# === Status Variables (cached_property) ===
@@ -1441,10 +1149,12 @@ def startup_count(self) -> linopy.Variable | None:
ids = self.data.with_startup_limit
if not ids:
return None
+ startup_limit = self.data.ds.get('startup_limit')
+ upper = startup_limit.sel(flow=ids) if startup_limit is not None else None
return self.add_variables(
FlowVarName.STARTUP_COUNT,
lower=0,
- upper=self.data.startup_limit_values,
+ upper=upper,
dims=('period', 'scenario'),
element_ids=ids,
)
@@ -1457,15 +1167,26 @@ def uptime(self) -> linopy.Variable | None:
return None
from .features import StatusBuilder
- prev = sd.previous_uptime
+ ds = sd.ds
+ ids = sd.with_uptime_tracking
+ min_up = ds.get('min_uptime')
+ if min_up is not None:
+ min_up = min_up.sel(flow=ids)
+ max_up = ds.get('max_uptime')
+ if max_up is not None:
+ max_up = max_up.sel(flow=ids)
+ prev = ds.get('previous_uptime')
+ if prev is not None:
+ prev = prev.sel(flow=ids)
+
var = StatusBuilder.add_batched_duration_tracking(
model=self.model,
- state=self.status.sel({self.dim_name: sd.with_uptime_tracking}),
+ state=self.status.sel({self.dim_name: ids}),
name=FlowVarName.UPTIME,
dim_name=self.dim_name,
timestep_duration=self.model.timestep_duration,
- minimum_duration=sd.min_uptime,
- maximum_duration=sd.max_uptime,
+ minimum_duration=min_up,
+ maximum_duration=max_up,
previous_duration=prev if prev is not None and fast_notnull(prev).any() else None,
)
self._variables[FlowVarName.UPTIME] = var
@@ -1479,15 +1200,26 @@ def downtime(self) -> linopy.Variable | None:
return None
from .features import StatusBuilder
- prev = sd.previous_downtime
+ ds = sd.ds
+ ids = sd.with_downtime_tracking
+ min_down = ds.get('min_downtime')
+ if min_down is not None:
+ min_down = min_down.sel(flow=ids)
+ max_down = ds.get('max_downtime')
+ if max_down is not None:
+ max_down = max_down.sel(flow=ids)
+ prev = ds.get('previous_downtime')
+ if prev is not None:
+ prev = prev.sel(flow=ids)
+
var = StatusBuilder.add_batched_duration_tracking(
model=self.model,
state=self.inactive,
name=FlowVarName.DOWNTIME,
dim_name=self.dim_name,
timestep_duration=self.model.timestep_duration,
- minimum_duration=sd.min_downtime,
- maximum_duration=sd.max_downtime,
+ minimum_duration=min_down,
+ maximum_duration=max_down,
previous_duration=prev if prev is not None and fast_notnull(prev).any() else None,
)
self._variables[FlowVarName.DOWNTIME] = var
@@ -1712,10 +1444,6 @@ def __init__(self, model: FlowSystemModel, data: BusesData, flows_model: FlowsMo
# Element ID lists for subsets
self.imbalance_ids: list[str] = data.with_imbalance
- # Set reference on each bus element
- for bus in self.elements.values():
- bus._buses_model = self
-
self.create_variables()
self.create_constraints()
self.create_effect_shares()
@@ -1800,7 +1528,7 @@ def collect_penalty_share_specs(self) -> list[tuple[str, xr.DataArray]]:
penalty_specs = []
for bus in self.buses_with_imbalance:
bus_label = bus.id
- imbalance_penalty = bus.imbalance_penalty_per_flow_hour * self.model.timestep_duration
+ imbalance_penalty = self.data.aligned_imbalance_penalty(bus) * self.model.timestep_duration
virtual_supply = self[BusVarName.VIRTUAL_SUPPLY].sel({dim: bus_label})
virtual_demand = self[BusVarName.VIRTUAL_DEMAND].sel({dim: bus_label})
@@ -1907,8 +1635,8 @@ def create_constraints(self) -> None:
flow_sum = sparse_weighted_sum(flow_status, mask, sum_dim='flow', group_dim='component')
# Separate single-flow vs multi-flow components
- single_flow_ids = [c.id for c in self.components if len(c.inputs) + len(c.outputs) == 1]
- multi_flow_ids = [c.id for c in self.components if len(c.inputs) + len(c.outputs) > 1]
+ single_flow_ids = [c.id for c in self.components if len(list(c.flows)) == 1]
+ multi_flow_ids = [c.id for c in self.components if len(list(c.flows)) > 1]
# Single-flow: exact equality
if single_flow_ids:
diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py
index beb314148..338a0e9bf 100644
--- a/flixopt/flow_system.py
+++ b/flixopt/flow_system.py
@@ -8,7 +8,6 @@
import logging
import pathlib
import warnings
-from itertools import chain
from typing import TYPE_CHECKING, Any, Literal
import pandas as pd
@@ -16,7 +15,7 @@
from . import io as fx_io
from .batched import BatchedAccessor
-from .components import Storage
+from .components import Converter, Port, Storage, Transmission
from .config import CONFIG, DEPRECATION_REMOVAL_VERSION
from .core import (
ConversionError,
@@ -35,7 +34,8 @@
CompositeContainerMixin,
Element,
FlowSystemModel,
- Interface,
+ create_reference_structure,
+ replace_references_with_stats,
)
from .topology_accessor import TopologyAccessor
from .transform_accessor import TransformAccessor
@@ -204,7 +204,7 @@ def __contains__(self, key):
return key in object.__getattribute__(self, '_dataset')
-class FlowSystem(Interface, CompositeContainerMixin[Element]):
+class FlowSystem(CompositeContainerMixin[Element]):
"""
A FlowSystem organizes the high level Elements (Components, Buses, Effects & Flows).
@@ -250,7 +250,7 @@ class FlowSystem(Interface, CompositeContainerMixin[Element]):
>>> boiler = fx.Component('Boiler', inputs=[heat_flow], status_parameters=...)
>>> heat_bus = fx.Bus('Heat', imbalance_penalty_per_flow_hour=1e4)
>>> costs = fx.Effect('costs', is_objective=True, is_standard=True)
- >>> flow_system.add_elements(boiler, heat_bus, costs)
+ >>> flow_system.add(boiler, heat_bus, costs)
Unified dict-like access (recommended for most cases):
@@ -353,8 +353,11 @@ def __init__(
fit_to_model_coords=self.fit_to_model_coords,
)
- # Element collections
- self.components: IdList[Component] = element_id_list(display_name='components', truncate_repr=10)
+ # Element collections — component sub-containers
+ self.converters: IdList[Converter] = element_id_list(display_name='converters', truncate_repr=10)
+ self.ports: IdList[Port] = element_id_list(display_name='ports', truncate_repr=10)
+ self.storages: IdList[Storage] = element_id_list(display_name='storages', truncate_repr=10)
+ self.transmissions: IdList[Transmission] = element_id_list(display_name='transmissions', truncate_repr=10)
self.buses: IdList[Bus] = element_id_list(display_name='buses', truncate_repr=10)
self.effects: EffectCollection = EffectCollection(truncate_repr=10)
self.model: FlowSystemModel | None = None
@@ -362,9 +365,14 @@ def __init__(
self._connected_and_transformed = False
self._used_in_optimization = False
+ # Registry for runtime state (populated during model building, not stored on elements)
+ self._element_variable_names: dict[str, list[str]] = {}
+ self._element_constraint_names: dict[str, list[str]] = {}
+ self._registered_elements: set[int] = set() # Python id() for ownership check
+
self._network_app = None
self._flows_cache: IdList[Flow] | None = None
- self._storages_cache: IdList[Storage] | None = None
+ self._components_cache: IdList | None = None
# Solution dataset - populated after optimization or loaded from file
self._solution: xr.Dataset | None = None
@@ -397,37 +405,50 @@ def __init__(
def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
"""
Override Interface method to handle FlowSystem-specific serialization.
- Combines custom FlowSystem logic with Interface pattern for nested objects.
+
+ Uses path-based DataArray keys via standalone ``create_reference_structure``:
+ ``components.{id}.param``, ``buses.{id}.param``, ``effects.{id}.param``.
Returns:
Tuple of (reference_structure, extracted_arrays_dict)
"""
- # Start with Interface base functionality for constructor parameters
- reference_structure, all_extracted_arrays = super()._create_reference_structure()
+ coords = self.indexes
+
+ # Start with standalone function for FlowSystem's own constructor params
+ reference_structure, all_extracted_arrays = create_reference_structure(self, coords=coords)
# Remove timesteps, as it's directly stored in dataset index
reference_structure.pop('timesteps', None)
- # Extract from components
- components_structure = {}
- for comp_id, component in self.components.items():
- comp_structure, comp_arrays = component._create_reference_structure()
- all_extracted_arrays.update(comp_arrays)
- components_structure[comp_id] = comp_structure
- reference_structure['components'] = components_structure
+ # Extract from component containers with path prefix
+ for container_key, container in [
+ ('converters', self.converters),
+ ('ports', self.ports),
+ ('storages', self.storages),
+ ('transmissions', self.transmissions),
+ ]:
+ container_structure = {}
+ for comp_id, component in container.items():
+ comp_structure, comp_arrays = create_reference_structure(
+ component, f'{container_key}|{comp_id}', coords=coords
+ )
+ all_extracted_arrays.update(comp_arrays)
+ container_structure[comp_id] = comp_structure
+ if container_structure:
+ reference_structure[container_key] = container_structure
- # Extract from buses
+ # Extract from buses with path prefix
buses_structure = {}
for bus_id, bus in self.buses.items():
- bus_structure, bus_arrays = bus._create_reference_structure()
+ bus_structure, bus_arrays = create_reference_structure(bus, f'buses|{bus_id}', coords=coords)
all_extracted_arrays.update(bus_arrays)
buses_structure[bus_id] = bus_structure
reference_structure['buses'] = buses_structure
- # Extract from effects
+ # Extract from effects with path prefix
effects_structure = {}
for effect in self.effects.values():
- effect_structure, effect_arrays = effect._create_reference_structure()
+ effect_structure, effect_arrays = create_reference_structure(effect, f'effects|{effect.id}', coords=coords)
all_extracted_arrays.update(effect_arrays)
effects_structure[effect.id] = effect_structure
reference_structure['effects'] = effects_structure
@@ -468,8 +489,9 @@ def to_dataset(self, include_solution: bool = True, include_original_data: bool
logger.info('FlowSystem is not connected_and_transformed. Connecting and transforming data now.')
self.connect_and_transform()
- # Get base dataset from parent class
- base_ds = super().to_dataset()
+ # Build base dataset from FlowSystem's own _create_reference_structure
+ reference_structure, extracted_arrays = self._create_reference_structure()
+ base_ds = xr.Dataset(extracted_arrays, attrs=reference_structure)
# Add FlowSystem-specific data (solution, clustering, metadata)
return fx_io.flow_system_to_dataset(self, base_ds, include_solution, include_original_data)
@@ -560,7 +582,11 @@ def from_netcdf(cls, path: str | pathlib.Path) -> FlowSystem:
FlowSystem instance with name set from filename
"""
path = pathlib.Path(path)
- flow_system = super().from_netcdf(path)
+ try:
+ ds = fx_io.load_dataset_from_netcdf(path)
+ flow_system = cls.from_dataset(ds)
+ except Exception as e:
+ raise OSError(f'Failed to load FlowSystem from NetCDF file {path}: {e}') from e
# Derive name from filename (without extension)
flow_system.name = path.stem
return flow_system
@@ -728,12 +754,12 @@ def copy(self) -> FlowSystem:
Examples:
>>> original = FlowSystem(timesteps)
- >>> original.add_elements(boiler, bus)
+ >>> original.add(boiler, bus)
>>> original.optimize(solver) # Original now has solution
>>>
>>> # Create a copy to try different parameters
>>> variant = original.copy() # No solution, can be modified
- >>> variant.add_elements(new_component)
+ >>> variant.add(new_component)
>>> variant.optimize(solver)
"""
ds = self.to_dataset(include_solution=False)
@@ -760,7 +786,14 @@ def get_structure(self, clean: bool = False, stats: bool = False) -> dict:
logger.warning('FlowSystem is not connected. Calling connect_and_transform() now.')
self.connect_and_transform()
- return super().get_structure(clean, stats)
+ reference_structure, extracted_arrays = self._create_reference_structure()
+
+ if stats:
+ reference_structure = replace_references_with_stats(reference_structure, extracted_arrays)
+
+ if clean:
+ return fx_io.remove_none_and_empty(reference_structure)
+ return reference_structure
def to_json(self, path: str | pathlib.Path):
"""
@@ -776,7 +809,11 @@ def to_json(self, path: str | pathlib.Path):
)
self.connect_and_transform()
- super().to_json(path)
+ try:
+ data = self.get_structure(clean=True, stats=True)
+ fx_io.save_json(data, path)
+ except Exception as e:
+ raise OSError(f'Failed to save FlowSystem to JSON file {path}: {e}') from e
def fit_to_model_coords(
self,
@@ -877,13 +914,11 @@ def connect_and_transform(self):
self._register_missing_carriers()
self._assign_element_colors()
- # Prepare effects BEFORE transform_data,
- # so the penalty Effect gets transformed too.
- # Note: status parameter propagation happens inside Component.transform_data()
+ # Create penalty effect if needed (must happen before validation)
self._prepare_effects()
- for element in chain(self.components.values(), self.effects.values(), self.buses.values()):
- element.transform_data()
+ # Propagate status parameters from components to flows
+ self._propagate_all_status_parameters()
# Validate cross-element references after transformation
self._validate_system_integrity()
@@ -933,13 +968,11 @@ def _assign_element_colors(self) -> None:
self.components[element_id].color = color
logger.debug(f"Auto-assigned color '{color}' to component '{element_id}'")
- def add_elements(self, *elements: Element) -> None:
- """
- Add Components(Storages, Boilers, Heatpumps, ...), Buses or Effects to the FlowSystem
+ def add(self, *elements: Element) -> None:
+ """Add elements (Converters, Ports, Storages, Buses, Effects, ...) to the FlowSystem.
Args:
- *elements: childs of Element like Boiler, HeatPump, Bus,...
- modeling Elements
+ *elements: Element instances to add (Converter, Port, Storage, Bus, Effect, ...).
Raises:
RuntimeError: If the FlowSystem is locked (has a solution).
@@ -962,7 +995,7 @@ def add_elements(self, *elements: Element) -> None:
for new_element in list(elements):
# Validate element type first
- if not isinstance(new_element, (Component, Effect, Bus)):
+ if not isinstance(new_element, (Converter, Port, Storage, Transmission, Component, Effect, Bus)):
raise TypeError(
f'Tried to add incompatible object to FlowSystem: {type(new_element)=}: {new_element=} '
)
@@ -972,7 +1005,7 @@ def add_elements(self, *elements: Element) -> None:
self._check_if_element_is_unique(new_element)
# Dispatch to type-specific handlers
- if isinstance(new_element, Component):
+ if isinstance(new_element, (Converter, Port, Storage, Transmission, Component)):
self._add_components(new_element)
elif isinstance(new_element, Effect):
self._add_effects(new_element)
@@ -983,6 +1016,15 @@ def add_elements(self, *elements: Element) -> None:
element_type = type(new_element).__name__
logger.info(f'Registered new {element_type}: {new_element.id}')
+ def add_elements(self, *elements: Element) -> None:
+ """Deprecated. Use :meth:`add` instead."""
+ warnings.warn(
+ 'add_elements() is deprecated. Use add() instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.add(*elements)
+
def add_carriers(self, *carriers: Carrier) -> None:
"""Register a custom carrier for this FlowSystem.
@@ -1008,7 +1050,7 @@ def add_carriers(self, *carriers: Carrier) -> None:
# Now buses can reference this carrier by name
bus = fx.Bus('BioGasNetwork', carrier='biogas')
- fs.add_elements(bus)
+ fs.add(bus)
# The carrier color will be used in plots automatically
```
@@ -1282,7 +1324,7 @@ def status(self) -> FlowSystemStatus:
>>> fs = FlowSystem(timesteps)
>>> fs.status
- >>> fs.add_elements(bus, component)
+ >>> fs.add(bus, component)
>>> fs.connect_and_transform()
>>> fs.status
@@ -1356,9 +1398,9 @@ def reset(self) -> FlowSystem:
Examples:
>>> flow_system.optimize(solver) # FlowSystem is now locked
- >>> flow_system.add_elements(new_bus) # Raises RuntimeError
+ >>> flow_system.add(new_bus) # Raises RuntimeError
>>> flow_system.reset() # Unlock the FlowSystem
- >>> flow_system.add_elements(new_bus) # Now works
+ >>> flow_system.add(new_bus) # Now works
"""
self.solution = None # Also clears _statistics via setter
self._invalidate_model()
@@ -1425,11 +1467,10 @@ def optimize(self) -> OptimizeAccessor:
>>> flow_system.optimize(HighsSolver())
>>> print(flow_system.solution['Boiler(Q_th)|flow_rate'])
- Access element solutions directly:
+ Access solution data:
>>> flow_system.optimize(solver)
- >>> boiler = flow_system.components['Boiler']
- >>> print(boiler.solution)
+ >>> print(flow_system.solution['flow|rate'])
Future specialized modes:
@@ -1669,24 +1710,29 @@ def _check_if_element_already_assigned(self, element: Element) -> None:
Raises:
ValueError: If element is already assigned to a different FlowSystem
"""
- if element._flow_system is not None and element._flow_system is not self:
- raise ValueError(
- f'Element "{element.id}" is already assigned to another FlowSystem. '
- f'Each element can only belong to one FlowSystem at a time. '
- f'To use this element in multiple systems, create a copy: '
- f'flow_system.add_elements(element.copy())'
- )
+ if id(element) in self._registered_elements:
+ return # Already registered to this FlowSystem
+ # Check if any other FlowSystem has claimed this element — not possible to detect
+ # with id()-based tracking alone, but duplicates are caught by _check_if_element_is_unique
+
+ def _propagate_all_status_parameters(self) -> None:
+ """Propagate status parameters from components to their flows.
+
+ Components with status_parameters or prevent_simultaneous_flows require
+ certain flows to have StatusParameters. Transmissions with absolute_losses
+ additionally need status variables on input flows.
+ """
+ for component in self.components.values():
+ component._propagate_status_parameters()
def _prepare_effects(self) -> None:
"""Create the penalty effect if needed.
- Called before transform_data() so the penalty effect gets transformed.
Validation is done after transformation via _run_validation().
"""
if self.effects._penalty_effect is None:
penalty = self.effects._create_penalty_effect()
- if penalty._flow_system is None:
- penalty.link_to_flow_system(self)
+ self._registered_elements.add(id(penalty))
def _run_validation(self) -> None:
"""Run all validation through batched *Data classes.
@@ -1695,7 +1741,7 @@ def _run_validation(self) -> None:
- Config validation (simple checks)
- DataArray validation (post-transformation checks)
- Called after transform_data(). The cached *Data instances are
+ Called during connect_and_transform(). The cached *Data instances are
reused during model building.
"""
batched = self.batched
@@ -1729,55 +1775,59 @@ def _validate_system_integrity(self) -> None:
raise ValueError(
f'Flow "{flow.id}" references bus "{flow.bus}" which does not exist in FlowSystem. '
f'Available buses: {available_buses}. '
- f'Did you forget to add the bus using flow_system.add_elements(Bus("{flow.bus}"))?'
+ f'Did you forget to add the bus using flow_system.add(Bus("{flow.bus}"))?'
)
def _add_effects(self, *args: Effect) -> None:
for effect in args:
- effect.link_to_flow_system(self) # Link element to FlowSystem
+ self._registered_elements.add(id(effect))
self.effects.add_effects(*args)
- def _add_components(self, *components: Component) -> None:
+ def _add_components(self, *components) -> None:
for new_component in list(components):
- new_component.link_to_flow_system(self) # Link element to FlowSystem
- self.components.add(new_component) # Add to existing components
- # Invalidate cache once after all additions
+ self._registered_elements.add(id(new_component))
+ for flow in new_component.flows.values():
+ self._registered_elements.add(id(flow))
+ # Dispatch to the right container
+ if isinstance(new_component, Converter):
+ self.converters.add(new_component)
+ elif isinstance(new_component, Port):
+ self.ports.add(new_component)
+ elif isinstance(new_component, Storage):
+ self.storages.add(new_component)
+ elif isinstance(new_component, Transmission):
+ self.transmissions.add(new_component)
+ else:
+ # Legacy Component subclass (Source, Sink, SourceAndSink) → ports
+ self.ports.add(new_component)
+ # Invalidate caches once after all additions
if components:
self._flows_cache = None
- self._storages_cache = None
+ self._components_cache = None
def _add_buses(self, *buses: Bus):
for new_bus in list(buses):
- new_bus.link_to_flow_system(self) # Link element to FlowSystem
+ self._registered_elements.add(id(new_bus))
self.buses.add(new_bus) # Add to existing buses
# Invalidate cache once after all additions
if buses:
self._flows_cache = None
- self._storages_cache = None
def _connect_network(self):
- """Connects the network of components and buses. Can be rerun without changes if no elements were added"""
- for component in self.components.values():
- for flow in component.flows.values():
- flow.component = component.id
- flow.is_input_in_component = flow.id in component.inputs
-
- # Connect Buses
- bus = self.buses.get(flow.bus)
- if bus is None:
- raise KeyError(
- f'Bus {flow.bus} not found in the FlowSystem, but used by "{flow.id}". Please add it first.'
- )
- if flow.is_input_in_component and flow.id not in bus.outputs:
- bus.outputs.add(flow)
- elif not flow.is_input_in_component and flow.id not in bus.inputs:
- bus.inputs.add(flow)
+ """Connect flows to their buses. Flow ownership is already set in each component's __init__."""
+ for flow in self.flows.values():
+ bus = self.buses.get(flow.bus)
+ if bus is None:
+ raise KeyError(
+ f'Bus {flow.bus} not found in the FlowSystem, but used by "{flow.id}". Please add it first.'
+ )
+ if flow.is_input_in_component and flow.id not in bus.outputs:
+ bus.outputs.add(flow)
+ elif not flow.is_input_in_component and flow.id not in bus.inputs:
+ bus.inputs.add(flow)
- # Count flows manually to avoid triggering cache rebuild
- flow_count = sum(len(c.inputs) + len(c.outputs) for c in self.components.values())
logger.debug(
- f'Connected {len(self.buses)} Buses and {len(self.components)} '
- f'via {flow_count} Flows inside the FlowSystem.'
+ f'Connected {len(self.buses)} Buses and {len(self.components)} Components via {len(self.flows)} Flows.'
)
def __repr__(self) -> str:
@@ -1845,36 +1895,50 @@ def __eq__(self, other: FlowSystem):
def _get_container_groups(self) -> dict[str, IdList]:
"""Return ordered container groups for CompositeContainerMixin."""
- return {
- 'Components': self.components,
- 'Buses': self.buses,
- 'Effects': self.effects,
- 'Flows': self.flows,
- }
+ groups: dict[str, IdList] = {}
+ if self.converters:
+ groups['Converters'] = self.converters
+ if self.ports:
+ groups['Ports'] = self.ports
+ if self.storages:
+ groups['Storages'] = self.storages
+ if self.transmissions:
+ groups['Transmissions'] = self.transmissions
+ groups['Buses'] = self.buses
+ groups['Effects'] = self.effects
+ groups['Flows'] = self.flows
+ return groups
+
+ @property
+ def components(self) -> IdList:
+ """All component-like elements as a combined IdList (backward compat).
+
+ Prefer accessing specific containers directly:
+ ``self.converters``, ``self.ports``, ``self.storages``, ``self.transmissions``.
+ """
+ if self._components_cache is None:
+ all_comps = (
+ list(self.converters.values())
+ + list(self.ports.values())
+ + list(self.storages.values())
+ + list(self.transmissions.values())
+ )
+ all_comps.sort(key=lambda c: c.id.lower())
+ self._components_cache = element_id_list(all_comps, display_name='components', truncate_repr=10)
+ return self._components_cache
@property
def flows(self) -> IdList[Flow]:
if self._flows_cache is None:
- flows = [f for c in self.components.values() for f in c.flows.values()]
+ flows = []
+ for container in (self.converters, self.ports, self.storages, self.transmissions):
+ for c in container.values():
+ flows.extend(c.flows.values())
# Deduplicate by id and sort for reproducibility
flows = sorted({id(f): f for f in flows}.values(), key=lambda f: f.id.lower())
self._flows_cache = element_id_list(flows, display_name='flows', truncate_repr=10)
return self._flows_cache
- @property
- def storages(self) -> IdList[Storage]:
- """All storage components as an IdList.
-
- Returns:
- IdList containing all Storage components in the FlowSystem,
- sorted by id for reproducibility.
- """
- if self._storages_cache is None:
- storages = [c for c in self.components.values() if isinstance(c, Storage)]
- storages = sorted(storages, key=lambda s: s.id.lower())
- self._storages_cache = element_id_list(storages, display_name='storages', truncate_repr=10)
- return self._storages_cache
-
# --- Forwarding properties for model coordinate state ---
@property
diff --git a/flixopt/flow_system_status.py b/flixopt/flow_system_status.py
index aef8c0957..47e39cf43 100644
--- a/flixopt/flow_system_status.py
+++ b/flixopt/flow_system_status.py
@@ -111,10 +111,9 @@ def _clear_solved(fs: FlowSystem) -> None:
def _clear_model_built(fs: FlowSystem) -> None:
"""Clear artifacts from MODEL_BUILT status."""
- # Clear element variable/constraint name mappings
- for element in fs.values():
- element._variable_names = []
- element._constraint_names = []
+ # Clear element variable/constraint name registries
+ fs._element_variable_names.clear()
+ fs._element_constraint_names.clear()
# Reset the model-built flag so status downgrades to MODEL_CREATED
if fs.model is not None:
fs.model._is_built = False
diff --git a/flixopt/interface.py b/flixopt/interface.py
index 227a63c7a..363b74379 100644
--- a/flixopt/interface.py
+++ b/flixopt/interface.py
@@ -6,6 +6,7 @@
from __future__ import annotations
import logging
+from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Literal
import numpy as np
@@ -14,8 +15,9 @@
import xarray as xr
from .config import CONFIG
+from .io import build_repr_from_init
from .plot_result import PlotResult
-from .structure import Interface, register_class_for_io
+from .structure import register_class_for_io
if TYPE_CHECKING: # for type checking and preventing circular imports
from collections.abc import Iterator
@@ -25,8 +27,21 @@
logger = logging.getLogger('flixopt')
+def _has_value(param: Any) -> bool:
+ """Check if a parameter has a meaningful value.
+
+ Returns False for None and empty collections, True for everything else.
+ """
+ if param is None:
+ return False
+ if isinstance(param, (dict, list, tuple, set, frozenset)) and len(param) == 0:
+ return False
+ return True
+
+
@register_class_for_io
-class Piece(Interface):
+@dataclass(eq=False)
+class Piece:
"""Define a single linear segment with specified domain boundaries.
This class represents one linear segment that will be combined with other
@@ -71,19 +86,16 @@ class Piece(Interface):
"""
- def __init__(self, start: Numeric_TPS, end: Numeric_TPS):
- self.start = start
- self.end = end
- self.has_time_dim = False
+ start: Numeric_TPS
+ end: Numeric_TPS
- def transform_data(self) -> None:
- dims = None if self.has_time_dim else ['period', 'scenario']
- self.start = self._fit_coords(f'{self.prefix}|start', self.start, dims=dims)
- self.end = self._fit_coords(f'{self.prefix}|end', self.end, dims=dims)
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
@register_class_for_io
-class Piecewise(Interface):
+@dataclass(eq=False)
+class Piecewise:
"""Define piecewise linear approximations for modeling non-linear relationships.
Enables modeling of non-linear relationships through piecewise linear segments
@@ -199,19 +211,7 @@ class Piecewise(Interface):
"""
- def __init__(self, pieces: list[Piece]):
- self.pieces = pieces
- self._has_time_dim = False
-
- @property
- def has_time_dim(self):
- return self._has_time_dim
-
- @has_time_dim.setter
- def has_time_dim(self, value):
- self._has_time_dim = value
- for piece in self.pieces:
- piece.has_time_dim = value
+ pieces: list[Piece]
def __len__(self):
"""
@@ -228,19 +228,13 @@ def __getitem__(self, index) -> Piece:
def __iter__(self) -> Iterator[Piece]:
return iter(self.pieces) # Enables iteration like for piece in piecewise: ...
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Piece objects."""
- super().link_to_flow_system(flow_system, prefix)
- for i, piece in enumerate(self.pieces):
- piece.link_to_flow_system(flow_system, self._sub_prefix(f'Piece{i}'))
-
- def transform_data(self) -> None:
- for piece in self.pieces:
- piece.transform_data()
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
@register_class_for_io
-class PiecewiseConversion(Interface):
+@dataclass(eq=False)
+class PiecewiseConversion:
"""Define coordinated piecewise linear relationships between multiple flows.
This class models conversion processes where multiple flows (inputs, outputs,
@@ -436,20 +430,7 @@ class PiecewiseConversion(Interface):
"""
- def __init__(self, piecewises: dict[str, Piecewise]):
- self.piecewises = piecewises
- self._has_time_dim = True
- self.has_time_dim = True # Initial propagation
-
- @property
- def has_time_dim(self):
- return self._has_time_dim
-
- @has_time_dim.setter
- def has_time_dim(self, value):
- self._has_time_dim = value
- for piecewise in self.piecewises.values():
- piecewise.has_time_dim = value
+ piecewises: dict[str, Piecewise]
def items(self):
"""
@@ -460,16 +441,6 @@ def items(self):
"""
return self.piecewises.items()
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Piecewise objects."""
- super().link_to_flow_system(flow_system, prefix)
- for name, piecewise in self.piecewises.items():
- piecewise.link_to_flow_system(flow_system, self._sub_prefix(name))
-
- def transform_data(self) -> None:
- for piecewise in self.piecewises.values():
- piecewise.transform_data()
-
def plot(
self,
x_flow: str | None = None,
@@ -484,10 +455,6 @@ def plot(
is shown in a separate subplot (faceted by flow). Pieces are distinguished
by line dash style. If boundaries vary over time, color shows time progression.
- Note:
- Requires FlowSystem to be connected and transformed (call
- flow_system.connect_and_transform() first).
-
Args:
x_flow: Flow label to use for X-axis. Defaults to first flow in dict.
title: Plot title.
@@ -502,15 +469,10 @@ def plot(
PlotResult containing the figure and underlying piecewise data.
Examples:
- >>> flow_system.connect_and_transform()
>>> chp.piecewise_conversion.plot(x_flow='Gas', title='CHP Curves')
>>> # Select specific time range
>>> chp.piecewise_conversion.plot(select={'time': slice(0, 12)})
"""
- if not self.flow_system.connected_and_transformed:
- logger.debug('Connecting flow_system for plotting PiecewiseConversion')
- self.flow_system.connect_and_transform()
-
colorscale = colorscale or CONFIG.Plotting.default_sequential_colorscale
flow_labels = list(self.piecewises.keys())
@@ -524,6 +486,9 @@ def plot(
x_piecewise = self.piecewises[x_label]
+ def _ensure_da(v):
+ return v if isinstance(v, xr.DataArray) else xr.DataArray(v)
+
# Build Dataset with all piece data
datasets = []
for y_label in y_flows:
@@ -531,8 +496,8 @@ def plot(
for i, (x_piece, y_piece) in enumerate(zip(x_piecewise, y_piecewise, strict=False)):
ds = xr.Dataset(
{
- x_label: xr.concat([x_piece.start, x_piece.end], dim='point'),
- 'output': xr.concat([y_piece.start, y_piece.end], dim='point'),
+ x_label: xr.concat([_ensure_da(x_piece.start), _ensure_da(x_piece.end)], dim='point'),
+ 'output': xr.concat([_ensure_da(y_piece.start), _ensure_da(y_piece.end)], dim='point'),
}
)
ds = ds.assign_coords(point=['start', 'end'])
@@ -606,9 +571,13 @@ def plot(
return result
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
+
@register_class_for_io
-class PiecewiseEffects(Interface):
+@dataclass(eq=False)
+class PiecewiseEffects:
"""Define how a single decision variable contributes to system effects with piecewise rates.
This class models situations where a decision variable (the origin) generates
@@ -797,34 +766,8 @@ class PiecewiseEffects(Interface):
"""
- def __init__(self, piecewise_origin: Piecewise, piecewise_shares: dict[str, Piecewise]):
- self.piecewise_origin = piecewise_origin
- self.piecewise_shares = piecewise_shares
- self._has_time_dim = False
- self.has_time_dim = False # Initial propagation
-
- @property
- def has_time_dim(self):
- return self._has_time_dim
-
- @has_time_dim.setter
- def has_time_dim(self, value):
- self._has_time_dim = value
- self.piecewise_origin.has_time_dim = value
- for piecewise in self.piecewise_shares.values():
- piecewise.has_time_dim = value
-
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested Piecewise objects."""
- super().link_to_flow_system(flow_system, prefix)
- self.piecewise_origin.link_to_flow_system(flow_system, self._sub_prefix('origin'))
- for effect, piecewise in self.piecewise_shares.items():
- piecewise.link_to_flow_system(flow_system, self._sub_prefix(effect))
-
- def transform_data(self) -> None:
- self.piecewise_origin.transform_data()
- for piecewise in self.piecewise_shares.values():
- piecewise.transform_data()
+ piecewise_origin: Piecewise
+ piecewise_shares: dict[str, Piecewise]
def plot(
self,
@@ -839,10 +782,6 @@ def plot(
and its effect shares. Each effect is shown in a separate subplot (faceted
by effect). Pieces are distinguished by line dash style.
- Note:
- Requires FlowSystem to be connected and transformed (call
- flow_system.connect_and_transform() first).
-
Args:
title: Plot title.
select: xarray-style selection dict to filter data,
@@ -856,19 +795,17 @@ def plot(
PlotResult containing the figure and underlying piecewise data.
Examples:
- >>> flow_system.connect_and_transform()
>>> invest_params.piecewise_effects_of_investment.plot(title='Investment Effects')
"""
- if not self.flow_system.connected_and_transformed:
- logger.debug('Connecting flow_system for plotting PiecewiseEffects')
- self.flow_system.connect_and_transform()
-
colorscale = colorscale or CONFIG.Plotting.default_sequential_colorscale
effect_labels = list(self.piecewise_shares.keys())
if not effect_labels:
raise ValueError('Need at least one effect share to plot')
+ def _ensure_da(v):
+ return v if isinstance(v, xr.DataArray) else xr.DataArray(v)
+
# Build Dataset with all piece data
datasets = []
for effect_label in effect_labels:
@@ -876,8 +813,8 @@ def plot(
for i, (x_piece, y_piece) in enumerate(zip(self.piecewise_origin, y_piecewise, strict=False)):
ds = xr.Dataset(
{
- 'origin': xr.concat([x_piece.start, x_piece.end], dim='point'),
- 'share': xr.concat([y_piece.start, y_piece.end], dim='point'),
+ 'origin': xr.concat([_ensure_da(x_piece.start), _ensure_da(x_piece.end)], dim='point'),
+ 'share': xr.concat([_ensure_da(y_piece.start), _ensure_da(y_piece.end)], dim='point'),
}
)
ds = ds.assign_coords(point=['start', 'end'])
@@ -951,9 +888,13 @@ def plot(
return result
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
+
@register_class_for_io
-class InvestParameters(Interface):
+@dataclass(eq=False)
+class InvestParameters:
"""Define investment decision parameters with flexible sizing and effect modeling.
This class models investment decisions in optimization problems, supporting
@@ -1143,100 +1084,25 @@ class InvestParameters(Interface):
"""
- def __init__(
- self,
- fixed_size: Numeric_PS | None = None,
- minimum_size: Numeric_PS | None = None,
- maximum_size: Numeric_PS | None = None,
- mandatory: bool = False,
- effects_of_investment: Effect_PS | Numeric_PS | None = None,
- effects_of_investment_per_size: Effect_PS | Numeric_PS | None = None,
- effects_of_retirement: Effect_PS | Numeric_PS | None = None,
- piecewise_effects_of_investment: PiecewiseEffects | None = None,
- linked_periods: Numeric_PS | tuple[int, int] | None = None,
- ):
- self.effects_of_investment = effects_of_investment if effects_of_investment is not None else {}
- self.effects_of_retirement = effects_of_retirement if effects_of_retirement is not None else {}
- self.fixed_size = fixed_size
- self.mandatory = mandatory
- self.effects_of_investment_per_size = (
- effects_of_investment_per_size if effects_of_investment_per_size is not None else {}
- )
- self.piecewise_effects_of_investment = piecewise_effects_of_investment
- self.minimum_size = minimum_size if minimum_size is not None else CONFIG.Modeling.epsilon
- self.maximum_size = maximum_size
- self.linked_periods = linked_periods
-
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- """Propagate flow_system reference to nested PiecewiseEffects object if present."""
- super().link_to_flow_system(flow_system, prefix)
- if self.piecewise_effects_of_investment is not None:
- self.piecewise_effects_of_investment.link_to_flow_system(flow_system, self._sub_prefix('PiecewiseEffects'))
-
- def transform_data(self) -> None:
- # Validate that either fixed_size or maximum_size is set
- if self.fixed_size is None and self.maximum_size is None:
- raise ValueError(
- f'InvestParameters in "{self.prefix}" requires either fixed_size or maximum_size to be set. '
- f'An upper bound is needed to properly scale the optimization model.'
- )
- self.effects_of_investment = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_of_investment,
- suffix='effects_of_investment',
- dims=['period', 'scenario'],
- )
- self.effects_of_retirement = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_of_retirement,
- suffix='effects_of_retirement',
- dims=['period', 'scenario'],
- )
- self.effects_of_investment_per_size = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_of_investment_per_size,
- suffix='effects_of_investment_per_size',
- dims=['period', 'scenario'],
- )
-
- if self.piecewise_effects_of_investment is not None:
- self.piecewise_effects_of_investment.has_time_dim = False
- self.piecewise_effects_of_investment.transform_data()
-
- self.minimum_size = self._fit_coords(
- f'{self.prefix}|minimum_size', self.minimum_size, dims=['period', 'scenario']
- )
- self.maximum_size = self._fit_coords(
- f'{self.prefix}|maximum_size', self.maximum_size, dims=['period', 'scenario']
- )
- # Convert tuple (first_period, last_period) to DataArray if needed
- if isinstance(self.linked_periods, (tuple, list)):
- if len(self.linked_periods) != 2:
- raise TypeError(
- f'If you provide a tuple to "linked_periods", it needs to be len=2. Got {len(self.linked_periods)=}'
- )
- if self.flow_system.periods is None:
- raise ValueError(
- f'Cannot use linked_periods={self.linked_periods} when FlowSystem has no periods defined. '
- f'Please define periods in FlowSystem or use linked_periods=None.'
- )
- logger.debug(f'Computing linked_periods from {self.linked_periods}')
- start, end = self.linked_periods
- if start not in self.flow_system.periods.values:
- logger.warning(
- f'Start of linked periods ({start} not found in periods directly: {self.flow_system.periods.values}'
- )
- if end not in self.flow_system.periods.values:
- logger.warning(
- f'End of linked periods ({end} not found in periods directly: {self.flow_system.periods.values}'
- )
- self.linked_periods = self.compute_linked_periods(start, end, self.flow_system.periods)
- logger.debug(f'Computed {self.linked_periods=}')
-
- self.linked_periods = self._fit_coords(
- f'{self.prefix}|linked_periods', self.linked_periods, dims=['period', 'scenario']
- )
- self.fixed_size = self._fit_coords(f'{self.prefix}|fixed_size', self.fixed_size, dims=['period', 'scenario'])
+ fixed_size: Numeric_PS | None = None
+ minimum_size: Numeric_PS | None = None
+ maximum_size: Numeric_PS | None = None
+ mandatory: bool = False
+ effects_of_investment: Effect_PS | Numeric_PS | None = None
+ effects_of_investment_per_size: Effect_PS | Numeric_PS | None = None
+ effects_of_retirement: Effect_PS | Numeric_PS | None = None
+ piecewise_effects_of_investment: PiecewiseEffects | None = None
+ linked_periods: Numeric_PS | tuple[int, int] | None = None
+
+ def __post_init__(self):
+ if self.effects_of_investment is None:
+ self.effects_of_investment = {}
+ if self.effects_of_retirement is None:
+ self.effects_of_retirement = {}
+ if self.effects_of_investment_per_size is None:
+ self.effects_of_investment_per_size = {}
+ if self.minimum_size is None:
+ self.minimum_size = CONFIG.Modeling.epsilon
@property
def minimum_or_fixed_size(self) -> Numeric_PS:
@@ -1278,9 +1144,13 @@ def compute_linked_periods(first_period: int, last_period: int, periods: pd.Inde
coords=(pd.Index(periods, name='period'),),
).rename('linked_periods')
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
+
@register_class_for_io
-class StatusParameters(Interface):
+@dataclass(eq=False)
+class StatusParameters:
"""Define operational constraints and effects for binary status equipment behavior.
This class models equipment that operates in discrete states (active/inactive) rather than
@@ -1468,56 +1338,23 @@ class StatusParameters(Interface):
"""
- def __init__(
- self,
- effects_per_startup: Effect_TPS | Numeric_TPS | None = None,
- effects_per_active_hour: Effect_TPS | Numeric_TPS | None = None,
- active_hours_min: Numeric_PS | None = None,
- active_hours_max: Numeric_PS | None = None,
- min_uptime: Numeric_TPS | None = None,
- max_uptime: Numeric_TPS | None = None,
- min_downtime: Numeric_TPS | None = None,
- max_downtime: Numeric_TPS | None = None,
- startup_limit: Numeric_PS | None = None,
- force_startup_tracking: bool = False,
- cluster_mode: Literal['relaxed', 'cyclic'] = 'relaxed',
- ):
- self.effects_per_startup = effects_per_startup if effects_per_startup is not None else {}
- self.effects_per_active_hour = effects_per_active_hour if effects_per_active_hour is not None else {}
- self.active_hours_min = active_hours_min
- self.active_hours_max = active_hours_max
- self.min_uptime = min_uptime
- self.max_uptime = max_uptime
- self.min_downtime = min_downtime
- self.max_downtime = max_downtime
- self.startup_limit = startup_limit
- self.force_startup_tracking: bool = force_startup_tracking
- self.cluster_mode = cluster_mode
-
- def transform_data(self) -> None:
- self.effects_per_startup = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_per_startup,
- suffix='per_startup',
- )
- self.effects_per_active_hour = self._fit_effect_coords(
- prefix=self.prefix,
- effect_values=self.effects_per_active_hour,
- suffix='per_active_hour',
- )
- self.min_uptime = self._fit_coords(f'{self.prefix}|min_uptime', self.min_uptime)
- self.max_uptime = self._fit_coords(f'{self.prefix}|max_uptime', self.max_uptime)
- self.min_downtime = self._fit_coords(f'{self.prefix}|min_downtime', self.min_downtime)
- self.max_downtime = self._fit_coords(f'{self.prefix}|max_downtime', self.max_downtime)
- self.active_hours_max = self._fit_coords(
- f'{self.prefix}|active_hours_max', self.active_hours_max, dims=['period', 'scenario']
- )
- self.active_hours_min = self._fit_coords(
- f'{self.prefix}|active_hours_min', self.active_hours_min, dims=['period', 'scenario']
- )
- self.startup_limit = self._fit_coords(
- f'{self.prefix}|startup_limit', self.startup_limit, dims=['period', 'scenario']
- )
+ effects_per_startup: Effect_TPS | Numeric_TPS | None = None
+ effects_per_active_hour: Effect_TPS | Numeric_TPS | None = None
+ active_hours_min: Numeric_PS | None = None
+ active_hours_max: Numeric_PS | None = None
+ min_uptime: Numeric_TPS | None = None
+ max_uptime: Numeric_TPS | None = None
+ min_downtime: Numeric_TPS | None = None
+ max_downtime: Numeric_TPS | None = None
+ startup_limit: Numeric_PS | None = None
+ force_startup_tracking: bool = False
+ cluster_mode: Literal['relaxed', 'cyclic'] = 'relaxed'
+
+ def __post_init__(self):
+ if self.effects_per_startup is None:
+ self.effects_per_startup = {}
+ if self.effects_per_active_hour is None:
+ self.effects_per_active_hour = {}
@property
def use_uptime_tracking(self) -> bool:
@@ -1536,9 +1373,12 @@ def use_startup_tracking(self) -> bool:
return True
return any(
- self._has_value(param)
+ _has_value(param)
for param in [
self.effects_per_startup,
self.startup_limit,
]
)
+
+ def __repr__(self) -> str:
+ return build_repr_from_init(self)
diff --git a/flixopt/io.py b/flixopt/io.py
index 514e22665..f8b2cd4c7 100644
--- a/flixopt/io.py
+++ b/flixopt/io.py
@@ -27,6 +27,40 @@
from .flow_system import FlowSystem
from .types import Numeric_TPS
+# Lazy imports to avoid circular dependency (structure.py imports io.py)
+# These are used at call time, not at import time.
+_resolve_ref = None
+_resolve_da_ref = None
+_create_ref = None
+
+
+def _get_resolve_reference_structure():
+ global _resolve_ref
+ if _resolve_ref is None:
+ from .structure import resolve_reference_structure
+
+ _resolve_ref = resolve_reference_structure
+ return _resolve_ref
+
+
+def _get_resolve_dataarray_reference():
+ global _resolve_da_ref
+ if _resolve_da_ref is None:
+ from .structure import _resolve_dataarray_reference
+
+ _resolve_da_ref = _resolve_dataarray_reference
+ return _resolve_da_ref
+
+
+def _get_create_reference_structure():
+ global _create_ref
+ if _create_ref is None:
+ from .structure import create_reference_structure
+
+ _create_ref = create_reference_structure
+ return _create_ref
+
+
logger = logging.getLogger('flixopt')
@@ -1707,12 +1741,21 @@ def _create_flow_system(
cls: type[FlowSystem],
) -> FlowSystem:
"""Create FlowSystem instance with constructor parameters."""
+ _resolve_da = _get_resolve_dataarray_reference()
+
+ def _resolve(key, default=None):
+ """Resolve a reference_structure value, unwrapping ::: refs via _resolve_da."""
+ val = reference_structure.get(key, default)
+ if isinstance(val, str) and val.startswith(':::'):
+ val = _resolve_da(val, arrays_dict)
+ return val
+
# Extract cluster index if present (clustered FlowSystem)
clusters = ds.indexes.get('cluster')
# Resolve cluster_weight if present in reference structure
cluster_weight_for_constructor = (
- cls._resolve_dataarray_reference(reference_structure['cluster_weight'], arrays_dict)
+ _resolve_da(reference_structure['cluster_weight'], arrays_dict)
if 'cluster_weight' in reference_structure
else None
)
@@ -1720,14 +1763,7 @@ def _create_flow_system(
# Resolve scenario_weights only if scenario dimension exists
scenario_weights = None
if ds.indexes.get('scenario') is not None and 'scenario_weights' in reference_structure:
- scenario_weights = cls._resolve_dataarray_reference(reference_structure['scenario_weights'], arrays_dict)
-
- # Resolve timestep_duration if present as DataArray reference
- timestep_duration = None
- if 'timestep_duration' in reference_structure:
- ref_value = reference_structure['timestep_duration']
- if isinstance(ref_value, str) and ref_value.startswith(':::'):
- timestep_duration = cls._resolve_dataarray_reference(ref_value, arrays_dict)
+ scenario_weights = _resolve_da(reference_structure['scenario_weights'], arrays_dict)
# Get timesteps - convert integer index to RangeIndex for segmented systems
time_index = ds.indexes['time']
@@ -1739,15 +1775,15 @@ def _create_flow_system(
periods=ds.indexes.get('period'),
scenarios=ds.indexes.get('scenario'),
clusters=clusters,
- hours_of_last_timestep=reference_structure.get('hours_of_last_timestep'),
- hours_of_previous_timesteps=reference_structure.get('hours_of_previous_timesteps'),
- weight_of_last_period=reference_structure.get('weight_of_last_period'),
+ hours_of_last_timestep=_resolve('hours_of_last_timestep'),
+ hours_of_previous_timesteps=_resolve('hours_of_previous_timesteps'),
+ weight_of_last_period=_resolve('weight_of_last_period'),
scenario_weights=scenario_weights,
cluster_weight=cluster_weight_for_constructor,
- scenario_independent_sizes=reference_structure.get('scenario_independent_sizes', True),
- scenario_independent_flow_rates=reference_structure.get('scenario_independent_flow_rates', False),
- name=reference_structure.get('name'),
- timestep_duration=timestep_duration,
+ scenario_independent_sizes=_resolve('scenario_independent_sizes', True),
+ scenario_independent_flow_rates=_resolve('scenario_independent_flow_rates', False),
+ name=_resolve('name'),
+ timestep_duration=_resolve('timestep_duration'),
)
@staticmethod
@@ -1758,26 +1794,35 @@ def _restore_elements(
cls: type[FlowSystem],
) -> None:
"""Restore components, buses, and effects to FlowSystem."""
+ from .components import Converter, Port, Storage
from .effects import Effect
from .elements import Bus, Component
- # Restore components
+ _resolve = _get_resolve_reference_structure()
+
+ # Restore components (new format: separate container keys)
+ for container_key in ('converters', 'ports', 'storages', 'transmissions'):
+ for _comp_label, comp_data in reference_structure.get(container_key, {}).items():
+ component = _resolve(comp_data, arrays_dict)
+ flow_system._add_components(component)
+
+ # Legacy format: all components under single 'components' key
for comp_label, comp_data in reference_structure.get('components', {}).items():
- component = cls._resolve_reference_structure(comp_data, arrays_dict)
- if not isinstance(component, Component):
+ component = _resolve(comp_data, arrays_dict)
+ if not isinstance(component, (Component, Converter, Port, Storage)):
logger.critical(f'Restoring component {comp_label} failed.')
flow_system._add_components(component)
# Restore buses
for bus_label, bus_data in reference_structure.get('buses', {}).items():
- bus = cls._resolve_reference_structure(bus_data, arrays_dict)
+ bus = _resolve(bus_data, arrays_dict)
if not isinstance(bus, Bus):
logger.critical(f'Restoring bus {bus_label} failed.')
flow_system._add_buses(bus)
# Restore effects
for effect_label, effect_data in reference_structure.get('effects', {}).items():
- effect = cls._resolve_reference_structure(effect_data, arrays_dict)
+ effect = _resolve(effect_data, arrays_dict)
if not isinstance(effect, Effect):
logger.critical(f'Restoring effect {effect_label} failed.')
flow_system._add_effects(effect)
@@ -1845,7 +1890,7 @@ def _restore_clustering(
else:
main_var_names.append(name)
- clustering = fs_cls._resolve_reference_structure(clustering_structure, clustering_arrays)
+ clustering = _get_resolve_reference_structure()(clustering_structure, clustering_arrays)
flow_system.clustering = clustering
# Reconstruct aggregated_data from FlowSystem's main data arrays
@@ -1866,11 +1911,12 @@ def _restore_metadata(
cls: type[FlowSystem],
) -> None:
"""Restore carriers from reference structure."""
+ _resolve = _get_resolve_reference_structure()
# Restore carriers if present
if 'carriers' in reference_structure:
carriers_structure = json.loads(reference_structure['carriers'])
for carrier_data in carriers_structure.values():
- carrier = cls._resolve_reference_structure(carrier_data, {})
+ carrier = _resolve(carrier_data, {})
flow_system.carriers.add(carrier)
# --- Serialization (FlowSystem -> Dataset) ---
@@ -1962,9 +2008,10 @@ def _add_solution_to_dataset(
def _add_carriers_to_dataset(ds: xr.Dataset, carriers: Any) -> xr.Dataset:
"""Add carrier definitions to dataset attributes."""
if carriers:
+ _create_ref_fn = _get_create_reference_structure()
carriers_structure = {}
for name, carrier in carriers.items():
- carrier_ref, _ = carrier._create_reference_structure()
+ carrier_ref, _ = _create_ref_fn(carrier)
carriers_structure[name] = carrier_ref
ds.attrs['carriers'] = json.dumps(carriers_structure, ensure_ascii=False)
diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py
index 0212e73e4..e14a94453 100644
--- a/flixopt/linear_converters.py
+++ b/flixopt/linear_converters.py
@@ -5,6 +5,7 @@
from __future__ import annotations
import logging
+import warnings
from typing import TYPE_CHECKING
import numpy as np
@@ -84,6 +85,11 @@ def __init__(
color: str | None = None,
**kwargs,
):
+ warnings.warn(
+ 'Boiler is deprecated. Use Converter.boiler() instead. Will be removed in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
# Validate required parameters
if fuel_flow is None:
raise ValueError(f"'{id}': fuel_flow is required and cannot be None")
@@ -183,6 +189,11 @@ def __init__(
color: str | None = None,
**kwargs,
):
+ warnings.warn(
+ 'Power2Heat is deprecated. Use Converter.power2heat() instead. Will be removed in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
# Validate required parameters
if electrical_flow is None:
raise ValueError(f"'{id}': electrical_flow is required and cannot be None")
@@ -282,6 +293,11 @@ def __init__(
color: str | None = None,
**kwargs,
):
+ warnings.warn(
+ 'HeatPump is deprecated. Use Converter.heat_pump() instead. Will be removed in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
# Validate required parameters
if electrical_flow is None:
raise ValueError(f"'{id}': electrical_flow is required and cannot be None")
@@ -383,6 +399,11 @@ def __init__(
color: str | None = None,
**kwargs,
):
+ warnings.warn(
+ 'CoolingTower is deprecated. Use Converter.cooling_tower() instead. Will be removed in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
# Validate required parameters
if electrical_flow is None:
raise ValueError(f"'{id}': electrical_flow is required and cannot be None")
@@ -491,6 +512,11 @@ def __init__(
color: str | None = None,
**kwargs,
):
+ warnings.warn(
+ 'CHP is deprecated. Use Converter.chp() instead. Will be removed in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
# Validate required parameters
if fuel_flow is None:
raise ValueError(f"'{id}': fuel_flow is required and cannot be None")
@@ -625,6 +651,12 @@ def __init__(
color: str | None = None,
**kwargs,
):
+ warnings.warn(
+ 'HeatPumpWithSource is deprecated. Use Converter.heat_pump_with_source() instead. '
+ 'Will be removed in a future release.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
# Validate required parameters
if electrical_flow is None:
raise ValueError(f"'{id}': electrical_flow is required and cannot be None")
diff --git a/flixopt/network_app.py b/flixopt/network_app.py
index db8ed612f..8ed32e76e 100644
--- a/flixopt/network_app.py
+++ b/flixopt/network_app.py
@@ -18,7 +18,7 @@
DASH_CYTOSCAPE_AVAILABLE = False
VISUALIZATION_ERROR = str(e)
-from .components import LinearConverter, Sink, Source, SourceAndSink, Storage
+from .components import Converter, LinearConverter, Port, Sink, Source, SourceAndSink, Storage
from .config import SUCCESS_LEVEL
from .elements import Bus
@@ -131,13 +131,20 @@ def get_element_type(element):
"""Determine element type for coloring"""
if isinstance(element, Bus):
return 'Bus'
+ elif isinstance(element, Port):
+ if element.exports and not element.imports:
+ return 'Sink'
+ elif element.imports and not element.exports:
+ return 'Source'
+ else:
+ return 'Sink'
elif isinstance(element, Source):
return 'Source'
elif isinstance(element, (Sink, SourceAndSink)):
return 'Sink'
elif isinstance(element, Storage):
return 'Storage'
- elif isinstance(element, LinearConverter):
+ elif isinstance(element, (Converter, LinearConverter)):
return 'Converter'
else:
return 'Other'
diff --git a/flixopt/optimization.py b/flixopt/optimization.py
index e95eb15b8..a49782706 100644
--- a/flixopt/optimization.py
+++ b/flixopt/optimization.py
@@ -576,11 +576,7 @@ def __init__(
# Storing all original start values
self._original_start_values = {
**{flow.id: flow.previous_flow_rate for flow in self.flow_system.flows.values()},
- **{
- comp.id: comp.initial_charge_state
- for comp in self.flow_system.components.values()
- if isinstance(comp, Storage)
- },
+ **{comp.id: comp.initial_charge_state for comp in self.flow_system.storages.values()},
}
self._transfered_start_values: list[dict[str, Any]] = []
@@ -748,8 +744,8 @@ def _transfer_start_values(self, i: int):
# Get previous charge state from type-level model
storages_model = current_model._storages_model
- for current_comp in current_flow_system.components.values():
- next_comp = next_flow_system.components[current_comp.id]
+ for current_comp in current_flow_system.storages.values():
+ next_comp = next_flow_system.storages[current_comp.id]
if isinstance(next_comp, Storage):
if storages_model is not None:
charge_state = storages_model.get_variable(StorageVarName.CHARGE, current_comp.id)
diff --git a/flixopt/optimize_accessor.py b/flixopt/optimize_accessor.py
index 3328219b0..1bf334f39 100644
--- a/flixopt/optimize_accessor.py
+++ b/flixopt/optimize_accessor.py
@@ -339,7 +339,6 @@ def _transfer_state(
- Flow previous_flow_rate: Last nr_of_previous_values from non-overlap portion
- Storage initial_charge_state: Charge state at end of non-overlap portion
"""
- from .components import Storage
solution = source_fs.solution
time_slice = slice(horizon - nr_of_previous_values, horizon)
@@ -352,11 +351,10 @@ def _transfer_state(
target_flow.previous_flow_rate = values.item() if values.size == 1 else values
# Transfer storage charge states
- for label, target_comp in target_fs.components.items():
- if isinstance(target_comp, Storage):
- var_name = f'{label}|charge_state'
- if var_name in solution:
- target_comp.initial_charge_state = solution[var_name].isel(time=horizon).item()
+ for label, target_comp in target_fs.storages.items():
+ var_name = f'{label}|charge_state'
+ if var_name in solution:
+ target_comp.initial_charge_state = solution[var_name].isel(time=horizon).item()
def _check_no_investments(self, segment_fs: FlowSystem) -> None:
"""Check that no InvestParameters are used (not supported in rolling horizon)."""
@@ -369,10 +367,8 @@ def _check_no_investments(self, segment_fs: FlowSystem) -> None:
invest_elements.append(flow.id)
# Check storages for InvestParameters
- from .components import Storage
-
- for comp in segment_fs.components.values():
- if isinstance(comp, Storage) and isinstance(comp.capacity, InvestParameters):
+ for comp in segment_fs.storages.values():
+ if isinstance(comp.capacity, InvestParameters):
invest_elements.append(comp.id)
if invest_elements:
diff --git a/flixopt/statistics_accessor.py b/flixopt/statistics_accessor.py
index 7f4b1ed35..c81a1fe67 100644
--- a/flixopt/statistics_accessor.py
+++ b/flixopt/statistics_accessor.py
@@ -2383,8 +2383,8 @@ def storage(
raise ValueError(f"'{storage}' is not a storage (no charge_state variable found)")
# Get flow data
- input_labels = [f.id for f in component.inputs.values()]
- output_labels = [f.id for f in component.outputs.values()]
+ input_labels = [f.id for f in component.flows.values() if f.is_input_in_component]
+ output_labels = [f.id for f in component.flows.values() if not f.is_input_in_component]
all_labels = input_labels + output_labels
source_da = self._stats.flow_rates if unit == 'flow_rate' else self._stats.flow_hours
diff --git a/flixopt/structure.py b/flixopt/structure.py
index 8f26746d1..14a5095dc 100644
--- a/flixopt/structure.py
+++ b/flixopt/structure.py
@@ -5,10 +5,10 @@
from __future__ import annotations
+import dataclasses
import inspect
import json
import logging
-import pathlib
import re
import warnings
from abc import ABC, abstractmethod
@@ -17,7 +17,6 @@
from typing import (
TYPE_CHECKING,
Any,
- ClassVar,
Generic,
Literal,
TypeVar,
@@ -30,7 +29,7 @@
from . import io as fx_io
from .config import DEPRECATION_REMOVAL_VERSION
-from .core import FlowSystemDimensions, TimeSeriesData, get_dataarray_stats
+from .core import TimeSeriesData, align_to_coords, get_dataarray_stats
from .id_list import IdList
if TYPE_CHECKING: # for type checking and preventing circular imports
@@ -38,7 +37,6 @@
from .effects import EffectsModel
from .flow_system import FlowSystem
- from .types import Effect_TPS, Numeric_TPS, NumericOrBool
logger = logging.getLogger('flixopt')
@@ -677,6 +675,343 @@ def register_class_for_io(cls):
return cls
+# =============================================================================
+# Standalone Serialization Functions (path-based DataArray naming)
+# =============================================================================
+
+
+def _is_numeric(obj: Any) -> bool:
+ """Check if an object is a numeric value that should be stored as a DataArray.
+
+ Matches arrays (np.ndarray, pd.Series, pd.DataFrame) and scalars
+ (int, float, np.integer, np.floating). Excludes bool (subclass of int).
+
+ Storing numerics as DataArrays enables:
+ - Dataset operations (resampling, selection, etc.)
+ - Efficient binary storage in NetCDF
+ - Dtype preservation
+ """
+ if isinstance(obj, bool):
+ return False
+ return isinstance(obj, (np.ndarray, pd.Series, pd.DataFrame, int, float, np.integer, np.floating))
+
+
+def create_reference_structure(
+ obj, path_prefix: str = '', coords: dict[str, pd.Index] | None = None
+) -> tuple[dict, dict[str, xr.DataArray]]:
+ """Extract DataArrays from any registered object, using path-based keys.
+
+ Works with
+ any object whose class is in CLASS_REGISTRY, any dataclass, or any object
+ with an inspectable ``__init__``.
+
+ DataArray keys are deterministic paths built from the object hierarchy:
+ ``element_id.param_name`` for top-level, ``element_id.param.sub_param`` for nested.
+
+ Args:
+ obj: Object to serialize.
+ path_prefix: Path prefix for DataArray keys (e.g., ``'components.Boiler'``).
+ coords: Model coordinates for aligning numeric arrays. When provided,
+ numpy arrays / pandas objects are converted to properly-dimensioned
+ DataArrays via ``align_to_coords``, ensuring they participate in
+ dataset operations (resampling, selection) and avoid dimension conflicts.
+
+ Returns:
+ Tuple of (reference_structure dict, extracted_arrays dict).
+ """
+ structure: dict[str, Any] = {'__class__': obj.__class__.__name__}
+ all_arrays: dict[str, xr.DataArray] = {}
+
+ params = _get_serializable_params(obj)
+
+ for name, value in params.items():
+ if value is None:
+ continue
+ if isinstance(value, pd.Index):
+ logger.debug(f'Skipping {name=} because it is an Index')
+ continue
+
+ param_path = f'{path_prefix}|{name}' if path_prefix else name
+ processed, arrays = _extract_recursive(value, param_path, coords)
+ all_arrays.update(arrays)
+ if processed is not None:
+ structure[name] = processed
+
+ return structure, all_arrays
+
+
+def _extract_recursive(
+ obj: Any, path: str, coords: dict[str, pd.Index] | None = None
+) -> tuple[Any, dict[str, xr.DataArray]]:
+ """Recursively extract DataArrays, using *path* as the array key.
+
+ Handles DataArrays, numeric arrays (np.ndarray, pd.Series, pd.DataFrame),
+ registered classes, plain dataclasses, dicts, lists, tuples, sets, IdList,
+ and scalar/basic types.
+
+ When *coords* is provided, numeric arrays are aligned to model dimensions
+ via ``align_to_coords`` to get proper dimension names.
+ """
+ arrays: dict[str, xr.DataArray] = {}
+
+ if isinstance(obj, xr.DataArray):
+ # Align DataArrays with generic dims (dim_0, dim_1, ...) to model coords
+ # so they are stored with proper dimension names in the dataset.
+ if coords is not None and any(d.startswith('dim_') for d in obj.dims):
+ obj = align_to_coords(obj, coords, name=path)
+ arrays[path] = obj.rename(path)
+ return f':::{path}', arrays
+
+ # Numeric values → DataArray for dataset operations and binary NetCDF storage.
+ if coords is not None and _is_numeric(obj):
+ da = align_to_coords(obj, coords, name=path)
+ arrays[path] = da.rename(path)
+ return f':::{path}', arrays
+
+ if obj.__class__.__name__ in CLASS_REGISTRY:
+ return create_reference_structure(obj, path_prefix=path, coords=coords)
+
+ if dataclasses.is_dataclass(obj) and not isinstance(obj, type):
+ structure: dict[str, Any] = {'__class__': obj.__class__.__name__}
+ for field in dataclasses.fields(obj):
+ value = getattr(obj, field.name)
+ if value is None:
+ continue
+ processed, field_arrays = _extract_recursive(value, f'{path}|{field.name}', coords)
+ arrays.update(field_arrays)
+ if processed is not None and not _is_empty(processed):
+ structure[field.name] = processed
+ return structure, arrays
+
+ if isinstance(obj, IdList):
+ processed_list: list[Any] = []
+ for key, item in obj.items():
+ p, a = _extract_recursive(item, f'{path}|{key}', coords)
+ arrays.update(a)
+ processed_list.append(p)
+ return processed_list, arrays
+
+ if isinstance(obj, dict):
+ processed_dict = {}
+ for key, value in obj.items():
+ p, a = _extract_recursive(value, f'{path}|{key}', coords)
+ arrays.update(a)
+ processed_dict[key] = p
+ return processed_dict, arrays
+
+ if isinstance(obj, (list, tuple)):
+ processed_list: list[Any] = []
+ for i, item in enumerate(obj):
+ p, a = _extract_recursive(item, f'{path}|{i}', coords)
+ arrays.update(a)
+ processed_list.append(p)
+ return processed_list, arrays
+
+ if isinstance(obj, set):
+ processed_list = []
+ for i, item in enumerate(obj):
+ p, a = _extract_recursive(item, f'{path}|{i}', coords)
+ arrays.update(a)
+ processed_list.append(p)
+ return processed_list, arrays
+
+ # Scalar / basic type
+ return _to_basic_type(obj), arrays
+
+
+def _has_dataclass_init(cls: type) -> bool:
+ """Check if a class uses a dataclass-generated __init__ (not a custom override).
+
+ Returns True only when @dataclass was applied directly to ``cls`` with init=True.
+ Classes that merely inherit from a dataclass (e.g. Boiler(LinearConverter))
+ but define their own __init__ return False.
+ """
+ params = cls.__dict__.get('__dataclass_params__')
+ return params is not None and params.init
+
+
+def _get_serializable_params(obj) -> dict[str, Any]:
+ """Get name->value pairs for serialization from ``__init__`` parameters."""
+ _skip = {'self', 'label', 'label_as_positional', 'args', 'kwargs'}
+
+ # Class-level exclusion set for IO serialization
+ io_exclude = getattr(obj.__class__, '_io_exclude', set())
+ _skip |= io_exclude
+
+ # Prefer dataclass fields when class uses dataclass-generated __init__
+ if _has_dataclass_init(obj.__class__):
+ return {f.name: getattr(obj, f.name, None) for f in dataclasses.fields(obj) if f.name not in _skip and f.init}
+
+ # Fallback for non-dataclass or custom-__init__ classes
+ sig = inspect.signature(obj.__init__)
+ return {name: getattr(obj, name, None) for name in sig.parameters if name not in _skip}
+
+
+def _to_basic_type(obj: Any) -> Any:
+ """Convert a single value to a JSON-compatible basic Python type."""
+ if obj is None or isinstance(obj, (str, int, float, bool)):
+ return obj
+ if isinstance(obj, np.integer):
+ return int(obj)
+ if isinstance(obj, np.floating):
+ return float(obj)
+ if isinstance(obj, np.bool_):
+ return bool(obj)
+ if isinstance(obj, (np.ndarray, pd.Series, pd.DataFrame)):
+ return obj.tolist() if hasattr(obj, 'tolist') else list(obj)
+ if isinstance(obj, dict):
+ return {k: _to_basic_type(v) for k, v in obj.items()}
+ if isinstance(obj, (list, tuple)):
+ return [_to_basic_type(item) for item in obj]
+ if isinstance(obj, set):
+ return [_to_basic_type(item) for item in obj]
+ if hasattr(obj, 'isoformat'):
+ return obj.isoformat()
+ if hasattr(obj, '__dict__'):
+ logger.warning(f'Converting custom object {type(obj)} to dict representation: {obj}')
+ return {str(k): _to_basic_type(v) for k, v in obj.__dict__.items()}
+ logger.error(f'Converting unknown type {type(obj)} to string: {obj}')
+ return str(obj)
+
+
+def _is_empty(obj: Any) -> bool:
+ """Check if object is an empty container (dict, list, tuple, set)."""
+ return isinstance(obj, (dict, list, tuple, set)) and len(obj) == 0
+
+
+def resolve_reference_structure(structure: Any, arrays_dict: dict[str, xr.DataArray]) -> Any:
+ """Resolve a reference structure back to actual objects.
+
+ Resolves ``:::path`` DataArray references and ``__class__`` markers back to objects.
+ Handles ``:::path`` DataArray references, registered classes, lists, and dicts.
+
+ Args:
+ structure: Structure containing ``:::path`` references or ``__class__`` markers.
+ arrays_dict: Dictionary mapping path keys to DataArrays.
+
+ Returns:
+ Resolved structure with DataArrays and reconstructed objects.
+ """
+ # Handle DataArray references
+ if isinstance(structure, str) and structure.startswith(':::'):
+ return _resolve_dataarray_reference(structure, arrays_dict)
+
+ if isinstance(structure, list):
+ resolved_list = []
+ for item in structure:
+ resolved_item = resolve_reference_structure(item, arrays_dict)
+ if resolved_item is not None:
+ resolved_list.append(resolved_item)
+ return resolved_list
+
+ if isinstance(structure, dict):
+ if structure.get('__class__'):
+ class_name = structure['__class__']
+ if class_name not in CLASS_REGISTRY:
+ raise ValueError(
+ f"Class '{class_name}' not found in CLASS_REGISTRY. "
+ f'Available classes: {list(CLASS_REGISTRY.keys())}'
+ )
+
+ nested_class = CLASS_REGISTRY[class_name]
+ nested_data = {k: v for k, v in structure.items() if k != '__class__'}
+ resolved_nested_data = resolve_reference_structure(nested_data, arrays_dict)
+
+ try:
+ # Discover init parameters — prefer dataclass fields
+ if _has_dataclass_init(nested_class):
+ init_params = {f.name for f in dataclasses.fields(nested_class) if f.init} | {'self'}
+ else:
+ init_params = set(inspect.signature(nested_class.__init__).parameters.keys())
+
+ # Filter out legacy runtime attrs from old serialized files
+ _legacy_deferred = {'_variable_names', '_constraint_names'}
+ constructor_data = {k: v for k, v in resolved_nested_data.items() if k not in _legacy_deferred}
+
+ # Handle renamed parameters from old serialized data
+ if 'label' in constructor_data and 'label' not in init_params:
+ new_key = 'flow_id' if 'flow_id' in init_params else 'id'
+ constructor_data[new_key] = constructor_data.pop('label')
+ if 'id' in constructor_data and 'id' not in init_params and 'flow_id' in init_params:
+ constructor_data['flow_id'] = constructor_data.pop('id')
+
+ # Check for unknown parameters
+ unknown_params = set(constructor_data.keys()) - init_params
+ if unknown_params:
+ raise TypeError(
+ f'{class_name}.__init__() got unexpected keyword arguments: {unknown_params}. '
+ f'This may indicate renamed parameters that need conversion. '
+ f'Valid parameters are: {init_params - {"self"}}'
+ )
+
+ instance = nested_class(**constructor_data)
+
+ return instance
+ except TypeError as e:
+ raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
+ except Exception as e:
+ raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
+ else:
+ # Regular dictionary
+ resolved_dict = {}
+ for key, value in structure.items():
+ resolved_value = resolve_reference_structure(value, arrays_dict)
+ if resolved_value is not None or value is None:
+ resolved_dict[key] = resolved_value
+ return resolved_dict
+
+ return structure
+
+
+def _resolve_dataarray_reference(reference: str, arrays_dict: dict[str, xr.DataArray]) -> xr.DataArray | TimeSeriesData:
+ """Resolve a single ``:::path`` DataArray reference.
+
+ Args:
+ reference: Reference string starting with ``:::``.
+ arrays_dict: Dictionary of available DataArrays.
+
+ Returns:
+ Resolved DataArray or TimeSeriesData object.
+ """
+ array_name = reference[3:]
+ if array_name not in arrays_dict:
+ raise ValueError(f"Referenced DataArray '{array_name}' not found in dataset")
+
+ array = arrays_dict[array_name]
+
+ # Handle null values with warning
+ has_nulls = (np.issubdtype(array.dtype, np.floating) and np.any(np.isnan(array.values))) or (
+ array.dtype == object and pd.isna(array.values).any()
+ )
+ if has_nulls:
+ logger.error(f"DataArray '{array_name}' contains null values. Dropping all-null along present dims.")
+ if 'time' in array.dims:
+ array = array.dropna(dim='time', how='all')
+
+ if TimeSeriesData.is_timeseries_data(array):
+ return TimeSeriesData.from_dataarray(array)
+
+ # Unwrap 0-d DataArrays back to Python scalars
+ if array.ndim == 0:
+ return array.item()
+
+ return array
+
+
+def replace_references_with_stats(structure, arrays_dict: dict[str, xr.DataArray]):
+ """Replace ``:::path`` DataArray references with statistical summaries."""
+ if isinstance(structure, str) and structure.startswith(':::'):
+ array_name = structure[3:]
+ if array_name in arrays_dict:
+ return get_dataarray_stats(arrays_dict[array_name])
+ return structure
+ elif isinstance(structure, dict):
+ return {k: replace_references_with_stats(v, arrays_dict) for k, v in structure.items()}
+ elif isinstance(structure, list):
+ return [replace_references_with_stats(item, arrays_dict) for item in structure]
+ return structure
+
+
class _BuildTimer:
"""Simple timing helper for build_model profiling."""
@@ -755,7 +1090,9 @@ def _populate_element_variable_names(self):
self._populate_names_from_type_level_models()
def _populate_names_from_type_level_models(self):
- """Populate element variable/constraint names from type-level models."""
+ """Populate element variable/constraint names in FlowSystem registry."""
+ var_names = self.flow_system._element_variable_names
+ con_names = self.flow_system._element_constraint_names
# Helper to find batched variables that contain a specific element ID in a dimension
def _find_vars_for_element(element_id: str, dim_name: str) -> list[str]:
@@ -763,73 +1100,79 @@ def _find_vars_for_element(element_id: str, dim_name: str) -> list[str]:
Returns the batched variable names (e.g., 'flow|rate', 'storage|charge').
"""
- var_names = []
+ result = []
for var_name in self.variables:
var = self.variables[var_name]
if dim_name in var.dims:
try:
if element_id in var.coords[dim_name].values:
- var_names.append(var_name)
+ result.append(var_name)
except (KeyError, AttributeError):
pass
- return var_names
+ return result
def _find_constraints_for_element(element_id: str, dim_name: str) -> list[str]:
"""Find all constraint names that have this element in their dimension."""
- con_names = []
+ result = []
for con_name in self.constraints:
con = self.constraints[con_name]
if dim_name in con.dims:
try:
if element_id in con.coords[dim_name].values:
- con_names.append(con_name)
+ result.append(con_name)
except (KeyError, AttributeError):
pass
# Also check for element-specific constraints (e.g., bus|BusLabel|balance)
elif element_id in con_name.split('|'):
- con_names.append(con_name)
- return con_names
+ result.append(con_name)
+ return result
# Populate flows
for flow in self.flow_system.flows.values():
- flow._variable_names = _find_vars_for_element(flow.id, 'flow')
- flow._constraint_names = _find_constraints_for_element(flow.id, 'flow')
+ var_names[flow.id] = _find_vars_for_element(flow.id, 'flow')
+ con_names[flow.id] = _find_constraints_for_element(flow.id, 'flow')
# Populate buses
for bus in self.flow_system.buses.values():
- bus._variable_names = _find_vars_for_element(bus.id, 'bus')
- bus._constraint_names = _find_constraints_for_element(bus.id, 'bus')
+ var_names[bus.id] = _find_vars_for_element(bus.id, 'bus')
+ con_names[bus.id] = _find_constraints_for_element(bus.id, 'bus')
# Populate storages
from .components import Storage
for comp in self.flow_system.components.values():
if isinstance(comp, Storage):
- comp._variable_names = _find_vars_for_element(comp.id, 'storage')
- comp._constraint_names = _find_constraints_for_element(comp.id, 'storage')
+ comp_vars = _find_vars_for_element(comp.id, 'storage')
+ comp_cons = _find_constraints_for_element(comp.id, 'storage')
# Also add flow variables (storages have charging/discharging flows)
for flow in comp.flows.values():
- comp._variable_names.extend(flow._variable_names)
- comp._constraint_names.extend(flow._constraint_names)
+ comp_vars.extend(var_names[flow.id])
+ comp_cons.extend(con_names[flow.id])
+ var_names[comp.id] = comp_vars
+ con_names[comp.id] = comp_cons
else:
# Generic component - collect from child flows
- comp._variable_names = []
- comp._constraint_names = []
+ comp_vars = []
+ comp_cons = []
# Add component-level variables (status, etc.)
- comp._variable_names.extend(_find_vars_for_element(comp.id, 'component'))
- comp._constraint_names.extend(_find_constraints_for_element(comp.id, 'component'))
+ comp_vars.extend(_find_vars_for_element(comp.id, 'component'))
+ comp_cons.extend(_find_constraints_for_element(comp.id, 'component'))
# Add flow variables
for flow in comp.flows.values():
- comp._variable_names.extend(flow._variable_names)
- comp._constraint_names.extend(flow._constraint_names)
+ comp_vars.extend(var_names[flow.id])
+ comp_cons.extend(con_names[flow.id])
+ var_names[comp.id] = comp_vars
+ con_names[comp.id] = comp_cons
# Populate effects
for effect in self.flow_system.effects.values():
- effect._variable_names = _find_vars_for_element(effect.id, 'effect')
- effect._constraint_names = _find_constraints_for_element(effect.id, 'effect')
+ var_names[effect.id] = _find_vars_for_element(effect.id, 'effect')
+ con_names[effect.id] = _find_constraints_for_element(effect.id, 'effect')
def _build_results_structure(self) -> dict[str, dict]:
"""Build results structure for all elements using type-level models."""
+ var_names = self.flow_system._element_variable_names
+ con_names = self.flow_system._element_constraint_names
results = {
'Components': {},
@@ -840,13 +1183,16 @@ def _build_results_structure(self) -> dict[str, dict]:
# Components
for comp in sorted(self.flow_system.components.values(), key=lambda c: c.id.upper()):
- flow_ids = [f.id for f in comp.flows.values()]
+ flows_list = list(comp.flows.values())
+ flow_ids = [f.id for f in flows_list]
+ inputs_count = sum(1 for f in flows_list if f.is_input_in_component)
+ outputs_count = len(flows_list) - inputs_count
results['Components'][comp.id] = {
'id': comp.id,
- 'variables': comp._variable_names,
- 'constraints': comp._constraint_names,
- 'inputs': ['flow|rate'] * len(comp.inputs),
- 'outputs': ['flow|rate'] * len(comp.outputs),
+ 'variables': var_names.get(comp.id, []),
+ 'constraints': con_names.get(comp.id, []),
+ 'inputs': ['flow|rate'] * inputs_count,
+ 'outputs': ['flow|rate'] * outputs_count,
'flows': flow_ids,
}
@@ -859,8 +1205,8 @@ def _build_results_structure(self) -> dict[str, dict]:
output_vars.append('bus|virtual_demand')
results['Buses'][bus.id] = {
'id': bus.id,
- 'variables': bus._variable_names,
- 'constraints': bus._constraint_names,
+ 'variables': var_names.get(bus.id, []),
+ 'constraints': con_names.get(bus.id, []),
'inputs': input_vars,
'outputs': output_vars,
'flows': [f.id for f in bus.flows.values()],
@@ -870,16 +1216,16 @@ def _build_results_structure(self) -> dict[str, dict]:
for effect in sorted(self.flow_system.effects.values(), key=lambda e: e.id.upper()):
results['Effects'][effect.id] = {
'id': effect.id,
- 'variables': effect._variable_names,
- 'constraints': effect._constraint_names,
+ 'variables': var_names.get(effect.id, []),
+ 'constraints': con_names.get(effect.id, []),
}
# Flows
for flow in sorted(self.flow_system.flows.values(), key=lambda f: f.id.upper()):
results['Flows'][flow.id] = {
'id': flow.id,
- 'variables': flow._variable_names,
- 'constraints': flow._constraint_names,
+ 'variables': var_names.get(flow.id, []),
+ 'constraints': con_names.get(flow.id, []),
'start': flow.bus if flow.is_input_in_component else flow.component,
'end': flow.component if flow.is_input_in_component else flow.bus,
'component': flow.component,
@@ -1127,7 +1473,7 @@ def objective_weights(self) -> xr.DataArray:
elif default_weights is not None:
period_weights = default_weights
else:
- period_weights = obj_effect._fit_coords(name='period_weights', data=1, dims=['period'])
+ period_weights = align_to_coords(1, self.flow_system.indexes, name='period_weights', dims=['period'])
scenario_weights = self.scenario_weights
return period_weights * scenario_weights
@@ -1189,857 +1535,33 @@ def __repr__(self) -> str:
return f'{title}\n{"=" * len(title)}\n\n{all_sections}'
-class Interface:
- """
- Base class for all Elements and Models in flixopt that provides serialization capabilities.
-
- This class enables automatic serialization/deserialization of objects containing xarray DataArrays
- and nested Interface objects to/from xarray Datasets and NetCDF files. It uses introspection
- of constructor parameters to automatically handle most serialization scenarios.
-
- Key Features:
- - Automatic extraction and restoration of xarray DataArrays
- - Support for nested Interface objects
- - NetCDF and JSON export/import
- - Recursive handling of complex nested structures
+def valid_id(id: str) -> str:
+ """Check if the id is valid and return it (possibly stripped).
- Subclasses must implement:
- transform_data(): Transform data to match FlowSystem dimensions
+ Raises:
+ ValueError: If the id contains forbidden characters.
"""
-
- # Class-level defaults for attributes set by link_to_flow_system()
- # These provide type hints and default values without requiring __init__ in subclasses
- _flow_system: FlowSystem | None = None
- _prefix: str = ''
-
- def transform_data(self) -> None:
- """Transform the data of the interface to match the FlowSystem's dimensions.
-
- Uses `self._prefix` (set during `link_to_flow_system()`) to name transformed data.
-
- Raises:
- NotImplementedError: Must be implemented by subclasses
-
- Note:
- The FlowSystem reference is available via self._flow_system (for Interface objects)
- or self.flow_system property (for Element objects). Elements must be registered
- to a FlowSystem before calling this method.
- """
- raise NotImplementedError('Every Interface subclass needs a transform_data() method')
-
- @property
- def prefix(self) -> str:
- """The prefix used for naming transformed data (e.g., 'Boiler(Q_th)|status_parameters')."""
- return self._prefix
-
- def _sub_prefix(self, name: str) -> str:
- """Build a prefix for a nested interface by appending name to current prefix."""
- return f'{self._prefix}|{name}' if self._prefix else name
-
- def link_to_flow_system(self, flow_system: FlowSystem, prefix: str = '') -> None:
- """Link this interface and all nested interfaces to a FlowSystem.
-
- This method is called automatically during element registration to enable
- elements to access FlowSystem properties without passing the reference
- through every method call. It also sets the prefix used for naming
- transformed data.
-
- Subclasses with nested Interface objects should override this method
- to propagate the link to their nested interfaces by calling
- `super().link_to_flow_system(flow_system, prefix)` first, then linking
- nested objects with appropriate prefixes.
-
- Args:
- flow_system: The FlowSystem to link to
- prefix: The prefix for naming transformed data (e.g., 'Boiler(Q_th)')
-
- Examples:
- Override in a subclass with nested interfaces:
-
- ```python
- def link_to_flow_system(self, flow_system, prefix: str = '') -> None:
- super().link_to_flow_system(flow_system, prefix)
- if self.nested_interface is not None:
- self.nested_interface.link_to_flow_system(flow_system, f'{prefix}|nested' if prefix else 'nested')
- ```
-
- Creating an Interface dynamically during modeling:
-
- ```python
- # In a Model class
- if flow.status_parameters is None:
- flow.status_parameters = StatusParameters()
- flow.status_parameters.link_to_flow_system(self._model.flow_system, f'{flow.id}')
- ```
- """
- self._flow_system = flow_system
- self._prefix = prefix
-
- @property
- def flow_system(self) -> FlowSystem:
- """Access the FlowSystem this interface is linked to.
-
- Returns:
- The FlowSystem instance this interface belongs to.
-
- Raises:
- RuntimeError: If interface has not been linked to a FlowSystem yet.
-
- Note:
- For Elements, this is set during add_elements().
- For parameter classes, this is set recursively when the parent Element is registered.
- """
- if self._flow_system is None:
- raise RuntimeError(
- f'{self.__class__.__name__} is not linked to a FlowSystem. '
- f'Ensure the parent element is registered via flow_system.add_elements() first.'
- )
- return self._flow_system
-
- def _fit_coords(
- self, name: str, data: NumericOrBool | None, dims: Collection[FlowSystemDimensions] | None = None
- ) -> xr.DataArray | None:
- """Convenience wrapper for FlowSystem.fit_to_model_coords().
-
- Args:
- name: The name for the data variable
- data: The data to transform
- dims: Optional dimension names
-
- Returns:
- Transformed data aligned to FlowSystem coordinates
- """
- return self.flow_system.fit_to_model_coords(name, data, dims=dims)
-
- def _fit_effect_coords(
- self,
- prefix: str | None,
- effect_values: Effect_TPS | Numeric_TPS | None,
- suffix: str | None = None,
- dims: Collection[FlowSystemDimensions] | None = None,
- ) -> Effect_TPS | None:
- """Convenience wrapper for FlowSystem.fit_effects_to_model_coords().
-
- Args:
- prefix: Label prefix for effect names
- effect_values: The effect values to transform
- suffix: Optional label suffix
- dims: Optional dimension names
-
- Returns:
- Transformed effect values aligned to FlowSystem coordinates
- """
- return self.flow_system.fit_effects_to_model_coords(prefix, effect_values, suffix, dims=dims)
-
- def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
- """
- Convert all DataArrays to references and extract them.
- This is the core method that both to_dict() and to_dataset() build upon.
-
- Returns:
- Tuple of (reference_structure, extracted_arrays_dict)
-
- Raises:
- ValueError: If DataArrays don't have unique names or are duplicated
- """
- # Get constructor parameters using caching for performance
- if not hasattr(self, '_cached_init_params'):
- self._cached_init_params = list(inspect.signature(self.__init__).parameters.keys())
-
- # Process all constructor parameters
- reference_structure = {'__class__': self.__class__.__name__}
- all_extracted_arrays = {}
-
- # Deprecated init params that should not be serialized (they alias other params)
- _deprecated_init_params = {'label', 'label_as_positional'}
- # On Flow, 'id' is deprecated in favor of 'flow_id'
- if 'flow_id' in self._cached_init_params:
- _deprecated_init_params.add('id')
-
- for name in self._cached_init_params:
- if name == 'self' or name in _deprecated_init_params:
- continue
-
- # For 'id' or 'flow_id' param, use _short_id to get the raw constructor value
- # (Flow.id property returns qualified name, but constructor expects short name)
- if name in ('id', 'flow_id') and hasattr(self, '_short_id'):
- value = self._short_id
- else:
- value = getattr(self, name, None)
-
- if value is None:
- continue
- if isinstance(value, pd.Index):
- logger.debug(f'Skipping {name=} because it is an Index')
- continue
-
- # Extract arrays and get reference structure
- processed_value, extracted_arrays = self._extract_dataarrays_recursive(value, name)
-
- # Check for array name conflicts
- conflicts = set(all_extracted_arrays.keys()) & set(extracted_arrays.keys())
- if conflicts:
- raise ValueError(
- f'DataArray name conflicts detected: {conflicts}. '
- f'Each DataArray must have a unique name for serialization.'
- )
-
- # Add extracted arrays to the collection
- all_extracted_arrays.update(extracted_arrays)
-
- # Only store in structure if it's not None/empty after processing
- if processed_value is not None and not self._is_empty_container(processed_value):
- reference_structure[name] = processed_value
-
- return reference_structure, all_extracted_arrays
-
- @staticmethod
- def _is_empty_container(obj) -> bool:
- """Check if object is an empty container (dict, list, tuple, set)."""
- return isinstance(obj, (dict, list, tuple, set)) and len(obj) == 0
-
- def _extract_dataarrays_recursive(self, obj, context_name: str = '') -> tuple[Any, dict[str, xr.DataArray]]:
- """
- Recursively extract DataArrays from nested structures.
-
- Args:
- obj: Object to process
- context_name: Name context for better error messages
-
- Returns:
- Tuple of (processed_object_with_references, extracted_arrays_dict)
-
- Raises:
- ValueError: If DataArrays don't have unique names
- """
- extracted_arrays = {}
-
- # Handle DataArrays directly - use their unique name
- if isinstance(obj, xr.DataArray):
- if not obj.name:
- raise ValueError(
- f'DataArrays must have a unique name for serialization. '
- f'Unnamed DataArray found in {context_name}. Please set array.name = "unique_name"'
- )
-
- array_name = str(obj.name) # Ensure string type
- if array_name in extracted_arrays:
- raise ValueError(
- f'DataArray name "{array_name}" is duplicated in {context_name}. '
- f'Each DataArray must have a unique name for serialization.'
- )
-
- extracted_arrays[array_name] = obj
- return f':::{array_name}', extracted_arrays
-
- # Handle Interface objects - extract their DataArrays too
- elif isinstance(obj, Interface):
- try:
- interface_structure, interface_arrays = obj._create_reference_structure()
- extracted_arrays.update(interface_arrays)
- return interface_structure, extracted_arrays
- except Exception as e:
- raise ValueError(f'Failed to process nested Interface object in {context_name}: {e}') from e
-
- # Handle sequences (lists, tuples)
- elif isinstance(obj, (list, tuple)):
- processed_items = []
- for i, item in enumerate(obj):
- item_context = f'{context_name}[{i}]' if context_name else f'item[{i}]'
- processed_item, nested_arrays = self._extract_dataarrays_recursive(item, item_context)
- extracted_arrays.update(nested_arrays)
- processed_items.append(processed_item)
- return processed_items, extracted_arrays
-
- # Handle IdList containers (treat as dict for serialization)
- elif isinstance(obj, IdList):
- processed_dict = {}
- for key, value in obj.items():
- key_context = f'{context_name}.{key}' if context_name else str(key)
- processed_value, nested_arrays = self._extract_dataarrays_recursive(value, key_context)
- extracted_arrays.update(nested_arrays)
- processed_dict[key] = processed_value
- return processed_dict, extracted_arrays
-
- # Handle dictionaries
- elif isinstance(obj, dict):
- processed_dict = {}
- for key, value in obj.items():
- key_context = f'{context_name}.{key}' if context_name else str(key)
- processed_value, nested_arrays = self._extract_dataarrays_recursive(value, key_context)
- extracted_arrays.update(nested_arrays)
- processed_dict[key] = processed_value
- return processed_dict, extracted_arrays
-
- # Handle sets (convert to list for JSON compatibility)
- elif isinstance(obj, set):
- processed_items = []
- for i, item in enumerate(obj):
- item_context = f'{context_name}.set_item[{i}]' if context_name else f'set_item[{i}]'
- processed_item, nested_arrays = self._extract_dataarrays_recursive(item, item_context)
- extracted_arrays.update(nested_arrays)
- processed_items.append(processed_item)
- return processed_items, extracted_arrays
-
- # For all other types, serialize to basic types
- else:
- return self._serialize_to_basic_types(obj), extracted_arrays
-
- def _handle_deprecated_kwarg(
- self,
- kwargs: dict,
- old_name: str,
- new_name: str,
- current_value: Any = None,
- transform: callable = None,
- check_conflict: bool = True,
- additional_warning_message: str = '',
- ) -> Any:
- """
- Handle a deprecated keyword argument by issuing a warning and returning the appropriate value.
-
- This centralizes the deprecation pattern used across multiple classes (Source, Sink, InvestParameters, etc.).
-
- Args:
- kwargs: Dictionary of keyword arguments to check and modify
- old_name: Name of the deprecated parameter
- new_name: Name of the replacement parameter
- current_value: Current value of the new parameter (if already set)
- transform: Optional callable to transform the old value before returning (e.g., lambda x: [x] to wrap in list)
- check_conflict: Whether to check if both old and new parameters are specified (default: True).
- Note: For parameters with non-None default values (e.g., bool parameters with default=False),
- set check_conflict=False since we cannot distinguish between an explicit value and the default.
- additional_warning_message: Add a custom message which gets appended with a line break to the default warning.
-
- Returns:
- The value to use (either from old parameter or current_value)
-
- Raises:
- ValueError: If both old and new parameters are specified and check_conflict is True
-
- Example:
- # For parameters where None is the default (conflict checking works):
- value = self._handle_deprecated_kwarg(kwargs, 'old_param', 'new_param', current_value)
-
- # For parameters with non-None defaults (disable conflict checking):
- mandatory = self._handle_deprecated_kwarg(
- kwargs, 'optional', 'mandatory', mandatory,
- transform=lambda x: not x,
- check_conflict=False # Cannot detect if mandatory was explicitly passed
- )
- """
- import warnings
-
- old_value = kwargs.pop(old_name, None)
- if old_value is not None:
- # Build base warning message
- base_warning = f'The use of the "{old_name}" argument is deprecated. Use the "{new_name}" argument instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.'
-
- # Append additional message on a new line if provided
- if additional_warning_message:
- # Normalize whitespace: strip leading/trailing whitespace
- extra_msg = additional_warning_message.strip()
- if extra_msg:
- base_warning += '\n' + extra_msg
-
- warnings.warn(
- base_warning,
- DeprecationWarning,
- stacklevel=3, # Stack: this method -> __init__ -> caller
- )
- # Check for conflicts: only raise error if both were explicitly provided
- if check_conflict and current_value is not None:
- raise ValueError(f'Either {old_name} or {new_name} can be specified, but not both.')
-
- # Apply transformation if provided
- if transform is not None:
- return transform(old_value)
- return old_value
-
- return current_value
-
- def _validate_kwargs(self, kwargs: dict, class_name: str = None) -> None:
- """
- Validate that no unexpected keyword arguments are present in kwargs.
-
- This method uses inspect to get the actual function signature and filters out
- any parameters that are not defined in the __init__ method, while also
- handling the special case of 'kwargs' itself which can appear during deserialization.
-
- Args:
- kwargs: Dictionary of keyword arguments to validate
- class_name: Optional class name for error messages. If None, uses self.__class__.__name__
-
- Raises:
- TypeError: If unexpected keyword arguments are found
- """
- if not kwargs:
- return
-
- import inspect
-
- sig = inspect.signature(self.__init__)
- known_params = set(sig.parameters.keys()) - {'self', 'kwargs'}
- # Also filter out 'kwargs' itself which can appear during deserialization
- extra_kwargs = {k: v for k, v in kwargs.items() if k not in known_params and k != 'kwargs'}
-
- if extra_kwargs:
- class_name = class_name or self.__class__.__name__
- unexpected_params = ', '.join(f"'{param}'" for param in extra_kwargs.keys())
- raise TypeError(f'{class_name}.__init__() got unexpected keyword argument(s): {unexpected_params}')
-
- @staticmethod
- def _has_value(param: Any) -> bool:
- """Check if a parameter has a meaningful value.
-
- Args:
- param: The parameter to check.
-
- Returns:
- False for:
- - None
- - Empty collections (dict, list, tuple, set, frozenset)
-
- True for all other values, including:
- - Non-empty collections
- - xarray DataArrays (even if they contain NaN/empty data)
- - Scalar values (0, False, empty strings, etc.)
- - NumPy arrays (even if empty - use .size to check those explicitly)
- """
- if param is None:
- return False
-
- # Check for empty collections (but not strings, arrays, or DataArrays)
- if isinstance(param, (dict, list, tuple, set, frozenset)) and len(param) == 0:
- return False
-
- return True
-
- @classmethod
- def _resolve_dataarray_reference(
- cls, reference: str, arrays_dict: dict[str, xr.DataArray]
- ) -> xr.DataArray | TimeSeriesData:
- """
- Resolve a single DataArray reference (:::name) to actual DataArray or TimeSeriesData.
-
- Args:
- reference: Reference string starting with ":::"
- arrays_dict: Dictionary of available DataArrays
-
- Returns:
- Resolved DataArray or TimeSeriesData object
-
- Raises:
- ValueError: If referenced array is not found
- """
- array_name = reference[3:] # Remove ":::" prefix
- if array_name not in arrays_dict:
- raise ValueError(f"Referenced DataArray '{array_name}' not found in dataset")
-
- array = arrays_dict[array_name]
-
- # Handle null values with warning (use numpy for performance - 200x faster than xarray)
- has_nulls = (np.issubdtype(array.dtype, np.floating) and np.any(np.isnan(array.values))) or (
- array.dtype == object and pd.isna(array.values).any()
+ not_allowed = ['(', ')', '|', '->', '\\', '-slash-'] # \\ is needed to check for \
+ if any([sign in id for sign in not_allowed]):
+ raise ValueError(
+ f'Id "{id}" is not valid. Ids cannot contain the following characters: {not_allowed}. '
+ f'Use any other symbol instead'
)
- if has_nulls:
- logger.error(f"DataArray '{array_name}' contains null values. Dropping all-null along present dims.")
- if 'time' in array.dims:
- array = array.dropna(dim='time', how='all')
-
- # Check if this should be restored as TimeSeriesData
- if TimeSeriesData.is_timeseries_data(array):
- return TimeSeriesData.from_dataarray(array)
-
- return array
-
- @classmethod
- def _resolve_reference_structure(cls, structure, arrays_dict: dict[str, xr.DataArray]):
- """
- Convert reference structure back to actual objects using provided arrays.
-
- Args:
- structure: Structure containing references (:::name) or special type markers
- arrays_dict: Dictionary of available DataArrays
-
- Returns:
- Structure with references resolved to actual DataArrays or objects
-
- Raises:
- ValueError: If referenced arrays are not found or class is not registered
- """
- # Handle DataArray references
- if isinstance(structure, str) and structure.startswith(':::'):
- return cls._resolve_dataarray_reference(structure, arrays_dict)
-
- elif isinstance(structure, list):
- resolved_list = []
- for item in structure:
- resolved_item = cls._resolve_reference_structure(item, arrays_dict)
- if resolved_item is not None: # Filter out None values from missing references
- resolved_list.append(resolved_item)
- return resolved_list
-
- elif isinstance(structure, dict):
- if structure.get('__class__'):
- class_name = structure['__class__']
- if class_name not in CLASS_REGISTRY:
- raise ValueError(
- f"Class '{class_name}' not found in CLASS_REGISTRY. "
- f'Available classes: {list(CLASS_REGISTRY.keys())}'
- )
-
- # This is a nested Interface object - restore it recursively
- nested_class = CLASS_REGISTRY[class_name]
- # Remove the __class__ key and process the rest
- nested_data = {k: v for k, v in structure.items() if k != '__class__'}
- # Resolve references in the nested data
- resolved_nested_data = cls._resolve_reference_structure(nested_data, arrays_dict)
-
- try:
- # Get valid constructor parameters for this class
- init_params = set(inspect.signature(nested_class.__init__).parameters.keys())
+ if id.endswith(' '):
+ logger.error(f'Id "{id}" ends with a space. This will be removed.')
+ return id.rstrip()
+ return id
- # Check for deferred init attributes (defined as class attribute on Element subclasses)
- # These are serialized but set after construction, not passed to child __init__
- deferred_attr_names = getattr(nested_class, '_deferred_init_attrs', set())
- deferred_attrs = {k: v for k, v in resolved_nested_data.items() if k in deferred_attr_names}
- constructor_data = {k: v for k, v in resolved_nested_data.items() if k not in deferred_attr_names}
-
- # Handle renamed parameters from old serialized data
- if 'label' in constructor_data and 'label' not in init_params:
- # label → id for most elements, label → flow_id for Flow
- new_key = 'flow_id' if 'flow_id' in init_params else 'id'
- constructor_data[new_key] = constructor_data.pop('label')
- if 'id' in constructor_data and 'id' not in init_params and 'flow_id' in init_params:
- # id → flow_id for Flow (from recently serialized data)
- constructor_data['flow_id'] = constructor_data.pop('id')
-
- # Check for unknown parameters - these could be typos or renamed params
- unknown_params = set(constructor_data.keys()) - init_params
- if unknown_params:
- raise TypeError(
- f'{class_name}.__init__() got unexpected keyword arguments: {unknown_params}. '
- f'This may indicate renamed parameters that need conversion. '
- f'Valid parameters are: {init_params - {"self"}}'
- )
-
- # Create instance with constructor parameters
- instance = nested_class(**constructor_data)
-
- # Set internal attributes after construction
- for attr_name, attr_value in deferred_attrs.items():
- setattr(instance, attr_name, attr_value)
-
- return instance
- except TypeError as e:
- raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
- except Exception as e:
- raise ValueError(f'Failed to create instance of {class_name}: {e}') from e
- else:
- # Regular dictionary - resolve references in values
- resolved_dict = {}
- for key, value in structure.items():
- resolved_value = cls._resolve_reference_structure(value, arrays_dict)
- if resolved_value is not None or value is None: # Keep None values if they were originally None
- resolved_dict[key] = resolved_value
- return resolved_dict
-
- else:
- return structure
-
- def _serialize_to_basic_types(self, obj):
- """
- Convert object to basic Python types only (no DataArrays, no custom objects).
-
- Args:
- obj: Object to serialize
-
- Returns:
- Object converted to basic Python types (str, int, float, bool, list, dict)
- """
- if obj is None or isinstance(obj, (str, int, float, bool)):
- return obj
- elif isinstance(obj, np.integer):
- return int(obj)
- elif isinstance(obj, np.floating):
- return float(obj)
- elif isinstance(obj, np.bool_):
- return bool(obj)
- elif isinstance(obj, (np.ndarray, pd.Series, pd.DataFrame)):
- return obj.tolist() if hasattr(obj, 'tolist') else list(obj)
- elif isinstance(obj, dict):
- return {k: self._serialize_to_basic_types(v) for k, v in obj.items()}
- elif isinstance(obj, (list, tuple)):
- return [self._serialize_to_basic_types(item) for item in obj]
- elif isinstance(obj, set):
- return [self._serialize_to_basic_types(item) for item in obj]
- elif hasattr(obj, 'isoformat'): # datetime objects
- return obj.isoformat()
- elif hasattr(obj, '__dict__'): # Custom objects with attributes
- logger.warning(f'Converting custom object {type(obj)} to dict representation: {obj}')
- return {str(k): self._serialize_to_basic_types(v) for k, v in obj.__dict__.items()}
- else:
- # For any other object, try to convert to string as fallback
- logger.error(f'Converting unknown type {type(obj)} to string: {obj}')
- return str(obj)
-
- def to_dataset(self) -> xr.Dataset:
- """
- Convert the object to an xarray Dataset representation.
- All DataArrays become dataset variables, everything else goes to attrs.
-
- Its recommended to only call this method on Interfaces with all numeric data stored as xr.DataArrays.
- Interfaces inside a FlowSystem are automatically converted this form after connecting and transforming the FlowSystem.
-
- Returns:
- xr.Dataset: Dataset containing all DataArrays with basic objects only in attributes
-
- Raises:
- ValueError: If serialization fails due to naming conflicts or invalid data
- """
- try:
- reference_structure, extracted_arrays = self._create_reference_structure()
- # Create the dataset with extracted arrays as variables and structure as attrs
- return xr.Dataset(extracted_arrays, attrs=reference_structure)
- except Exception as e:
- raise ValueError(
- f'Failed to convert {self.__class__.__name__} to dataset. Its recommended to only call this method on '
- f'a fully connected and transformed FlowSystem, or Interfaces inside such a FlowSystem.'
- f'Original Error: {e}'
- ) from e
-
- def to_netcdf(self, path: str | pathlib.Path, compression: int = 5, overwrite: bool = False):
- """
- Save the object to a NetCDF file.
-
- Args:
- path: Path to save the NetCDF file. Parent directories are created if they don't exist.
- compression: Compression level (0-9)
- overwrite: If True, overwrite existing file. If False, raise error if file exists.
-
- Raises:
- FileExistsError: If overwrite=False and file already exists.
- ValueError: If serialization fails
- IOError: If file cannot be written
- """
- path = pathlib.Path(path)
-
- # Check if file exists (unless overwrite is True)
- if not overwrite and path.exists():
- raise FileExistsError(f'File already exists: {path}. Use overwrite=True to overwrite existing file.')
-
- # Create parent directories if they don't exist
- path.parent.mkdir(parents=True, exist_ok=True)
-
- try:
- ds = self.to_dataset()
- fx_io.save_dataset_to_netcdf(ds, path, compression=compression)
- except Exception as e:
- raise OSError(f'Failed to save {self.__class__.__name__} to NetCDF file {path}: {e}') from e
-
- @classmethod
- def from_dataset(cls, ds: xr.Dataset) -> Interface:
- """
- Create an instance from an xarray Dataset.
-
- Args:
- ds: Dataset containing the object data
-
- Returns:
- Interface instance
-
- Raises:
- ValueError: If dataset format is invalid or class mismatch
- """
- try:
- # Get class name and verify it matches
- class_name = ds.attrs.get('__class__')
- if class_name and class_name != cls.__name__:
- logger.warning(f"Dataset class '{class_name}' doesn't match target class '{cls.__name__}'")
-
- # Get the reference structure from attrs
- reference_structure = dict(ds.attrs)
-
- # Remove the class name since it's not a constructor parameter
- reference_structure.pop('__class__', None)
-
- # Create arrays dictionary from dataset variables
- # Use ds.variables with coord_cache for faster DataArray construction
- variables = ds.variables
- coord_cache = {k: ds.coords[k] for k in ds.coords}
- arrays_dict = {
- name: xr.DataArray(
- variables[name],
- coords={k: coord_cache[k] for k in variables[name].dims if k in coord_cache},
- name=name,
- )
- for name in ds.data_vars
- }
-
- # Resolve all references using the centralized method
- resolved_params = cls._resolve_reference_structure(reference_structure, arrays_dict)
-
- return cls(**resolved_params)
- except Exception as e:
- raise ValueError(f'Failed to create {cls.__name__} from dataset: {e}') from e
-
- @classmethod
- def from_netcdf(cls, path: str | pathlib.Path) -> Interface:
- """
- Load an instance from a NetCDF file.
-
- Args:
- path: Path to the NetCDF file
-
- Returns:
- Interface instance
-
- Raises:
- IOError: If file cannot be read
- ValueError: If file format is invalid
- """
- try:
- ds = fx_io.load_dataset_from_netcdf(path)
- return cls.from_dataset(ds)
- except Exception as e:
- raise OSError(f'Failed to load {cls.__name__} from NetCDF file {path}: {e}') from e
-
- def get_structure(self, clean: bool = False, stats: bool = False) -> dict:
- """
- Get object structure as a dictionary.
-
- Args:
- clean: If True, remove None and empty dicts and lists.
- stats: If True, replace DataArray references with statistics
-
- Returns:
- Dictionary representation of the object structure
- """
- reference_structure, extracted_arrays = self._create_reference_structure()
-
- if stats:
- # Replace references with statistics
- reference_structure = self._replace_references_with_stats(reference_structure, extracted_arrays)
-
- if clean:
- return fx_io.remove_none_and_empty(reference_structure)
- return reference_structure
-
- def _replace_references_with_stats(self, structure, arrays_dict: dict[str, xr.DataArray]):
- """Replace DataArray references with statistical summaries."""
- if isinstance(structure, str) and structure.startswith(':::'):
- array_name = structure[3:]
- if array_name in arrays_dict:
- return get_dataarray_stats(arrays_dict[array_name])
- return structure
-
- elif isinstance(structure, dict):
- return {k: self._replace_references_with_stats(v, arrays_dict) for k, v in structure.items()}
-
- elif isinstance(structure, list):
- return [self._replace_references_with_stats(item, arrays_dict) for item in structure]
-
- return structure
-
- def to_json(self, path: str | pathlib.Path):
- """
- Save the object to a JSON file.
- This is meant for documentation and comparison, not for reloading.
-
- Args:
- path: The path to the JSON file.
-
- Raises:
- IOError: If file cannot be written
- """
- try:
- # Use the stats mode for JSON export (cleaner output)
- data = self.get_structure(clean=True, stats=True)
- fx_io.save_json(data, path)
- except Exception as e:
- raise OSError(f'Failed to save {self.__class__.__name__} to JSON file {path}: {e}') from e
-
- def __repr__(self):
- """Return a detailed string representation for debugging."""
- return fx_io.build_repr_from_init(self, excluded_params={'self', 'id', 'label', 'kwargs'})
-
- def copy(self) -> Interface:
- """
- Create a copy of the Interface object.
-
- Uses the existing serialization infrastructure to ensure proper copying
- of all DataArrays and nested objects.
-
- Returns:
- A new instance of the same class with copied data.
- """
- # Convert to dataset, copy it, and convert back
- dataset = self.to_dataset().copy(deep=True)
- return self.__class__.from_dataset(dataset)
-
- def __copy__(self):
- """Support for copy.copy()."""
- return self.copy()
-
- def __deepcopy__(self, memo):
- """Support for copy.deepcopy()."""
- return self.copy()
-
-
-class Element(Interface):
- """This class is the basic Element of flixopt. Every Element has an id."""
-
- # Attributes that are serialized but set after construction (not passed to child __init__)
- # These are internal state populated during modeling, not user-facing parameters
- _deferred_init_attrs: ClassVar[set[str]] = {'_variable_names', '_constraint_names'}
-
- def __init__(
- self,
- id: str | None = None,
- meta_data: dict | None = None,
- color: str | None = None,
- _variable_names: list[str] | None = None,
- _constraint_names: list[str] | None = None,
- **kwargs,
- ):
- """
- Args:
- id: The id of the element
- meta_data: used to store more information about the Element. Is not used internally, but saved in the results. Only use python native types.
- color: Optional color for visualizations (e.g., '#FF6B6B'). If not provided, a color will be automatically assigned during FlowSystem.connect_and_transform().
- _variable_names: Internal. Variable names for this element (populated after modeling).
- _constraint_names: Internal. Constraint names for this element (populated after modeling).
- """
- id = self._handle_deprecated_kwarg(kwargs, 'label', 'id', id)
- if id is None:
- raise TypeError(f'{self.__class__.__name__}.__init__() requires an "id" argument.')
- self._validate_kwargs(kwargs)
- self._short_id: str = Element._valid_id(id)
- self.meta_data = meta_data if meta_data is not None else {}
- self.color = color
- self._flow_system: FlowSystem | None = None
- # Variable/constraint names - populated after modeling, serialized for results
- self._variable_names: list[str] = _variable_names if _variable_names is not None else []
- self._constraint_names: list[str] = _constraint_names if _constraint_names is not None else []
-
- def _plausibility_checks(self) -> None:
- """This function is used to do some basic plausibility checks for each Element during initialization.
- This is run after all data is transformed to the correct format/type"""
- raise NotImplementedError('Every Element needs a _plausibility_checks() method')
- @property
- def id(self) -> str:
- """The unique identifier of this element.
+class Element:
+ """Mixin for all elements in flixopt. Provides deprecated label properties.
- For most elements this is the name passed to the constructor.
- For flows this returns the qualified form: ``component(short_id)``.
- """
- return self._short_id
+ Subclasses (Effect, Bus, Flow, Component) are @dataclass classes that declare
+ their own ``id`` field. Element does NOT define ``id`` — each subclass owns it.
- @id.setter
- def id(self, value: str) -> None:
- self._short_id = value
+ Runtime state (variable names, constraint names) is stored in FlowSystem registries,
+ not on the element objects themselves.
+ """
@property
def label(self) -> str:
@@ -2049,7 +1571,7 @@ def label(self) -> str:
DeprecationWarning,
stacklevel=2,
)
- return self._short_id
+ return self.id
@label.setter
def label(self, value: str) -> None:
@@ -2058,7 +1580,7 @@ def label(self, value: str) -> None:
DeprecationWarning,
stacklevel=2,
)
- self._short_id = value
+ self.id = value
@property
def label_full(self) -> str:
@@ -2080,87 +1602,6 @@ def id_full(self) -> str:
)
return self.id
- @property
- def solution(self) -> xr.Dataset:
- """Solution data for this element's variables.
-
- Returns a Dataset built by selecting this element from batched variables
- in FlowSystem.solution.
-
- Raises:
- ValueError: If no solution is available (optimization not run or not solved).
- """
- if self._flow_system is None:
- raise ValueError(f'Element "{self.id}" is not linked to a FlowSystem.')
- if self._flow_system.solution is None:
- raise ValueError(f'No solution available for "{self.id}". Run optimization first or load results.')
- if not self._variable_names:
- raise ValueError(f'No variable names available for "{self.id}". Element may not have been modeled yet.')
- full_solution = self._flow_system.solution
- data_vars = {}
- for var_name in self._variable_names:
- if var_name not in full_solution:
- continue
- var = full_solution[var_name]
- # Select this element from the appropriate dimension
- for dim in var.dims:
- if dim in ('time', 'period', 'scenario', 'cluster'):
- continue
- if self.id in var.coords[dim].values:
- var = var.sel({dim: self.id}, drop=True)
- break
- data_vars[var_name] = var
- return xr.Dataset(data_vars)
-
- def _create_reference_structure(self) -> tuple[dict, dict[str, xr.DataArray]]:
- """
- Override to include _variable_names and _constraint_names in serialization.
-
- These attributes are defined in Element but may not be in subclass constructors,
- so we need to add them explicitly.
- """
- reference_structure, all_extracted_arrays = super()._create_reference_structure()
-
- # Always include variable/constraint names for solution access after loading
- if self._variable_names:
- reference_structure['_variable_names'] = self._variable_names
- if self._constraint_names:
- reference_structure['_constraint_names'] = self._constraint_names
-
- return reference_structure, all_extracted_arrays
-
- def __repr__(self) -> str:
- """Return string representation."""
- return fx_io.build_repr_from_init(self, excluded_params={'self', 'id', 'kwargs'}, skip_default_size=True)
-
- @staticmethod
- def _valid_id(id: str) -> str:
- """Checks if the id is valid.
-
- Raises:
- ValueError: If the id is not valid.
- """
- not_allowed = ['(', ')', '|', '->', '\\', '-slash-'] # \\ is needed to check for \
- if any([sign in id for sign in not_allowed]):
- raise ValueError(
- f'Id "{id}" is not valid. Ids cannot contain the following characters: {not_allowed}. '
- f'Use any other symbol instead'
- )
- if id.endswith(' '):
- logger.error(f'Id "{id}" ends with a space. This will be removed.')
- return id.rstrip()
- return id
-
- @staticmethod
- def _valid_label(label: str) -> str:
- """Deprecated: Use ``_valid_id`` instead."""
- warnings.warn(
- f'_valid_label is deprecated. Use _valid_id instead. Will be removed in v{DEPRECATION_REMOVAL_VERSION}.',
- DeprecationWarning,
- stacklevel=2,
- )
- return Element._valid_id(label)
-
# Precompiled regex pattern for natural sorting
_NATURAL_SPLIT = re.compile(r'(\d+)')
@@ -2171,257 +1612,6 @@ def _natural_sort_key(text):
return [int(c) if c.isdigit() else c.lower() for c in _NATURAL_SPLIT.split(text)]
-# Type variable for containers
-T = TypeVar('T')
-
-
-class ContainerMixin(dict[str, T]):
- """
- Mixin providing shared container functionality with nice repr and error messages.
-
- Subclasses must implement _get_label() to extract the label from elements.
- """
-
- def __init__(
- self,
- elements: list[T] | dict[str, T] | None = None,
- element_type_name: str = 'elements',
- truncate_repr: int | None = None,
- item_name: str | None = None,
- ):
- """
- Args:
- elements: Initial elements to add (list or dict)
- element_type_name: Name for display (e.g., 'components', 'buses')
- truncate_repr: Maximum number of items to show in repr. If None, show all items. Default: None
- item_name: Singular name for error messages (e.g., 'Component', 'Carrier').
- If None, inferred from first added item's class name.
- """
- super().__init__()
- self._element_type_name = element_type_name
- self._truncate_repr = truncate_repr
- self._item_name = item_name
-
- if elements is not None:
- if isinstance(elements, dict):
- for element in elements.values():
- self.add(element)
- else:
- for element in elements:
- self.add(element)
-
- def _get_label(self, element: T) -> str:
- """
- Extract label from element. Must be implemented by subclasses.
-
- Args:
- element: Element to get label from
-
- Returns:
- Label string
- """
- raise NotImplementedError('Subclasses must implement _get_label()')
-
- def _get_item_name(self) -> str:
- """Get the singular item name for error messages.
-
- Returns the explicitly set item_name, or infers from the first item's class name.
- Falls back to 'Item' if container is empty and no name was set.
- """
- if self._item_name is not None:
- return self._item_name
- # Infer from first item's class name
- if self:
- first_item = next(iter(self.values()))
- return first_item.__class__.__name__
- return 'Item'
-
- def add(self, element: T) -> None:
- """Add an element to the container."""
- label = self._get_label(element)
- if label in self:
- item_name = element.__class__.__name__
- raise ValueError(
- f'{item_name} with label "{label}" already exists in {self._element_type_name}. '
- f'Each {item_name.lower()} must have a unique label.'
- )
- self[label] = element
-
- def __setitem__(self, label: str, element: T) -> None:
- """Set element with validation."""
- element_label = self._get_label(element)
- if label != element_label:
- raise ValueError(
- f'Key "{label}" does not match element label "{element_label}". '
- f'Use the correct label as key or use .add() method.'
- )
- super().__setitem__(label, element)
-
- def __getitem__(self, label: str) -> T:
- """
- Get element by label with helpful error messages.
-
- Args:
- label: Label of the element to retrieve
-
- Returns:
- The element with the given label
-
- Raises:
- KeyError: If element is not found, with suggestions for similar labels
- """
- try:
- return super().__getitem__(label)
- except KeyError:
- # Provide helpful error with close matches suggestions
- item_name = self._get_item_name()
- suggestions = get_close_matches(label, self.keys(), n=3, cutoff=0.6)
- error_msg = f'{item_name} "{label}" not found in {self._element_type_name}.'
- if suggestions:
- error_msg += f' Did you mean: {", ".join(suggestions)}?'
- else:
- available = list(self.keys())
- if len(available) <= 5:
- error_msg += f' Available: {", ".join(available)}'
- else:
- error_msg += f' Available: {", ".join(available[:5])} ... (+{len(available) - 5} more)'
- raise KeyError(error_msg) from None
-
- def _get_repr(self, max_items: int | None = None) -> str:
- """
- Get string representation with optional truncation.
-
- Args:
- max_items: Maximum number of items to show. If None, uses instance default (self._truncate_repr).
- If still None, shows all items.
-
- Returns:
- Formatted string representation
- """
- # Use provided max_items, or fall back to instance default
- limit = max_items if max_items is not None else self._truncate_repr
-
- count = len(self)
- title = f'{self._element_type_name.capitalize()} ({count} item{"s" if count != 1 else ""})'
-
- if not self:
- r = fx_io.format_title_with_underline(title)
- r += '\n'
- else:
- r = fx_io.format_title_with_underline(title)
- sorted_names = sorted(self.keys(), key=_natural_sort_key)
-
- if limit is not None and limit > 0 and len(sorted_names) > limit:
- # Show truncated list
- for name in sorted_names[:limit]:
- r += f' * {name}\n'
- r += f' ... (+{len(sorted_names) - limit} more)\n'
- else:
- # Show all items
- for name in sorted_names:
- r += f' * {name}\n'
-
- return r
-
- def __add__(self, other: ContainerMixin[T]) -> ContainerMixin[T]:
- """Concatenate two containers."""
- result = self.__class__(element_type_name=self._element_type_name)
- for element in self.values():
- result.add(element)
- for element in other.values():
- result.add(element)
- return result
-
- def __repr__(self) -> str:
- """Return a string representation using the instance's truncate_repr setting."""
- return self._get_repr()
-
-
-class FlowContainer(ContainerMixin[T]):
- """Container for Flow objects with dual access: by index or by id.
-
- Supports:
- - container['Boiler(Q_th)'] # id-based access
- - container['Q_th'] # short-id access (when all flows share same component)
- - container[0] # index-based access
- - container.add(flow)
- - for flow in container.values()
- - container1 + container2 # concatenation
-
- Examples:
- >>> boiler = Boiler(id='Boiler', inputs=[Flow('heat_bus')])
- >>> boiler.inputs[0] # Index access
- >>> boiler.inputs['Boiler(heat_bus)'] # Full id access
- >>> boiler.inputs['heat_bus'] # Short id access (same component)
- >>> for flow in boiler.inputs.values():
- ... print(flow.id)
- """
-
- def _get_label(self, flow: T) -> str:
- """Extract id from Flow."""
- return flow.id
-
- def __getitem__(self, key: str | int) -> T:
- """Get flow by id, short id, or index."""
- if isinstance(key, int):
- try:
- return list(self.values())[key]
- except IndexError:
- raise IndexError(f'Flow index {key} out of range (container has {len(self)} flows)') from None
-
- if dict.__contains__(self, key):
- return super().__getitem__(key)
-
- # Try short-id match if all flows share the same component
- if len(self) > 0:
- components = {flow.component for flow in self.values()}
- if len(components) == 1:
- component = next(iter(components))
- full_key = f'{component}({key})'
- if dict.__contains__(self, full_key):
- return super().__getitem__(full_key)
-
- raise KeyError(f"'{key}' not found in {self._element_type_name}")
-
- def __contains__(self, key: object) -> bool:
- """Check if key exists (supports id or short id)."""
- if not isinstance(key, str):
- return False
- if dict.__contains__(self, key):
- return True
- if len(self) > 0:
- components = {flow.component for flow in self.values()}
- if len(components) == 1:
- component = next(iter(components))
- full_key = f'{component}({key})'
- return dict.__contains__(self, full_key)
- return False
-
-
-class ElementContainer(ContainerMixin[T]):
- """
- Container for Element objects (Component, Bus, Flow, Effect).
-
- Uses element.id for keying.
- """
-
- def _get_label(self, element: T) -> str:
- """Extract id from Element."""
- return element.id
-
-
-class ResultsContainer(ContainerMixin[T]):
- """
- Container for Results objects (ComponentResults, BusResults, etc).
-
- Uses element.id for keying.
- """
-
- def _get_label(self, element: T) -> str:
- """Extract id from Results object."""
- return element.id
-
-
T_element = TypeVar('T_element')
diff --git a/pyproject.toml b/pyproject.toml
index 1509b26b5..0756fa281 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -202,19 +202,29 @@ filterwarnings = [
# === Default behavior: show all warnings ===
"default",
+ # === Treat most flixopt warnings as errors (strict mode for our code) ===
+ # This ensures we catch deprecations, future changes, and user warnings in our own code
+ "error::DeprecationWarning:flixopt",
+ "error::FutureWarning:flixopt",
+ "error::UserWarning:flixopt",
+
# === Ignore specific deprecation warnings for backward compatibility tests ===
- # These are raised by deprecated classes (Optimization, Results) used in tests/deprecated/
+ # These must come AFTER the "error" filter to take precedence (Python warnings: later = higher priority)
"ignore:Results is deprecated:DeprecationWarning:flixopt",
"ignore:Optimization is deprecated:DeprecationWarning:flixopt",
"ignore:SegmentedOptimization is deprecated:DeprecationWarning:flixopt",
"ignore:SegmentedResults is deprecated:DeprecationWarning:flixopt",
"ignore:ClusteredOptimization is deprecated:DeprecationWarning:flixopt",
-
- # === Treat most flixopt warnings as errors (strict mode for our code) ===
- # This ensures we catch deprecations, future changes, and user warnings in our own code
- "error::DeprecationWarning:flixopt",
- "error::FutureWarning:flixopt",
- "error::UserWarning:flixopt",
+ "ignore:Boiler is deprecated:DeprecationWarning",
+ "ignore:CHP is deprecated:DeprecationWarning",
+ "ignore:HeatPump is deprecated:DeprecationWarning",
+ "ignore:CoolingTower is deprecated:DeprecationWarning",
+ "ignore:Power2Heat is deprecated:DeprecationWarning",
+ "ignore:HeatPumpWithSource is deprecated:DeprecationWarning",
+ "ignore:Source is deprecated:DeprecationWarning",
+ "ignore:Sink is deprecated:DeprecationWarning",
+ "ignore:SourceAndSink is deprecated:DeprecationWarning",
+ "ignore:add_elements\\(\\) is deprecated:DeprecationWarning",
"ignore:.*network visualization is still experimental.*:UserWarning:flixopt",
]
diff --git a/tests/conftest.py b/tests/conftest.py
index 970b8f285..82217676e 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -147,29 +147,29 @@ class Boilers:
@staticmethod
def simple():
"""Simple boiler from simple_flow_system"""
- return fx.linear_converters.Boiler(
+ return fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.5,
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=50,
relative_minimum=5 / 50,
relative_maximum=1,
status_parameters=fx.StatusParameters(),
),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
@staticmethod
def complex():
"""Complex boiler with investment parameters from flow_system_complex"""
- return fx.linear_converters.Boiler(
+ return fx.Converter.boiler(
'Kessel',
thermal_efficiency=0.5,
status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
load_factor_max=1.0,
load_factor_min=0.1,
@@ -193,49 +193,53 @@ def complex():
),
flow_hours_max=1e6,
),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1),
)
class CHPs:
@staticmethod
def simple():
"""Simple CHP from simple_flow_system"""
- return fx.linear_converters.CHP(
+ return fx.Converter.chp(
'CHP_unit',
thermal_efficiency=0.5,
electrical_efficiency=0.4,
electrical_flow=fx.Flow(
- 'Strom', flow_id='P_el', size=60, relative_minimum=5 / 60, status_parameters=fx.StatusParameters()
+ bus='Strom',
+ flow_id='P_el',
+ size=60,
+ relative_minimum=5 / 60,
+ status_parameters=fx.StatusParameters(),
),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
@staticmethod
def base():
"""CHP from flow_system_base"""
- return fx.linear_converters.CHP(
+ return fx.Converter.chp(
'KWK',
thermal_efficiency=0.5,
electrical_efficiency=0.4,
status_parameters=fx.StatusParameters(effects_per_startup=0.01),
electrical_flow=fx.Flow(
- 'Strom', flow_id='P_el', size=60, relative_minimum=5 / 60, previous_flow_rate=10
+ bus='Strom', flow_id='P_el', size=60, relative_minimum=5 / 60, previous_flow_rate=10
),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=1e3),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu', size=1e3),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=1e3),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu', size=1e3),
)
class LinearConverters:
@staticmethod
def piecewise():
"""Piecewise converter from flow_system_piecewise_conversion"""
- return fx.LinearConverter(
+ return fx.Converter(
'KWK',
- inputs=[fx.Flow('Gas', flow_id='Q_fu', size=200)],
+ inputs=[fx.Flow(bus='Gas', flow_id='Q_fu', size=200)],
outputs=[
- fx.Flow('Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
- fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fx.Flow(bus='Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
+ fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -250,12 +254,12 @@ def piecewise():
@staticmethod
def segments(timesteps_length):
"""Segments converter with time-varying piecewise conversion"""
- return fx.LinearConverter(
+ return fx.Converter(
'KWK',
- inputs=[fx.Flow('Gas', flow_id='Q_fu', size=200)],
+ inputs=[fx.Flow(bus='Gas', flow_id='Q_fu', size=200)],
outputs=[
- fx.Flow('Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
- fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fx.Flow(bus='Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
+ fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -286,11 +290,11 @@ def simple(timesteps_length=9):
return fx.Storage(
'Speicher',
charging=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th_load',
size=fx.InvestParameters(fixed_size=1e4, mandatory=True), # Investment for testing sizes
),
- discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=1e4),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_unload', size=1e4),
capacity_in_flow_hours=fx.InvestParameters(effects_of_investment=20, fixed_size=30, mandatory=True),
initial_charge_state=0,
relative_maximum_charge_state=1 / 100 * np.array(charge_state_values),
@@ -320,8 +324,8 @@ def complex():
)
return fx.Storage(
'Speicher',
- charging=fx.Flow('Fernwärme', flow_id='Q_th_load', size=1e4),
- discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=1e4),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_load', size=1e4),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_unload', size=1e4),
capacity_in_flow_hours=invest_speicher,
initial_charge_state=0,
maximal_final_charge_state=10,
@@ -377,25 +381,25 @@ class Sinks:
@staticmethod
def heat_load(thermal_profile):
"""Create thermal heat load sink"""
- return fx.Sink(
+ return fx.Port(
'Wärmelast',
- inputs=[fx.Flow('Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_profile)],
+ exports=[fx.Flow(bus='Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_profile)],
)
@staticmethod
def electricity_feed_in(electrical_price_profile):
"""Create electricity feed-in sink"""
- return fx.Sink(
+ return fx.Port(
'Einspeisung',
- inputs=[fx.Flow('Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_price_profile)],
+ exports=[fx.Flow(bus='Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_price_profile)],
)
@staticmethod
def electricity_load(electrical_profile):
"""Create electrical load sink (for flow_system_long)"""
- return fx.Sink(
+ return fx.Port(
'Stromlast',
- inputs=[fx.Flow('Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_profile)],
+ exports=[fx.Flow(bus='Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_profile)],
)
@@ -406,14 +410,14 @@ class Sources:
def gas_with_costs_and_co2():
"""Standard gas tariff with CO2 emissions"""
source = Sources.gas_with_costs()
- source.outputs[0].effects_per_flow_hour = {'costs': 0.04, 'CO2': 0.3}
+ source.imports[0].effects_per_flow_hour = {'costs': 0.04, 'CO2': 0.3}
return source
@staticmethod
def gas_with_costs():
"""Simple gas tariff without CO2"""
- return fx.Source(
- 'Gastarif', outputs=[fx.Flow('Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04})]
+ return fx.Port(
+ 'Gastarif', imports=[fx.Flow(bus='Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04})]
)
@@ -444,8 +448,8 @@ def build_simple_flow_system() -> fx.FlowSystem:
# Create flow system
flow_system = fx.FlowSystem(base_timesteps)
- flow_system.add_elements(*Buses.defaults())
- flow_system.add_elements(storage, costs, co2, boiler, heat_load, gas_tariff, electricity_feed_in, chp)
+ flow_system.add(*Buses.defaults())
+ flow_system.add(storage, costs, co2, boiler, heat_load, gas_tariff, electricity_feed_in, chp)
return flow_system
@@ -483,8 +487,8 @@ def simple_flow_system_scenarios() -> fx.FlowSystem:
flow_system = fx.FlowSystem(
base_timesteps, scenarios=pd.Index(['A', 'B', 'C']), scenario_weights=np.array([0.5, 0.25, 0.25])
)
- flow_system.add_elements(*Buses.defaults())
- flow_system.add_elements(storage, costs, co2, boiler, heat_load, gas_tariff, electricity_feed_in, chp)
+ flow_system.add(*Buses.defaults())
+ flow_system.add(storage, costs, co2, boiler, heat_load, gas_tariff, electricity_feed_in, chp)
return flow_system
@@ -502,8 +506,8 @@ def basic_flow_system() -> fx.FlowSystem:
gas_source = Sources.gas_with_costs()
electricity_sink = Sinks.electricity_feed_in(p_el)
- flow_system.add_elements(*Buses.defaults())
- flow_system.add_elements(costs, heat_load, gas_source, electricity_sink)
+ flow_system.add(*Buses.defaults())
+ flow_system.add(costs, heat_load, gas_source, electricity_sink)
return flow_system
@@ -528,13 +532,13 @@ def flow_system_complex() -> fx.FlowSystem:
gas_tariff = Sources.gas_with_costs_and_co2()
electricity_feed_in = Sinks.electricity_feed_in(electrical_load)
- flow_system.add_elements(*Buses.defaults())
- flow_system.add_elements(costs, co2, pe, heat_load, gas_tariff, electricity_feed_in)
+ flow_system.add(*Buses.defaults())
+ flow_system.add(costs, co2, pe, heat_load, gas_tariff, electricity_feed_in)
boiler = Converters.Boilers.complex()
speicher = Storage.complex()
- flow_system.add_elements(boiler, speicher)
+ flow_system.add(boiler, speicher)
return flow_system
@@ -546,7 +550,7 @@ def flow_system_base(flow_system_complex) -> fx.FlowSystem:
"""
flow_system = flow_system_complex
chp = Converters.CHPs.base()
- flow_system.add_elements(chp)
+ flow_system.add(chp)
return flow_system
@@ -554,7 +558,7 @@ def flow_system_base(flow_system_complex) -> fx.FlowSystem:
def flow_system_piecewise_conversion(flow_system_complex) -> fx.FlowSystem:
flow_system = flow_system_complex
converter = Converters.LinearConverters.piecewise()
- flow_system.add_elements(converter)
+ flow_system.add(converter)
return flow_system
@@ -565,7 +569,7 @@ def flow_system_segments_of_flows_2(flow_system_complex) -> fx.FlowSystem:
"""
flow_system = flow_system_complex
converter = Converters.LinearConverters.segments(len(flow_system.timesteps))
- flow_system.add_elements(converter)
+ flow_system.add(converter)
return flow_system
@@ -596,44 +600,50 @@ def flow_system_long():
)
flow_system = fx.FlowSystem(pd.DatetimeIndex(data.index))
- flow_system.add_elements(
+ flow_system.add(
*Buses.defaults(),
Buses.coal(),
Effects.costs(),
Effects.co2(),
Effects.primary_energy(),
- fx.Sink(
+ fx.Port(
'Wärmelast',
- inputs=[fx.Flow('Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load_ts)],
+ exports=[fx.Flow(bus='Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load_ts)],
),
- fx.Sink(
+ fx.Port(
'Stromlast',
- inputs=[fx.Flow('Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_load_ts)],
+ exports=[fx.Flow(bus='Strom', flow_id='P_el_Last', size=1, fixed_relative_profile=electrical_load_ts)],
),
- fx.Source(
+ fx.Port(
'Kohletarif',
- outputs=[fx.Flow('Kohle', flow_id='Q_Kohle', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})],
+ imports=[
+ fx.Flow(bus='Kohle', flow_id='Q_Kohle', size=1000, effects_per_flow_hour={'costs': 4.6, 'CO2': 0.3})
+ ],
),
- fx.Source(
+ fx.Port(
'Gastarif',
- outputs=[
- fx.Flow('Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})
+ imports=[
+ fx.Flow(bus='Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': gas_price, 'CO2': 0.3})
],
),
- fx.Sink('Einspeisung', inputs=[fx.Flow('Strom', flow_id='P_el', size=1000, effects_per_flow_hour=p_feed_in)]),
- fx.Source(
+ fx.Port(
+ 'Einspeisung', exports=[fx.Flow(bus='Strom', flow_id='P_el', size=1000, effects_per_flow_hour=p_feed_in)]
+ ),
+ fx.Port(
'Stromtarif',
- outputs=[fx.Flow('Strom', flow_id='P_el', size=1000, effects_per_flow_hour={'costs': p_sell, 'CO2': 0.3})],
+ imports=[
+ fx.Flow(bus='Strom', flow_id='P_el', size=1000, effects_per_flow_hour={'costs': p_sell, 'CO2': 0.3})
+ ],
),
)
- flow_system.add_elements(
- fx.linear_converters.Boiler(
+ flow_system.add(
+ fx.Converter.boiler(
'Kessel',
thermal_efficiency=0.85,
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th'),
fuel_flow=fx.Flow(
- 'Gas',
+ bus='Gas',
flow_id='Q_fu',
size=95,
relative_minimum=12 / 95,
@@ -641,19 +651,19 @@ def flow_system_long():
status_parameters=fx.StatusParameters(effects_per_startup=1000),
),
),
- fx.linear_converters.CHP(
+ fx.Converter.chp(
'BHKW2',
thermal_efficiency=(eta_th := 0.58),
electrical_efficiency=(eta_el := 0.22),
status_parameters=fx.StatusParameters(effects_per_startup=24000),
- fuel_flow=fx.Flow('Kohle', flow_id='Q_fu', size=(fuel_size := 288), relative_minimum=87 / fuel_size),
- electrical_flow=fx.Flow('Strom', flow_id='P_el', size=fuel_size * eta_el),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=fuel_size * eta_th),
+ fuel_flow=fx.Flow(bus='Kohle', flow_id='Q_fu', size=(fuel_size := 288), relative_minimum=87 / fuel_size),
+ electrical_flow=fx.Flow(bus='Strom', flow_id='P_el', size=fuel_size * eta_el),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=fuel_size * eta_th),
),
fx.Storage(
'Speicher',
- charging=fx.Flow('Fernwärme', flow_id='Q_th_load', size=137),
- discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=158),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_load', size=137),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_unload', size=158),
capacity_in_flow_hours=684,
initial_charge_state=137,
minimal_final_charge_state=137,
@@ -691,8 +701,8 @@ def basic_flow_system_linopy(timesteps_linopy) -> fx.FlowSystem:
gas_source = Sources.gas_with_costs()
electricity_sink = Sinks.electricity_feed_in(p_el)
- flow_system.add_elements(*Buses.defaults())
- flow_system.add_elements(costs, heat_load, gas_source, electricity_sink)
+ flow_system.add(*Buses.defaults())
+ flow_system.add(costs, heat_load, gas_source, electricity_sink)
return flow_system
@@ -710,8 +720,8 @@ def basic_flow_system_linopy_coords(coords_config) -> fx.FlowSystem:
gas_source = Sources.gas_with_costs()
electricity_sink = Sinks.electricity_feed_in(p_el)
- flow_system.add_elements(*Buses.defaults())
- flow_system.add_elements(costs, heat_load, gas_source, electricity_sink)
+ flow_system.add(*Buses.defaults())
+ flow_system.add(costs, heat_load, gas_source, electricity_sink)
return flow_system
diff --git a/tests/flow_system/test_flow_system_locking.py b/tests/flow_system/test_flow_system_locking.py
index 83d931c87..f8c94e038 100644
--- a/tests/flow_system/test_flow_system_locking.py
+++ b/tests/flow_system/test_flow_system_locking.py
@@ -47,7 +47,7 @@ class TestAddElementsLocking:
def test_add_elements_before_optimization(self, simple_flow_system):
"""Should be able to add elements before optimization."""
new_bus = fx.Bus('NewBus')
- simple_flow_system.add_elements(new_bus)
+ simple_flow_system.add(new_bus)
assert 'NewBus' in simple_flow_system.buses
def test_add_elements_raises_when_locked(self, simple_flow_system, highs_solver):
@@ -56,7 +56,7 @@ def test_add_elements_raises_when_locked(self, simple_flow_system, highs_solver)
new_bus = fx.Bus('NewBus')
with pytest.raises(RuntimeError, match='Cannot add elements.*reset\\(\\)'):
- simple_flow_system.add_elements(new_bus)
+ simple_flow_system.add(new_bus)
def test_add_elements_after_reset(self, simple_flow_system, highs_solver):
"""Should be able to add elements after reset."""
@@ -64,7 +64,7 @@ def test_add_elements_after_reset(self, simple_flow_system, highs_solver):
simple_flow_system.reset()
new_bus = fx.Bus('NewBus')
- simple_flow_system.add_elements(new_bus)
+ simple_flow_system.add(new_bus)
assert 'NewBus' in simple_flow_system.buses
def test_add_elements_invalidates_model(self, simple_flow_system):
@@ -75,7 +75,7 @@ def test_add_elements_invalidates_model(self, simple_flow_system):
new_bus = fx.Bus('NewBus')
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
- simple_flow_system.add_elements(new_bus)
+ simple_flow_system.add(new_bus)
assert len(w) == 1
assert 'model will be invalidated' in str(w[0].message)
@@ -143,17 +143,17 @@ def test_reset_clears_model(self, simple_flow_system, highs_solver):
assert simple_flow_system.model is None
def test_reset_clears_element_variable_names(self, simple_flow_system, highs_solver):
- """Reset should clear element variable names."""
+ """Reset should clear element variable name registries."""
simple_flow_system.optimize(highs_solver)
- # Check that elements have variable names after optimization
+ # Check that registry has variable names after optimization
boiler = simple_flow_system.components['Boiler']
- assert len(boiler._variable_names) > 0
+ assert len(simple_flow_system._element_variable_names.get(boiler.id, [])) > 0
simple_flow_system.reset()
- # Check that variable names are cleared
- assert len(boiler._variable_names) == 0
+ # Check that variable name registry is cleared
+ assert len(simple_flow_system._element_variable_names) == 0
def test_reset_returns_self(self, simple_flow_system, highs_solver):
"""Reset should return self for method chaining."""
@@ -222,7 +222,7 @@ def test_copy_can_be_modified(self, optimized_flow_system):
"""Copy should be modifiable even if original is locked."""
copy_fs = optimized_flow_system.copy()
new_bus = fx.Bus('NewBus')
- copy_fs.add_elements(new_bus) # Should not raise
+ copy_fs.add(new_bus) # Should not raise
assert 'NewBus' in copy_fs.buses
def test_copy_can_be_optimized_independently(self, optimized_flow_system):
@@ -276,7 +276,7 @@ def test_loaded_fs_can_be_reset(self, simple_flow_system, highs_solver, tmp_path
assert loaded_fs.is_locked is False
new_bus = fx.Bus('NewBus')
- loaded_fs.add_elements(new_bus) # Should not raise
+ loaded_fs.add(new_bus) # Should not raise
class TestInvalidate:
@@ -334,9 +334,9 @@ def test_modify_element_and_invalidate(self, simple_flow_system, highs_solver):
# Modify an element attribute (increase gas price, which should increase costs)
gas_tariff = simple_flow_system.components['Gastarif']
- original_effects = gas_tariff.outputs[0].effects_per_flow_hour
+ original_effects = gas_tariff.imports[0].effects_per_flow_hour
# Double the cost effect
- gas_tariff.outputs[0].effects_per_flow_hour = {effect: value * 2 for effect, value in original_effects.items()}
+ gas_tariff.imports[0].effects_per_flow_hour = {effect: value * 2 for effect, value in original_effects.items()}
# Invalidate to trigger re-transformation
simple_flow_system.invalidate()
@@ -355,8 +355,8 @@ def test_invalidate_needed_after_transform_before_optimize(self, simple_flow_sys
# Modify an attribute - double the gas costs
gas_tariff = simple_flow_system.components['Gastarif']
- original_effects = gas_tariff.outputs[0].effects_per_flow_hour
- gas_tariff.outputs[0].effects_per_flow_hour = {effect: value * 2 for effect, value in original_effects.items()}
+ original_effects = gas_tariff.imports[0].effects_per_flow_hour
+ gas_tariff.imports[0].effects_per_flow_hour = {effect: value * 2 for effect, value in original_effects.items()}
# Call invalidate to ensure re-transformation
simple_flow_system.invalidate()
@@ -368,8 +368,8 @@ def test_invalidate_needed_after_transform_before_optimize(self, simple_flow_sys
# Reset and use original values
simple_flow_system.reset()
- gas_tariff.outputs[0].effects_per_flow_hour = {
- effect: value / 2 for effect, value in gas_tariff.outputs[0].effects_per_flow_hour.items()
+ gas_tariff.imports[0].effects_per_flow_hour = {
+ effect: value / 2 for effect, value in gas_tariff.imports[0].effects_per_flow_hour.items()
}
simple_flow_system.optimize(highs_solver)
cost_with_original = simple_flow_system.solution['effect|total'].sel(effect='costs').item()
@@ -389,8 +389,8 @@ def test_reset_already_invalidates(self, simple_flow_system, highs_solver):
# Modify an element attribute
gas_tariff = simple_flow_system.components['Gastarif']
- original_effects = gas_tariff.outputs[0].effects_per_flow_hour
- gas_tariff.outputs[0].effects_per_flow_hour = {effect: value * 2 for effect, value in original_effects.items()}
+ original_effects = gas_tariff.imports[0].effects_per_flow_hour
+ gas_tariff.imports[0].effects_per_flow_hour = {effect: value * 2 for effect, value in original_effects.items()}
# Re-optimize - changes take effect because reset already invalidated
simple_flow_system.optimize(highs_solver)
diff --git a/tests/flow_system/test_flow_system_resample.py b/tests/flow_system/test_flow_system_resample.py
index 360b1bfc1..adb8cdb6b 100644
--- a/tests/flow_system/test_flow_system_resample.py
+++ b/tests/flow_system/test_flow_system_resample.py
@@ -13,15 +13,13 @@ def simple_fs():
"""Simple FlowSystem with basic components."""
timesteps = pd.date_range('2023-01-01', periods=24, freq='h')
fs = fx.FlowSystem(timesteps)
- fs.add_elements(
- fx.Bus('heat'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True)
- )
- fs.add_elements(
- fx.Sink(
+ fs.add(fx.Bus('heat'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add(
+ fx.Port(
'demand',
- inputs=[fx.Flow('heat', flow_id='in', fixed_relative_profile=np.linspace(10, 20, 24), size=1)],
+ exports=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.linspace(10, 20, 24), size=1)],
),
- fx.Source('source', outputs=[fx.Flow('heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Port('source', imports=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]),
)
return fs
@@ -32,36 +30,38 @@ def complex_fs():
timesteps = pd.date_range('2023-01-01', periods=48, freq='h')
fs = fx.FlowSystem(timesteps)
- fs.add_elements(
+ fs.add(
fx.Bus('heat'),
fx.Bus('elec'),
fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True),
)
# Storage
- fs.add_elements(
+ fs.add(
fx.Storage(
'battery',
- charging=fx.Flow('elec', flow_id='charge', size=10),
- discharging=fx.Flow('elec', flow_id='discharge', size=10),
+ charging=fx.Flow(bus='elec', size=10),
+ discharging=fx.Flow(bus='elec', size=10),
capacity_in_flow_hours=fx.InvestParameters(fixed_size=100),
)
)
# Piecewise converter
- converter = fx.linear_converters.Boiler(
- 'boiler', thermal_efficiency=0.9, fuel_flow=fx.Flow('elec', flow_id='gas'), thermal_flow=fx.Flow('heat')
+ converter = fx.Converter.boiler(
+ 'boiler',
+ thermal_efficiency=0.9,
+ fuel_flow=fx.Flow(bus='elec', flow_id='gas'),
+ thermal_flow=fx.Flow(bus='heat', size=100),
)
- converter.thermal_flow.size = 100
- fs.add_elements(converter)
+ fs.add(converter)
# Component with investment
- fs.add_elements(
- fx.Source(
+ fs.add(
+ fx.Port(
'pv',
- outputs=[
+ imports=[
fx.Flow(
- 'elec',
+ bus='elec',
flow_id='gen',
size=fx.InvestParameters(maximum_size=1000, effects_of_investment_per_size={'costs': 100}),
)
@@ -97,11 +97,11 @@ def test_resample_methods(method, expected):
"""Test different resampling methods."""
ts = pd.date_range('2023-01-01', periods=4, freq='h')
fs = fx.FlowSystem(ts)
- fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(
- fx.Sink(
+ fs.add(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add(
+ fx.Port(
's',
- inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.array([10.0, 20.0, 30.0, 40.0]), size=1)],
+ exports=[fx.Flow(bus='b', flow_id='in', fixed_relative_profile=np.array([10.0, 20.0, 30.0, 40.0]), size=1)],
)
)
@@ -143,8 +143,8 @@ def test_time_metadata_updated(simple_fs):
def test_with_dimensions(simple_fs, dim_name, dim_value):
"""Test resampling preserves period/scenario dimensions."""
fs = fx.FlowSystem(simple_fs.timesteps, **{dim_name: dim_value})
- fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(fx.Sink('d', inputs=[fx.Flow('h', flow_id='in', fixed_relative_profile=np.ones(24), size=1)]))
+ fs.add(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add(fx.Port('d', exports=[fx.Flow(bus='h', flow_id='in', fixed_relative_profile=np.ones(24), size=1)]))
fs_r = fs.resample('2h', method='mean')
assert getattr(fs_r, dim_name) is not None
@@ -159,8 +159,8 @@ def test_storage_resample(complex_fs):
fs_r = complex_fs.resample('4h', method='mean')
assert 'battery' in fs_r.components
storage = fs_r.components['battery']
- assert storage.charging.label == 'charge'
- assert storage.discharging.label == 'discharge'
+ assert storage.charging.flow_id == 'charging'
+ assert storage.discharging.flow_id == 'discharging'
def test_converter_resample(complex_fs):
@@ -168,7 +168,7 @@ def test_converter_resample(complex_fs):
fs_r = complex_fs.resample('4h', method='mean')
assert 'boiler' in fs_r.components
boiler = fs_r.components['boiler']
- assert hasattr(boiler, 'thermal_efficiency')
+ assert hasattr(boiler, 'conversion_factors')
def test_invest_resample(complex_fs):
@@ -193,10 +193,10 @@ def test_modeling(with_dim):
kwargs['scenarios'] = pd.Index(['base', 'high'], name='scenario')
fs = fx.FlowSystem(ts, **kwargs)
- fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(
- fx.Sink('d', inputs=[fx.Flow('h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]),
- fx.Source('s', outputs=[fx.Flow('h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]),
+ fs.add(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add(
+ fx.Port('d', exports=[fx.Flow(bus='h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]),
+ fx.Port('s', imports=[fx.Flow(bus='h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]),
)
fs_r = fs.resample('4h', method='mean')
@@ -210,10 +210,10 @@ def test_model_structure_preserved():
"""Test model structure (var/constraint types) preserved."""
ts = pd.date_range('2023-01-01', periods=48, freq='h')
fs = fx.FlowSystem(ts)
- fs.add_elements(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(
- fx.Sink('d', inputs=[fx.Flow('h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]),
- fx.Source('s', outputs=[fx.Flow('h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]),
+ fs.add(fx.Bus('h'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add(
+ fx.Port('d', exports=[fx.Flow(bus='h', flow_id='in', fixed_relative_profile=np.linspace(10, 30, 48), size=1)]),
+ fx.Port('s', imports=[fx.Flow(bus='h', flow_id='out', size=100, effects_per_flow_hour={'costs': 0.05})]),
)
fs.build_model()
@@ -255,8 +255,8 @@ def test_frequencies(freq, exp_len):
"""Test various frequencies."""
ts = pd.date_range('2023-01-01', periods=168, freq='h')
fs = fx.FlowSystem(ts)
- fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(fx.Sink('s', inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.ones(168), size=1)]))
+ fs.add(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add(fx.Port('s', exports=[fx.Flow(bus='b', flow_id='in', fixed_relative_profile=np.ones(168), size=1)]))
assert len(fs.resample(freq, method='mean').timesteps) == exp_len
@@ -265,8 +265,8 @@ def test_irregular_timesteps_error():
"""Test that resampling irregular timesteps to finer resolution raises error without fill_gaps."""
ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time')
fs = fx.FlowSystem(ts)
- fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(fx.Sink('s', inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.ones(3), size=1)]))
+ fs.add(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add(fx.Port('s', exports=[fx.Flow(bus='b', flow_id='in', fixed_relative_profile=np.ones(3), size=1)]))
with pytest.raises(ValueError, match='Resampling created gaps'):
fs.transform.resample('1h', method='mean')
@@ -276,9 +276,9 @@ def test_irregular_timesteps_with_fill_gaps():
"""Test that resampling irregular timesteps works with explicit fill_gaps strategy."""
ts = pd.DatetimeIndex(['2023-01-01 00:00', '2023-01-01 01:00', '2023-01-01 03:00'], name='time')
fs = fx.FlowSystem(ts)
- fs.add_elements(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
- fs.add_elements(
- fx.Sink('s', inputs=[fx.Flow('b', flow_id='in', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)])
+ fs.add(fx.Bus('b'), fx.Effect('costs', unit='€', description='costs', is_objective=True, is_standard=True))
+ fs.add(
+ fx.Port('s', exports=[fx.Flow(bus='b', flow_id='in', fixed_relative_profile=np.array([1.0, 2.0, 4.0]), size=1)])
)
# Test with ffill
diff --git a/tests/flow_system/test_sel_isel_single_selection.py b/tests/flow_system/test_sel_isel_single_selection.py
index 4d84ced51..ddf9deaaf 100644
--- a/tests/flow_system/test_sel_isel_single_selection.py
+++ b/tests/flow_system/test_sel_isel_single_selection.py
@@ -15,13 +15,13 @@ def fs_with_scenarios():
scenario_weights = np.array([0.5, 0.3, 0.2])
fs = fx.FlowSystem(timesteps, scenarios=scenarios, scenario_weights=scenario_weights)
- fs.add_elements(
+ fs.add(
fx.Bus('heat'),
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
- fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=np.ones(24), size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fs.add(
+ fx.Port('demand', exports=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.ones(24), size=10)]),
+ fx.Port('source', imports=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]),
)
return fs
@@ -33,13 +33,13 @@ def fs_with_periods():
periods = pd.Index([2020, 2030, 2040], name='period')
fs = fx.FlowSystem(timesteps, periods=periods, weight_of_last_period=10)
- fs.add_elements(
+ fs.add(
fx.Bus('heat'),
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
- fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=np.ones(24), size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fs.add(
+ fx.Port('demand', exports=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.ones(24), size=10)]),
+ fx.Port('source', imports=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]),
)
return fs
@@ -52,13 +52,13 @@ def fs_with_periods_and_scenarios():
scenarios = pd.Index(['Low', 'High'], name='scenario')
fs = fx.FlowSystem(timesteps, periods=periods, scenarios=scenarios, weight_of_last_period=10)
- fs.add_elements(
+ fs.add(
fx.Bus('heat'),
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
- fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=np.ones(24), size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fs.add(
+ fx.Port('demand', exports=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.ones(24), size=10)]),
+ fx.Port('source', imports=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]),
)
return fs
diff --git a/tests/io/test_io.py b/tests/io/test_io.py
index 404f514ec..18cdd2262 100644
--- a/tests/io/test_io.py
+++ b/tests/io/test_io.py
@@ -242,13 +242,13 @@ def test_netcdf_roundtrip_preserves_periods(self, tmp_path):
periods = pd.Index([2020, 2030, 2040], name='period')
fs = fx.FlowSystem(timesteps=timesteps, periods=periods)
- fs.add_elements(
+ fs.add(
fx.Bus('heat'),
fx.Effect('costs', unit='EUR', is_objective=True),
)
- fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50)]),
+ fs.add(
+ fx.Port('demand', exports=[fx.Flow(bus='heat', flow_id='in', size=10)]),
+ fx.Port('source', imports=[fx.Flow(bus='heat', flow_id='out', size=50)]),
)
path = tmp_path / 'test_periods.nc'
@@ -266,13 +266,13 @@ def test_netcdf_roundtrip_preserves_scenarios(self, tmp_path):
scenarios = pd.Index(['A', 'B'], name='scenario')
fs = fx.FlowSystem(timesteps=timesteps, scenarios=scenarios)
- fs.add_elements(
+ fs.add(
fx.Bus('heat'),
fx.Effect('costs', unit='EUR', is_objective=True),
)
- fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50)]),
+ fs.add(
+ fx.Port('demand', exports=[fx.Flow(bus='heat', flow_id='in', size=10)]),
+ fx.Port('source', imports=[fx.Flow(bus='heat', flow_id='out', size=50)]),
)
path = tmp_path / 'test_scenarios.nc'
@@ -295,13 +295,17 @@ def test_netcdf_roundtrip_with_clustering(self, tmp_path):
demand_profile = np.sin(np.linspace(0, 4 * np.pi, 48)) * 0.4 + 0.6
fs = fx.FlowSystem(timesteps)
- fs.add_elements(
+ fs.add(
fx.Bus('heat'),
fx.Effect('costs', unit='EUR', is_objective=True),
)
- fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fs.add(
+ fx.Port(
+ 'demand', exports=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]
+ ),
+ fx.Port(
+ 'source', imports=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
fs_clustered = fs.transform.cluster(n_clusters=2, cluster_duration='1D')
diff --git a/tests/plotting/test_solution_and_plotting.py b/tests/plotting/test_solution_and_plotting.py
index 50cc2ec59..4fcb0d070 100644
--- a/tests/plotting/test_solution_and_plotting.py
+++ b/tests/plotting/test_solution_and_plotting.py
@@ -14,7 +14,6 @@
import pytest
import xarray as xr
-import flixopt as fx
from flixopt import plotting
# ============================================================================
@@ -114,53 +113,6 @@ def test_solution_none_before_optimization(self, simple_flow_system):
assert simple_flow_system.solution is None
-class TestElementSolution:
- """Tests for element.solution API (filtered view of flow_system.solution)."""
-
- def test_element_solution_is_filtered_dataset(self, simple_flow_system, highs_solver):
- """Verify element.solution returns filtered Dataset."""
- simple_flow_system.optimize(highs_solver)
-
- boiler = simple_flow_system.components['Boiler']
- element_solution = boiler.solution
-
- assert isinstance(element_solution, xr.Dataset)
-
- def test_element_solution_contains_only_element_variables(self, simple_flow_system, highs_solver):
- """Verify element.solution only contains variables for that element."""
- simple_flow_system.optimize(highs_solver)
-
- boiler = simple_flow_system.components['Boiler']
- element_solution = boiler.solution
-
- # Variables should be batched names from _variable_names
- assert len(list(element_solution.data_vars)) > 0
- # Element solution should contain flow|rate (Boiler has flows)
- assert 'flow|rate' in element_solution
-
- def test_storage_element_solution(self, simple_flow_system, highs_solver):
- """Verify storage element solution contains charge state."""
- simple_flow_system.optimize(highs_solver)
-
- storage = simple_flow_system.components['Speicher']
- element_solution = storage.solution
-
- # Should contain storage charge variable
- charge_vars = [v for v in element_solution.data_vars if 'charge' in v]
- assert len(charge_vars) > 0
-
- def test_element_solution_raises_for_unlinked_element(self):
- """Verify accessing solution for unlinked element raises error."""
- boiler = fx.linear_converters.Boiler(
- 'TestBoiler',
- thermal_efficiency=0.9,
- thermal_flow=fx.Flow('Heat', flow_id='Q_th'),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- )
- with pytest.raises(ValueError, match='not linked to a FlowSystem'):
- _ = boiler.solution
-
-
# ============================================================================
# STATISTICS ACCESSOR TESTS
# ============================================================================
diff --git a/tests/superseded/math/test_bus.py b/tests/superseded/math/test_bus.py
index 62bce1cb2..4c71e99a6 100644
--- a/tests/superseded/math/test_bus.py
+++ b/tests/superseded/math/test_bus.py
@@ -14,8 +14,8 @@ def test_bus(self, basic_flow_system_linopy_coords, coords_config):
bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=None)
flow_system.add_elements(
bus,
- fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]),
- fx.Source('GastarifTest', outputs=[fx.Flow('Q_Gas', 'TestBus')]),
+ fx.Sink('WärmelastTest', inputs=[fx.Flow(bus='TestBus', flow_id='Q_th_Last')]),
+ fx.Source('GastarifTest', outputs=[fx.Flow(bus='TestBus', flow_id='Q_Gas')]),
)
model = create_linopy_model(flow_system)
@@ -39,8 +39,8 @@ def test_bus_penalty(self, basic_flow_system_linopy_coords, coords_config):
bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=1e5)
flow_system.add_elements(
bus,
- fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]),
- fx.Source('GastarifTest', outputs=[fx.Flow('Q_Gas', 'TestBus')]),
+ fx.Sink('WärmelastTest', inputs=[fx.Flow(bus='TestBus', flow_id='Q_th_Last')]),
+ fx.Source('GastarifTest', outputs=[fx.Flow(bus='TestBus', flow_id='Q_Gas')]),
)
model = create_linopy_model(flow_system)
@@ -70,8 +70,8 @@ def test_bus_with_coords(self, basic_flow_system_linopy_coords, coords_config):
bus = fx.Bus('TestBus', imbalance_penalty_per_flow_hour=None)
flow_system.add_elements(
bus,
- fx.Sink('WärmelastTest', inputs=[fx.Flow('Q_th_Last', 'TestBus')]),
- fx.Source('GastarifTest', outputs=[fx.Flow('Q_Gas', 'TestBus')]),
+ fx.Sink('WärmelastTest', inputs=[fx.Flow(bus='TestBus', flow_id='Q_th_Last')]),
+ fx.Source('GastarifTest', outputs=[fx.Flow(bus='TestBus', flow_id='Q_Gas')]),
)
model = create_linopy_model(flow_system)
diff --git a/tests/superseded/math/test_component.py b/tests/superseded/math/test_component.py
index 41d2bcf5e..54151732b 100644
--- a/tests/superseded/math/test_component.py
+++ b/tests/superseded/math/test_component.py
@@ -14,12 +14,12 @@ class TestComponentModel:
def test_flow_label_check(self):
"""Test that flow model constraints are correctly generated."""
inputs = [
- fx.Flow('Q_th_Last', 'Fernwärme', relative_minimum=np.ones(10) * 0.1),
- fx.Flow('Q_Gas', 'Fernwärme', relative_minimum=np.ones(10) * 0.1),
+ fx.Flow(bus='Fernwärme', flow_id='Q_th_Last', relative_minimum=np.ones(10) * 0.1),
+ fx.Flow(bus='Fernwärme', flow_id='Q_Gas', relative_minimum=np.ones(10) * 0.1),
]
outputs = [
- fx.Flow('Q_th_Last', 'Gas', relative_minimum=np.ones(10) * 0.01),
- fx.Flow('Q_Gas', 'Gas', relative_minimum=np.ones(10) * 0.01),
+ fx.Flow(bus='Gas', flow_id='Q_th_Last', relative_minimum=np.ones(10) * 0.01),
+ fx.Flow(bus='Gas', flow_id='Q_Gas', relative_minimum=np.ones(10) * 0.01),
]
with pytest.raises(ValueError, match='Flow names must be unique!'):
_ = flixopt.elements.Component('TestComponent', inputs=inputs, outputs=outputs)
@@ -28,12 +28,12 @@ def test_component(self, basic_flow_system_linopy_coords, coords_config):
"""Test that flow model constraints are correctly generated."""
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
inputs = [
- fx.Flow('In1', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1),
- fx.Flow('In2', 'Fernwärme', size=100, relative_minimum=np.ones(10) * 0.1),
+ fx.Flow(bus='Fernwärme', flow_id='In1', size=100, relative_minimum=np.ones(10) * 0.1),
+ fx.Flow(bus='Fernwärme', flow_id='In2', size=100, relative_minimum=np.ones(10) * 0.1),
]
outputs = [
- fx.Flow('Out1', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01),
- fx.Flow('Out2', 'Gas', size=100, relative_minimum=np.ones(10) * 0.01),
+ fx.Flow(bus='Gas', flow_id='Out1', size=100, relative_minimum=np.ones(10) * 0.01),
+ fx.Flow(bus='Gas', flow_id='Out2', size=100, relative_minimum=np.ones(10) * 0.01),
]
comp = flixopt.elements.Component('TestComponent', inputs=inputs, outputs=outputs)
flow_system.add_elements(comp)
@@ -55,11 +55,11 @@ def test_on_with_multiple_flows(self, basic_flow_system_linopy_coords, coords_co
ub_out2 = np.linspace(1, 1.5, 10).round(2)
inputs = [
- fx.Flow('In1', 'Fernwärme', relative_minimum=np.ones(10) * 0.1, size=100),
+ fx.Flow(bus='Fernwärme', flow_id='In1', relative_minimum=np.ones(10) * 0.1, size=100),
]
outputs = [
- fx.Flow('Out1', 'Gas', relative_minimum=np.ones(10) * 0.2, size=200),
- fx.Flow('Out2', 'Gas', relative_minimum=np.ones(10) * 0.3, relative_maximum=ub_out2, size=300),
+ fx.Flow(bus='Gas', flow_id='Out1', relative_minimum=np.ones(10) * 0.2, size=200),
+ fx.Flow(bus='Gas', flow_id='Out2', relative_minimum=np.ones(10) * 0.3, relative_maximum=ub_out2, size=300),
]
comp = flixopt.elements.Component(
'TestComponent', inputs=inputs, outputs=outputs, status_parameters=fx.StatusParameters()
@@ -102,7 +102,7 @@ def test_on_with_single_flow(self, basic_flow_system_linopy_coords, coords_confi
"""Test that component with status and single flow is correctly generated."""
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
inputs = [
- fx.Flow('In1', 'Fernwärme', relative_minimum=np.ones(10) * 0.1, size=100),
+ fx.Flow(bus='Fernwärme', flow_id='In1', relative_minimum=np.ones(10) * 0.1, size=100),
]
outputs = []
comp = flixopt.elements.Component(
@@ -137,18 +137,20 @@ def test_previous_states_with_multiple_flows(self, basic_flow_system_linopy_coor
ub_out2 = np.linspace(1, 1.5, 10).round(2)
inputs = [
fx.Flow(
- 'In1',
- 'Fernwärme',
+ bus='Fernwärme',
+ flow_id='In1',
relative_minimum=np.ones(10) * 0.1,
size=100,
previous_flow_rate=np.array([0, 0, 1e-6, 1e-5, 1e-4, 3, 4]),
),
]
outputs = [
- fx.Flow('Out1', 'Gas', relative_minimum=np.ones(10) * 0.2, size=200, previous_flow_rate=[3, 4, 5]),
fx.Flow(
- 'Out2',
- 'Gas',
+ bus='Gas', flow_id='Out1', relative_minimum=np.ones(10) * 0.2, size=200, previous_flow_rate=[3, 4, 5]
+ ),
+ fx.Flow(
+ bus='Gas',
+ flow_id='Out2',
relative_minimum=np.ones(10) * 0.3,
relative_maximum=ub_out2,
size=300,
@@ -200,8 +202,8 @@ def test_previous_states_with_multiple_flows_parameterized(
ub_out2 = np.linspace(1, 1.5, 10).round(2)
inputs = [
fx.Flow(
- 'In1',
- 'Fernwärme',
+ bus='Fernwärme',
+ flow_id='In1',
relative_minimum=np.ones(10) * 0.1,
size=100,
previous_flow_rate=in1_previous_flow_rate,
@@ -210,11 +212,15 @@ def test_previous_states_with_multiple_flows_parameterized(
]
outputs = [
fx.Flow(
- 'Out1', 'Gas', relative_minimum=np.ones(10) * 0.2, size=200, previous_flow_rate=out1_previous_flow_rate
+ bus='Gas',
+ flow_id='Out1',
+ relative_minimum=np.ones(10) * 0.2,
+ size=200,
+ previous_flow_rate=out1_previous_flow_rate,
),
fx.Flow(
- 'Out2',
- 'Gas',
+ bus='Gas',
+ flow_id='Out2',
relative_minimum=np.ones(10) * 0.3,
relative_maximum=ub_out2,
size=300,
@@ -260,8 +266,8 @@ def test_transmission_basic(self, basic_flow_system, highs_solver):
boiler = fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- thermal_flow=fx.Flow('Q_th', bus='Wärme lokal'),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Wärme lokal', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
transmission = fx.Transmission(
@@ -269,9 +275,11 @@ def test_transmission_basic(self, basic_flow_system, highs_solver):
relative_losses=0.2,
absolute_losses=20,
in1=fx.Flow(
- 'Rohr1', 'Wärme lokal', size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1e6)
+ bus='Wärme lokal',
+ flow_id='Rohr1',
+ size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1e6),
),
- out1=fx.Flow('Rohr2', 'Fernwärme', size=1000),
+ out1=fx.Flow(bus='Fernwärme', flow_id='Rohr2', size=1000),
)
flow_system.add_elements(transmission, boiler)
@@ -300,24 +308,24 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver):
'Boiler_Standard',
thermal_efficiency=0.9,
thermal_flow=fx.Flow(
- 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
+ bus='Fernwärme', flow_id='Q_th', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
boiler2 = fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.4,
- thermal_flow=fx.Flow('Q_th', bus='Wärme lokal'),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Wärme lokal', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
last2 = fx.Sink(
'Wärmelast2',
inputs=[
fx.Flow(
- 'Q_th_Last',
bus='Wärme lokal',
+ flow_id='Q_th_Last',
size=1,
fixed_relative_profile=flow_system.components['Wärmelast'].inputs[0].fixed_relative_profile
* np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
@@ -330,13 +338,13 @@ def test_transmission_balanced(self, basic_flow_system, highs_solver):
relative_losses=0.2,
absolute_losses=20,
in1=fx.Flow(
- 'Rohr1a',
bus='Wärme lokal',
+ flow_id='Rohr1a',
size=fx.InvestParameters(effects_of_investment_per_size=5, maximum_size=1000),
),
- out1=fx.Flow('Rohr1b', 'Fernwärme', size=1000),
- in2=fx.Flow('Rohr2a', 'Fernwärme', size=fx.InvestParameters(maximum_size=1000)),
- out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000),
+ out1=fx.Flow(bus='Fernwärme', flow_id='Rohr1b', size=1000),
+ in2=fx.Flow(bus='Fernwärme', flow_id='Rohr2a', size=fx.InvestParameters(maximum_size=1000)),
+ out2=fx.Flow(bus='Wärme lokal', flow_id='Rohr2b', size=1000),
balanced=True,
)
@@ -375,24 +383,24 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver):
'Boiler_Standard',
thermal_efficiency=0.9,
thermal_flow=fx.Flow(
- 'Q_th', bus='Fernwärme', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
+ bus='Fernwärme', flow_id='Q_th', size=1000, relative_maximum=np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1])
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
boiler2 = fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.4,
- thermal_flow=fx.Flow('Q_th', bus='Wärme lokal'),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Wärme lokal', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
)
last2 = fx.Sink(
'Wärmelast2',
inputs=[
fx.Flow(
- 'Q_th_Last',
bus='Wärme lokal',
+ flow_id='Q_th_Last',
size=1,
fixed_relative_profile=flow_system.components['Wärmelast'].inputs[0].fixed_relative_profile
* np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1]),
@@ -405,19 +413,19 @@ def test_transmission_unbalanced(self, basic_flow_system, highs_solver):
relative_losses=0.2,
absolute_losses=20,
in1=fx.Flow(
- 'Rohr1a',
bus='Wärme lokal',
+ flow_id='Rohr1a',
size=fx.InvestParameters(effects_of_investment_per_size=50, maximum_size=1000),
),
- out1=fx.Flow('Rohr1b', 'Fernwärme', size=1000),
+ out1=fx.Flow(bus='Fernwärme', flow_id='Rohr1b', size=1000),
in2=fx.Flow(
- 'Rohr2a',
- 'Fernwärme',
+ bus='Fernwärme',
+ flow_id='Rohr2a',
size=fx.InvestParameters(
effects_of_investment_per_size=100, minimum_size=10, maximum_size=1000, mandatory=True
),
),
- out2=fx.Flow('Rohr2b', bus='Wärme lokal', size=1000),
+ out2=fx.Flow(bus='Wärme lokal', flow_id='Rohr2b', size=1000),
balanced=False,
)
diff --git a/tests/superseded/math/test_effect.py b/tests/superseded/math/test_effect.py
index 103eb385a..102e1abee 100644
--- a/tests/superseded/math/test_effect.py
+++ b/tests/superseded/math/test_effect.py
@@ -143,13 +143,13 @@ def test_shares(self, basic_flow_system_linopy_coords, coords_config, highs_solv
'Boiler',
thermal_efficiency=0.5,
thermal_flow=fx.Flow(
- 'Q_th',
bus='Fernwärme',
+ flow_id='Q_th',
size=fx.InvestParameters(
effects_of_investment_per_size=10, minimum_size=20, maximum_size=200, mandatory=True
),
),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
),
)
diff --git a/tests/superseded/math/test_flow.py b/tests/superseded/math/test_flow.py
index fa9d558cb..a4294b2d8 100644
--- a/tests/superseded/math/test_flow.py
+++ b/tests/superseded/math/test_flow.py
@@ -13,7 +13,7 @@ def test_flow_minimal(self, basic_flow_system_linopy_coords, coords_config):
"""Test that flow model constraints are correctly generated."""
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
- flow = fx.Flow('Fernwärme', flow_id='Wärme', size=100)
+ flow = fx.Flow(bus='Fernwärme', flow_id='Wärme', size=100)
flow_system.add_elements(fx.Sink('Sink', inputs=[flow]))
@@ -34,7 +34,7 @@ def test_flow(self, basic_flow_system_linopy_coords, coords_config):
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
relative_minimum=np.linspace(0, 0.5, timesteps.size),
@@ -69,7 +69,9 @@ def test_effects_per_flow_hour(self, basic_flow_system_linopy_coords, coords_con
co2_per_flow_hour = np.linspace(4, 5, timesteps.size)
flow = fx.Flow(
- 'Fernwärme', flow_id='Wärme', effects_per_flow_hour={'costs': costs_per_flow_hour, 'CO2': co2_per_flow_hour}
+ bus='Fernwärme',
+ flow_id='Wärme',
+ effects_per_flow_hour={'costs': costs_per_flow_hour, 'CO2': co2_per_flow_hour},
)
flow_system.add_elements(fx.Sink('Sink', inputs=[flow]), fx.Effect('CO2', 't', ''))
model = create_linopy_model(flow_system)
@@ -93,7 +95,7 @@ def test_flow_invest(self, basic_flow_system_linopy_coords, coords_config):
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=20, maximum_size=100, mandatory=True),
relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
@@ -127,7 +129,7 @@ def test_flow_invest_optional(self, basic_flow_system_linopy_coords, coords_conf
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=20, maximum_size=100, mandatory=False),
relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
@@ -159,7 +161,7 @@ def test_flow_invest_optional_wo_min_size(self, basic_flow_system_linopy_coords,
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(maximum_size=100, mandatory=False),
relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
@@ -182,7 +184,7 @@ def test_flow_invest_wo_min_size_non_optional(self, basic_flow_system_linopy_coo
timesteps = flow_system.timesteps
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(maximum_size=100, mandatory=True),
relative_minimum=np.linspace(0.1, 0.5, timesteps.size),
@@ -207,7 +209,7 @@ def test_flow_invest_fixed_size(self, basic_flow_system_linopy_coords, coords_co
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(fixed_size=75, mandatory=True),
relative_minimum=0.2,
@@ -240,7 +242,7 @@ def test_flow_invest_with_effects(self, basic_flow_system_linopy_coords, coords_
co2 = fx.Effect('CO2', unit='ton', description='CO2 emissions')
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(
minimum_size=20,
@@ -264,7 +266,7 @@ def test_flow_invest_divest_effects(self, basic_flow_system_linopy_coords, coord
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(
minimum_size=20,
@@ -289,7 +291,7 @@ def test_flow_on(self, basic_flow_system_linopy_coords, coords_config):
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
relative_minimum=0.2,
@@ -329,7 +331,7 @@ def test_effects_per_active_hour(self, basic_flow_system_linopy_coords, coords_c
co2_per_running_hour = np.linspace(4, 5, timesteps.size)
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
status_parameters=fx.StatusParameters(
@@ -353,7 +355,7 @@ def test_consecutive_on_hours(self, basic_flow_system_linopy_coords, coords_conf
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
previous_flow_rate=0, # Required to get initial constraint
@@ -387,7 +389,7 @@ def test_consecutive_on_hours_previous(self, basic_flow_system_linopy_coords, co
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
status_parameters=fx.StatusParameters(
@@ -414,7 +416,7 @@ def test_consecutive_off_hours(self, basic_flow_system_linopy_coords, coords_con
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
previous_flow_rate=0, # Required to get initial constraint (was OFF for 1h, so previous_downtime=1)
@@ -448,7 +450,7 @@ def test_consecutive_off_hours_previous(self, basic_flow_system_linopy_coords, c
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
status_parameters=fx.StatusParameters(
@@ -475,7 +477,7 @@ def test_switch_on_constraints(self, basic_flow_system_linopy_coords, coords_con
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
previous_flow_rate=0, # Required to get initial constraint
@@ -513,7 +515,7 @@ def test_on_hours_limits(self, basic_flow_system_linopy_coords, coords_config):
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
status_parameters=fx.StatusParameters(
@@ -544,7 +546,7 @@ class TestFlowOnInvestModel:
def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_config):
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=False),
relative_minimum=0.2,
@@ -574,7 +576,7 @@ def test_flow_on_invest_optional(self, basic_flow_system_linopy_coords, coords_c
def test_flow_on_invest_non_optional(self, basic_flow_system_linopy_coords, coords_config):
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=20, maximum_size=200, mandatory=True),
relative_minimum=0.2,
@@ -613,7 +615,7 @@ def test_fixed_relative_profile(self, basic_flow_system_linopy_coords, coords_co
profile = np.sin(np.linspace(0, 2 * np.pi, len(timesteps))) * 0.5 + 0.5 # Values between 0 and 1
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=100,
fixed_relative_profile=profile,
@@ -638,7 +640,7 @@ def test_fixed_profile_with_investment(self, basic_flow_system_linopy_coords, co
profile = np.sin(np.linspace(0, 2 * np.pi, len(timesteps))) * 0.5 + 0.5
flow = fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Wärme',
size=fx.InvestParameters(minimum_size=50, maximum_size=200, mandatory=False),
fixed_relative_profile=profile,
diff --git a/tests/superseded/math/test_linear_converter.py b/tests/superseded/math/test_linear_converter.py
index 2057581e4..69cb905cf 100644
--- a/tests/superseded/math/test_linear_converter.py
+++ b/tests/superseded/math/test_linear_converter.py
@@ -14,8 +14,8 @@ def test_basic_linear_converter(self, basic_flow_system_linopy_coords, coords_co
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create a simple linear converter with constant conversion factor
converter = fx.LinearConverter(
@@ -48,8 +48,8 @@ def test_linear_converter_time_varying(self, basic_flow_system_linopy_coords, co
varying_efficiency = np.linspace(0.7, 0.9, len(timesteps))
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create a linear converter with time-varying conversion factor
converter = fx.LinearConverter(
@@ -78,10 +78,10 @@ def test_linear_converter_multiple_factors(self, basic_flow_system_linopy_coords
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create flows
- input_flow1 = fx.Flow('input_bus1', flow_id='input1', size=100)
- input_flow2 = fx.Flow('input_bus2', flow_id='input2', size=100)
- output_flow1 = fx.Flow('output_bus1', flow_id='output1', size=100)
- output_flow2 = fx.Flow('output_bus2', flow_id='output2', size=100)
+ input_flow1 = fx.Flow(bus='input_bus1', flow_id='input1', size=100)
+ input_flow2 = fx.Flow(bus='input_bus2', flow_id='input2', size=100)
+ output_flow1 = fx.Flow(bus='output_bus1', flow_id='output1', size=100)
+ output_flow2 = fx.Flow(bus='output_bus2', flow_id='output2', size=100)
# Create a linear converter with multiple inputs/outputs and conversion factors
converter = fx.LinearConverter(
@@ -111,8 +111,8 @@ def test_linear_converter_with_status(self, basic_flow_system_linopy_coords, coo
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create StatusParameters
status_params = fx.StatusParameters(
@@ -158,10 +158,10 @@ def test_linear_converter_multidimensional(self, basic_flow_system_linopy_coords
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create a more complex setup with multiple flows
- input_flow1 = fx.Flow('fuel_bus', flow_id='fuel', size=100)
- input_flow2 = fx.Flow('electricity_bus', flow_id='electricity', size=50)
- output_flow1 = fx.Flow('heat_bus', flow_id='heat', size=70)
- output_flow2 = fx.Flow('cooling_bus', flow_id='cooling', size=30)
+ input_flow1 = fx.Flow(bus='fuel_bus', flow_id='fuel', size=100)
+ input_flow2 = fx.Flow(bus='electricity_bus', flow_id='electricity', size=50)
+ output_flow1 = fx.Flow(bus='heat_bus', flow_id='heat', size=70)
+ output_flow2 = fx.Flow(bus='cooling_bus', flow_id='cooling', size=30)
# Create a CHP-like converter with more complex connections
converter = fx.LinearConverter(
@@ -205,8 +205,8 @@ def test_edge_case_time_varying_conversion(self, basic_flow_system_linopy_coords
)
# Create input and output flows
- input_flow = fx.Flow('electricity_bus', flow_id='electricity', size=100)
- output_flow = fx.Flow('heat_bus', flow_id='heat', size=500) # Higher maximum to allow for COP of 5
+ input_flow = fx.Flow(bus='electricity_bus', flow_id='electricity', size=100)
+ output_flow = fx.Flow(bus='heat_bus', flow_id='heat', size=500) # Higher maximum to allow for COP of 5
conversion_factors = [{input_flow.label: fluctuating_cop, output_flow.label: np.ones(len(timesteps))}]
@@ -229,8 +229,8 @@ def test_piecewise_conversion(self, basic_flow_system_linopy_coords, coords_conf
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create pieces for piecewise conversion
# For input flow: two pieces from 0-50 and 50-100
@@ -269,8 +269,8 @@ def test_piecewise_conversion_with_status(self, basic_flow_system_linopy_coords,
flow_system, coords_config = basic_flow_system_linopy_coords, coords_config
# Create input and output flows
- input_flow = fx.Flow('input_bus', flow_id='input', size=100)
- output_flow = fx.Flow('output_bus', flow_id='output', size=100)
+ input_flow = fx.Flow(bus='input_bus', flow_id='input', size=100)
+ output_flow = fx.Flow(bus='output_bus', flow_id='output', size=100)
# Create pieces for piecewise conversion
input_pieces = [fx.Piece(start=0, end=50), fx.Piece(start=50, end=100)]
diff --git a/tests/superseded/math/test_storage.py b/tests/superseded/math/test_storage.py
index 3e3e23f15..efcb19694 100644
--- a/tests/superseded/math/test_storage.py
+++ b/tests/superseded/math/test_storage.py
@@ -16,8 +16,8 @@ def test_basic_storage(self, basic_flow_system_linopy_coords, coords_config):
# Create a simple storage
storage = fx.Storage(
'TestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30, # 30 kWh storage capacity
initial_charge_state=0, # Start empty
prevent_simultaneous_charge_and_discharge=True,
@@ -64,8 +64,8 @@ def test_lossy_storage(self, basic_flow_system_linopy_coords, coords_config):
# Create a simple storage
storage = fx.Storage(
'TestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30, # 30 kWh storage capacity
initial_charge_state=0, # Start empty
eta_charge=0.9, # Charging efficiency
@@ -111,8 +111,8 @@ def test_charge_state_bounds(self, basic_flow_system_linopy_coords, coords_confi
# Create a simple storage with time-varying bounds
storage = fx.Storage(
'TestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30, # 30 kWh storage capacity
initial_charge_state=3,
prevent_simultaneous_charge_and_discharge=True,
@@ -159,8 +159,8 @@ def test_storage_with_investment(self, basic_flow_system_linopy_coords, coords_c
# Create storage with investment parameters
storage = fx.Storage(
'InvestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=fx.InvestParameters(
effects_of_investment={'costs': 100},
effects_of_investment_per_size={'costs': 10},
@@ -207,8 +207,8 @@ def test_storage_with_final_state_constraints(self, basic_flow_system_linopy_coo
# Create storage with final state constraints
storage = fx.Storage(
'FinalStateStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30,
initial_charge_state=10, # Start with 10 kWh
minimal_final_charge_state=15, # End with at least 15 kWh
@@ -235,8 +235,8 @@ def test_storage_cyclic_initialization(self, basic_flow_system_linopy_coords, co
# Create storage with cyclic initialization
storage = fx.Storage(
'CyclicStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30,
initial_charge_state='equals_final', # Cyclic initialization
eta_charge=0.9,
@@ -261,8 +261,8 @@ def test_simultaneous_charge_discharge(self, basic_flow_system_linopy_coords, co
# Create storage with or without simultaneous charge/discharge prevention
storage = fx.Storage(
'SimultaneousStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=30,
initial_charge_state=0,
eta_charge=0.9,
@@ -317,8 +317,8 @@ def test_investment_parameters(
# Create storage with specified investment parameters
storage = fx.Storage(
'InvestStorage',
- charging=fx.Flow('Q_th_in', bus='Fernwärme', size=20),
- discharging=fx.Flow('Q_th_out', bus='Fernwärme', size=20),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_in', size=20),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_out', size=20),
capacity_in_flow_hours=fx.InvestParameters(**invest_params),
initial_charge_state=0,
eta_charge=0.9,
diff --git a/tests/superseded/test_functional.py b/tests/superseded/test_functional.py
index 2826379b5..d9cfa9d54 100644
--- a/tests/superseded/test_functional.py
+++ b/tests/superseded/test_functional.py
@@ -71,9 +71,9 @@ def flow_system_base(timesteps: pd.DatetimeIndex) -> fx.FlowSystem:
flow_system.add_elements(
fx.Sink(
'Wärmelast',
- inputs=[fx.Flow('Fernwärme', flow_id='Wärme', fixed_relative_profile=data.thermal_demand, size=1)],
+ inputs=[fx.Flow(bus='Fernwärme', flow_id='Wärme', fixed_relative_profile=data.thermal_demand, size=1)],
),
- fx.Source('Gastarif', outputs=[fx.Flow('Gas', flow_id='Gas', effects_per_flow_hour=1)]),
+ fx.Source('Gastarif', outputs=[fx.Flow(bus='Gas', flow_id='Gas', effects_per_flow_hour=1)]),
)
return flow_system
@@ -84,8 +84,8 @@ def flow_system_minimal(timesteps) -> fx.FlowSystem:
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th'),
)
)
return flow_system
@@ -140,9 +140,9 @@ def test_fixed_size(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(fixed_size=1000, effects_of_investment=10, effects_of_investment_per_size=1),
),
@@ -179,9 +179,9 @@ def test_optimize_size(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(effects_of_investment=10, effects_of_investment_per_size=1, maximum_size=100),
),
@@ -218,9 +218,9 @@ def test_size_bounds(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(
minimum_size=40, maximum_size=100, effects_of_investment=10, effects_of_investment_per_size=1
@@ -259,9 +259,9 @@ def test_optional_invest(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(
mandatory=False,
@@ -275,9 +275,9 @@ def test_optional_invest(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler_optional',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=fx.InvestParameters(
mandatory=False,
@@ -336,8 +336,8 @@ def test_on(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=100, status_parameters=fx.StatusParameters()),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100, status_parameters=fx.StatusParameters()),
)
)
@@ -373,9 +373,9 @@ def test_off(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(max_downtime=100),
@@ -422,9 +422,9 @@ def test_startup_shutdown(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(force_startup_tracking=True),
@@ -478,9 +478,9 @@ def test_on_total_max(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(active_hours_max=1),
@@ -489,8 +489,8 @@ def test_on_total_max(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
),
)
@@ -526,9 +526,9 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(active_hours_max=2),
@@ -537,9 +537,9 @@ def test_on_total_bounds(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
status_parameters=fx.StatusParameters(active_hours_min=3),
@@ -597,9 +597,9 @@ def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
previous_flow_rate=0, # Required for initial uptime constraint
@@ -609,8 +609,8 @@ def test_consecutive_uptime_downtime(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
),
)
flow_system['Wärmelast'].inputs[0].fixed_relative_profile = np.array([5, 10, 20, 18, 12])
@@ -656,15 +656,15 @@ def test_consecutive_off(solver_fixture, time_steps_fixture):
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
- thermal_flow=fx.Flow('Fernwärme', flow_id='Q_th'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Fernwärme', flow_id='Q_th'),
),
fx.linear_converters.Boiler(
'Boiler_backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
size=100,
previous_flow_rate=np.array([20]), # Otherwise its Off before the start
diff --git a/tests/test_align_to_coords.py b/tests/test_align_to_coords.py
new file mode 100644
index 000000000..88a9a0dc6
--- /dev/null
+++ b/tests/test_align_to_coords.py
@@ -0,0 +1,179 @@
+"""Tests for align_to_coords() and align_effects_to_coords()."""
+
+import numpy as np
+import pandas as pd
+import pytest
+import xarray as xr
+
+from flixopt.core import ConversionError, TimeSeriesData, align_effects_to_coords, align_to_coords
+
+
+@pytest.fixture
+def time_coords():
+ """Standard time-only coordinates."""
+ return {'time': pd.date_range('2020-01-01', periods=5, freq='h', name='time')}
+
+
+@pytest.fixture
+def full_coords():
+ """Time + period + scenario coordinates."""
+ return {
+ 'time': pd.date_range('2020-01-01', periods=5, freq='h', name='time'),
+ 'period': pd.Index([2020, 2030], name='period'),
+ 'scenario': pd.Index(['A', 'B', 'C'], name='scenario'),
+ }
+
+
+class TestAlignNone:
+ def test_none_returns_none(self, time_coords):
+ assert align_to_coords(None, time_coords) is None
+
+ def test_none_with_name(self, time_coords):
+ assert align_to_coords(None, time_coords, name='test') is None
+
+
+class TestAlignScalar:
+ def test_int(self, time_coords):
+ result = align_to_coords(42, time_coords, name='val')
+ assert isinstance(result, xr.DataArray)
+ assert result.ndim == 0
+ assert float(result) == 42.0
+
+ def test_float(self, time_coords):
+ result = align_to_coords(0.5, time_coords)
+ assert result.ndim == 0
+ assert float(result) == 0.5
+
+ def test_bool(self, time_coords):
+ result = align_to_coords(True, time_coords)
+ assert result.ndim == 0
+
+ def test_np_float(self, time_coords):
+ result = align_to_coords(np.float64(3.14), time_coords)
+ assert result.ndim == 0
+ assert float(result) == pytest.approx(3.14)
+
+
+class TestAlign1DArray:
+ def test_numpy_array_matches_time(self, time_coords):
+ data = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
+ result = align_to_coords(data, time_coords, name='profile')
+ assert result.dims == ('time',)
+ assert len(result) == 5
+ np.testing.assert_array_equal(result.values, data)
+
+ def test_wrong_length_raises(self, time_coords):
+ data = np.array([1.0, 2.0, 3.0]) # length 3, time has 5
+ with pytest.raises(ConversionError):
+ align_to_coords(data, time_coords)
+
+ def test_matches_period_dim(self, full_coords):
+ data = np.array([10.0, 20.0]) # length 2 matches period
+ result = align_to_coords(data, full_coords, dims=['period', 'scenario'])
+ assert result.dims == ('period',)
+
+ def test_matches_scenario_dim(self, full_coords):
+ data = np.array([1.0, 2.0, 3.0]) # length 3 matches scenario
+ result = align_to_coords(data, full_coords, dims=['period', 'scenario'])
+ assert result.dims == ('scenario',)
+
+
+class TestAlignSeries:
+ def test_series_with_datetime_index(self, time_coords):
+ idx = time_coords['time']
+ data = pd.Series([10, 20, 30, 40, 50], index=idx)
+ result = align_to_coords(data, time_coords)
+ assert result.dims == ('time',)
+ np.testing.assert_array_equal(result.values, [10, 20, 30, 40, 50])
+
+ def test_series_wrong_index_raises(self, time_coords):
+ wrong_idx = pd.date_range('2021-01-01', periods=5, freq='h')
+ data = pd.Series([1, 2, 3, 4, 5], index=wrong_idx)
+ with pytest.raises(ConversionError):
+ align_to_coords(data, time_coords)
+
+
+class TestAlignTimeSeriesData:
+ def test_basic_timeseries(self, time_coords):
+ data = TimeSeriesData([1, 2, 3, 4, 5])
+ result = align_to_coords(data, time_coords, name='ts')
+ assert isinstance(result, TimeSeriesData)
+ assert result.dims == ('time',)
+
+ def test_clustering_metadata_preserved(self, time_coords):
+ data = TimeSeriesData([1, 2, 3, 4, 5], clustering_group='heat')
+ result = align_to_coords(data, time_coords, name='ts')
+ assert result.clustering_group == 'heat'
+
+ def test_clustering_weight_preserved(self, time_coords):
+ data = TimeSeriesData([1, 2, 3, 4, 5], clustering_weight=0.7)
+ result = align_to_coords(data, time_coords, name='ts')
+ assert result.clustering_weight == 0.7
+
+
+class TestAlignDataArray:
+ def test_already_aligned_passthrough(self, time_coords):
+ idx = time_coords['time']
+ da = xr.DataArray([1, 2, 3, 4, 5], dims=['time'], coords={'time': idx})
+ result = align_to_coords(da, time_coords)
+ xr.testing.assert_equal(result, da)
+
+ def test_scalar_dataarray(self, time_coords):
+ da = xr.DataArray(42.0)
+ result = align_to_coords(da, time_coords)
+ assert result.ndim == 0
+ assert float(result) == 42.0
+
+ def test_incompatible_dims_raises(self, time_coords):
+ da = xr.DataArray([1, 2, 3], dims=['foo'])
+ with pytest.raises(ConversionError):
+ align_to_coords(da, time_coords)
+
+
+class TestAlignDimsFilter:
+ def test_dims_restricts_alignment(self, full_coords):
+ data = np.array([10.0, 20.0]) # length 2 matches period
+ result = align_to_coords(data, full_coords, dims=['period'])
+ assert result.dims == ('period',)
+
+ def test_dims_none_uses_all(self, time_coords):
+ data = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
+ result = align_to_coords(data, time_coords, dims=None)
+ assert result.dims == ('time',)
+
+
+class TestAlignName:
+ def test_name_assigned(self, time_coords):
+ result = align_to_coords(42, time_coords, name='my_param')
+ assert result.name == 'my_param'
+
+ def test_no_name(self, time_coords):
+ result = align_to_coords(42, time_coords)
+ # Should not error, name may be None
+ assert result is not None
+
+
+class TestAlignEffects:
+ def test_none_returns_none(self, time_coords):
+ assert align_effects_to_coords(None, time_coords) is None
+
+ def test_scalar_effects(self, time_coords):
+ effects = {'costs': 0.04, 'CO2': 0.3}
+ result = align_effects_to_coords(effects, time_coords, prefix='flow')
+ assert set(result.keys()) == {'costs', 'CO2'}
+ assert float(result['costs']) == pytest.approx(0.04)
+ assert result['costs'].name == 'flow|costs'
+
+ def test_array_effects(self, time_coords):
+ effects = {'costs': np.array([1, 2, 3, 4, 5])}
+ result = align_effects_to_coords(effects, time_coords)
+ assert result['costs'].dims == ('time',)
+
+ def test_prefix_suffix(self, time_coords):
+ effects = {'costs': 42}
+ result = align_effects_to_coords(effects, time_coords, prefix='Boiler', suffix='per_hour')
+ assert result['costs'].name == 'Boiler|costs|per_hour'
+
+ def test_empty_dict(self, time_coords):
+ result = align_effects_to_coords({}, time_coords)
+ assert result == {}
diff --git a/tests/test_clustering/test_cluster_reduce_expand.py b/tests/test_clustering/test_cluster_reduce_expand.py
index fe61144ea..06426e3c5 100644
--- a/tests/test_clustering/test_cluster_reduce_expand.py
+++ b/tests/test_clustering/test_cluster_reduce_expand.py
@@ -20,13 +20,13 @@ def create_simple_system(timesteps: pd.DatetimeIndex) -> fx.FlowSystem:
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -249,14 +249,14 @@ def create_system_with_scenarios(timesteps: pd.DatetimeIndex, scenarios: pd.Inde
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_df, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_df, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -392,12 +392,12 @@ def create_system_with_storage(
flow_system.add_elements(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Grid', outputs=[fx.Flow('P', bus='Elec', size=100, effects_per_flow_hour=0.1)]),
- fx.Sink('Load', inputs=[fx.Flow('P', bus='Elec', fixed_relative_profile=demand, size=1)]),
+ fx.Source('Grid', outputs=[fx.Flow(bus='Elec', flow_id='P', size=100, effects_per_flow_hour=0.1)]),
+ fx.Sink('Load', inputs=[fx.Flow(bus='Elec', flow_id='P', fixed_relative_profile=demand, size=1)]),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=30),
- discharging=fx.Flow('discharge', bus='Elec', size=30),
+ charging=fx.Flow(bus='Elec', size=30),
+ discharging=fx.Flow(bus='Elec', size=30),
capacity_in_flow_hours=100,
relative_loss_per_hour=relative_loss_per_hour,
cluster_mode=cluster_mode,
@@ -579,13 +579,13 @@ def create_system_with_periods(timesteps: pd.DatetimeIndex, periods: pd.Index) -
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -620,14 +620,14 @@ def create_system_with_periods_and_scenarios(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_da, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_da, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -753,13 +753,13 @@ def create_system_with_peak_demand(timesteps: pd.DatetimeIndex) -> fx.FlowSystem
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -948,13 +948,13 @@ def test_cluster_with_data_vars_subset(self, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=price)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=price)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -992,13 +992,13 @@ def test_data_vars_preserves_all_flowsystem_data(self, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=price)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=price)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -1025,13 +1025,13 @@ def test_data_vars_optimization_works(self, solver_fixture, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=price)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=price)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -1057,13 +1057,13 @@ def test_data_vars_with_multiple_variables(self, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=price)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=price)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -1291,12 +1291,12 @@ def test_segmented_total_effects_match_solution(self, solver_fixture, freq):
fs.add_elements(
fx.Source(
'Boiler',
- outputs=[fx.Flow('Q', bus='Heat', size=100, effects_per_flow_hour={'Cost': 50})],
+ outputs=[fx.Flow(bus='Heat', flow_id='Q', size=100, effects_per_flow_hour={'Cost': 50})],
)
)
demand_profile = np.tile([0.5, 1], n_timesteps // 2)
fs.add_elements(
- fx.Sink('Demand', inputs=[fx.Flow('Q', bus='Heat', size=50, fixed_relative_profile=demand_profile)])
+ fx.Sink('Demand', inputs=[fx.Flow(bus='Heat', flow_id='Q', size=50, fixed_relative_profile=demand_profile)])
)
# Cluster with segments -> solve -> expand
@@ -1547,8 +1547,8 @@ def test_startup_shutdown_first_timestep_only(self, solver_fixture, timesteps_8_
'Boiler',
outputs=[
fx.Flow(
- 'Q',
bus='Heat',
+ flow_id='Q',
size=100,
status_parameters=fx.StatusParameters(effects_per_startup={'Cost': 10}),
effects_per_flow_hour={'Cost': 50},
@@ -1561,7 +1561,7 @@ def test_startup_shutdown_first_timestep_only(self, solver_fixture, timesteps_8_
demand_pattern = np.array([0.8] * 12 + [0.0] * 12) # On/off pattern per day (0-1 range)
demand_profile = np.tile(demand_pattern, 8)
fs.add_elements(
- fx.Sink('Demand', inputs=[fx.Flow('Q', bus='Heat', size=50, fixed_relative_profile=demand_profile)])
+ fx.Sink('Demand', inputs=[fx.Flow(bus='Heat', flow_id='Q', size=50, fixed_relative_profile=demand_profile)])
)
# Cluster with segments
@@ -1616,8 +1616,8 @@ def test_startup_timing_preserved_non_segmented(self, solver_fixture, timesteps_
'Boiler',
outputs=[
fx.Flow(
- 'Q',
bus='Heat',
+ flow_id='Q',
size=100,
status_parameters=fx.StatusParameters(effects_per_startup={'Cost': 10}),
effects_per_flow_hour={'Cost': 50},
@@ -1629,7 +1629,7 @@ def test_startup_timing_preserved_non_segmented(self, solver_fixture, timesteps_
demand_pattern = np.array([0.8] * 12 + [0.0] * 12) # On/off pattern per day (0-1 range)
demand_profile = np.tile(demand_pattern, 8)
fs.add_elements(
- fx.Sink('Demand', inputs=[fx.Flow('Q', bus='Heat', size=50, fixed_relative_profile=demand_profile)])
+ fx.Sink('Demand', inputs=[fx.Flow(bus='Heat', flow_id='Q', size=50, fixed_relative_profile=demand_profile)])
)
# Cluster WITHOUT segments
diff --git a/tests/test_clustering/test_clustering_io.py b/tests/test_clustering/test_clustering_io.py
index 527ea645c..92e6842e4 100644
--- a/tests/test_clustering/test_clustering_io.py
+++ b/tests/test_clustering/test_clustering_io.py
@@ -19,8 +19,10 @@ def simple_system_24h():
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=np.ones(24), size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=np.ones(24), size=10)]),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -54,8 +56,10 @@ def simple_system_8_days():
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -224,8 +228,12 @@ def system_with_scenarios(self):
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink(
+ 'demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]
+ ),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -349,8 +357,12 @@ def system_with_periods(self):
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink(
+ 'demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]
+ ),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
@@ -434,12 +446,16 @@ def system_with_intercluster_storage(self):
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
)
fs.add_elements(
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand_profile, size=10)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=50, effects_per_flow_hour={'costs': 0.1})]),
+ fx.Sink(
+ 'demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand_profile, size=10)]
+ ),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=50, effects_per_flow_hour={'costs': 0.1})]
+ ),
fx.Storage(
'storage',
- charging=fx.Flow('in', bus='heat', size=20),
- discharging=fx.Flow('out', bus='heat', size=20),
+ charging=fx.Flow(bus='heat', flow_id='in', size=20),
+ discharging=fx.Flow(bus='heat', flow_id='out', size=20),
capacity_in_flow_hours=100,
cluster_mode='intercluster', # Key: intercluster mode
),
@@ -576,8 +592,10 @@ def system_with_periods_and_scenarios(self):
fs.add_elements(
fx.Bus('heat'),
fx.Effect('costs', unit='EUR', description='costs', is_objective=True, is_standard=True),
- fx.Sink('demand', inputs=[fx.Flow('in', bus='heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('source', outputs=[fx.Flow('out', bus='heat', size=200, effects_per_flow_hour={'costs': 0.05})]),
+ fx.Sink('demand', inputs=[fx.Flow(bus='heat', flow_id='in', fixed_relative_profile=demand, size=1)]),
+ fx.Source(
+ 'source', outputs=[fx.Flow(bus='heat', flow_id='out', size=200, effects_per_flow_hour={'costs': 0.05})]
+ ),
)
return fs
diff --git a/tests/test_clustering/test_integration.py b/tests/test_clustering/test_integration.py
index fcec081aa..d91bdb6e6 100644
--- a/tests/test_clustering/test_integration.py
+++ b/tests/test_clustering/test_integration.py
@@ -142,9 +142,10 @@ def test_clustering_data_returns_dataset(self):
# Add components with time-varying data
demand_data = np.sin(np.linspace(0, 4 * np.pi, n_hours)) + 2
bus = Bus('electricity')
- source = Source('grid', outputs=[Flow('grid_in', bus='electricity', size=100)])
+ source = Source('grid', outputs=[Flow(bus='electricity', flow_id='grid_in', size=100)])
sink = Sink(
- 'demand', inputs=[Flow('demand_out', bus='electricity', size=100, fixed_relative_profile=demand_data)]
+ 'demand',
+ inputs=[Flow(bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=demand_data)],
)
fs.add_elements(source, sink, bus)
@@ -162,9 +163,10 @@ def test_clustering_data_contains_only_time_varying(self):
# Add components with time-varying and constant data
demand_data = np.sin(np.linspace(0, 4 * np.pi, n_hours)) + 2
bus = Bus('electricity')
- source = Source('grid', outputs=[Flow('grid_in', bus='electricity', size=100)])
+ source = Source('grid', outputs=[Flow(bus='electricity', flow_id='grid_in', size=100)])
sink = Sink(
- 'demand', inputs=[Flow('demand_out', bus='electricity', size=100, fixed_relative_profile=demand_data)]
+ 'demand',
+ inputs=[Flow(bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=demand_data)],
)
fs.add_elements(source, sink, bus)
@@ -196,9 +198,10 @@ def test_clustering_data_with_periods(self):
)
bus = Bus('electricity')
effect = Effect('costs', '€', is_objective=True)
- source = Source('grid', outputs=[Flow('grid_in', bus='electricity', size=100)])
+ source = Source('grid', outputs=[Flow(bus='electricity', flow_id='grid_in', size=100)])
sink = Sink(
- 'demand', inputs=[Flow('demand_out', bus='electricity', size=100, fixed_relative_profile=demand_data)]
+ 'demand',
+ inputs=[Flow(bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=demand_data)],
)
fs.add_elements(source, sink, bus, effect)
@@ -238,9 +241,9 @@ def test_cluster_reduces_timesteps(self):
demand_data = np.sin(np.linspace(0, 14 * np.pi, n_hours)) + 2 # Varying demand over 7 days
bus = Bus('electricity')
# Bus label is passed as string to Flow
- grid_flow = Flow('grid_in', bus='electricity', size=100)
+ grid_flow = Flow(bus='electricity', flow_id='grid_in', size=100)
demand_flow = Flow(
- 'demand_out', bus='electricity', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
+ bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
)
source = Source('grid', outputs=[grid_flow])
sink = Sink('demand', inputs=[demand_flow])
@@ -276,9 +279,9 @@ def basic_flow_system(self):
demand_data = np.sin(np.linspace(0, 14 * np.pi, n_hours)) + 2
bus = Bus('electricity')
- grid_flow = Flow('grid_in', bus='electricity', size=100)
+ grid_flow = Flow(bus='electricity', flow_id='grid_in', size=100)
demand_flow = Flow(
- 'demand_out', bus='electricity', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
+ bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
)
source = Source('grid', outputs=[grid_flow])
sink = Sink('demand', inputs=[demand_flow])
@@ -349,9 +352,9 @@ def test_metrics_with_periods(self):
demand_data = np.sin(np.linspace(0, 14 * np.pi, n_hours)) + 2
bus = Bus('electricity')
- grid_flow = Flow('grid_in', bus='electricity', size=100)
+ grid_flow = Flow(bus='electricity', flow_id='grid_in', size=100)
demand_flow = Flow(
- 'demand_out', bus='electricity', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
+ bus='electricity', flow_id='demand_out', size=100, fixed_relative_profile=TimeSeriesData(demand_data / 100)
)
source = Source('grid', outputs=[grid_flow])
sink = Sink('demand', inputs=[demand_flow])
diff --git a/tests/test_clustering/test_multiperiod_extremes.py b/tests/test_clustering/test_multiperiod_extremes.py
index 55720f3a0..356866399 100644
--- a/tests/test_clustering/test_multiperiod_extremes.py
+++ b/tests/test_clustering/test_multiperiod_extremes.py
@@ -109,14 +109,14 @@ def create_multiperiod_system_with_different_profiles(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_da, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_da, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -195,14 +195,14 @@ def create_system_with_extreme_peaks(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_input, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_input, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -250,14 +250,14 @@ def create_multiperiod_multiscenario_system(
fx.Effect('costs', '€', is_standard=True, is_objective=True),
fx.Sink(
'HeatDemand',
- inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_da, size=1)],
+ inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_da, size=1)],
),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
return flow_system
@@ -435,13 +435,13 @@ def test_new_cluster_with_min_value(self, solver_fixture, timesteps_8_days):
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
@@ -737,13 +737,13 @@ def test_cluster_with_scenarios(self, solver_fixture, timesteps_8_days, scenario
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink('HeatDemand', inputs=[fx.Flow('Q', bus='Heat', fixed_relative_profile=demand_da, size=1)]),
- fx.Source('GasSource', outputs=[fx.Flow('Gas', bus='Gas', effects_per_flow_hour=0.05)]),
+ fx.Sink('HeatDemand', inputs=[fx.Flow(bus='Heat', flow_id='Q', fixed_relative_profile=demand_da, size=1)]),
+ fx.Source('GasSource', outputs=[fx.Flow(bus='Gas', effects_per_flow_hour=0.05)]),
fx.linear_converters.Boiler(
'Boiler',
thermal_efficiency=0.9,
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
- thermal_flow=fx.Flow('Q_th', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th'),
),
)
diff --git a/tests/test_comparison.py b/tests/test_comparison.py
index b37b1ca44..844f371b7 100644
--- a/tests/test_comparison.py
+++ b/tests/test_comparison.py
@@ -25,36 +25,36 @@
def _build_base_flow_system():
"""Factory: base flow system with boiler and storage."""
fs = fx.FlowSystem(_TIMESTEPS, name='Base')
- fs.add_elements(
+ fs.add(
fx.Effect('costs', '€', 'Costs', is_standard=True, is_objective=True),
fx.Effect('CO2', 'kg', 'CO2 Emissions'),
fx.Bus('Electricity'),
fx.Bus('Heat'),
fx.Bus('Gas'),
)
- fs.add_elements(
- fx.Source(
+ fs.add(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('P_el', bus='Electricity', size=100, effects_per_flow_hour={'costs': 0.3})],
+ imports=[fx.Flow(bus='Electricity', flow_id='P_el', size=100, effects_per_flow_hour={'costs': 0.3})],
),
- fx.Source(
+ fx.Port(
'GasSupply',
- outputs=[fx.Flow('Q_gas', bus='Gas', size=200, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})],
+ imports=[fx.Flow(bus='Gas', flow_id='Q_gas', size=200, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})],
),
- fx.Sink(
+ fx.Port(
'HeatDemand',
- inputs=[fx.Flow('Q_demand', bus='Heat', size=50, fixed_relative_profile=0.6)],
+ exports=[fx.Flow(bus='Heat', flow_id='Q_demand', size=50, fixed_relative_profile=0.6)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.9,
- thermal_flow=fx.Flow('Q_th', bus='Heat', size=60),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th', size=60),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
),
fx.Storage(
'ThermalStorage',
- charging=fx.Flow('Q_charge', bus='Heat', size=20),
- discharging=fx.Flow('Q_discharge', bus='Heat', size=20),
+ charging=fx.Flow(bus='Heat', flow_id='Q_charge', size=20),
+ discharging=fx.Flow(bus='Heat', flow_id='Q_discharge', size=20),
capacity_in_flow_hours=40,
initial_charge_state=0.5,
),
@@ -65,48 +65,48 @@ def _build_base_flow_system():
def _build_flow_system_with_chp():
"""Factory: flow system with additional CHP component."""
fs = fx.FlowSystem(_TIMESTEPS, name='WithCHP')
- fs.add_elements(
+ fs.add(
fx.Effect('costs', '€', 'Costs', is_standard=True, is_objective=True),
fx.Effect('CO2', 'kg', 'CO2 Emissions'),
fx.Bus('Electricity'),
fx.Bus('Heat'),
fx.Bus('Gas'),
)
- fs.add_elements(
- fx.Source(
+ fs.add(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('P_el', bus='Electricity', size=100, effects_per_flow_hour={'costs': 0.3})],
+ imports=[fx.Flow(bus='Electricity', flow_id='P_el', size=100, effects_per_flow_hour={'costs': 0.3})],
),
- fx.Source(
+ fx.Port(
'GasSupply',
- outputs=[fx.Flow('Q_gas', bus='Gas', size=200, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})],
+ imports=[fx.Flow(bus='Gas', flow_id='Q_gas', size=200, effects_per_flow_hour={'costs': 0.05, 'CO2': 0.2})],
),
- fx.Sink(
+ fx.Port(
'HeatDemand',
- inputs=[fx.Flow('Q_demand', bus='Heat', size=50, fixed_relative_profile=0.6)],
+ exports=[fx.Flow(bus='Heat', flow_id='Q_demand', size=50, fixed_relative_profile=0.6)],
),
- fx.Sink(
+ fx.Port(
'ElectricitySink',
- inputs=[fx.Flow('P_sink', bus='Electricity', size=100)],
+ exports=[fx.Flow(bus='Electricity', flow_id='P_sink', size=100)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.9,
- thermal_flow=fx.Flow('Q_th', bus='Heat', size=60),
- fuel_flow=fx.Flow('Q_fu', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th', size=60),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu'),
),
- fx.linear_converters.CHP(
+ fx.Converter.chp(
'CHP',
thermal_efficiency=0.5,
electrical_efficiency=0.3,
- thermal_flow=fx.Flow('Q_th_chp', bus='Heat', size=30),
- electrical_flow=fx.Flow('P_el_chp', bus='Electricity', size=18),
- fuel_flow=fx.Flow('Q_fu_chp', bus='Gas'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='Q_th_chp', size=30),
+ electrical_flow=fx.Flow(bus='Electricity', flow_id='P_el_chp', size=18),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu_chp'),
),
fx.Storage(
'ThermalStorage',
- charging=fx.Flow('Q_charge', bus='Heat', size=20),
- discharging=fx.Flow('Q_discharge', bus='Heat', size=20),
+ charging=fx.Flow(bus='Heat', flow_id='Q_charge', size=20),
+ discharging=fx.Flow(bus='Heat', flow_id='Q_discharge', size=20),
capacity_in_flow_hours=40,
initial_charge_state=0.5,
),
diff --git a/tests/test_legacy_solution_access.py b/tests/test_legacy_solution_access.py
index 74bcfe917..f494af043 100644
--- a/tests/test_legacy_solution_access.py
+++ b/tests/test_legacy_solution_access.py
@@ -46,11 +46,13 @@ class TestLegacySolutionAccess:
def test_effect_access(self, optimize):
"""Test legacy effect access: solution['costs'] -> solution['effect|total'].sel(effect='costs')."""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Port('Src', imports=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Port(
+ 'Snk', exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
@@ -65,11 +67,13 @@ def test_effect_access(self, optimize):
def test_flow_rate_access(self, optimize):
"""Test legacy flow rate access: solution['Src(heat)|flow_rate'] -> solution['flow|rate'].sel(flow='Src(heat)')."""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Port('Src', imports=[fx.Flow(bus='Heat', flow_id='heat', size=10)]),
+ fx.Port(
+ 'Snk', exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
@@ -84,14 +88,20 @@ def test_flow_rate_access(self, optimize):
def test_flow_size_access(self, optimize):
"""Test legacy flow size access: solution['Src(heat)|size'] -> solution['flow|size'].sel(flow='Src(heat)')."""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source(
+ fx.Port(
'Src',
- outputs=[fx.Flow('heat', bus='Heat', size=fx.InvestParameters(fixed_size=50), effects_per_flow_hour=1)],
+ imports=[
+ fx.Flow(
+ bus='Heat', flow_id='heat', size=fx.InvestParameters(fixed_size=50), effects_per_flow_hour=1
+ )
+ ],
+ ),
+ fx.Port(
+ 'Snk', exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([5, 5]))]
),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([5, 5]))]),
)
fs = optimize(fs)
@@ -106,18 +116,21 @@ def test_flow_size_access(self, optimize):
def test_storage_charge_state_access(self, optimize):
"""Test legacy storage charge state access: solution['Battery|charge_state'] -> solution['storage|charge'].sel(storage='Battery')."""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Grid', outputs=[fx.Flow('elec', bus='Elec', size=100, effects_per_flow_hour=1)]),
+ fx.Port('Grid', imports=[fx.Flow(bus='Elec', flow_id='elec', size=100, effects_per_flow_hour=1)]),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=10),
- discharging=fx.Flow('discharge', bus='Elec', size=10),
+ charging=fx.Flow(bus='Elec', size=10),
+ discharging=fx.Flow(bus='Elec', size=10),
capacity_in_flow_hours=50,
initial_charge_state=25,
),
- fx.Sink('Load', inputs=[fx.Flow('elec', bus='Elec', size=10, fixed_relative_profile=np.array([1, 1, 1]))]),
+ fx.Port(
+ 'Load',
+ exports=[fx.Flow(bus='Elec', flow_id='elec', size=10, fixed_relative_profile=np.array([1, 1, 1]))],
+ ),
)
fs = optimize(fs)
@@ -140,11 +153,14 @@ def test_legacy_access_disabled_by_default(self):
fx.CONFIG.Legacy.solution_access = False
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Port('Src', imports=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Port(
+ 'Snk',
+ exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))],
+ ),
)
solver = fx.solvers.HighsSolver(log_to_console=False)
fs.optimize(solver)
@@ -164,11 +180,13 @@ def test_legacy_access_disabled_by_default(self):
def test_legacy_access_emits_deprecation_warning(self, optimize):
"""Test that legacy access emits DeprecationWarning."""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Port('Src', imports=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Port(
+ 'Snk', exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_bus.py b/tests/test_math/test_bus.py
index 121b4c747..b646f5c14 100644
--- a/tests/test_math/test_bus.py
+++ b/tests/test_math/test_bus.py
@@ -21,25 +21,25 @@ def test_merit_order_dispatch(self, optimize):
with merit order yields cost=80 and the exact flow split [20,10].
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=None),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
- fx.Source(
+ fx.Port(
'Src1',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=1, size=20),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=1, size=20),
],
),
- fx.Source(
+ fx.Port(
'Src2',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=2, size=20),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=2, size=20),
],
),
)
@@ -64,20 +64,24 @@ def test_imbalance_penalty(self, optimize):
tracked in a separate 'Penalty' effect, not in 'costs'.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=100),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'Src',
- outputs=[
+ imports=[
fx.Flow(
- 'heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20]), effects_per_flow_hour=1
+ bus='Heat',
+ flow_id='heat',
+ size=1,
+ fixed_relative_profile=np.array([20, 20]),
+ effects_per_flow_hour=1,
),
],
),
@@ -102,40 +106,40 @@ def test_prevent_simultaneous_flow_rates(self, optimize):
Sensitivity: Without prevent_simultaneous, cost=40. With it, cost=2*(10+50)=120.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat1'),
fx.Bus('Heat2'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand1',
- inputs=[
- fx.Flow('heat', bus='Heat1', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat1', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Sink(
+ fx.Port(
'Demand2',
- inputs=[
- fx.Flow('heat', bus='Heat2', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat2', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'DualSrc',
- outputs=[
- fx.Flow('heat1', bus='Heat1', effects_per_flow_hour=1, size=100),
- fx.Flow('heat2', bus='Heat2', effects_per_flow_hour=1, size=100),
+ imports=[
+ fx.Flow(bus='Heat1', flow_id='heat1', effects_per_flow_hour=1, size=100),
+ fx.Flow(bus='Heat2', flow_id='heat2', effects_per_flow_hour=1, size=100),
],
prevent_simultaneous_flow_rates=True,
),
- fx.Source(
+ fx.Port(
'Backup1',
- outputs=[
- fx.Flow('heat', bus='Heat1', effects_per_flow_hour=5),
+ imports=[
+ fx.Flow(bus='Heat1', flow_id='heat', effects_per_flow_hour=5),
],
),
- fx.Source(
+ fx.Port(
'Backup2',
- outputs=[
- fx.Flow('heat', bus='Heat2', effects_per_flow_hour=5),
+ imports=[
+ fx.Flow(bus='Heat2', flow_id='heat', effects_per_flow_hour=5),
],
),
)
diff --git a/tests/test_math/test_clustering.py b/tests/test_math/test_clustering.py
index d56366508..9b95beaed 100644
--- a/tests/test_math/test_clustering.py
+++ b/tests/test_math/test_clustering.py
@@ -36,16 +36,16 @@ def test_clustering_basic_objective(self):
# Full model
fs_full = fx.FlowSystem(ts)
- fs_full.add_elements(
+ fs_full.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand)],
+ exports=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand)],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs_full.optimize(_SOLVER)
@@ -65,16 +65,16 @@ def test_clustering_basic_objective(self):
demand_day1 = demand[:24]
demand_day2 = demand[24:]
demand_avg = (demand_day1 + demand_day2) / 2
- fs_clust.add_elements(
+ fs_clust.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand_avg)],
+ exports=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand_avg)],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs_clust.optimize(_SOLVER)
@@ -95,21 +95,23 @@ def test_storage_cluster_mode_cyclic(self):
ts = pd.date_range('2020-01-01', periods=4, freq='h')
clusters = pd.Index([0, 1], name='cluster')
fs = fx.FlowSystem(ts, clusters=clusters, cluster_weight=np.array([1.0, 1.0]))
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 10]))],
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 10]))
+ ],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 10, 1, 10]))],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 10, 1, 10]))],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=1,
@@ -133,21 +135,23 @@ def test_storage_cluster_mode_intercluster(self):
def _build(mode):
fs = fx.FlowSystem(ts, clusters=clusters, cluster_weight=np.array([1.0, 1.0]))
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 10]))],
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 10]))
+ ],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 10, 1, 10]))],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 10, 1, 10]))],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=1,
@@ -176,32 +180,32 @@ def test_status_cluster_mode_cyclic(self):
ts = pd.date_range('2020-01-01', periods=4, freq='h')
clusters = pd.Index([0, 1], name='cluster')
fs = fx.FlowSystem(ts, clusters=clusters, cluster_weight=np.array([1.0, 1.0]))
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([10, 10, 10, 10]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(
effects_per_startup=10,
@@ -237,16 +241,18 @@ def test_flow_rates_match_demand_per_cluster(self, optimize):
objective = (10+20+30+40) × (1+2) = 300.
"""
fs = _make_clustered_flow_system(4, [1.0, 2.0])
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 40]))],
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 20, 30, 40]))
+ ],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -264,16 +270,18 @@ def test_per_timestep_effects_with_varying_price(self, optimize):
objective = (10+20+30+40) × (1+3) = 400.
"""
fs = _make_clustered_flow_system(4, [1.0, 3.0])
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10, 10]))],
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10, 10]))
+ ],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 2, 3, 4]))],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 2, 3, 4]))],
),
)
fs = optimize(fs)
@@ -300,21 +308,21 @@ def test_storage_cyclic_charge_discharge_pattern(self, optimize):
objective = 50 × 1 × 2 clusters = 100.
"""
fs = _make_clustered_flow_system(4, [1.0, 1.0])
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 50, 0, 50]))],
+ exports=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 50, 0, 50]))],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100, 1, 100]))],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100, 1, 100]))],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=1,
@@ -331,7 +339,7 @@ def test_storage_cyclic_charge_discharge_pattern(self, optimize):
assert_allclose(grid_fr.sum(axis=1), 50.0, atol=1e-5) # Total purchase per cluster = 50
# Discharge at expensive timesteps (indices 1, 3)
- discharge_fr = fs.solution['Battery(discharge)|flow_rate'].values[:, :4]
+ discharge_fr = fs.solution['Battery(discharging)|flow_rate'].values[:, :4]
assert_allclose(discharge_fr[:, [1, 3]], [[50, 50], [50, 50]], atol=1e-5)
# Charge state: dims=(cluster, time), 5 entries per cluster (incl. final)
diff --git a/tests/test_math/test_combinations.py b/tests/test_math/test_combinations.py
index 915d4b4c2..e83f1c14f 100644
--- a/tests/test_math/test_combinations.py
+++ b/tests/test_math/test_combinations.py
@@ -33,29 +33,29 @@ def test_piecewise_conversion_with_investment_sizing(self, optimize):
proves both mechanisms cooperate.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([40, 40])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([40, 40])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas', size=fx.InvestParameters(maximum_size=100))],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=fx.InvestParameters(maximum_size=100))],
outputs=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
effects_of_investment_per_size=1,
@@ -92,27 +92,27 @@ def test_piecewise_invest_cost_with_optional_skip(self, optimize):
If piecewise cost correctly applied and expensive, backup cheaper.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
piecewise_effects_of_investment=fx.PiecewiseEffects(
@@ -124,11 +124,11 @@ def test_piecewise_invest_cost_with_optional_skip(self, optimize):
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -158,37 +158,37 @@ def test_piecewise_nonlinear_conversion_with_startup_cost(self, optimize):
The 290 is unique to BOTH features being correct.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 40, 0, 40]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
inputs=[
fx.Flow(
- 'fuel',
bus='Gas',
+ flow_id='fuel',
size=100,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(effects_per_startup=100),
)
],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
piecewise_conversion=fx.PiecewiseConversion(
{
# Non-1:1 ratio in operating range!
@@ -219,33 +219,33 @@ def test_piecewise_minimum_load_with_status(self, optimize):
- With piecewise gap (min load 20): converter OFF at t=0, backup=75, conv=40, cost=115.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([15, 40]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.Source(
+ fx.Port(
'Backup',
- outputs=[fx.Flow('heat', bus='Heat', effects_per_flow_hour=5)],
+ imports=[fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5)],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(0, 0), fx.Piece(20, 50)]),
@@ -284,40 +284,40 @@ def test_piecewise_no_zero_point_with_status(self, optimize):
- If piecewise conversion ignored (1:1): fuel at t=1 would be 35 instead of 53.3.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([5, 35]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.Source(
+ fx.Port(
'Backup',
- outputs=[fx.Flow('heat', bus='Heat', effects_per_flow_hour=5)],
+ imports=[fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5)],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
inputs=[
fx.Flow(
- 'fuel',
bus='Gas',
+ flow_id='fuel',
size=100,
status_parameters=fx.StatusParameters(),
)
],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
piecewise_conversion=fx.PiecewiseConversion(
{
# NO off-state piece — operating range only
@@ -352,41 +352,41 @@ def test_piecewise_no_zero_point_startup_cost(self, optimize):
The 510 is unique to BOTH features.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 40, 0, 40]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.Source(
+ fx.Port(
'Backup',
- outputs=[fx.Flow('heat', bus='Heat', effects_per_flow_hour=100)],
+ imports=[fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=100)],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
inputs=[
fx.Flow(
- 'fuel',
bus='Gas',
+ flow_id='fuel',
size=100,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(effects_per_startup=200),
)
],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
piecewise_conversion=fx.PiecewiseConversion(
{
# NO off-state piece
@@ -424,24 +424,24 @@ def test_three_segment_piecewise(self, optimize):
fuel would differ. Only correct 3-segment handling gives the right fuel value.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([40, 40])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([40, 40])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(0, 10), fx.Piece(10, 30), fx.Piece(30, 60)]),
@@ -465,24 +465,24 @@ def test_three_segment_low_load_selection(self, optimize):
Sensitivity: If segment 2 or 3 were incorrectly selected, fuel would differ.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([5, 5])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([5, 5])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(0, 10), fx.Piece(10, 30), fx.Piece(30, 60)]),
@@ -506,24 +506,24 @@ def test_three_segment_mid_load_selection(self, optimize):
This value is unique to segment 2.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([18, 18])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([18, 18])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(0, 10), fx.Piece(10, 30), fx.Piece(30, 60)]),
@@ -560,33 +560,33 @@ def test_startup_cost_on_co2_effect(self, optimize):
fs = make_flow_system(4)
co2 = fx.Effect('CO2', 'kg', maximum_total=60)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
fx.Bus('Gas'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 20, 0, 20]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=0,
@@ -620,28 +620,28 @@ def test_effects_per_active_hour_on_multiple_effects(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg')
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(
effects_per_active_hour={'costs': 10, 'CO2': 5},
@@ -678,31 +678,31 @@ def test_invest_sizing_respects_relative_minimum(self, optimize):
(strict bus can't absorb min_load=25 excess when demand=5).
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([5, 50])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([5, 50])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.Source(
+ fx.Port(
'Backup',
- outputs=[fx.Flow('heat', bus='Heat', effects_per_flow_hour=10)],
+ imports=[fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=10)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
relative_minimum=0.5,
size=fx.InvestParameters(
maximum_size=100,
@@ -742,27 +742,27 @@ def test_time_varying_effects_per_flow_hour(self, optimize):
If mean(2) were used: cost=120. Only per-timestep gives 100.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=np.array([1, 3])),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=np.array([1, 3])),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -783,37 +783,37 @@ def test_effects_per_flow_hour_with_dual_output_conversion(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg')
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Elec'),
fx.Bus('Gas'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'HeatDemand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([50, 50])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([50, 50])),
],
),
- fx.Sink(
+ fx.Port(
'ElecGrid',
- inputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour={'costs': -2, 'CO2': -0.3}),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour={'costs': -2, 'CO2': -0.3}),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour={'costs': 1, 'CO2': 0.5}),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour={'costs': 1, 'CO2': 0.5}),
],
),
- fx.linear_converters.CHP(
+ fx.Converter.chp(
'CHP',
thermal_efficiency=0.5,
electrical_efficiency=0.4,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
- electrical_flow=fx.Flow('elec', bus='Elec'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
),
)
fs = optimize(fs)
@@ -844,32 +844,32 @@ def test_piecewise_invest_with_startup_cost(self, optimize):
- Correct: invest(130) + fuel(160) + startups(100) = 390. Unique.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 80, 0, 80]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
relative_minimum=0.5,
previous_flow_rate=0,
size=fx.InvestParameters(
@@ -914,32 +914,32 @@ def test_startup_limit_with_max_downtime(self, optimize):
Without max_downtime, can stay off indefinitely.
"""
fs = make_flow_system(6)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([10, 10, 10, 10, 10, 10]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=20,
relative_minimum=0.5,
previous_flow_rate=10,
@@ -949,11 +949,11 @@ def test_startup_limit_with_max_downtime(self, optimize):
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -987,43 +987,43 @@ def test_min_uptime_with_min_downtime(self, optimize):
With constraints, forced into block pattern → backup needed for off blocks.
"""
fs = make_flow_system(6)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([20, 20, 20, 20, 20, 20]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(min_uptime=2, min_downtime=2),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -1082,28 +1082,28 @@ def test_effect_share_with_investment(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg')
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True, share_from_periodic={'CO2': 20})
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 50, 'CO2': 10},
@@ -1133,35 +1133,35 @@ def test_effect_maximum_with_status_contribution(self, optimize):
fs = make_flow_system(4)
co2 = fx.Effect('CO2', 'kg', maximum_total=20)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
fx.Bus('Gas'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([0, 10, 0, 10]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour={'costs': 1, 'CO2': 0.1}),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour={'costs': 1, 'CO2': 0.1}),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=0,
@@ -1194,28 +1194,28 @@ def test_invest_per_size_on_non_cost_effect(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg', maximum_periodic=50)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
mandatory=True,
@@ -1223,11 +1223,11 @@ def test_invest_per_size_on_non_cost_effect(self, optimize):
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_components.py b/tests/test_math/test_components.py
index 38730b5b3..358dd0e3f 100644
--- a/tests/test_math/test_components.py
+++ b/tests/test_math/test_components.py
@@ -28,26 +28,26 @@ def test_component_status_startup_cost(self, optimize):
With 100€/startup × 2, cost=240.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20, 0, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20, 0, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'Boiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)], # Size required for component status
- outputs=[fx.Flow('heat', bus='Heat', size=100)], # Size required for component status
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)], # Size required for component status
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)], # Size required for component status
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(effects_per_startup=100),
),
@@ -67,26 +67,26 @@ def test_component_status_min_uptime(self, optimize):
With min_uptime=2, status is forced into 2-hour blocks.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'), # Strict balance
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 10, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 10, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'Boiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)], # Size required
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)], # Size required
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(min_uptime=2),
),
@@ -108,34 +108,34 @@ def test_component_status_active_hours_max(self, optimize):
With limit=2, backup covers 2 hours → cost=60.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)], # Size required
- outputs=[fx.Flow('heat', bus='Heat', size=100)], # Size required
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)], # Size required
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)], # Size required
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(active_hours_max=2),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpensiveBackup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -153,26 +153,26 @@ def test_component_status_effects_per_active_hour(self, optimize):
With 50€/hour × 2, cost=120.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'Boiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(effects_per_active_hour=50),
),
@@ -192,33 +192,33 @@ def test_component_status_active_hours_min(self, optimize):
With floor=2, expensive component runs → status must be [1,1].
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'ExpensiveBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 2}], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2)
status_parameters=fx.StatusParameters(active_hours_min=2),
),
- fx.LinearConverter(
+ fx.Converter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
),
)
@@ -238,33 +238,33 @@ def test_component_status_max_uptime(self, optimize):
With max_uptime=2 and 1 hour carry-over, pattern forces backup use.
"""
fs = make_flow_system(5)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100, previous_flow_rate=10)],
- outputs=[fx.Flow('heat', bus='Heat', size=100, previous_flow_rate=10)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100, previous_flow_rate=10)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100, previous_flow_rate=10)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(max_uptime=2, min_uptime=2),
),
- fx.LinearConverter(
+ fx.Converter(
'ExpensiveBackup',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 2}], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2)
),
)
@@ -296,33 +296,33 @@ def test_component_status_min_downtime(self, optimize):
With min_downtime=3, backup needed at t=2 → cost=60.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=100, previous_flow_rate=20, relative_minimum=0.1)],
- outputs=[fx.Flow('heat', bus='Heat', size=100, previous_flow_rate=20, relative_minimum=0.1)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100, previous_flow_rate=20, relative_minimum=0.1)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100, previous_flow_rate=20, relative_minimum=0.1)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
status_parameters=fx.StatusParameters(min_downtime=3),
),
- fx.LinearConverter(
+ fx.Converter(
'ExpensiveBackup',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[
{'fuel': 1, 'heat': 2}
], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2) (1 fuel → 0.5 heat)
@@ -349,35 +349,35 @@ def test_component_status_max_downtime(self, optimize):
With max_downtime=1, ExpensiveBoiler forced on ≥2 of 4 hours → cost > 40.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'ExpensiveBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=40, previous_flow_rate=20)],
- outputs=[fx.Flow('heat', bus='Heat', size=20, relative_minimum=0.5, previous_flow_rate=10)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=40, previous_flow_rate=20)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=20, relative_minimum=0.5, previous_flow_rate=10)],
conversion_factors=[
{'fuel': 1, 'heat': 2}
], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2) (1 fuel → 0.5 heat)
status_parameters=fx.StatusParameters(max_downtime=1),
),
- fx.LinearConverter(
+ fx.Converter(
'CheapBackup',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[{'fuel': 1, 'heat': 1}],
),
)
@@ -402,33 +402,33 @@ def test_component_status_startup_limit(self, optimize):
With startup_limit=1, backup serves one peak → cost=30.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 0, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 0, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'CheapBoiler',
- inputs=[fx.Flow('fuel', bus='Gas', size=20, previous_flow_rate=0, relative_minimum=0.5)],
- outputs=[fx.Flow('heat', bus='Heat', size=20, previous_flow_rate=0, relative_minimum=0.5)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=20, previous_flow_rate=0, relative_minimum=0.5)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=20, previous_flow_rate=0, relative_minimum=0.5)],
conversion_factors=[{'fuel': 1, 'heat': 1}], # eta=1.0
status_parameters=fx.StatusParameters(startup_limit=1),
),
- fx.LinearConverter(
+ fx.Converter(
'ExpensiveBackup',
- inputs=[fx.Flow('fuel', bus='Gas', size=100)],
- outputs=[fx.Flow('heat', bus='Heat', size=100)],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel', size=100)],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat', size=100)],
conversion_factors=[
{'fuel': 1, 'heat': 2}
], # eta=0.5 (fuel:heat = 1:2 → eta = 1/2) (1 fuel → 0.5 heat)
@@ -458,26 +458,26 @@ def test_transmission_relative_losses(self, optimize):
With 10% loss, source≈111.11 for demand=100.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Source'),
fx.Bus('Sink'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Sink', size=1, fixed_relative_profile=np.array([50, 50])),
+ exports=[
+ fx.Flow(bus='Sink', flow_id='heat', size=1, fixed_relative_profile=np.array([50, 50])),
],
),
- fx.Source(
+ fx.Port(
'CheapSource',
- outputs=[
- fx.Flow('heat', bus='Source', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Source', flow_id='heat', effects_per_flow_hour=1),
],
),
fx.Transmission(
'Pipe',
- in1=fx.Flow('in', bus='Source', size=200),
- out1=fx.Flow('out', bus='Sink', size=200),
+ in1=fx.Flow(bus='Source', flow_id='in', size=200),
+ out1=fx.Flow(bus='Sink', flow_id='out', size=200),
relative_losses=0.1,
),
)
@@ -497,26 +497,26 @@ def test_transmission_absolute_losses(self, optimize):
With absolute_losses=5, source=50 (40 + 2×5).
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Source'),
fx.Bus('Sink'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Sink', size=1, fixed_relative_profile=np.array([20, 20])),
+ exports=[
+ fx.Flow(bus='Sink', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
- fx.Source(
+ fx.Port(
'CheapSource',
- outputs=[
- fx.Flow('heat', bus='Source', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Source', flow_id='heat', effects_per_flow_hour=1),
],
),
fx.Transmission(
'Pipe',
- in1=fx.Flow('in', bus='Source', size=200),
- out1=fx.Flow('out', bus='Sink', size=200),
+ in1=fx.Flow(bus='Source', flow_id='in', size=200),
+ out1=fx.Flow(bus='Sink', flow_id='out', size=200),
absolute_losses=5,
),
)
@@ -535,40 +535,40 @@ def test_transmission_bidirectional(self, optimize):
With bidirectional, cheap source can serve both sides.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Left'),
fx.Bus('Right'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'LeftDemand',
- inputs=[
- fx.Flow('heat', bus='Left', size=1, fixed_relative_profile=np.array([20, 0])),
+ exports=[
+ fx.Flow(bus='Left', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0])),
],
),
- fx.Sink(
+ fx.Port(
'RightDemand',
- inputs=[
- fx.Flow('heat', bus='Right', size=1, fixed_relative_profile=np.array([0, 20])),
+ exports=[
+ fx.Flow(bus='Right', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
- fx.Source(
+ fx.Port(
'LeftSource',
- outputs=[
- fx.Flow('heat', bus='Left', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Left', flow_id='heat', effects_per_flow_hour=1),
],
),
- fx.Source(
+ fx.Port(
'RightSource',
- outputs=[
- fx.Flow('heat', bus='Right', effects_per_flow_hour=10), # Expensive
+ imports=[
+ fx.Flow(bus='Right', flow_id='heat', effects_per_flow_hour=10), # Expensive
],
),
fx.Transmission(
'Link',
- in1=fx.Flow('left', bus='Left', size=100),
- out1=fx.Flow('right', bus='Right', size=100),
- in2=fx.Flow('right_in', bus='Right', size=100),
- out2=fx.Flow('left_out', bus='Left', size=100),
+ in1=fx.Flow(bus='Left', flow_id='left', size=100),
+ out1=fx.Flow(bus='Right', flow_id='right', size=100),
+ in2=fx.Flow(bus='Right', flow_id='right_in', size=100),
+ out2=fx.Flow(bus='Left', flow_id='left_out', size=100),
),
)
fs = optimize(fs)
@@ -587,32 +587,32 @@ def test_transmission_prevent_simultaneous_bidirectional(self, optimize):
Sensitivity: Constraint is structural. Cost = 40 (same as unrestricted).
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Left'),
fx.Bus('Right'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'LeftDemand',
- inputs=[
- fx.Flow('heat', bus='Left', size=1, fixed_relative_profile=np.array([20, 0])),
+ exports=[
+ fx.Flow(bus='Left', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0])),
],
),
- fx.Sink(
+ fx.Port(
'RightDemand',
- inputs=[
- fx.Flow('heat', bus='Right', size=1, fixed_relative_profile=np.array([0, 20])),
+ exports=[
+ fx.Flow(bus='Right', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
- fx.Source(
+ fx.Port(
'LeftSource',
- outputs=[fx.Flow('heat', bus='Left', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Left', flow_id='heat', effects_per_flow_hour=1)],
),
fx.Transmission(
'Link',
- in1=fx.Flow('left', bus='Left', size=100),
- out1=fx.Flow('right', bus='Right', size=100),
- in2=fx.Flow('right_in', bus='Right', size=100),
- out2=fx.Flow('left_out', bus='Left', size=100),
+ in1=fx.Flow(bus='Left', flow_id='left', size=100),
+ out1=fx.Flow(bus='Right', flow_id='right', size=100),
+ in2=fx.Flow(bus='Right', flow_id='right_in', size=100),
+ out2=fx.Flow(bus='Left', flow_id='left_out', size=100),
prevent_simultaneous_flows_in_both_directions=True,
),
)
@@ -636,24 +636,24 @@ def test_transmission_status_startup_cost(self, optimize):
With 50€/startup × 2, cost=140.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Source'),
fx.Bus('Sink'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Sink', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
+ exports=[
+ fx.Flow(bus='Sink', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
],
),
- fx.Source(
+ fx.Port(
'CheapSource',
- outputs=[fx.Flow('heat', bus='Source', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Source', flow_id='heat', effects_per_flow_hour=1)],
),
fx.Transmission(
'Pipe',
- in1=fx.Flow('in', bus='Source', size=200, previous_flow_rate=0, relative_minimum=0.1),
- out1=fx.Flow('out', bus='Sink', size=200, previous_flow_rate=0, relative_minimum=0.1),
+ in1=fx.Flow(bus='Source', flow_id='in', size=200, previous_flow_rate=0, relative_minimum=0.1),
+ out1=fx.Flow(bus='Sink', flow_id='out', size=200, previous_flow_rate=0, relative_minimum=0.1),
status_parameters=fx.StatusParameters(effects_per_startup=50),
),
)
@@ -674,27 +674,27 @@ def test_heatpump_cop(self, optimize):
With cop=3, elec=10 → cost=10.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1),
],
),
- fx.linear_converters.HeatPump(
+ fx.Converter.heat_pump(
'HP',
cop=3.0,
- electrical_flow=fx.Flow('elec', bus='Elec'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -709,27 +709,27 @@ def test_heatpump_variable_cop(self, optimize):
Sensitivity: If scalar cop=3 used, elec=13.33. Only time-varying gives 15.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1),
],
),
- fx.linear_converters.HeatPump(
+ fx.Converter.heat_pump(
'HP',
cop=np.array([2.0, 4.0]),
- electrical_flow=fx.Flow('elec', bus='Elec'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -750,27 +750,27 @@ def test_cooling_tower_specific_electricity(self, optimize):
With specific_electricity_demand=0.1, cost=20 for 200 kWth.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source(
+ fx.Port(
'HeatSource',
- outputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([100, 100])),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([100, 100])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1),
],
),
- fx.linear_converters.CoolingTower(
+ fx.Converter.cooling_tower(
'CT',
specific_electricity_demand=0.1, # 0.1 kWel per kWth
- thermal_flow=fx.Flow('heat', bus='Heat'),
- electrical_flow=fx.Flow('elec', bus='Elec'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
),
)
fs = optimize(fs)
@@ -791,25 +791,25 @@ def test_power2heat_efficiency(self, optimize):
With eta=0.9, elec=44.44 → cost≈44.44.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
- fx.linear_converters.Power2Heat(
+ fx.Converter.power2heat(
'P2H',
thermal_efficiency=0.9,
- electrical_flow=fx.Flow('elec', bus='Elec'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -830,31 +830,31 @@ def test_heatpump_with_source_cop(self, optimize):
Sensitivity: If cop=1, elec=60 → cost=60. With cop=3, cost=20.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Elec'),
fx.Bus('HeatSource'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
- fx.Source(
+ fx.Port(
'FreeHeat',
- outputs=[fx.Flow('heat', bus='HeatSource')],
+ imports=[fx.Flow(bus='HeatSource', flow_id='heat')],
),
- fx.linear_converters.HeatPumpWithSource(
+ fx.Converter.heat_pump_with_source(
'HP',
cop=3.0,
- electrical_flow=fx.Flow('elec', bus='Elec'),
- heat_source_flow=fx.Flow('source', bus='HeatSource'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
+ heat_source_flow=fx.Flow(bus='HeatSource', flow_id='source'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -875,25 +875,25 @@ def test_source_and_sink_prevent_simultaneous(self, optimize):
Sensitivity: Cost = 50 - 40 = 10.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'Solar',
- outputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([30, 30, 0])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([30, 30, 0])),
],
),
- fx.SourceAndSink(
+ fx.Port(
'GridConnection',
- outputs=[fx.Flow('buy', bus='Elec', size=100, effects_per_flow_hour=5)],
- inputs=[fx.Flow('sell', bus='Elec', size=100, effects_per_flow_hour=-1)],
+ imports=[fx.Flow(bus='Elec', flow_id='buy', size=100, effects_per_flow_hour=5)],
+ exports=[fx.Flow(bus='Elec', flow_id='sell', size=100, effects_per_flow_hour=-1)],
prevent_simultaneous_flow_rates=True,
),
)
diff --git a/tests/test_math/test_conversion.py b/tests/test_math/test_conversion.py
index 6a527a338..05c72def9 100644
--- a/tests/test_math/test_conversion.py
+++ b/tests/test_math/test_conversion.py
@@ -15,27 +15,27 @@ def test_boiler_efficiency(self, optimize):
Sensitivity: If eta were ignored (treated as 1.0), cost would be 40 instead of 50.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 20, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 20, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.8,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -49,27 +49,27 @@ def test_variable_efficiency(self, optimize):
value (0.5) were broadcast, cost=40. Only per-timestep application yields 30.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=np.array([0.5, 1.0]),
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
),
)
fs = optimize(fs)
@@ -84,36 +84,36 @@ def test_chp_dual_output(self, optimize):
If eta_th were wrong (e.g. 1.0), fuel=100 and cost changes to −60.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Elec'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'HeatDemand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([50, 50])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([50, 50])),
],
),
- fx.Sink(
+ fx.Port(
'ElecGrid',
- inputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=-2),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=-2),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.CHP(
+ fx.Converter.chp(
'CHP',
thermal_efficiency=0.5,
electrical_efficiency=0.4,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat'),
- electrical_flow=fx.Flow('elec', bus='Elec'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat'),
+ electrical_flow=fx.Flow(bus='Elec', flow_id='elec'),
),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_effects.py b/tests/test_math/test_effects.py
index a69172bbd..53f1bdc9d 100644
--- a/tests/test_math/test_effects.py
+++ b/tests/test_math/test_effects.py
@@ -22,20 +22,20 @@ def test_effects_per_flow_hour(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg')
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 20])),
],
),
- fx.Source(
+ fx.Port(
'HeatSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 2, 'CO2': 0.5}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 2, 'CO2': 0.5}),
],
),
)
@@ -57,20 +57,20 @@ def test_share_from_temporal(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg')
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True, share_from_temporal={'CO2': 0.5})
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'HeatSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 10}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 10}),
],
),
)
@@ -94,26 +94,26 @@ def test_effect_maximum_total(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg', maximum_total=15)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'Dirty',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
- fx.Source(
+ fx.Port(
'Clean',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 10, 'CO2': 0}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 10, 'CO2': 0}),
],
),
)
@@ -140,26 +140,26 @@ def test_effect_minimum_total(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg', minimum_total=25)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'Dirty',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
- fx.Source(
+ fx.Port(
'Clean',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 0}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 0}),
],
),
)
@@ -184,26 +184,26 @@ def test_effect_maximum_per_hour(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg', maximum_per_hour=8)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([15, 5])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([15, 5])),
],
),
- fx.Source(
+ fx.Port(
'Dirty',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
- fx.Source(
+ fx.Port(
'Clean',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 5, 'CO2': 0}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 5, 'CO2': 0}),
],
),
)
@@ -225,20 +225,20 @@ def test_effect_minimum_per_hour(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg', minimum_per_hour=10)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([5, 5])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([5, 5])),
],
),
- fx.Source(
+ fx.Port(
'Dirty',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
)
@@ -260,26 +260,26 @@ def test_effect_maximum_temporal(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg', maximum_temporal=12)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'Dirty',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
- fx.Source(
+ fx.Port(
'Clean',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 5, 'CO2': 0}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 5, 'CO2': 0}),
],
),
)
@@ -302,20 +302,20 @@ def test_effect_minimum_temporal(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg', minimum_temporal=25)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'Dirty',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
)
@@ -336,30 +336,30 @@ def test_share_from_periodic(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg')
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True, share_from_periodic={'CO2': 10})
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 100, 'CO2': 5},
@@ -389,43 +389,43 @@ def test_effect_maximum_periodic(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg', maximum_periodic=50)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 10, 'CO2': 100},
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpensiveBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 50, 'CO2': 10},
@@ -453,41 +453,41 @@ def test_effect_minimum_periodic(self, optimize):
fs = make_flow_system(2)
co2 = fx.Effect('CO2', 'kg', minimum_periodic=40)
costs = fx.Effect('costs', '€', is_standard=True, is_objective=True)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
costs,
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=50,
effects_of_investment={'costs': 100, 'CO2': 50},
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_flow.py b/tests/test_math/test_flow.py
index 940dcdc48..8e5914451 100644
--- a/tests/test_math/test_flow.py
+++ b/tests/test_math/test_flow.py
@@ -20,27 +20,27 @@ def test_relative_minimum(self, optimize):
→ cost=60. With relative_minimum=0.4, must produce 40 → cost=80.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100, relative_minimum=0.4),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100, relative_minimum=0.4),
),
)
fs = optimize(fs)
@@ -62,25 +62,25 @@ def test_relative_maximum(self, optimize):
ExpensiveSrc covers 10 each timestep (2×10×5=100) → total cost=200.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([60, 60])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([60, 60])),
],
),
- fx.Source(
+ fx.Port(
'CheapSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', size=100, relative_maximum=0.5, effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=100, relative_maximum=0.5, effects_per_flow_hour=1),
],
),
- fx.Source(
+ fx.Port(
'ExpensiveSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=5),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5),
],
),
)
@@ -103,25 +103,25 @@ def test_flow_hours_max(self, optimize):
With flow_hours_max=30, CheapSrc limited to 30, ExpensiveSrc covers 30 → cost=180.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20, 20])),
],
),
- fx.Source(
+ fx.Port(
'CheapSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', flow_hours_max=30, effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', flow_hours_max=30, effects_per_flow_hour=1),
],
),
- fx.Source(
+ fx.Port(
'ExpensiveSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=5),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5),
],
),
)
@@ -144,25 +144,25 @@ def test_flow_hours_min(self, optimize):
With flow_hours_min=40, ExpensiveSrc forced to produce 40 → cost=220.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'), # Strict balance (no imbalance penalty = must balance)
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
- fx.Source(
+ fx.Port(
'CheapSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=1),
],
),
- fx.Source(
+ fx.Port(
'ExpensiveSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', flow_hours_min=40, effects_per_flow_hour=5),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', flow_hours_min=40, effects_per_flow_hour=5),
],
),
)
@@ -185,25 +185,25 @@ def test_load_factor_max(self, optimize):
With load_factor_max=0.5, CheapSrc limited to 50, ExpensiveSrc covers 30 → cost=200.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([40, 40])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([40, 40])),
],
),
- fx.Source(
+ fx.Port(
'CheapSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', size=50, load_factor_max=0.5, effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=50, load_factor_max=0.5, effects_per_flow_hour=1),
],
),
- fx.Source(
+ fx.Port(
'ExpensiveSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=5),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=5),
],
),
)
@@ -224,25 +224,25 @@ def test_load_factor_min(self, optimize):
With load_factor_min=0.3, ExpensiveSrc forced to produce 60 → cost=300.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
- fx.Source(
+ fx.Port(
'CheapSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=1),
],
),
- fx.Source(
+ fx.Port(
'ExpensiveSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', size=100, load_factor_min=0.3, effects_per_flow_hour=5),
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=100, load_factor_min=0.3, effects_per_flow_hour=5),
],
),
)
diff --git a/tests/test_math/test_flow_invest.py b/tests/test_math/test_flow_invest.py
index f9ae91078..9515c8e6d 100644
--- a/tests/test_math/test_flow_invest.py
+++ b/tests/test_math/test_flow_invest.py
@@ -22,29 +22,29 @@ def test_invest_size_optimized(self, optimize):
Only size=50 (peak demand) minimizes the sum of invest + fuel cost.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 50, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 50, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=200,
effects_of_investment=10,
@@ -71,40 +71,40 @@ def test_invest_optional_not_built(self, optimize):
vs 20) proves the investment mechanism is working.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
effects_of_investment=99999,
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -124,29 +124,29 @@ def test_invest_minimum_size(self, optimize):
proves the constraint is active.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=100,
maximum_size=200,
@@ -175,40 +175,40 @@ def test_invest_fixed_size(self, optimize):
invested size is exactly 80, not 30.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([30, 30])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([30, 30])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'FixedBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
fixed_size=80,
effects_of_investment=10,
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -232,29 +232,29 @@ def test_piecewise_invest_cost(self, optimize):
With piecewise (economies of scale), invest=130 → total=210.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([80, 80])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([80, 80])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=0.5),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=0.5),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=200,
piecewise_effects_of_investment=fx.PiecewiseEffects(
@@ -286,29 +286,29 @@ def test_invest_mandatory_forces_investment(self, optimize):
mandatory is enforced.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpensiveBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=10,
maximum_size=100,
@@ -318,11 +318,11 @@ def test_invest_mandatory_forces_investment(self, optimize):
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -346,29 +346,29 @@ def test_invest_not_mandatory_skips_when_uneconomical(self, optimize):
cost=40 here vs cost=1030 with mandatory=True proves the flag works.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpensiveBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=10,
maximum_size=100,
@@ -377,11 +377,11 @@ def test_invest_not_mandatory_skips_when_uneconomical(self, optimize):
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -403,29 +403,29 @@ def test_invest_effects_of_retirement(self, optimize):
With retirement=500, investing becomes cheaper. Cost difference proves feature.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'NewBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=10,
maximum_size=100,
@@ -434,11 +434,11 @@ def test_invest_effects_of_retirement(self, optimize):
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -461,29 +461,29 @@ def test_invest_retirement_triggers_when_not_investing(self, optimize):
The 50€ difference proves retirement cost is applied.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpensiveBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
minimum_size=10,
maximum_size=100,
@@ -492,11 +492,11 @@ def test_invest_retirement_triggers_when_not_investing(self, optimize):
),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -520,29 +520,29 @@ def test_invest_with_startup_cost(self, optimize):
With startup_cost=50 × 2, cost increases by 100.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20, 0, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20, 0, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
relative_minimum=0.5,
size=fx.InvestParameters(
maximum_size=100,
@@ -571,29 +571,29 @@ def test_invest_with_min_uptime(self, optimize):
Sensitivity: The cost changes due to min_uptime forcing operation patterns.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'), # Strict balance (demand must be met)
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 10, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 10, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
relative_minimum=0.1,
size=fx.InvestParameters(
maximum_size=100,
@@ -602,11 +602,11 @@ def test_invest_with_min_uptime(self, optimize):
status_parameters=fx.StatusParameters(min_uptime=2),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -631,29 +631,29 @@ def test_invest_with_active_hours_max(self, optimize):
With active_hours_max=2, InvestBoiler runs 2 hours, backup runs 2 → cost higher.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'InvestBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=fx.InvestParameters(
maximum_size=100,
effects_of_investment_per_size=0.1,
@@ -661,11 +661,11 @@ def test_invest_with_active_hours_max(self, optimize):
status_parameters=fx.StatusParameters(active_hours_max=2),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_flow_status.py b/tests/test_math/test_flow_status.py
index 66f4de269..17e6ca3d6 100644
--- a/tests/test_math/test_flow_status.py
+++ b/tests/test_math/test_flow_status.py
@@ -23,29 +23,29 @@ def test_startup_cost(self, optimize):
With 100€/startup × 2 startups, objective=240.
"""
fs = make_flow_system(5)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 10, 0, 10, 0])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 10, 0, 10, 0])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(effects_per_startup=100),
),
@@ -65,38 +65,38 @@ def test_active_hours_max(self, optimize):
With limit=1, forced to use expensive backup for 2 hours → cost=60.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 20, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 20, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(active_hours_max=1),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpensiveBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -118,39 +118,39 @@ def test_min_uptime_forces_operation(self, optimize):
a different cost and status pattern. The constraint forces status=[1,1,0,1,1].
"""
fs = fx.FlowSystem(pd.date_range('2020-01-01', periods=5, freq='h'))
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([5, 10, 20, 18, 12])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([5, 10, 20, 18, 12])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(min_uptime=2, max_uptime=2),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.2,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -176,39 +176,39 @@ def test_min_downtime_prevents_restart(self, optimize):
With min_downtime=3, backup needed at t=2 → cost=60.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 0, 20, 0])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
previous_flow_rate=20,
status_parameters=fx.StatusParameters(min_downtime=3),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -230,29 +230,29 @@ def test_effects_per_active_hour(self, optimize):
With 50€/h × 2h, cost = 20 + 100 = 120.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(effects_per_active_hour=50),
),
@@ -274,38 +274,38 @@ def test_active_hours_min(self, optimize):
With floor=2, expensive boiler runs both hours → cost=40.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
status_parameters=fx.StatusParameters(active_hours_min=2),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -337,40 +337,40 @@ def test_max_downtime(self, optimize):
With max_downtime=1, ExpBoiler forced on ≥2 hours → cost > 40.
"""
fs = make_flow_system(4)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=20,
relative_minimum=0.5,
previous_flow_rate=10,
status_parameters=fx.StatusParameters(max_downtime=1),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -395,40 +395,40 @@ def test_startup_limit(self, optimize):
backup serves other (fuel=10/0.5=20). Total=32.5.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([10, 0, 10])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([10, 0, 10])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=0.8,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=20,
relative_minimum=0.5,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(startup_limit=1),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Backup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -450,42 +450,42 @@ def test_max_uptime_standalone(self, optimize):
With max_uptime=2, backup covers 1 hour at eta=0.5 → cost=70.
"""
fs = make_flow_system(5)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
+ exports=[
fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=1,
fixed_relative_profile=np.array([10, 10, 10, 10, 10]),
),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[fx.Flow('gas', bus='Gas', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1)],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
previous_flow_rate=0,
status_parameters=fx.StatusParameters(max_uptime=2),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpensiveBackup',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -526,29 +526,29 @@ def test_previous_flow_rate_scalar_on_forces_min_uptime(self, optimize):
With previous_flow_rate=10 (was on), cost=10 (forced on at t=0).
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=10, # Was ON for 1 hour before t=0
@@ -570,29 +570,29 @@ def test_previous_flow_rate_scalar_off_no_carry_over(self, optimize):
Sensitivity: Cost=0 here vs cost=10 with previous_flow_rate>0.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=0, # Was OFF before t=0
@@ -616,29 +616,29 @@ def test_previous_flow_rate_array_uptime_satisfied_vs_partial(self, optimize):
This test uses Scenario A (satisfied). See test_scalar_on for Scenario B equivalent.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=[10, 20], # Was ON for 2 hours → min_uptime=2 satisfied
@@ -662,29 +662,29 @@ def test_previous_flow_rate_array_partial_uptime_forces_continuation(self, optim
With previous_flow_rate=[0, 10] (1h uptime), cost=20 (forced on 2 more hours).
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 0, 0])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 0, 0])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=[0, 10], # Off at t=-2, ON at t=-1 (1 hour uptime)
@@ -709,39 +709,39 @@ def test_previous_flow_rate_array_min_downtime_carry_over(self, optimize):
With previous_flow_rate=[10, 0] (1h downtime), forced off 2 more hours, cost=100.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'CheapBoiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
previous_flow_rate=[10, 0], # ON at t=-2, OFF at t=-1 (1 hour downtime)
status_parameters=fx.StatusParameters(min_downtime=3),
),
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'ExpensiveBoiler',
thermal_efficiency=0.5,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
- thermal_flow=fx.Flow('heat', bus='Heat', size=100),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
+ thermal_flow=fx.Flow(bus='Heat', flow_id='heat', size=100),
),
)
fs = optimize(fs)
@@ -762,29 +762,29 @@ def test_previous_flow_rate_array_longer_history(self, optimize):
With previous_flow_rate=[0, 10, 20, 30] (3 hours on), cost=10.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat', imbalance_penalty_per_flow_hour=0),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([0, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.linear_converters.Boiler(
+ fx.Converter.boiler(
'Boiler',
thermal_efficiency=1.0,
- fuel_flow=fx.Flow('fuel', bus='Gas'),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='fuel'),
thermal_flow=fx.Flow(
- 'heat',
bus='Heat',
+ flow_id='heat',
size=100,
relative_minimum=0.1,
previous_flow_rate=[0, 10, 20, 30], # Off, then ON for 3 hours
diff --git a/tests/test_math/test_legacy_solution_access.py b/tests/test_math/test_legacy_solution_access.py
index 3686d1aac..f8050dfcc 100644
--- a/tests/test_math/test_legacy_solution_access.py
+++ b/tests/test_math/test_legacy_solution_access.py
@@ -19,11 +19,13 @@ class TestLegacySolutionAccess:
def test_effect_access(self, optimize):
"""Test legacy effect access: solution['costs'] -> solution['effect|total'].sel(effect='costs')."""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Port('Src', imports=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Port(
+ 'Snk', exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
@@ -38,11 +40,13 @@ def test_effect_access(self, optimize):
def test_flow_rate_access(self, optimize):
"""Test legacy flow rate access: solution['Src(heat)|flow_rate'] -> solution['flow|rate'].sel(flow='Src(heat)')."""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Port('Src', imports=[fx.Flow(bus='Heat', flow_id='heat', size=10)]),
+ fx.Port(
+ 'Snk', exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
@@ -57,14 +61,20 @@ def test_flow_rate_access(self, optimize):
def test_flow_size_access(self, optimize):
"""Test legacy flow size access: solution['Src(heat)|size'] -> solution['flow|size'].sel(flow='Src(heat)')."""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source(
+ fx.Port(
'Src',
- outputs=[fx.Flow('heat', bus='Heat', size=fx.InvestParameters(fixed_size=50), effects_per_flow_hour=1)],
+ imports=[
+ fx.Flow(
+ bus='Heat', flow_id='heat', size=fx.InvestParameters(fixed_size=50), effects_per_flow_hour=1
+ )
+ ],
+ ),
+ fx.Port(
+ 'Snk', exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([5, 5]))]
),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([5, 5]))]),
)
fs = optimize(fs)
@@ -79,18 +89,21 @@ def test_flow_size_access(self, optimize):
def test_storage_charge_state_access(self, optimize):
"""Test legacy storage charge state access: solution['Battery|charge_state'] -> solution['storage|charge'].sel(storage='Battery')."""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Grid', outputs=[fx.Flow('elec', bus='Elec', size=100, effects_per_flow_hour=1)]),
+ fx.Port('Grid', imports=[fx.Flow(bus='Elec', flow_id='elec', size=100, effects_per_flow_hour=1)]),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=10),
- discharging=fx.Flow('discharge', bus='Elec', size=10),
+ charging=fx.Flow(bus='Elec', size=10),
+ discharging=fx.Flow(bus='Elec', size=10),
capacity_in_flow_hours=50,
initial_charge_state=25,
),
- fx.Sink('Load', inputs=[fx.Flow('elec', bus='Elec', size=10, fixed_relative_profile=np.array([1, 1, 1]))]),
+ fx.Port(
+ 'Load',
+ exports=[fx.Flow(bus='Elec', flow_id='elec', size=10, fixed_relative_profile=np.array([1, 1, 1]))],
+ ),
)
fs = optimize(fs)
@@ -113,11 +126,14 @@ def test_legacy_access_disabled_by_default(self):
fx.CONFIG.Legacy.solution_access = False
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Port('Src', imports=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Port(
+ 'Snk',
+ exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))],
+ ),
)
solver = fx.solvers.HighsSolver(log_to_console=False)
fs.optimize(solver)
@@ -137,11 +153,13 @@ def test_legacy_access_disabled_by_default(self):
def test_legacy_access_emits_deprecation_warning(self, optimize):
"""Test that legacy access emits DeprecationWarning."""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Source('Src', outputs=[fx.Flow('heat', bus='Heat', size=10, effects_per_flow_hour=1)]),
- fx.Sink('Snk', inputs=[fx.Flow('heat', bus='Heat', size=10, fixed_relative_profile=np.array([1, 1]))]),
+ fx.Port('Src', imports=[fx.Flow(bus='Heat', flow_id='heat', size=10, effects_per_flow_hour=1)]),
+ fx.Port(
+ 'Snk', exports=[fx.Flow(bus='Heat', flow_id='heat', size=10, fixed_relative_profile=np.array([1, 1]))]
+ ),
)
fs = optimize(fs)
diff --git a/tests/test_math/test_multi_period.py b/tests/test_math/test_multi_period.py
index d39b0e02f..3115e0b4e 100644
--- a/tests/test_math/test_multi_period.py
+++ b/tests/test_math/test_multi_period.py
@@ -25,18 +25,18 @@ def test_period_weights_affect_objective(self, optimize):
With weights [5, 5], objective=300.
"""
fs = make_multi_period_flow_system(n_timesteps=3, periods=[2020, 2025], weight_of_last_period=5)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -56,29 +56,29 @@ def test_flow_hours_max_over_periods(self, optimize):
With constraint, objective > 300.
"""
fs = make_multi_period_flow_system(n_timesteps=3, periods=[2020, 2025], weight_of_last_period=5)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'DirtySource',
- outputs=[
+ imports=[
fx.Flow(
- 'elec',
bus='Elec',
+ flow_id='elec',
effects_per_flow_hour=1,
flow_hours_max_over_periods=50,
),
],
),
- fx.Source(
+ fx.Port(
'CleanSource',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=10)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=10)],
),
)
fs = optimize(fs)
@@ -98,25 +98,25 @@ def test_flow_hours_min_over_periods(self, optimize):
With constraint, must use expensive → objective > 300.
"""
fs = make_multi_period_flow_system(n_timesteps=3, periods=[2020, 2025], weight_of_last_period=5)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'CheapSource',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
- fx.Source(
+ fx.Port(
'ExpensiveSource',
- outputs=[
+ imports=[
fx.Flow(
- 'elec',
bus='Elec',
+ flow_id='elec',
effects_per_flow_hour=10,
flow_hours_min_over_periods=100,
),
@@ -139,25 +139,25 @@ def test_effect_maximum_over_periods(self, optimize):
"""
fs = make_multi_period_flow_system(n_timesteps=3, periods=[2020, 2025], weight_of_last_period=5)
co2 = fx.Effect('CO2', 'kg', maximum_over_periods=50)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'DirtySource',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
- fx.Source(
+ fx.Port(
'CleanSource',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=10)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=10)],
),
)
fs = optimize(fs)
@@ -177,25 +177,25 @@ def test_effect_minimum_over_periods(self, optimize):
"""
fs = make_multi_period_flow_system(n_timesteps=3, periods=[2020, 2025], weight_of_last_period=5)
co2 = fx.Effect('CO2', 'kg', minimum_over_periods=100)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec', imbalance_penalty_per_flow_hour=0),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
co2,
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([2, 2, 2])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([2, 2, 2])),
],
),
- fx.Source(
+ fx.Port(
'DirtySource',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour={'costs': 1, 'CO2': 1}),
],
),
- fx.Source(
+ fx.Port(
'CheapSource',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -216,21 +216,21 @@ def test_invest_linked_periods(self, optimize):
periods=[2020, 2025],
weight_of_last_period=5,
)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
+ imports=[
fx.Flow(
- 'elec',
bus='Elec',
+ flow_id='elec',
size=fx.InvestParameters(
maximum_size=100,
effects_of_investment_per_size=1,
@@ -261,7 +261,7 @@ def test_effect_period_weights(self, optimize):
With custom [1, 10], objective=330.
"""
fs = make_multi_period_flow_system(n_timesteps=3, periods=[2020, 2025], weight_of_last_period=5)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect(
'costs',
@@ -270,15 +270,15 @@ def test_effect_period_weights(self, optimize):
is_objective=True,
period_weights=xr.DataArray([1, 10], dims='period', coords={'period': [2020, 2025]}),
),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 10, 10])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -299,25 +299,25 @@ def test_storage_relative_minimum_final_charge_state_scalar(self, optimize):
Per-period cost=3050. Objective = 5*3050 + 5*3050 = 30500.
"""
fs = make_multi_period_flow_system(n_timesteps=3, periods=[2020, 2025], weight_of_last_period=5)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 0, 80])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 0, 80])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 1, 100])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_final_charge_state=0.5,
@@ -343,25 +343,25 @@ def test_storage_relative_maximum_final_charge_state_scalar(self, optimize):
Total objective = 5*50 + 5*50 = 500.
"""
fs = make_multi_period_flow_system(n_timesteps=3, periods=[2020, 2025], weight_of_last_period=5)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec', imbalance_penalty_per_flow_hour=5),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0, 0])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0, 0])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1, 1])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
relative_maximum_final_charge_state=0.2,
diff --git a/tests/test_math/test_piecewise.py b/tests/test_math/test_piecewise.py
index e9da8a1ba..6791e0d87 100644
--- a/tests/test_math/test_piecewise.py
+++ b/tests/test_math/test_piecewise.py
@@ -22,26 +22,26 @@ def test_piecewise_selects_cheap_segment(self, optimize):
If the wrong segment were selected, the interpolation would be incorrect.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([45, 45])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([45, 45])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(10, 30), fx.Piece(30, 100)]),
@@ -67,26 +67,26 @@ def test_piecewise_conversion_at_breakpoint(self, optimize):
error or infeasibility at the boundary).
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([15, 15])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([15, 15])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
'fuel': fx.Piecewise([fx.Piece(10, 30), fx.Piece(30, 100)]),
@@ -114,32 +114,32 @@ def test_piecewise_with_gap_forces_minimum_load(self, optimize):
50 is valid (within 40-100 range). Verify the piecewise constraint is active.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([50, 50])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([50, 50])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.Source(
+ fx.Port(
'CheapSrc',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=10), # More expensive backup
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=10), # More expensive backup
],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
# Gap between 0 and 40: forbidden region (minimum load requirement)
@@ -173,32 +173,32 @@ def test_piecewise_gap_allows_off_state(self, optimize):
The optimizer should choose backup (off state for converter).
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([20, 20])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([20, 20])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=10), # Expensive gas
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=10), # Expensive gas
],
),
- fx.Source(
+ fx.Port(
'Backup',
- outputs=[
- fx.Flow('heat', bus='Heat', effects_per_flow_hour=1), # Cheap backup
+ imports=[
+ fx.Flow(bus='Heat', flow_id='heat', effects_per_flow_hour=1), # Cheap backup
],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
# Off state (0,0) + operating range with minimum load
@@ -229,26 +229,26 @@ def test_piecewise_varying_efficiency_across_segments(self, optimize):
If constant efficiency 1.33:1 from seg1 end were used, fuel≈46.67.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Heat'),
fx.Bus('Gas'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('heat', bus='Heat', size=1, fixed_relative_profile=np.array([35, 35])),
+ exports=[
+ fx.Flow(bus='Heat', flow_id='heat', size=1, fixed_relative_profile=np.array([35, 35])),
],
),
- fx.Source(
+ fx.Port(
'GasSrc',
- outputs=[
- fx.Flow('gas', bus='Gas', effects_per_flow_hour=1),
+ imports=[
+ fx.Flow(bus='Gas', flow_id='gas', effects_per_flow_hour=1),
],
),
- fx.LinearConverter(
+ fx.Converter(
'Converter',
- inputs=[fx.Flow('fuel', bus='Gas')],
- outputs=[fx.Flow('heat', bus='Heat')],
+ inputs=[fx.Flow(bus='Gas', flow_id='fuel')],
+ outputs=[fx.Flow(bus='Heat', flow_id='heat')],
piecewise_conversion=fx.PiecewiseConversion(
{
# Low load: less efficient. High load: more efficient.
diff --git a/tests/test_math/test_scenarios.py b/tests/test_math/test_scenarios.py
index 5656681ee..0b458795e 100644
--- a/tests/test_math/test_scenarios.py
+++ b/tests/test_math/test_scenarios.py
@@ -39,16 +39,16 @@ def test_scenario_weights_affect_objective(self, optimize):
scenario_weights=[0.3, 0.7],
)
demand = _scenario_demand(fs, [10, 10], [30, 30])
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand)],
+ exports=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand)],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -71,19 +71,19 @@ def test_scenario_independent_sizes(self, optimize):
scenario_weights=[0.5, 0.5],
)
demand = _scenario_demand(fs, [10, 10], [30, 30])
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand)],
+ exports=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand)],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
+ imports=[
fx.Flow(
- 'elec',
bus='Elec',
+ flow_id='elec',
size=fx.InvestParameters(maximum_size=100, effects_of_investment_per_size=1),
effects_per_flow_hour=1,
),
@@ -118,20 +118,20 @@ def test_scenario_independent_flow_rates(self, optimize):
)
fs.scenario_independent_flow_rates = ['Grid(elec)']
demand = _scenario_demand(fs, [10, 10], [30, 30])
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=demand)],
+ exports=[fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=demand)],
),
- fx.Sink(
+ fx.Port(
'Dump',
- inputs=[fx.Flow('elec', bus='Elec')],
+ exports=[fx.Flow(bus='Elec', flow_id='elec')],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[fx.Flow('elec', bus='Elec', effects_per_flow_hour=1)],
+ imports=[fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=1)],
),
)
fs = optimize(fs)
@@ -156,25 +156,25 @@ def test_storage_relative_minimum_final_charge_state_scalar(self, optimize):
scenarios=['low', 'high'],
scenario_weights=[0.5, 0.5],
)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 0, 80])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 0, 80])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 1, 100])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_final_charge_state=0.5,
@@ -203,25 +203,25 @@ def test_storage_relative_maximum_final_charge_state_scalar(self, optimize):
scenarios=['low', 'high'],
scenario_weights=[0.5, 0.5],
)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec', imbalance_penalty_per_flow_hour=5),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0, 0])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0, 0])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1, 1])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
relative_maximum_final_charge_state=0.2,
diff --git a/tests/test_math/test_storage.py b/tests/test_math/test_storage.py
index faab0c391..685e3efe5 100644
--- a/tests/test_math/test_storage.py
+++ b/tests/test_math/test_storage.py
@@ -17,25 +17,25 @@ def test_storage_shift_saves_money(self, optimize):
With working storage, buy at t=1 for 1€/kWh → cost=20. A 10× difference.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 0, 20])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 0, 20])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([10, 1, 10])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([10, 1, 10])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=1,
@@ -54,25 +54,25 @@ def test_storage_losses(self, optimize):
With 10% loss, must charge 100 to have 90 after 1h → cost=100.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 90])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 90])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 1000])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 1000])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=200,
initial_charge_state=0,
eta_charge=1,
@@ -93,25 +93,25 @@ def test_storage_eta_charge_discharge(self, optimize):
cost=80. If both broken, cost=72. Only both correct yields cost=100.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 72])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 72])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 1000])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 1000])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=200,
initial_charge_state=0,
eta_charge=0.9,
@@ -135,25 +135,25 @@ def test_storage_soc_bounds(self, optimize):
With the bound enforced, cost=1050 (50×1 + 10×100).
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 60])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 60])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=0,
relative_maximum_charge_state=0.5,
@@ -178,25 +178,25 @@ def test_storage_cyclic_charge_state(self, optimize):
With cyclic, must buy 50 at some point to replenish → cost=50.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 50])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 50])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state='equals_final',
eta_charge=1,
@@ -220,25 +220,25 @@ def test_storage_minimal_final_charge_state(self, optimize):
With minimal_final=60, charge 80 → cost=80.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 20])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 20])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=0,
minimal_final_charge_state=60,
@@ -262,25 +262,25 @@ def test_storage_invest_capacity(self, optimize):
At 1€/kWh, storage built → cost=50*1 (buy) + 50*1 (invest) = 100.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 50])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 50])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 10])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 10])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=fx.InvestParameters(
maximum_size=200,
effects_of_investment_per_size=1,
@@ -314,25 +314,25 @@ def test_prevent_simultaneous_charge_and_discharge(self, optimize):
could charge and discharge simultaneously, which is physically nonsensical.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([10, 20, 10])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([10, 20, 10])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 10, 1])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 10, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=100),
- discharging=fx.Flow('discharge', bus='Elec', size=100),
+ charging=fx.Flow(bus='Elec', size=100),
+ discharging=fx.Flow(bus='Elec', size=100),
capacity_in_flow_hours=100,
initial_charge_state=0,
eta_charge=0.9,
@@ -342,8 +342,8 @@ def test_prevent_simultaneous_charge_and_discharge(self, optimize):
),
)
fs = optimize(fs)
- charge = fs.solution['Battery(charge)|flow_rate'].values[:-1]
- discharge = fs.solution['Battery(discharge)|flow_rate'].values[:-1]
+ charge = fs.solution['Battery(charging)|flow_rate'].values[:-1]
+ discharge = fs.solution['Battery(discharging)|flow_rate'].values[:-1]
# At no timestep should both be > 0
for t in range(len(charge)):
assert not (charge[t] > 1e-5 and discharge[t] > 1e-5), (
@@ -363,25 +363,25 @@ def test_storage_relative_minimum_charge_state(self, optimize):
With min SOC=0.3, max discharge=70 → grid covers 10 @100€ → cost=1050.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 80, 0])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 80, 0])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100, 1])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_charge_state=0.3,
@@ -408,25 +408,25 @@ def test_storage_maximal_final_charge_state(self, optimize):
Sensitivity: Without max final, objective=0. With max final=20, objective=50.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec', imbalance_penalty_per_flow_hour=5),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
maximal_final_charge_state=20,
@@ -452,25 +452,25 @@ def test_storage_relative_minimum_final_charge_state(self, optimize):
Sensitivity: Without constraint, cost=30. With min final=0.5, cost=3050.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 80])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 80])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_charge_state=np.array([0, 0]),
@@ -499,25 +499,25 @@ def test_storage_relative_maximum_final_charge_state(self, optimize):
With relative_max_final=0.2 (=20 abs), must discharge 60 → excess 10 * 5€ = 50€.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec', imbalance_penalty_per_flow_hour=5),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
relative_maximum_charge_state=np.array([1.0, 1.0]),
@@ -540,25 +540,25 @@ def test_storage_relative_minimum_final_charge_state_scalar(self, optimize):
branch ignored the final override entirely.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 80])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 80])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=50,
relative_minimum_final_charge_state=0.5,
@@ -579,25 +579,25 @@ def test_storage_relative_maximum_final_charge_state_scalar(self, optimize):
branch ignored the final override entirely.
"""
fs = make_flow_system(2)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec', imbalance_penalty_per_flow_hour=5),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([50, 0])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([50, 0])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([100, 1])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([100, 1])),
],
),
fx.Storage(
'Battery',
- charging=fx.Flow('charge', bus='Elec', size=200),
- discharging=fx.Flow('discharge', bus='Elec', size=200),
+ charging=fx.Flow(bus='Elec', size=200),
+ discharging=fx.Flow(bus='Elec', size=200),
capacity_in_flow_hours=100,
initial_charge_state=80,
relative_maximum_final_charge_state=0.2,
@@ -621,30 +621,28 @@ def test_storage_balanced_invest(self, optimize):
With balanced, invest=160+160=320, ops=160.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0, 80, 80])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0, 80, 80])),
],
),
- fx.Source(
+ fx.Port(
'Grid',
- outputs=[
- fx.Flow('elec', bus='Elec', effects_per_flow_hour=np.array([1, 100, 100])),
+ imports=[
+ fx.Flow(bus='Elec', flow_id='elec', effects_per_flow_hour=np.array([1, 100, 100])),
],
),
fx.Storage(
'Battery',
charging=fx.Flow(
- 'charge',
bus='Elec',
size=InvestParameters(maximum_size=200, effects_of_investment_per_size=1),
),
discharging=fx.Flow(
- 'discharge',
bus='Elec',
size=InvestParameters(maximum_size=200, effects_of_investment_per_size=1),
),
@@ -664,8 +662,8 @@ def test_storage_balanced_invest(self, optimize):
# Invest: charge_size=160 @1€ = 160€. discharge_size=160 @1€ = 160€. Total invest=320€.
# Ops: 160 @1€ = 160€. Total = 480€.
# Without balanced: charge_size=160, discharge_size=80 → invest 240, ops 160 → 400€.
- charge_size = fs.solution['Battery(charge)|size'].item()
- discharge_size = fs.solution['Battery(discharge)|size'].item()
+ charge_size = fs.solution['Battery(charging)|size'].item()
+ discharge_size = fs.solution['Battery(discharging)|size'].item()
assert_allclose(charge_size, discharge_size, rtol=1e-5)
# With balanced, total cost is higher than without
assert fs.solution['costs'].item() > 400.0 - 1e-5
diff --git a/tests/test_math/test_validation.py b/tests/test_math/test_validation.py
index 5e1e90344..26b13aacc 100644
--- a/tests/test_math/test_validation.py
+++ b/tests/test_math/test_validation.py
@@ -23,19 +23,19 @@ def test_source_and_sink_requires_size_with_prevent_simultaneous(self):
should raise PlausibilityError during model building.
"""
fs = make_flow_system(3)
- fs.add_elements(
+ fs.add(
fx.Bus('Elec'),
fx.Effect('costs', '€', is_standard=True, is_objective=True),
- fx.Sink(
+ fx.Port(
'Demand',
- inputs=[
- fx.Flow('elec', bus='Elec', size=1, fixed_relative_profile=np.array([0.1, 0.1, 0.1])),
+ exports=[
+ fx.Flow(bus='Elec', flow_id='elec', size=1, fixed_relative_profile=np.array([0.1, 0.1, 0.1])),
],
),
- fx.SourceAndSink(
+ fx.Port(
'GridConnection',
- outputs=[fx.Flow('buy', bus='Elec', effects_per_flow_hour=5)],
- inputs=[fx.Flow('sell', bus='Elec', effects_per_flow_hour=-1)],
+ imports=[fx.Flow(bus='Elec', flow_id='buy', effects_per_flow_hour=5)],
+ exports=[fx.Flow(bus='Elec', flow_id='sell', effects_per_flow_hour=-1)],
prevent_simultaneous_flow_rates=True,
),
)
diff --git a/tests/test_scenarios.py b/tests/test_scenarios.py
index f4b07f9f5..fffd2505f 100644
--- a/tests/test_scenarios.py
+++ b/tests/test_scenarios.py
@@ -6,7 +6,7 @@
import xarray as xr
import flixopt as fx
-from flixopt import Effect, InvestParameters, Sink, Source, Storage
+from flixopt import Effect, InvestParameters, Port, Storage
from flixopt.elements import Bus, Flow
from flixopt.flow_system import FlowSystem
@@ -60,7 +60,7 @@ def test_system():
# Create a demand sink with scenario-dependent profiles
demand = Flow(electricity_bus.label_full, flow_id='Demand', fixed_relative_profile=demand_profiles)
- demand_sink = Sink('Demand', inputs=[demand])
+ demand_sink = Port('Demand', exports=[demand])
# Create a power source with investment option
power_gen = Flow(
@@ -73,11 +73,11 @@ def test_system():
),
effects_per_flow_hour={'costs': 20}, # €/MWh
)
- generator = Source('Generator', outputs=[power_gen])
+ generator = Port('Generator', imports=[power_gen])
# Create a storage for electricity
- storage_charge = Flow(electricity_bus.label_full, flow_id='Charge', size=10)
- storage_discharge = Flow(electricity_bus.label_full, flow_id='Discharge', size=10)
+ storage_charge = Flow(electricity_bus.label_full, size=10)
+ storage_discharge = Flow(electricity_bus.label_full, size=10)
storage = Storage(
'Battery',
charging=storage_charge,
@@ -96,7 +96,7 @@ def test_system():
cost_effect = Effect('costs', unit='€', description='Total costs', is_standard=True, is_objective=True)
# Add all elements to the flow system
- flow_system.add_elements(electricity_bus, generator, demand_sink, storage, cost_effect)
+ flow_system.add(electricity_bus, generator, demand_sink, storage, cost_effect)
# Return the created system and its components
return {
@@ -127,29 +127,32 @@ def flow_system_complex_scenarios() -> fx.FlowSystem:
scenarios=pd.Index(['A', 'B', 'C'], name='scenario'),
)
# Define the components and flow_system
- flow_system.add_elements(
+ flow_system.add(
fx.Effect('costs', '€', 'Kosten', is_standard=True, is_objective=True, share_from_temporal={'CO2': 0.2}),
fx.Effect('CO2', 'kg', 'CO2_e-Emissionen'),
fx.Effect('PE', 'kWh_PE', 'Primärenergie', maximum_total=3.5e3),
fx.Bus('Strom'),
fx.Bus('Fernwärme'),
fx.Bus('Gas'),
- fx.Sink(
- 'Wärmelast', inputs=[fx.Flow('Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load)]
+ fx.Port(
+ 'Wärmelast',
+ exports=[fx.Flow(bus='Fernwärme', flow_id='Q_th_Last', size=1, fixed_relative_profile=thermal_load)],
),
- fx.Source(
+ fx.Port(
'Gastarif',
- outputs=[fx.Flow('Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04, 'CO2': 0.3})],
+ imports=[fx.Flow(bus='Gas', flow_id='Q_Gas', size=1000, effects_per_flow_hour={'costs': 0.04, 'CO2': 0.3})],
+ ),
+ fx.Port(
+ 'Einspeisung', exports=[fx.Flow(bus='Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_load)]
),
- fx.Sink('Einspeisung', inputs=[fx.Flow('Strom', flow_id='P_el', effects_per_flow_hour=-1 * electrical_load)]),
)
- boiler = fx.linear_converters.Boiler(
+ boiler = fx.Converter.boiler(
'Kessel',
thermal_efficiency=0.5,
status_parameters=fx.StatusParameters(effects_per_active_hour={'costs': 0, 'CO2': 1000}),
thermal_flow=fx.Flow(
- 'Fernwärme',
+ bus='Fernwärme',
flow_id='Q_th',
load_factor_max=1.0,
load_factor_min=0.1,
@@ -173,7 +176,7 @@ def flow_system_complex_scenarios() -> fx.FlowSystem:
),
flow_hours_max=1e6,
),
- fuel_flow=fx.Flow('Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1),
+ fuel_flow=fx.Flow(bus='Gas', flow_id='Q_fu', size=200, relative_minimum=0, relative_maximum=1),
)
invest_speicher = fx.InvestParameters(
@@ -192,8 +195,8 @@ def flow_system_complex_scenarios() -> fx.FlowSystem:
)
speicher = fx.Storage(
'Speicher',
- charging=fx.Flow('Fernwärme', flow_id='Q_th_load', size=1e4),
- discharging=fx.Flow('Fernwärme', flow_id='Q_th_unload', size=1e4),
+ charging=fx.Flow(bus='Fernwärme', flow_id='Q_th_load', size=1e4),
+ discharging=fx.Flow(bus='Fernwärme', flow_id='Q_th_unload', size=1e4),
capacity_in_flow_hours=invest_speicher,
initial_charge_state=0,
maximal_final_charge_state=10,
@@ -203,7 +206,7 @@ def flow_system_complex_scenarios() -> fx.FlowSystem:
prevent_simultaneous_charge_and_discharge=True,
)
- flow_system.add_elements(boiler, speicher)
+ flow_system.add(boiler, speicher)
return flow_system
@@ -215,13 +218,13 @@ def flow_system_piecewise_conversion_scenarios(flow_system_complex_scenarios) ->
"""
flow_system = flow_system_complex_scenarios
- flow_system.add_elements(
- fx.LinearConverter(
+ flow_system.add(
+ fx.Converter(
'KWK',
- inputs=[fx.Flow('Gas', flow_id='Q_fu', size=200)],
+ inputs=[fx.Flow(bus='Gas', flow_id='Q_fu', size=200)],
outputs=[
- fx.Flow('Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
- fx.Flow('Fernwärme', flow_id='Q_th', size=100),
+ fx.Flow(bus='Strom', flow_id='P_el', size=60, relative_maximum=55, previous_flow_rate=10),
+ fx.Flow(bus='Fernwärme', flow_id='Q_th', size=100),
],
piecewise_conversion=fx.PiecewiseConversion(
{
@@ -508,11 +511,11 @@ def test_size_equality_constraints():
)
bus = fx.Bus('grid')
- source = fx.Source(
+ source = fx.Port(
'solar',
- outputs=[
+ imports=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10,
@@ -523,7 +526,7 @@ def test_size_equality_constraints():
],
)
- fs.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+ fs.add(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
fs.build_model()
@@ -547,11 +550,11 @@ def test_flow_rate_equality_constraints():
)
bus = fx.Bus('grid')
- source = fx.Source(
+ source = fx.Port(
'solar',
- outputs=[
+ imports=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10,
@@ -562,7 +565,7 @@ def test_flow_rate_equality_constraints():
],
)
- fs.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+ fs.add(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
fs.build_model()
@@ -586,11 +589,11 @@ def test_selective_scenario_independence():
)
bus = fx.Bus('grid')
- source = fx.Source(
+ source = fx.Port(
'solar',
- outputs=[
+ imports=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
@@ -598,12 +601,12 @@ def test_selective_scenario_independence():
)
],
)
- sink = fx.Sink(
+ sink = fx.Port(
'demand',
- inputs=[fx.Flow('grid', flow_id='in', size=50)],
+ exports=[fx.Flow(bus='grid', flow_id='in', size=50)],
)
- fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+ fs.add(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
fs.build_model()
@@ -645,11 +648,11 @@ def test_scenario_parameters_io_persistence():
)
bus = fx.Bus('grid')
- source = fx.Source(
+ source = fx.Port(
'solar',
- outputs=[
+ imports=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
@@ -658,7 +661,7 @@ def test_scenario_parameters_io_persistence():
],
)
- fs_original.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+ fs_original.add(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
# Save to dataset
fs_original.connect_and_transform()
@@ -685,11 +688,11 @@ def test_scenario_parameters_io_with_calculation(tmp_path):
)
bus = fx.Bus('grid')
- source = fx.Source(
+ source = fx.Port(
'solar',
- outputs=[
+ imports=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
@@ -697,12 +700,12 @@ def test_scenario_parameters_io_with_calculation(tmp_path):
)
],
)
- sink = fx.Sink(
+ sink = fx.Port(
'demand',
- inputs=[fx.Flow('grid', flow_id='in', size=50)],
+ exports=[fx.Flow(bus='grid', flow_id='in', size=50)],
)
- fs.add_elements(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+ fs.add(bus, source, sink, fx.Effect('cost', 'Total cost', '€', is_objective=True))
# Solve using new API
fs.optimize(fx.solvers.HighsSolver(mip_gap=0.01, time_limit_seconds=60))
@@ -743,11 +746,11 @@ def test_weights_io_persistence():
)
bus = fx.Bus('grid')
- source = fx.Source(
+ source = fx.Port(
'solar',
- outputs=[
+ imports=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=fx.InvestParameters(
minimum_size=10, maximum_size=100, effects_of_investment_per_size={'cost': 100}
@@ -756,7 +759,7 @@ def test_weights_io_persistence():
],
)
- fs_original.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+ fs_original.add(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
# Save to dataset
fs_original.connect_and_transform()
@@ -784,18 +787,18 @@ def test_weights_selection():
)
bus = fx.Bus('grid')
- source = fx.Source(
+ source = fx.Port(
'solar',
- outputs=[
+ imports=[
fx.Flow(
- 'grid',
+ bus='grid',
flow_id='out',
size=10,
)
],
)
- fs_full.add_elements(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
+ fs_full.add(bus, source, fx.Effect('cost', 'Total cost', '€', is_objective=True))
# Select a subset of scenarios
fs_subset = fs_full.sel(scenario=['base', 'high'])