Publications¶
Links to publications and associate source code.
Nempy Technical Brief¶
Soon to be published technical brief will be available here.
Source code for Figure 1¶
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 | # Notice: this script downloads large volumes of historical market data from AEMO's nemweb portal.
import sqlite3
import pandas as pd
import random
from datetime import datetime, timedelta
from nempy import markets
from nempy.historical_inputs import loaders, mms_db, \
xml_cache, units, demand, interconnectors, \
constraints
# The size of historical data files for a full year of 5 min dispatch
# is very large, approximately 800 GB, for this reason the data is
# stored on an external SSD.
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_db_manager = mms_db.DBManager(connection=con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
# The second time this example is run on a machine this flag can
# be set to false to save downloading the data again.
download_inputs = False
if download_inputs:
mms_db_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=12)
xml_cache_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=12)
raw_inputs_loader = loaders.RawInputsLoader(
nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_db_manager)
# Define a function for creating a list of randomly selected dispatch
# intervals
def get_test_intervals(number):
start_time = datetime(year=2019, month=1, day=1, hour=0, minute=0)
end_time = datetime(year=2019, month=12, day=31, hour=0, minute=0)
difference = end_time - start_time
difference_in_5_min_intervals = difference.days * 12 * 24
random.seed(1)
intervals = random.sample(range(1, difference_in_5_min_intervals), number)
times = [start_time + timedelta(minutes=5 * i) for i in intervals]
times_formatted = [t.isoformat().replace('T', ' ').replace('-', '/') for t in times]
return times_formatted
# List for saving outputs to.
outputs = []
# Create and dispatch the spot market for each dispatch interval.
for interval in get_test_intervals(number=1000):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
unit_info = unit_inputs.get_unit_info()
market = markets.SpotMarket(market_regions=['QLD1', 'NSW1', 'VIC1',
'SA1', 'TAS1'],
unit_info=unit_info)
# By default the CBC open source solver is used, but GUROBI is
# also supported
market.solver_name = 'CBC' # or could be 'GUROBI'
# Set bids
volume_bids, price_bids = unit_inputs.get_processed_bids()
market.set_unit_volume_bids(volume_bids)
market.set_unit_price_bids(price_bids)
# Set bid in capacity limits
unit_bid_limit = unit_inputs.get_unit_bid_availability()
market.set_unit_bid_capacity_constraints(unit_bid_limit)
cost = constraint_inputs.get_constraint_violation_prices()['unit_capacity']
market.make_constraints_elastic('unit_bid_capacity', violation_cost=cost)
# Set limits provided by the unconstrained intermittent generation
# forecasts. Primarily for wind and solar.
unit_uigf_limit = unit_inputs.get_unit_uigf_limits()
market.set_unconstrained_intermitent_generation_forecast_constraint(
unit_uigf_limit)
cost = constraint_inputs.get_constraint_violation_prices()['uigf']
market.make_constraints_elastic('uigf_capacity', violation_cost=cost)
# Set unit ramp rates.
ramp_rates = unit_inputs.get_ramp_rates_used_for_energy_dispatch()
market.set_unit_ramp_up_constraints(
ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_up_rate']])
market.set_unit_ramp_down_constraints(
ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_down_rate']])
cost = constraint_inputs.get_constraint_violation_prices()['ramp_rate']
market.make_constraints_elastic('ramp_up', violation_cost=cost)
market.make_constraints_elastic('ramp_down', violation_cost=cost)
# Set unit FCAS trapezium constraints.
unit_inputs.add_fcas_trapezium_constraints()
cost = constraint_inputs.get_constraint_violation_prices()['fcas_max_avail']
fcas_availability = unit_inputs.get_fcas_max_availability()
market.set_fcas_max_availability(fcas_availability)
market.make_constraints_elastic('fcas_max_availability', cost)
cost = constraint_inputs.get_constraint_violation_prices()['fcas_profile']
regulation_trapeziums = unit_inputs.get_fcas_regulation_trapeziums()
market.set_energy_and_regulation_capacity_constraints(regulation_trapeziums)
market.make_constraints_elastic('energy_and_regulation_capacity', cost)
scada_ramp_down_rates = unit_inputs.get_scada_ramp_down_rates_of_lower_reg_units()
market.set_joint_ramping_constraints_lower_reg(scada_ramp_down_rates)
market.make_constraints_elastic('joint_ramping_lower_reg', cost)
scada_ramp_up_rates = unit_inputs.get_scada_ramp_up_rates_of_raise_reg_units()
market.set_joint_ramping_constraints_raise_reg(scada_ramp_up_rates)
market.make_constraints_elastic('joint_ramping_raise_reg', cost)
contingency_trapeziums = unit_inputs.get_contingency_services()
market.set_joint_capacity_constraints(contingency_trapeziums)
market.make_constraints_elastic('joint_capacity', cost)
# Set interconnector definitions, limits and loss models.
interconnectors_definitions = \
interconnector_inputs.get_interconnector_definitions()
loss_functions, interpolation_break_points = \
interconnector_inputs.get_interconnector_loss_model()
market.set_interconnectors(interconnectors_definitions)
market.set_interconnector_losses(loss_functions,
interpolation_break_points)
# Add generic constraints and FCAS market constraints.
fcas_requirements = constraint_inputs.get_fcas_requirements()
market.set_fcas_requirements_constraints(fcas_requirements)
violation_costs = constraint_inputs.get_violation_costs()
market.make_constraints_elastic('fcas', violation_cost=violation_costs)
generic_rhs = constraint_inputs.get_rhs_and_type_excluding_regional_fcas_constraints()
market.set_generic_constraints(generic_rhs)
market.make_constraints_elastic('generic', violation_cost=violation_costs)
unit_generic_lhs = constraint_inputs.get_unit_lhs()
market.link_units_to_generic_constraints(unit_generic_lhs)
interconnector_generic_lhs = constraint_inputs.get_interconnector_lhs()
market.link_interconnectors_to_generic_constraints(
interconnector_generic_lhs)
# Set the operational demand to be met by dispatch.
regional_demand = demand_inputs.get_operational_demand()
market.set_demand_constraints(regional_demand)
# Get unit dispatch without fast start constraints and use it to
# make fast start unit commitment decisions.
market.dispatch()
dispatch = market.get_unit_dispatch()
fast_start_profiles = unit_inputs.get_fast_start_profiles_for_dispatch(dispatch)
market.set_fast_start_constraints(fast_start_profiles)
if 'fast_start' in market.get_constraint_set_names():
cost = constraint_inputs.get_constraint_violation_prices()['fast_start']
market.make_constraints_elastic('fast_start', violation_cost=cost)
# If AEMO historical used the over constrained dispatch rerun
# process then allow it to be used in dispatch. This is needed
# because sometimes the conditions for over constrained dispatch
# are present but the rerun process isn't used.
if constraint_inputs.is_over_constrained_dispatch_rerun():
market.dispatch(allow_over_constrained_dispatch_re_run=True,
energy_market_floor_price=-1000.0,
energy_market_ceiling_price=14500.0,
fcas_market_ceiling_price=1000.0)
else:
# The market price ceiling and floor are not needed here
# because they are only used for the over constrained
# dispatch rerun process.
market.dispatch(allow_over_constrained_dispatch_re_run=False)
# Save prices from this interval
prices = market.get_energy_prices()
prices['time'] = interval
# Getting historical prices for comparison. Note, ROP price, which is
# the regional reference node price before the application of any
# price scaling by AEMO, is used for comparison.
historical_prices = mms_db_manager.DISPATCHPRICE.get_data(interval)
prices = pd.merge(prices, historical_prices,
left_on=['time', 'region'],
right_on=['SETTLEMENTDATE', 'REGIONID'])
outputs.append(
prices.loc[:, ['time', 'region', 'price',
'SETTLEMENTDATE', 'REGIONID', 'ROP']])
con.close()
outputs = pd.concat(outputs)
outputs = outputs.sort_values('ROP')
outputs = outputs.reset_index(drop=True)
outputs.to_csv('energy_price_results_2019_1000_intervals.csv')
|
Source code for Figure 2¶
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 | # Notice: this script downloads large volumes of historical market data from AEMO's nemweb portal.
import sqlite3
import pandas as pd
import random
from datetime import datetime, timedelta
from nempy import markets
from nempy.historical_inputs import loaders, mms_db, \
xml_cache, units, demand, interconnectors, \
constraints
# The size of historical data files for a full year of 5 min dispatch
# is very large, approximately 800 GB, for this reason the data is
# stored on an external SSD.
con = sqlite3.connect('/media/nickgorman/Samsung_T5/nempy_test_files/historical_mms.db')
mms_db_manager = mms_db.DBManager(connection=con)
xml_cache_manager = xml_cache.XMLCacheManager('/media/nickgorman/Samsung_T5/nempy_test_files/nemde_cache')
# The second time this example is run on a machine this flag can
# be set to false to save downloading the data again.
download_inputs = False
if download_inputs:
mms_db_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=12)
xml_cache_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=12)
raw_inputs_loader = loaders.RawInputsLoader(
nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_db_manager)
# Define a function for creating a list of randomly selected dispatch
# intervals
def get_test_intervals(number):
start_time = datetime(year=2019, month=1, day=1, hour=0, minute=0)
end_time = datetime(year=2019, month=12, day=31, hour=0, minute=0)
difference = end_time - start_time
difference_in_5_min_intervals = difference.days * 12 * 24
random.seed(1)
intervals = random.sample(range(1, difference_in_5_min_intervals), number)
times = [start_time + timedelta(minutes=5 * i) for i in intervals]
times_formatted = [t.isoformat().replace('T', ' ').replace('-', '/') for t in times]
return times_formatted
# List for saving outputs to.
outputs = []
# Create and dispatch the spot market for each dispatch interval.
for interval in get_test_intervals(number=1000):
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
unit_info = unit_inputs.get_unit_info()
market = markets.SpotMarket(market_regions=['QLD1', 'NSW1', 'VIC1',
'SA1', 'TAS1'],
unit_info=unit_info)
# By default the CBC open source solver is used, but GUROBI is
# also supported
market.solver_name = 'CBC' # or could be 'GUROBI'
# Set bids
volume_bids, price_bids = unit_inputs.get_processed_bids()
volume_bids = volume_bids[volume_bids['service'] == 'energy']
price_bids = price_bids[price_bids['service'] == 'energy']
market.set_unit_volume_bids(volume_bids)
market.set_unit_price_bids(price_bids)
# Set bid in capacity limits
unit_bid_limit = unit_inputs.get_unit_bid_availability()
market.set_unit_bid_capacity_constraints(unit_bid_limit)
cost = constraint_inputs.get_constraint_violation_prices()['unit_capacity']
market.make_constraints_elastic('unit_bid_capacity', violation_cost=cost)
# Set limits provided by the unconstrained intermittent generation
# forecasts. Primarily for wind and solar.
unit_uigf_limit = unit_inputs.get_unit_uigf_limits()
market.set_unconstrained_intermitent_generation_forecast_constraint(
unit_uigf_limit)
cost = constraint_inputs.get_constraint_violation_prices()['uigf']
market.make_constraints_elastic('uigf_capacity', violation_cost=cost)
# Set unit ramp rates.
ramp_rates = unit_inputs.get_ramp_rates_used_for_energy_dispatch()
market.set_unit_ramp_up_constraints(
ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_up_rate']])
market.set_unit_ramp_down_constraints(
ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_down_rate']])
cost = constraint_inputs.get_constraint_violation_prices()['ramp_rate']
market.make_constraints_elastic('ramp_up', violation_cost=cost)
market.make_constraints_elastic('ramp_down', violation_cost=cost)
# Set interconnector definitions, limits and loss models.
interconnectors_definitions = \
interconnector_inputs.get_interconnector_definitions()
loss_functions, interpolation_break_points = \
interconnector_inputs.get_interconnector_loss_model()
market.set_interconnectors(interconnectors_definitions)
market.set_interconnector_losses(loss_functions,
interpolation_break_points)
# Set the operational demand to be met by dispatch.
regional_demand = demand_inputs.get_operational_demand()
market.set_demand_constraints(regional_demand)
market.dispatch()
# Save prices from this interval
prices = market.get_energy_prices()
prices['time'] = interval
# Getting historical prices for comparison. Note, ROP price, which is
# the regional reference node price before the application of any
# price scaling by AEMO, is used for comparison.
historical_prices = mms_db_manager.DISPATCHPRICE.get_data(interval)
prices = pd.merge(prices, historical_prices,
left_on=['time', 'region'],
right_on=['SETTLEMENTDATE', 'REGIONID'])
outputs.append(
prices.loc[:, ['time', 'region', 'price',
'SETTLEMENTDATE', 'REGIONID', 'ROP']])
con.close()
outputs = pd.concat(outputs)
outputs = outputs.sort_values('ROP')
outputs = outputs.reset_index(drop=True)
outputs.to_csv('energy_price_results_2019_1000_intervals.csv')
|