# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment # Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. # Import required Julia packages for file operations, data structures, and JSON parsing using Printf # For formatted string printing using JSON # For parsing JSON files using DataStructures # For OrderedDict and other data structures using GZip # For reading gzipped files import Base: getindex, time # Import specific functions from Base module # Define constant URL for downloading benchmark instances const INSTANCES_URL = "https://axavier.org/UnitCommitment.jl/0.4/instances" """ read_benchmark(name::AbstractString)::UnitCommitmentInstance Read one of the benchmark instances included in the package. See [Instances](guides/instances.md) for the entire list of benchmark instances available. # Example ```julia instance = UnitCommitment.read_benchmark("matpower/case3375wp/2017-02-01") ``` """ function read_benchmark( name::AbstractString; quiet::Bool = false, )::UnitCommitmentInstance # Get the directory where this file is located basedir = dirname(@__FILE__) # Construct the local file path for the benchmark instance filename = "$basedir/../../instances/$name.json.gz" # Construct the URL for downloading the benchmark instance url = "$INSTANCES_URL/$name.json.gz" # Check if the file doesn't exist locally if !isfile(filename) # If not quiet mode, print download message if !quiet @info "Downloading: $(url)" end # Download the file from the URL dpath = download(url) # Create the directory path if it doesn't exist mkpath(dirname(filename)) # Copy the downloaded file to the local filename cp(dpath, filename) # Read the JSON content from the file json = _read_json(filename) # If the JSON contains a SOURCE field and not in quiet mode, display citation info if "SOURCE" in keys(json) && !quiet @info "If you use this instance in your research, please cite:\n\n$(json["SOURCE"])\n" end end # Return the parsed UnitCommitmentInstance return UnitCommitment.read(filename) end """ Helper function to repair scenario names and normalize probabilities """ function _repair_scenario_names_and_probabilities!( scenarios::Vector{UnitCommitmentScenario}, path::Vector{String}, )::Nothing # Calculate the total weight/probability of all scenarios total_weight = sum([sc.probability for sc in scenarios]) # Iterate through each scenario and its corresponding file path for (sc_path, sc) in zip(path, scenarios) # If scenario name is empty, extract name from the file path sc.name !== "" || (sc.name = first(split(last(split(sc_path, "/")), "."))) # Normalize the probability by dividing by total weight sc.probability = (sc.probability / total_weight) end return end """ read(path::AbstractString)::UnitCommitmentInstance Read a deterministic test case from the given file. The file may be gzipped. # Example ```julia instance = UnitCommitment.read("s1.json.gz") ``` """ function read(path::String)::UnitCommitmentInstance # Initialize empty vector for scenarios scenarios = Vector{UnitCommitmentScenario}() # Read the single scenario from the file scenario = _read_scenario(path) # Set the scenario name to "s1" for deterministic case scenario.name = "s1" # Set probability to 1.0 since it's deterministic scenario.probability = 1.0 # Create scenarios vector with the single scenario scenarios = [scenario] # Create and return the UnitCommitmentInstance with time from scenario instance = UnitCommitmentInstance(time = scenario.time, scenarios = scenarios) return instance end """ read(path::Vector{String})::UnitCommitmentInstance Read a stochastic unit commitment instance from the given files. Each file describes a scenario. The files may be gzipped. # Example ```julia instance = UnitCommitment.read(["s1.json.gz", "s2.json.gz"]) ``` """ function read(paths::Vector{String})::UnitCommitmentInstance # Initialize empty vector for scenarios scenarios = UnitCommitmentScenario[] # Read each scenario from the provided file paths for p in paths push!(scenarios, _read_scenario(p)) end # Repair scenario names and normalize probabilities _repair_scenario_names_and_probabilities!(scenarios, paths) # Create and return the UnitCommitmentInstance with time from first scenario instance = UnitCommitmentInstance(time = scenarios[1].time, scenarios = scenarios) return instance end """ Helper function to read a single scenario from a file path """ function _read_scenario(path::String)::UnitCommitmentScenario # Check if file is gzipped and read accordingly if endswith(path, ".gz") scenario = _read(gzopen(path)) elseif endswith(path, ".json") scenario = _read(open(path)) else error("Unsupported input format") end return scenario end """ Helper function to read scenario from an IO stream """ function _read(file::IO)::UnitCommitmentScenario # Parse JSON from file using DefaultOrderedDict and convert to scenario return _from_json( JSON.parse(file, dicttype = () -> DefaultOrderedDict(nothing)), ) end """ Helper function to read JSON from a file path (handles both .json and .gz files) """ function _read_json(path::String)::OrderedDict # Open file based on extension (gzipped or plain JSON) if endswith(path, ".gz") file = GZip.gzopen(path) else file = open(path) end # Parse JSON and return as OrderedDict return JSON.parse(file, dicttype = () -> DefaultOrderedDict(nothing)) end """ Main function to convert JSON data to UnitCommitmentScenario """ function _from_json(json; repair = true)::UnitCommitmentScenario # Migrate JSON data to current format if needed _migrate(json) # Initialize empty arrays for all component types thermal_units = ThermalUnit[] buses = Bus[] contingencies = Contingency[] lines = TransmissionLine[] loads = PriceSensitiveLoad[] reserves = Reserve[] profiled_units = ProfiledUnit[] storage_units = StorageUnit[] # Helper function to handle scalar values with defaults function scalar(x; default = nothing) x !== nothing || return default return x end # Parse time horizon from JSON parameters time_horizon = json["Parameters"]["Time horizon (min)"] if time_horizon === nothing # Try alternative time horizon formats time_horizon = json["Parameters"]["Time (h)"] if time_horizon === nothing time_horizon = json["Parameters"]["Time horizon (h)"] end # Convert hours to minutes if found if time_horizon !== nothing time_horizon *= 60 end end # Validate that time horizon is present and is an integer time_horizon !== nothing || error("Missing parameter: Time horizon (min)") isinteger(time_horizon) || error("Time horizon must be an integer in minutes") time_horizon = Int(time_horizon) # Parse time step with default of 60 minutes time_step = scalar(json["Parameters"]["Time step (min)"], default = 60) # Validate that time step divides 60 evenly (60 % time_step == 0) || error("Time step $time_step is not a divisor of 60") # Validate that time step divides time horizon evenly (time_horizon % time_step == 0) || error( "Time step $time_step is not a divisor of time horizon $time_horizon", ) # Calculate time multiplier and number of time periods time_multiplier = 60 ÷ time_step T = time_horizon ÷ time_step # Parse scenario probability and name with defaults probability = json["Parameters"]["Scenario weight"] probability !== nothing || (probability = 1) scenario_name = json["Parameters"]["Scenario name"] scenario_name !== nothing || (scenario_name = "") # Initialize dictionaries for mapping names to objects name_to_bus = Dict{String,Bus}() name_to_line = Dict{String,TransmissionLine}() name_to_unit = Dict{String,ThermalUnit}() name_to_reserve = Dict{String,Reserve}() # Helper function to convert scalar values to time series function timeseries(x; default = nothing) x !== nothing || return default x isa Array || return [x for t in 1:T] return x end # Read power balance penalty parameter with default values power_balance_penalty = timeseries( json["Parameters"]["Power balance penalty (\$/MW)"], default = [1000.0 for t in 1:T], ) # Read bus data from JSON for (bus_name, dict) in json["Buses"] # Create Bus object with all its components bus = Bus( bus_name, length(buses), # Bus index timeseries(dict["Load (MW)"]), # Load time series ThermalUnit[], # Empty thermal units list PriceSensitiveLoad[], # Empty loads list ProfiledUnit[], # Empty profiled units list StorageUnit[], # Empty storage units list ) # Store bus in name mapping and add to buses list name_to_bus[bus_name] = bus push!(buses, bus) end # Read reserves data if present in JSON if "Reserves" in keys(json) for (reserve_name, dict) in json["Reserves"] r = Reserve( name = reserve_name, type = lowercase(dict["Type"]), amount = timeseries(dict["Amount (MW)"]), thermal_units = [], shortfall_penalty = scalar( dict["Shortfall penalty (\$/MW)"], default = -1, ), ) name_to_reserve[reserve_name] = r push!(reserves, r) end end # Read units for (unit_name, dict) in json["Generators"] # Read and validate unit type unit_type = scalar(dict["Type"], default = nothing) unit_type !== nothing || error("unit $unit_name has no type specified") bus = name_to_bus[dict["Bus"]] if lowercase(unit_type) === "thermal" # Read production cost curve data K = length(dict["Production cost curve (MW)"]) # Number of cost curve segments # Create matrix of power levels for each time period and segment curve_mw = hcat( [ timeseries(dict["Production cost curve (MW)"][k]) for k in 1:K ]..., ) # Create matrix of costs for each time period and segment curve_cost = hcat( [ timeseries(dict["Production cost curve (\$)"][k]) for k in 1:K ]..., ) # Extract minimum and maximum power levels from cost curve min_power = curve_mw[:, 1] # First column = minimum power max_power = curve_mw[:, K] # Last column = maximum power min_power_cost = curve_cost[:, 1] # Cost at minimum power # Create cost segments for piecewise linear cost approximation segments = CostSegment[] for k in 2:K # Calculate power increment for this segment amount = curve_mw[:, k] - curve_mw[:, k-1] # Calculate marginal cost for this segment cost = (curve_cost[:, k] - curve_cost[:, k-1]) ./ amount replace!(cost, NaN => 0.0) # Handle division by zero push!(segments, CostSegment(amount, cost)) end # Read startup costs and delays startup_delays = scalar(dict["Startup delays (h)"], default = [1]) # Hours required for startup startup_costs = scalar(dict["Startup costs (\$)"], default = [0.0]) # Cost for each startup category startup_categories = StartupCategory[] # Create startup categories based on delays and costs for k in 1:length(startup_delays) push!( startup_categories, StartupCategory( startup_delays[k] .* time_multiplier, # Convert hours to time periods startup_costs[k], # Cost for this startup category ), ) end # Read reserve eligibility for this unit unit_reserves = Reserve[] if "Reserve eligibility" in keys(dict) # Map reserve names to Reserve objects unit_reserves = [name_to_reserve[n] for n in dict["Reserve eligibility"]] end # Read and validate initial conditions for the unit initial_power = scalar(dict["Initial power (MW)"], default = nothing) # Power output at start initial_status = scalar(dict["Initial status (h)"], default = nothing) # Hours online/offline at start # Validate that both initial power and status are provided together if initial_power === nothing initial_status === nothing || error( "unit $unit_name has initial status but no initial power", ) else initial_status !== nothing || error( "unit $unit_name has initial power but no initial status", ) initial_status != 0 || error("unit $unit_name has invalid initial status") # Validate that offline units have zero power if initial_status < 0 && initial_power > 1e-3 error("unit $unit_name has invalid initial power") end initial_status *= time_multiplier # Convert hours to time periods end # Read commitment status for each time period commitment_status = scalar( dict["Commitment status"], default = Vector{Union{Bool,Nothing}}(nothing, T), # Default: no commitment constraints ) # Create ThermalUnit object with all parsed parameters unit = ThermalUnit( unit_name, # Unit identifier bus, # Bus where unit is located max_power, # Maximum power output min_power, # Minimum power output timeseries(dict["Must run?"], default = [false for t in 1:T]), # Must-run constraints min_power_cost, # Cost at minimum power segments, # Piecewise linear cost segments scalar(dict["Minimum uptime (h)"], default = 1) * time_multiplier, # Minimum time online (in periods) scalar(dict["Minimum downtime (h)"], default = 1) * time_multiplier, # Minimum time offline (in periods) scalar(dict["Ramp up limit (MW)"], default = 1e6), # Maximum ramp up rate scalar(dict["Ramp down limit (MW)"], default = 1e6), # Maximum ramp down rate scalar(dict["Startup limit (MW)"], default = 1e6), # Maximum startup rate scalar(dict["Shutdown limit (MW)"], default = 1e6), # Maximum shutdown rate initial_status, # Initial online/offline status initial_power, # Initial power output startup_categories, # Startup cost categories unit_reserves, # Eligible reserves commitment_status, # Commitment constraints timeseries(dict["Startup curve (MW)"], default = Float64[]), timeseries(dict["Shutdown curve (MW)"], default = Float64[]), ) # Add unit to its bus and update reserve associations push!(bus.thermal_units, unit) for r in unit_reserves push!(r.thermal_units, unit) end # Store unit in name mapping and add to thermal units list name_to_unit[unit_name] = unit push!(thermal_units, unit) elseif lowercase(unit_type) === "profiled" # Handle profiled units (e.g., renewable energy sources) bus = name_to_bus[dict["Bus"]] pu = ProfiledUnit( unit_name, # Unit identifier bus, # Bus where unit is located timeseries(scalar(dict["Minimum power (MW)"], default = 0.0)), # Minimum power output timeseries(dict["Maximum power (MW)"]), # Maximum power output timeseries(dict["Cost (\$/MW)"]), # Marginal cost ) # Add profiled unit to its bus and to the global list push!(bus.profiled_units, pu) push!(profiled_units, pu) else error("unit $unit_name has an invalid type") end end # Read transmission lines data if "Transmission lines" in keys(json) for (line_name, dict) in json["Transmission lines"] # Create TransmissionLine object with all parameters line = TransmissionLine( line_name, # Line identifier length(lines) + 1, # Line index name_to_bus[dict["Source bus"]], # Source bus name_to_bus[dict["Target bus"]], # Target bus scalar(dict["Susceptance (S)"]), # Electrical susceptance timeseries( dict["Normal flow limit (MW)"], default = [1e8 for t in 1:T], # Normal operating limit ), timeseries( dict["Emergency flow limit (MW)"], default = [1e8 for t in 1:T], # Emergency operating limit ), timeseries( dict["Flow limit penalty (\$/MW)"], default = [5000.0 for t in 1:T], # Penalty for exceeding limits ), ) # Store line in name mapping and add to lines list name_to_line[line_name] = line push!(lines, line) end end # Read contingency data (N-1 security constraints) if "Contingencies" in keys(json) for (cont_name, dict) in json["Contingencies"] # Initialize lists for affected components affected_units = ThermalUnit[] affected_lines = TransmissionLine[] # Map affected line names to TransmissionLine objects if "Affected lines" in keys(dict) affected_lines = [name_to_line[l] for l in dict["Affected lines"]] end # Map affected unit names to ThermalUnit objects if "Affected units" in keys(dict) affected_units = [name_to_unit[u] for u in dict["Affected units"]] end # Create Contingency object and add to list cont = Contingency(cont_name, affected_lines, affected_units) push!(contingencies, cont) end end # Read price-sensitive loads (demand response) if "Price-sensitive loads" in keys(json) for (load_name, dict) in json["Price-sensitive loads"] bus = name_to_bus[dict["Bus"]] # Create PriceSensitiveLoad object load = PriceSensitiveLoad( load_name, # Load identifier bus, # Bus where load is located timeseries(dict["Demand (MW)"]), # Demand time series timeseries(dict["Revenue (\$/MW)"]), # Revenue for demand reduction ) # Add load to its bus and to the global list push!(bus.price_sensitive_loads, load) push!(loads, load) end end # Read storage units (batteries, pumped hydro, etc.) if "Storage units" in keys(json) for (storage_name, dict) in json["Storage units"] bus = name_to_bus[dict["Bus"]] # Parse storage level constraints min_level = timeseries(scalar(dict["Minimum level (MWh)"], default = 0.0)) # Minimum energy level max_level = timeseries(dict["Maximum level (MWh)"]) # Maximum energy level # Create StorageUnit object with all parameters storage = StorageUnit( storage_name, # Storage unit identifier bus, # Bus where storage is located min_level, # Minimum energy level time series max_level, # Maximum energy level time series timeseries( scalar( dict["Allow simultaneous charging and discharging"], default = true, # Whether unit can charge and discharge simultaneously ), ), timeseries(dict["Charge cost (\$/MW)"]), # Cost to charge timeseries(dict["Discharge cost (\$/MW)"]), # Cost to discharge timeseries(scalar(dict["Charge efficiency"], default = 1.0)), # Charging efficiency timeseries(scalar(dict["Discharge efficiency"], default = 1.0)), # Discharging efficiency timeseries(scalar(dict["Loss factor"], default = 0.0)), # Self-discharge rate timeseries( scalar(dict["Minimum charge rate (MW)"], default = 0.0), # Minimum charging power ), timeseries(dict["Maximum charge rate (MW)"]), # Maximum charging power timeseries( scalar(dict["Minimum discharge rate (MW)"], default = 0.0), # Minimum discharging power ), timeseries(dict["Maximum discharge rate (MW)"]), # Maximum discharging power scalar(dict["Initial level (MWh)"], default = 0.0), # Initial energy level scalar( dict["Last period minimum level (MWh)"], default = min_level[T], # Final minimum level ), scalar( dict["Last period maximum level (MWh)"], default = max_level[T], # Final maximum level ), ) # Add storage unit to its bus and to the global list push!(bus.storage_units, storage) push!(storage_units, storage) end end # Create the final UnitCommitmentScenario object with all parsed components scenario = UnitCommitmentScenario( name = scenario_name, # Scenario identifier probability = probability, # Scenario probability/weight buses_by_name = Dict(b.name => b for b in buses), # Bus lookup by name buses = buses, # List of all buses contingencies_by_name = Dict(c.name => c for c in contingencies), # Contingency lookup by name contingencies = contingencies, # List of all contingencies lines_by_name = Dict(l.name => l for l in lines), # Line lookup by name lines = lines, # List of all transmission lines power_balance_penalty = power_balance_penalty, # Penalty for power imbalance price_sensitive_loads_by_name = Dict(ps.name => ps for ps in loads), # Load lookup by name price_sensitive_loads = loads, # List of all price-sensitive loads reserves = reserves, # List of all reserves reserves_by_name = name_to_reserve, # Reserve lookup by name time = T, # Number of time periods time_step = time_step, # Time step duration thermal_units_by_name = Dict(g.name => g for g in thermal_units), # Thermal unit lookup by name thermal_units = thermal_units, # List of all thermal units profiled_units_by_name = Dict(pu.name => pu for pu in profiled_units), # Profiled unit lookup by name profiled_units = profiled_units, # List of all profiled units storage_units_by_name = Dict(su.name => su for su in storage_units), # Storage unit lookup by name storage_units = storage_units, # List of all storage units isf = spzeros(Float64, length(lines), length(buses) - 1), # Injection Shift Factors (sparse matrix) lodf = spzeros(Float64, length(lines), length(lines)), # Line Outage Distribution Factors (sparse matrix) ) # Repair scenario data if requested (validate and fix inconsistencies) if repair UnitCommitment.repair!(scenario) end return scenario end