UnitCommitment_Trajectory / generate_dataset.jl
EridanusQ
init
43c68a3
using Distributed
using JuMP
using UnitCommitment
const DEFAULT_INPUT_ROOT = "instances/matpower"
const DEFAULT_OUTPUT_ROOT = "../UnitCommitment_Trajectory_Dataset"
const VARIANTS = ("hourly_noline", "hourly_withline", "subhourly_noline", "subhourly_withline")
function _build_noline_formulation()
return UnitCommitment.Formulation(
transmission = UnitCommitment.ShiftFactorsFormulation(
precomputed_isf = zeros(0, 0),
precomputed_lodf = zeros(0, 0),
),
)
end
function _write_mps(model::JuMP.Model, path::String)
mkpath(dirname(path))
JuMP.write_to_file(model, path)
return nothing
end
function _list_json_gz(case_dir::String)
files = filter(f -> endswith(f, ".json.gz"), readdir(case_dir))
sort!(files)
return files
end
function discover_matpower_cases(input_root::String = DEFAULT_INPUT_ROOT)
isdir(input_root) || error("Input directory does not exist: $input_root")
case_dirs = filter(d -> isdir(joinpath(input_root, d)), readdir(input_root))
sort!(case_dirs)
return [
(case_name, joinpath(input_root, case_name))
for case_name in case_dirs
if !isempty(_list_json_gz(joinpath(input_root, case_name)))
]
end
function _parse_case_filter()
raw = strip(get(ENV, "UC_CASES", ""))
isempty(raw) && return nothing
return Set(strip.(split(raw, ",")))
end
function _is_truthy_env(name::String)
value = lowercase(strip(get(ENV, name, "")))
return value in ("1", "true", "yes", "y")
end
function _parse_positive_int_env(name::String, default::Int)
raw = strip(get(ENV, name, ""))
isempty(raw) && return default
value = tryparse(Int, raw)
if value === nothing || value < 1
error("$name must be a positive integer, got: $raw")
end
return value
end
function _default_worker_count()
return nprocs() > 1 ? length(workers()) : 1
end
function _requested_worker_count()
return _parse_positive_int_env("UC_WORKERS", _default_worker_count())
end
function _selected_cases(input_root::String)
cases = discover_matpower_cases(input_root)
selected = _parse_case_filter()
selected === nothing && return cases
return filter(case -> case[1] in selected, cases)
end
function _ensure_worker_count!(requested_workers::Int)
requested_workers <= 1 && return Int[]
existing_workers = nprocs() > 1 ? workers() : Int[]
extra_workers = requested_workers - length(existing_workers)
if extra_workers > 0
project_file = Base.active_project()
if project_file === nothing
addprocs(extra_workers)
else
addprocs(extra_workers; exeflags = `--project=$(dirname(project_file))`)
end
end
return workers()[1:requested_workers]
end
function _load_script_on_workers!(worker_ids::Vector{Int})
isempty(worker_ids) && return nothing
script_path = abspath(@__FILE__)
@sync for pid in worker_ids
@async remotecall_wait(pid, script_path) do path
include(path)
return nothing
end
end
return nothing
end
function _generate_one_instance!(
case_name::AbstractString,
date_tag::AbstractString,
src_path::AbstractString,
output_root::AbstractString,
noline_formulation,
)
inst_hourly = UnitCommitment.read(src_path)
inst_hourly_noline = deepcopy(inst_hourly)
empty!(inst_hourly_noline.scenarios[1].lines)
model_hourly_noline = UnitCommitment.build_model(
instance = inst_hourly_noline,
formulation = noline_formulation,
variable_names = true,
)
_write_mps(
model_hourly_noline,
joinpath(output_root, case_name, "hourly_noline", "$(case_name)_$(date_tag)_h_noline.mps"),
)
model_hourly_withline = UnitCommitment.build_model(
instance = inst_hourly,
variable_names = true,
)
_write_mps(
model_hourly_withline,
joinpath(output_root, case_name, "hourly_withline", "$(case_name)_$(date_tag)_h_withline.mps"),
)
inst_sub = UnitCommitment.convert_to_subhourly(inst_hourly, inst_hourly)
inst_sub_noline = deepcopy(inst_sub)
empty!(inst_sub_noline.scenarios[1].lines)
model_sub_noline = UnitCommitment.build_model(
instance = inst_sub_noline,
formulation = noline_formulation,
variable_names = true,
)
_write_mps(
model_sub_noline,
joinpath(output_root, case_name, "subhourly_noline", "$(case_name)_$(date_tag)_s_noline.mps"),
)
model_sub_withline = UnitCommitment.build_model(
instance = inst_sub,
variable_names = true,
)
_write_mps(
model_sub_withline,
joinpath(output_root, case_name, "subhourly_withline", "$(case_name)_$(date_tag)_s_withline.mps"),
)
return nothing
end
function _build_generation_tasks(cases, output_root::AbstractString)
tasks = NamedTuple[]
case_count = length(cases)
for (case_index, (case_name, case_dir)) in enumerate(cases)
files = _list_json_gz(case_dir)
instance_count = length(files)
for (instance_index, file_name) in enumerate(files)
date_tag = split(file_name, ".")[1]
src_path = joinpath(case_dir, file_name)
push!(
tasks,
(;
case_index,
case_count,
case_name,
instance_index,
instance_count,
date_tag,
src_path,
output_root,
),
)
end
end
return tasks
end
function _prepare_output_dirs!(cases, output_root::AbstractString)
for (case_name, _) in cases
for variant in VARIANTS
mkpath(joinpath(output_root, case_name, variant))
end
end
return nothing
end
function _print_case_plan(cases)
for (case_index, (case_name, case_dir)) in enumerate(cases)
println("[$case_index/$(length(cases))] $case_name ($(_list_json_gz(case_dir) |> length) instances)")
end
return nothing
end
function _print_task_done(task, pid::Int)
println(
" [$(task.case_index)/$(task.case_count) $(task.case_name)] " *
"$(task.instance_index)/$(task.instance_count) $(task.date_tag) pid=$pid",
)
flush(stdout)
return nothing
end
function _generate_task!(task)
noline_formulation = _build_noline_formulation()
_generate_one_instance!(
task.case_name,
task.date_tag,
task.src_path,
task.output_root,
noline_formulation,
)
GC.gc()
_print_task_done(task, myid())
return nothing
end
function _generate_parallel!(tasks, worker_ids::Vector{Int})
pool = CachingPool(worker_ids)
pmap(_generate_task!, pool, tasks; batch_size = 1)
return nothing
end
function generate_dataset(;
input_root::String = get(ENV, "UC_INPUT_ROOT", DEFAULT_INPUT_ROOT),
output_root::String = get(ENV, "UC_OUTPUT_ROOT", DEFAULT_OUTPUT_ROOT),
)
cases = _selected_cases(input_root)
isempty(cases) && error("No cases selected under $input_root. Check UC_CASES or the input directory.")
mkpath(output_root)
requested_workers = _requested_worker_count()
total_instances = sum(length(_list_json_gz(case_dir)) for (_, case_dir) in cases)
println("Input root: $input_root")
println("Output root: $output_root")
println("Cases: $(length(cases))")
println("Instances: $total_instances")
println("Variants: $(length(VARIANTS))")
println("MPS files: $(total_instances * length(VARIANTS))")
println("Workers: $requested_workers")
if _is_truthy_env("UC_DRY_RUN")
println("\nDry run only. Set UC_DRY_RUN=0 or remove it to generate MPS files.")
for (case_name, case_dir) in cases
println(" $case_name: $(length(_list_json_gz(case_dir))) instances")
end
return nothing
end
_prepare_output_dirs!(cases, output_root)
tasks = _build_generation_tasks(cases, output_root)
println()
_print_case_plan(cases)
println()
if requested_workers <= 1
for task in tasks
_generate_task!(task)
end
else
worker_ids = _ensure_worker_count!(requested_workers)
_load_script_on_workers!(worker_ids)
println("Parallel worker pids: $(join(worker_ids, ", "))")
_generate_parallel!(tasks, worker_ids)
end
println("\nDone. output_root=$output_root")
return nothing
end
if myid() == 1 && abspath(PROGRAM_FILE) == @__FILE__
generate_dataset()
end