diff --git a/.github/workflows/PRASReport.jl.yml b/.github/workflows/PRASReport.jl.yml new file mode 100644 index 00000000..d7c2aa57 --- /dev/null +++ b/.github/workflows/PRASReport.jl.yml @@ -0,0 +1,71 @@ +name: PRASReport.jl tests +# Run on master, tags, or any pull request +on: + schedule: + - cron: '0 2 * * *' # Daily at 2 AM UTC (8 PM CST) + push: + branches: [main] + tags: ["*"] + pull_request: +jobs: + test: + name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + version: + - "lts" # Latest LTS release, min supported + - "1" # Latest release + os: + - ubuntu-latest + - macOS-latest + - windows-latest + arch: + - x64 + - aarch64 + exclude: + - os: windows-latest + arch: aarch64 + - os: ubuntu-latest + arch: aarch64 + steps: + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 + with: + version: ${{ matrix.version }} + arch: ${{ matrix.arch }} + - uses: actions/cache@v4 + env: + cache-name: cache-artifacts + with: + path: ~/.julia/artifacts + key: ${{ runner.os }}-${{ matrix.arch }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} + restore-keys: | + ${{ runner.os }}-${{ matrix.arch }}-test-${{ env.cache-name }}- + ${{ runner.os }}-${{ matrix.arch }}-test- + ${{ runner.os }}-${{ matrix.arch }}- + ${{ runner.os }}- + - run: julia --project=PRASReport.jl -e 'import Pkg; + Pkg.develop([ + (path="PRASCore.jl",), + (path="PRASFiles.jl",), + ])' + shell: bash + - uses: julia-actions/julia-buildpkg@latest + with: + project: PRASReport.jl + - run: | + git config --global user.name Tester + git config --global user.email te@st.er + - uses: julia-actions/julia-runtest@latest + with: + project: PRASReport.jl + env: + JULIA_NUM_THREADS: 2 + - uses: julia-actions/julia-processcoverage@v1 + with: + directories: PRASReport.jl/src + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore index 1fe04f98..83cf7b57 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,7 @@ Manifest.toml *.DS_Store *.pras *.json + +# Any html and duckdb files generated during testing +*.html +*.duckdb \ No newline at end of file diff --git a/PRASCore.jl/src/Results/Results.jl b/PRASCore.jl/src/Results/Results.jl index ad6f4028..7c231b45 100644 --- a/PRASCore.jl/src/Results/Results.jl +++ b/PRASCore.jl/src/Results/Results.jl @@ -70,6 +70,9 @@ EUE(x::AbstractShortfallResult, r::AbstractString, ::Colon) = EUE(x::AbstractShortfallResult, ::Colon, ::Colon) = EUE.(x, x.regions.names, permutedims(x.timestamps)) +NEUE(x::AbstractShortfallResult, ::Colon, t::ZonedDateTime) = + NEUE.(x, x.regions.names, t) + NEUE(x::AbstractShortfallResult, r::AbstractString, ::Colon) = NEUE.(x, r, x.timestamps) diff --git a/PRASCore.jl/src/Results/Shortfall.jl b/PRASCore.jl/src/Results/Shortfall.jl index e76c00c1..dc4b9462 100644 --- a/PRASCore.jl/src/Results/Shortfall.jl +++ b/PRASCore.jl/src/Results/Shortfall.jl @@ -267,6 +267,12 @@ function NEUE(x::ShortfallResult{N,L,T,E}, r::AbstractString) where {N,L,T,E} return NEUE(div(MeanEstimate(x[r]..., x.nsamples),(sum(x.regions.load[i_r,:])/1e6))) end +function NEUE(x::ShortfallResult{N,L,T,E}, r::AbstractString, t::ZonedDateTime) where {N,L,T,E} + i_r = findfirstunique(x.regions.names, r) + i_t = findfirstunique(x.timestamps, t) + return NEUE(div(MeanEstimate(x[r, t]..., x.nsamples),x.regions.load[i_r,i_t]/1e6)) +end + function finalize( acc::ShortfallAccumulator, system::SystemModel{N,L,T,P,E}, diff --git a/PRASReport.jl/LICENSE.md b/PRASReport.jl/LICENSE.md new file mode 100644 index 00000000..fc56637f --- /dev/null +++ b/PRASReport.jl/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Alliance for Sustainable Energy, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/PRASReport.jl/Project.toml b/PRASReport.jl/Project.toml new file mode 100644 index 00000000..851a3971 --- /dev/null +++ b/PRASReport.jl/Project.toml @@ -0,0 +1,29 @@ +name = "PRASReport" +uuid = "c003f3f0-f5d5-4077-8f93-207e59ecb3ff" +authors = ["Hari Sundar "] +version = "0.1.0" + +[deps] +Base64 = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" +DuckDB = "d2f5444f-75bc-4fdf-ac35-56f514c445e1" +PRASCore = "c5c32b99-e7c3-4530-a685-6f76e19f7fe2" +PRASFiles = "a2806276-6d43-4ef5-91c0-491704cd7cf1" +StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" +Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" +TimeZones = "f269a46b-ccf7-5d73-abea-4c690281aa53" + +[compat] +Dates = "1" +PRASCore = "0.8.0" +PRASFiles = "0.8.0" +StatsBase = "0.34" +TimeZones = "^1.14" +julia = "1.10" + +[extras] +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +Suppressor = "fd094767-a336-5f1f-9728-57cf17d0bbfb" + +[targets] +test = ["Test","Suppressor"] diff --git a/PRASReport.jl/README.md b/PRASReport.jl/README.md new file mode 100644 index 00000000..64ed5a0f --- /dev/null +++ b/PRASReport.jl/README.md @@ -0,0 +1,13 @@ +# PRASReport.jl + +## Usage +```julia +using PRASReport + +create_pras_report(system_path="path_to_sys.pras", # Path to your .pras file + samples=100,seed=1, # Number of samples and random seed for analysis + report_name="report", # Name of the output HTML file + report_path=pwd(), # Path for HTML file output + threshold=0, # hourly EUE threshold to be considered part of event + title="Resource Adequacy Report") # Report title +``` diff --git a/PRASReport.jl/TODOs.md b/PRASReport.jl/TODOs.md new file mode 100644 index 00000000..8dc8b416 --- /dev/null +++ b/PRASReport.jl/TODOs.md @@ -0,0 +1,16 @@ +# TODO +0. Verify timezone handling is correct. SO CONFUSING! +1. Landing page shows most important characteristics like + - Number of events with total EUE exceeds the ## number + - Same with LOLE + - Top 1 ptile events + - Event summary graph (like the ESIG report) +2. Users from the webpage can specify which regions need to be aggregated to get combined metric +3. PRASReport exports an assess function which automatically runs assess with Shortfall(), Surplus(), Flows(), and Utilization() +4. Event selector creates a tab for an event to show regional shortfall and flows etc +5. Does NEUE have to be reported as MWh/MWh +6. Explore how a directed graph can be stored in DuckDB, and how it can be drawn on a webpage with WASM-DuckDB +7. Julia indentation +8. Better names for Shortfall_timeseries and Flow_timeseries? +9. Update glossary with event period definitions, for example it says EUE per year, instead it should say per event period etc. +10. Add a analysis narrative section to the report. diff --git a/PRASReport.jl/examples/run_report.jl b/PRASReport.jl/examples/run_report.jl new file mode 100644 index 00000000..3cb9457d --- /dev/null +++ b/PRASReport.jl/examples/run_report.jl @@ -0,0 +1,12 @@ +using Revise +using PRAS +using PRASReport + +rts_sys = rts_gmlc(); +rts_sys.regions.load .+= 375; + +sf,flow = assess(rts_sys,SequentialMonteCarlo(samples=100),Shortfall(),Flow()); + +create_pras_report(sf,flow, report_name="rts_report", + title="RTS-GMLC (load modified) RA Report", + threshold=1) \ No newline at end of file diff --git a/PRASReport.jl/src/PRASReport.jl b/PRASReport.jl/src/PRASReport.jl new file mode 100644 index 00000000..30b5d4aa --- /dev/null +++ b/PRASReport.jl/src/PRASReport.jl @@ -0,0 +1,31 @@ +module PRASReport + +import PRASCore.Systems: SystemModel, Regions, Interfaces, + Generators, Storages, GeneratorStorages, Lines, + timeunits, powerunits, energyunits, unitsymbol, + unitsymbol_long +import PRASCore.Simulations: assess, SequentialMonteCarlo +import PRASCore.Results: EUE, LOLE, NEUE, + Shortfall, Flow, + ShortfallResult, FlowResult, + ShortfallSamplesResult, AbstractShortfallResult, + Result, MeanEstimate, findfirstunique, + val, stderror +import PRASFiles: SystemModel +import StatsBase: mean +import Dates: @dateformat_str, format, now, DateTime +import TimeZones: ZonedDateTime, @tz_str, TimeZone +import Base64: base64encode +import Tables: columntable +import DuckDB + +export + Event, get_events, event_length, + Shortfall_timeseries, Flow_timeseries, + get_db, create_pras_report + +include("events.jl") +include("writedb.jl") +include("report.jl") + +end # module PRASReport diff --git a/PRASReport.jl/src/event_db_schema.sql b/PRASReport.jl/src/event_db_schema.sql new file mode 100644 index 00000000..0db660ae --- /dev/null +++ b/PRASReport.jl/src/event_db_schema.sql @@ -0,0 +1,95 @@ +-- System and Simulation parameters +CREATE TABLE systemsiminfo ( + timesteps INTEGER, + step_size INTEGER NOT NULL, + time_unit TEXT NOT NULL, + power_unit TEXT NOT NULL, + energy_unit TEXT NOT NULL, + start_timestamp TIMESTAMP WITHOUT TIME ZONE, + end_timestamp TIMESTAMP WITHOUT TIME ZONE, + timezone TEXT, + n_samples INTEGER, + eue_mean REAL NOT NULL, + eue_stderr REAL NOT NULL, + lole_mean REAL NOT NULL, + lole_stderr REAL NOT NULL, + neue_mean REAL NOT NULL, + neue_stderr REAL NOT NULL, + eventthreshold INTEGER NOT NULL, + + -- Constraint to ensure valid ISO 8601 duration units + CONSTRAINT valid_time_unit CHECK ( + time_unit IN ('Year', 'Day', 'Hour', 'Minute', 'Second') + ) +); + +-- Regions lookup table +CREATE TABLE regions ( + id INTEGER PRIMARY KEY, + name TEXT UNIQUE NOT NULL +); + +-- Interfaces lookup table (region to region connections) +CREATE TABLE interfaces ( + id INTEGER PRIMARY KEY, + region_from_id INTEGER REFERENCES regions(id), + region_to_id INTEGER REFERENCES regions(id), + name TEXT, -- name like "Region1->Region2" + UNIQUE(region_from_id, region_to_id) +); + +-- Main events table (clean, no parameters) +CREATE SEQUENCE eventid_sequence START 1; +CREATE TABLE events ( + id INTEGER PRIMARY KEY DEFAULT nextval('eventid_sequence'), + name TEXT NOT NULL, + start_timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, + end_timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, + time_period_count INTEGER NOT NULL -- N parameter +); + +-- System-level metrics for each event (aggregated) +CREATE TABLE event_system_shortfall ( + event_id INTEGER REFERENCES events(id), + lole REAL NOT NULL, + eue REAL NOT NULL, + neue REAL NOT NULL, + PRIMARY KEY (event_id) +); + +-- Regional metrics for each event (aggregated) +CREATE TABLE event_regional_shortfall ( + id INTEGER PRIMARY KEY, + event_id INTEGER REFERENCES events(id), + region_id INTEGER REFERENCES regions(id), + lole REAL NOT NULL, + eue REAL NOT NULL, + neue REAL NOT NULL, + UNIQUE(event_id, region_id) +); + +-- Time-series metrics for each timestamp within an event (from sf_ts struct) +-- Optimized with better data types and ordering for columnar storage +CREATE TABLE event_timeseries_shortfall ( + event_id INTEGER NOT NULL, + region_id INTEGER NOT NULL, + timestamp_value TIMESTAMP WITHOUT TIME ZONE NOT NULL, + lole REAL NOT NULL, + eue REAL NOT NULL, + neue REAL NOT NULL, + FOREIGN KEY (event_id) REFERENCES events(id), + FOREIGN KEY (region_id) REFERENCES regions(id), + PRIMARY KEY (event_id, region_id, timestamp_value) +); + +-- Flow data for each timestamp within an event (from flow_ts struct) +-- Optimized with better data types and ordering for columnar storage +CREATE TABLE event_timeseries_flows ( + event_id INTEGER NOT NULL, + interface_id INTEGER NOT NULL, + timestamp_value TIMESTAMP WITHOUT TIME ZONE NOT NULL, + flow REAL NOT NULL, -- Flow value (NEUE units) + FOREIGN KEY (event_id) REFERENCES events(id), + FOREIGN KEY (interface_id) REFERENCES interfaces(id), + PRIMARY KEY (event_id, interface_id, timestamp_value) +); \ No newline at end of file diff --git a/PRASReport.jl/src/events.jl b/PRASReport.jl/src/events.jl new file mode 100644 index 00000000..43ce1c96 --- /dev/null +++ b/PRASReport.jl/src/events.jl @@ -0,0 +1,177 @@ +mutable struct Event{N,L,T,E} + name::String + timestamps::StepRange{ZonedDateTime,T} + system_lole::LOLE + system_eue::EUE + system_neue::NEUE + lole::Vector{LOLE} + eue::Vector{EUE} + neue::Vector{NEUE} + regions::Vector{String} + + function Event{}(name::String, timestamps::StepRange{ZonedDateTime,T}, + system_lole::LOLE{N,L,T}, system_eue::EUE{N,L,T,E}, + system_neue::NEUE, + lole::Vector{LOLE{N,L,T}}, eue::Vector{EUE{N,L,T,E}}, + neue::Vector{NEUE}, + regions::Vector{String} + ) where {N,L,T,E} + + length(lole) != length(eue) != length(neue) != length(regions) && + error("Length of lole, eue, neue, and region names vectors must be equal") + + length(timestamps) != N && + error("Number of timesteps should match metrics event length") + + length(regions) > 0 && !isapprox(val(system_eue),sum(val.(eue))) && + error("System EUE should be approximately the sum of EUE of all the regions") + + new{N,L,T,E}(name, timestamps, + system_lole, system_eue, system_neue, + lole, eue, neue, regions) + end +end + +event_length(event::Event{N,L,T}) where {N,L,T} = T(N*L) + +function Event(sfresult::ShortfallResult{N,L,T,E}, + event_timestamps::StepRange{ZonedDateTime,T}, + name::String=nothing + ) where {N,L,T,E} + + if isnothing(name) + name = "Shortfall Event" + end + + event_length = length(event_timestamps) + ts_first = findfirstunique(sfresult.timestamps,first(event_timestamps)) + ts_last = findfirstunique(sfresult.timestamps,last(event_timestamps)) + + system_lole = LOLE{event_length,L,T}( + MeanEstimate(sum(val.(LOLE.(sfresult,event_timestamps)))) + ) + + system_eue = EUE{event_length,L,T,E}( + MeanEstimate(sum(val.(EUE.(sfresult,event_timestamps)))) + ) + + system_neue = NEUE( + div(MeanEstimate(sum(val.(EUE.(sfresult,event_timestamps)))), + sum(sfresult.regions.load[:,ts_first:ts_last])/1e6)) + + lole = LOLE{event_length,L,T}[] + eue = EUE{event_length,L,T,E}[] + neue = NEUE[] + + if length(sfresult.regions) > 1 + region_names = ["System"] + + for (r,region) in enumerate(sfresult.regions.names) + + push!(lole, + LOLE{event_length,L,T}( + MeanEstimate(sum(val.(LOLE.(sfresult,region,event_timestamps)))) + ) + ) + + push!(eue, + EUE{event_length,L,T,E}( + MeanEstimate(sum(val.(EUE.(sfresult,region,event_timestamps)))) + ) + ) + + push!(neue, + NEUE( + div(MeanEstimate(sum(val.(EUE.(sfresult,region,event_timestamps)))), + sum(sfresult.regions.load[r,ts_first:ts_last])/1e6)) + ) + + push!(region_names,region) + end + end + + return Event(name,event_timestamps, + system_lole, system_eue, system_neue, + lole,eue,neue, + sfresult.regions.names) +end + +""" + get_events(sfresult::ShortfallResult{N,L,T,E}, event_threshold=0) where {N,L,T,E} + +Extracts events from PRAS ShortfallResult objects where an event is a contiguous +period during which the system EUE exceeds a specified threshold, and +returns a vector of (@ref Event) objects. + +If the PRAS simulation is hourly and event_threshold is 0, and there are +5 consecutive hours where the system EUE exceeds the threshold, this returns a +vector with a single event. +""" +function get_events(sfresult::ShortfallResult{N,L,T,E}, event_threshold=0) where {N,L,T,E} + + event_threshold < 0 && error("Event threshold must be non-negative") + + eue_system = EUE.(sfresult,sfresult.timestamps) + system_eue_above_threshold = findall(val.(eue_system) .> event_threshold) + + isempty(system_eue_above_threshold) && error("No shortfall events in this simulation") + + event_timegroups = get_stepranges(sfresult.timestamps[system_eue_above_threshold],L,T) + + return map(ts_group -> Event(sfresult,ts_group, + format(first(ts_group),"yyyy-mm-dd HH:MM ZZZ") + ), + event_timegroups) + +end + +function get_stepranges(vec::Vector{ZonedDateTime},L,T) + groups = Vector{StepRange{ZonedDateTime,T}}() + start = vec[1] + final = vec[1] + for next in vec[2:end] + if next == final + T(L) + final = next + else + push!(groups, StepRange(start,T(L),final)) + start = next + final = next + end + end + push!(groups, StepRange(start,T(L),final)) + return groups +end + +mutable struct Shortfall_timeseries{} + name::String + timestamps::Vector{ZonedDateTime} + eue::Vector{Vector{Float64}} + lole::Vector{Vector{Float64}} + neue::Vector{Vector{Float64}} + regions::Vector{String} + + function Shortfall_timeseries(event,sfresult::ShortfallResult{N,L,T,E}) where {N,L,T,E} + name = event.name + timestamps = collect(event.timestamps) + eue = map(row->val.(row),(EUE.(sfresult,:,timestamps))) + lole = map(row->val.(row),(LOLE.(sfresult,:,timestamps))) + neue = map(row->val.(row),(NEUE.(sfresult,:,timestamps))) + regions = event.regions + new(name,timestamps,eue,lole,neue,regions) + end +end + +mutable struct Flow_timeseries{} + name::String + timestamps::Vector{ZonedDateTime} + flow::Vector{Vector{Float64}} + interfaces::Vector{Pair{String,String}} + + function Flow_timeseries(event,flresult::FlowResult{N,L,T,P}) where {N,L,T,P} + name = event.name + timestamps = collect(event.timestamps) + flow = [first.(flresult[:, ts]) for ts in timestamps] + interfaces = flresult.interfaces + new(name,timestamps,flow,interfaces) + end +end diff --git a/PRASReport.jl/src/report.jl b/PRASReport.jl/src/report.jl new file mode 100644 index 00000000..a04a3670 --- /dev/null +++ b/PRASReport.jl/src/report.jl @@ -0,0 +1,141 @@ +""" + create_pras_report(sf::ShortfallResult, flow::FlowResult; + report_name::String, + report_path::String, + threshold::Int, + title::String) + +Create a HTML report from PRAS simulation results from +ShortfallResult and FlowResult objects. + +# Arguments +- `sf::ShortfallResult`: Simulation ShortfallResult +- `flow::FlowResult`: Simulation FlowResult +- `report_name::String`: Base name for the generated HTML file (default: "report") +- `report_path::String`: Directory path where the report will be saved (default: pwd()) +- `threshold::Int`: Event threshold for filtering events (default: 0) +- `title::String`: Title to display in the report header (default: "Resource Adequacy Report") +""" +function create_pras_report(sf::ShortfallResult, + flow::FlowResult; + report_name::String="report", + report_path::String=pwd(), + threshold::Int=0, + title::String="Resource Adequacy Report") + + base64_db = _get_base64_db((sf,flow);threshold=threshold) + + return _html_report(base64_db, + report_name=report_name, + report_path=report_path, + title=title) +end +""" + create_pras_report(system::SystemModel; + samples,seed, + report_name::String="report", + report_path::String=pwd(), + threshold::Int=0, + title::String="Resource Adequacy Report") + +Create a HTML report when a PRAS system and simulation parameters are provided. + +# Arguments +- `system::SystemModel`: PRAS system +- `samples`: Number of Monte Carlo samples (default: 1000) +- `seed`: Random seed for MC simulation (default: 1) +""" +function create_pras_report(system::SystemModel; + samples=1000,seed=1, + report_name::String="report", + report_path::String=pwd(), + threshold::Int=0, + title::String="Resource Adequacy Report") + + base64_db = _get_base64_db((system,);threshold=threshold, + samples=samples,seed=seed) + + return _html_report(base64_db, + report_name=report_name, + report_path=report_path, + title=title) +end + +""" + create_pras_report(system_path::String; + samples,seed, + report_name::String="report", + report_path::String=pwd(), + threshold::Int=0, + title::String="Resource Adequacy Report") + +Create a HTML report when a path to the .pras system and simulation +parameters are provided. + +# Arguments +- `system_path::String`: Path to the .pras file +""" +function create_pras_report(system_path::String; + samples=1000,seed=1, + report_name::String="report", + report_path::String=pwd(), + threshold::Int=0, + title::String="Resource Adequacy Report") + + base64_db = _get_base64_db((system_path,);threshold=threshold, + samples=samples,seed=seed) + + return _html_report(base64_db, + report_name=report_name, + report_path=report_path, + title=title) +end + +""" +Internal function to get events database for different types of inputs. +""" +function _get_base64_db(get_db_args; + samples=1000,seed=1, + threshold::Int=0) + + tempdb_path = tempname() * ".db" + dbfile = DuckDB.open(tempdb_path) + conn = DuckDB.connect(dbfile) + conn = get_db(get_db_args...; conn, threshold=threshold, + samples=samples, seed=seed) + + DuckDB.DBInterface.close!(conn) + DuckDB.close_database(dbfile) + + # Convert temp db to base64 string and delete temp file + base64_db = base64encode(read(tempdb_path)) + rm(tempdb_path; force=true) + + return base64_db + +end + +""" +Internal function to create a HTML report from PRAS simulation results stored in a +base64-encoded DuckDB database string. +""" +function _html_report(base64_db::String; + report_name::String, + report_path::String, + title) + + report_html = read(joinpath(@__DIR__, "report_template.html"), String) + report_html = replace(report_html, + " // Placeholder for base64 database - this will be replaced by Julia" => "") + report_html = replace(report_html, + "const BASE64_DB = \"{{BASE64_DB_PLACEHOLDER}}\"" => "const BASE64_DB = \"$(base64_db)\"") + report_html = replace(report_html, + "{{REPORT_TITLE_PLACEHOLDER}}" => title) + + full_report_path = joinpath(report_path, report_name * ".html") + println("Writing report to: ", full_report_path) + write(full_report_path, report_html) + + return + +end diff --git a/PRASReport.jl/src/report_template.html b/PRASReport.jl/src/report_template.html new file mode 100644 index 00000000..00334e94 --- /dev/null +++ b/PRASReport.jl/src/report_template.html @@ -0,0 +1,1121 @@ + + + + + + PRAS Report + + + +
+
+

{{REPORT_TITLE_PLACEHOLDER}}

+
+ + +
+
+

Simulation Summary

+
    +
  • Simulation Period: -
  • +
  • Samples: -
  • +
  • System LOLE: -
  • +
  • System NEUE: -
  • +
  • Events: -
  • +
  • Event threshold: -
  • +
+ +
+ + + +
+
+

Loading data...

+
+ + + + +
+
+
+ RA Events - system level summary +
+ + +
+
+
+
+
+
+ +
+
+ +
+
+ +
+
+
+ +
+
+ + +
+ + +
+

Glossary of Terms

+ +
+

Expected Unserved Energy (EUE)

+

Expected Unserved Energy (EUE) is the expected (average) total energy shortfall over the study period. It may be expressed in energy units (e.g. GWh per year).

+
+ +
+

Loss-of-Load Expectation (LOLE)

+

Loss-of-Load Expectation (LOLE) is the expected (average) count of periods experiencing shortfall over the study period. It is expressed in terms of event-periods (e.g. event-hours per year, event-days per year). When reported in terms of event-hours, LOLE is sometimes referred to as LOLH (loss-of-load hours).

+
+ +
+

Normalized Expected Unserved Energy (NEUE)

+

Normalized Expected Unserved Energy (NEUE) is the Expected Unserved Energy normalized against the system's total energy demand and expressed as a fraction, typically shown as a percentage or in parts-per-million (ppm).

+
+ +
+

Resource Adequacy (RA) Event

+

A Resource Adequacy event in this report is a set of contiguous timeperiods where the system EUE in those timeperiods exceed the event threshold.

+
+ +
+

Event Threshold

+

The minimum energy shortfall required for a time period to be considered part of a resource adequacy event.

+
+
+
+
+ + + + + \ No newline at end of file diff --git a/PRASReport.jl/src/writedb.jl b/PRASReport.jl/src/writedb.jl new file mode 100644 index 00000000..54887fe0 --- /dev/null +++ b/PRASReport.jl/src/writedb.jl @@ -0,0 +1,454 @@ +""" + get_db(sf::ShortfallResult{N,L,T,E}, + flow::FlowResult{N,L,T,P}=nothing; + conn::DuckDB.Connection=nothing, + threshold=0) + +Extract events from PRAS results and write them to database. +Returns the database connection if provided. +If connection is not provided, it creates and writes to a .duckdb +database in the working directory of REPL or the julia call. + +# Arguments +- `system::SystemModel`: PRAS system +- `conn::Union{DuckDB.Connection,Nothing}`: DuckDB database connection (default: nothing) +- `threshold`: Event threshold (default: 0) +""" +function get_db(sf::ShortfallResult{N,L,T,E}, + flow::Union{FlowResult{N,L,T,P},Nothing}=nothing; + conn::Union{DuckDB.Connection,Nothing}=nothing, + threshold=0, + samples=nothing, seed=nothing) where {N,L,T,P,E} + + if isnothing(conn) + timenow = format(now(tz"UTC"), @dateformat_str"yyyy-mm-dd_HHMMSSZZZ") + dbfile = DuckDB.open(joinpath(pwd(), "$(timenow).duckdb")) + conn = DuckDB.connect(dbfile) + internal_conn = true + else + internal_conn = false + end + + + # Load in DB schema + schema_file = joinpath(dirname(@__FILE__), "event_db_schema.sql") + schema_sql = read(schema_file, String) + + # Split schema into individual statements and execute each one + # Remove SQL comments (lines starting with --) + schema_sql = join(filter(line -> !startswith(strip(line), "--"), split(schema_sql, '\n')), '\n') + statements = split(schema_sql, ';') + for stmt in statements + stmt_clean = strip(stmt) + if !isempty(stmt_clean) && !startswith(stmt_clean, "--") + try + DuckDB.DBInterface.execute(conn, stmt_clean) + catch e + println("Error executing statement: $stmt_clean") + rethrow(e) + end + end + end + + # Write system & simulation parameters to database + _write_db!(sf, flow, threshold, conn) + + # Write region names to database + _write_db!(sf.regions.names, conn) + + # Extract events from shortfall results + events = get_events(sf,threshold) + + # Write events to database (events, system metrics, regional metrics) + foreach(event -> _write_db!(event,conn), events) + + # Write time-series shortfall data for each event + sf_timeseries_allevents = Shortfall_timeseries.(events, sf) + foreach(sf_ts -> _write_db!(sf_ts,conn), sf_timeseries_allevents) + + # Write flow data if provided + if !isnothing(flow) + _write_db!(flow.interfaces, conn) + flow_timeseries_allevents = Flow_timeseries.(events, flow) + foreach(flow_ts -> _write_db!(flow_ts,conn), flow_timeseries_allevents) + end + + if internal_conn + DuckDB.DBInterface.close!(conn) + DuckDB.close_database(dbfile) + return + else + return conn + end + +end + +""" + get_db(system::SystemModel; + conn::Union{DuckDB.Connection,Nothing}=nothing, + threshold=0, + samples=1000, + seed=1) + +Perform PRAS simulation on the given system and write results to database +connection if provided or to a new database in the current working directory +from which function is called. + +# Arguments +- `system::SystemModel`: PRAS system +- `samples`: Number of Monte Carlo samples (default: 1000) +- `seed`: Random seed for MC simulation (default: 1) +""" +function get_db(system::SystemModel; + conn::Union{DuckDB.Connection,Nothing}=nothing, + threshold=0, + samples=1000, + seed=1) + + # Run assessment with both Shortfall and Flow specifications + sf_result,flow_result = assess(system, + SequentialMonteCarlo(samples=samples,seed=seed), + Shortfall(),Flow() + ); + + # Call the main get_db function with the assessment results + return get_db(sf_result, flow_result; conn=conn, threshold=threshold) +end + +""" + get_db(system_path::AbstractString; + conn::Union{DuckDB.Connection,Nothing}=nothing, + threshold=0, + samples=1000, + seed=1) + +Load a SystemModel from file path, perform PRAS simulation on the given system. +Write results to database connection if provided or to a new database in the +current working directory from which function is called. + +# Arguments +- `system_path::AbstractString`: Path to the .pras file +""" +function get_db(system_path::AbstractString; + conn::Union{DuckDB.Connection,Nothing}=nothing, + threshold=0, + samples=1000, + seed=1) + + # Load the system model from file + system = SystemModel(system_path) + + # Call the SystemModel dispatch version + return get_db(system; conn=conn, threshold=threshold, samples=samples, seed=seed) +end + +# ============================================================================ +# Write functions - system, simulation global info +# ============================================================================ +""" + _write_db!(::ShortfallResult{N,L,T,E}, ::FlowResult{N,L,T,P}, + threshold::Int, conn::DuckDB.Connection) + +Write system and simulation parameters to the parameters table. +""" +function _write_db!(sf::ShortfallResult{N,L,T,E}, + ::FlowResult{N,L,T,P}, + threshold::Int64, + conn::DuckDB.Connection) where {N,L,T,P,E} + + try + + appender = DuckDB.Appender(conn, "systemsiminfo") + + try + DuckDB.append(appender, N) + DuckDB.append(appender, L) + DuckDB.append(appender, unitsymbol_long(T)) + DuckDB.append(appender, unitsymbol(P)) + DuckDB.append(appender, unitsymbol(E)) + DuckDB.append(appender, DateTime(first(sf.timestamps))) + DuckDB.append(appender, DateTime(last(sf.timestamps))) + DuckDB.append(appender, string(TimeZone(last(sf.timestamps)))) + DuckDB.append(appender, sf.nsamples) + DuckDB.append(appender, val(EUE(sf))) + DuckDB.append(appender, stderror(EUE(sf))) + DuckDB.append(appender, val(LOLE(sf))) + DuckDB.append(appender, stderror(LOLE(sf))) + DuckDB.append(appender, val(NEUE(sf))) + DuckDB.append(appender, stderror(NEUE(sf))) + DuckDB.append(appender, threshold) + DuckDB.end_row(appender) + DuckDB.flush(appender) + + finally + DuckDB.close(appender) + end + catch e + rethrow(e) + end +end + +""" + _write_db!(region_names::Vector{String}, conn::DuckDB.Connection) + +Write regions to the regions table. Call this once to populate the regions table. +""" +function _write_db!(region_names::Vector{String}, conn::DuckDB.Connection) + appender = DuckDB.Appender(conn, "regions") + + try + for (idx, region_name) in enumerate(region_names) + DuckDB.append(appender, idx) + DuckDB.append(appender, region_name) + DuckDB.end_row(appender) + end + + DuckDB.flush(appender) + + finally + DuckDB.close(appender) + end +end + +""" + _write_db!(interfaces::Vector{Pair{String,String}}, conn::DuckDB.Connection) + +Write interfaces from region pairs to the interfaces table. +Each tuple should be (region_from, region_to). +Assumes all regions already exist in the regions table. +Call this once to populate the interfaces table. +""" +function _write_db!(interfaces::Vector{Pair{String,String}}, conn::DuckDB.Connection) + # Get all region IDs and names once at the beginning + regions_result = DuckDB.execute(conn, "SELECT id, name FROM regions") |> columntable + region_name_to_id = Dict(zip(regions_result.name, regions_result.id)) + + appender = DuckDB.Appender(conn, "interfaces") + + try + for (idx, interface_pair) in enumerate(interfaces) + region_from, region_to = interface_pair.first, interface_pair.second + + from_id = get(region_name_to_id, region_from, nothing) + to_id = get(region_name_to_id, region_to, nothing) + + # Error if regions don't exist + isnothing(from_id) && error("Region '$region_from' not found in database") + isnothing(to_id) && error("Region '$region_to' not found in database") + + interface_name = "$region_from->$region_to" + + # Append row: id, region_from_id, region_to_id, name + DuckDB.append(appender, idx) + DuckDB.append(appender, from_id) + DuckDB.append(appender, to_id) + DuckDB.append(appender, interface_name) + DuckDB.end_row(appender) + end + + DuckDB.flush(appender) + + finally + DuckDB.close(appender) + end +end + +# ============================================================================ +# Write functions - events, event metrics, event time-series +# ============================================================================ +""" + _write_db!(events::Vector{Event}, conn::DuckDB.Connection) + +Write a vector of Event objects to the database using DuckDB Appender API for efficient bulk inserts. +Writes to: events, event_system_shortfall, event_regional_shortfall tables. +""" +function _write_db!(events::Vector{Event}, conn::DuckDB.Connection) + # Write each event individually to avoid memory issues with large datasets + for event in events + _write_db!(event, conn) + end +end + +""" + _write_db!(event::Event, conn::DuckDB.Connection) + +Write a single Event object to the database. +""" +function _write_db!(event::Event{N,L,T,E}, conn::DuckDB.Connection) where {N,L,T,E} + # Get region IDs in the same order as event.regions array + region_ids = get_region_ids_ordered(event.regions, conn) + + # Extract start and end timestamps + start_ts = DateTime(first(event.timestamps)) + end_ts = DateTime(last(event.timestamps)) + time_period_count = length(event.timestamps) + + result = DuckDB.execute(conn, + "INSERT INTO events (name, start_timestamp, end_timestamp, + time_period_count) VALUES (?, ?, ?, ?) + RETURNING id", + [event.name, start_ts, end_ts, time_period_count] + ) |> columntable + + event_id = first(result.id) + + # Insert system-level metrics using Appender API + appender_system = DuckDB.Appender(conn, "event_system_shortfall") + try + DuckDB.append(appender_system, event_id) + DuckDB.append(appender_system, val(event.system_lole)) + DuckDB.append(appender_system, val(event.system_eue)) + DuckDB.append(appender_system, val(event.system_neue)) + DuckDB.end_row(appender_system) + DuckDB.flush(appender_system) + finally + DuckDB.close(appender_system) + end + + # Insert regional metrics using Appender API + + appender_regions = DuckDB.Appender(conn, "event_regional_shortfall") + try + for (i, region_id) in enumerate(region_ids) + # Append row: event_id, region_id, lole, eue, neue + # Note: skipping the 'id' column since it's auto-generated + DuckDB.append(appender_regions, event_id) + DuckDB.append(appender_regions, region_id) + DuckDB.append(appender_regions, val(event.lole[i])) + DuckDB.append(appender_regions, val(event.eue[i])) + DuckDB.append(appender_regions, val(event.neue[i])) + DuckDB.end_row(appender_regions) + end + + DuckDB.flush(appender_regions) + + finally + DuckDB.close(appender_regions) + end + + return +end + +""" + _write_db!(sf_ts::Shortfall_timeseries, conn::DuckDB.Connection) + +Write event shortfall time-series data to event_timeseries_shortfall table. +Gets the event_id from the database using the event name for consistency. +""" +function _write_db!(sf_ts::Shortfall_timeseries, conn::DuckDB.Connection) + # Get event_id from database using event name + event_result = DuckDB.execute(conn, "SELECT id FROM events WHERE name = ?", + [sf_ts.name]) |> columntable + isempty(event_result) && + error("Event '$(sf_ts.name)' not found in database. Write the event first.") + event_id = first(event_result.id) + + # Get region IDs in the same order as sf_ts.regions array + region_ids = get_region_ids_ordered(sf_ts.regions, conn) + + # Use Appender for efficient bulk insert + appender = DuckDB.Appender(conn, "event_timeseries_shortfall") + try + # Iterate through timestamps and regions + for (t_idx, timestamp) in enumerate(sf_ts.timestamps) + for (r_idx, region_id) in enumerate(region_ids) + # Append row: event_id, region_id, timestamp_value, lole, eue, neue + DuckDB.append(appender, event_id) + DuckDB.append(appender, region_id) + DuckDB.append(appender, DateTime(timestamp)) + DuckDB.append(appender, sf_ts.lole[t_idx][r_idx]) + DuckDB.append(appender, sf_ts.eue[t_idx][r_idx]) + DuckDB.append(appender, sf_ts.neue[t_idx][r_idx]) + DuckDB.end_row(appender) + end + end + + DuckDB.flush(appender) + + finally + DuckDB.close(appender) + end +end + +""" + _write_db!(flow_ts::flow_ts, conn::DuckDB.Connection) + +Write event flow time-series data to event_timeseries_flows table. +""" +function _write_db!(flow_ts::Flow_timeseries, conn::DuckDB.Connection) + + # Get event_id from database using event name + event_result = DuckDB.execute(conn, "SELECT id FROM events WHERE name = ?", + [flow_ts.name]) |> columntable + isempty(event_result) && + error("Event '$(flow_ts.name)' not found in database. Write the event first.") + event_id = first(event_result.id) + + # Get interface IDs in the same order as flow_ts.interfaces array + interface_ids = get_interface_ids_ordered(flow_ts.interfaces, conn) + + # Use Appender for efficient bulk insert + appender = DuckDB.Appender(conn, "event_timeseries_flows") + try + # Iterate through timestamps and interfaces + for (t_idx, timestamp) in enumerate(flow_ts.timestamps) + for (i_idx, interface_id) in enumerate(interface_ids) + # Append row: event_id, interface_id, timestamp_value, flow + DuckDB.append(appender, event_id) + DuckDB.append(appender, interface_id) + DuckDB.append(appender, DateTime(timestamp)) + DuckDB.append(appender, flow_ts.flow[t_idx][i_idx]) # Extract value from NEUE + DuckDB.end_row(appender) + end + end + + DuckDB.flush(appender) + + finally + DuckDB.close(appender) + end +end + +# ============================================================================ +# Helper Functions +# ============================================================================ + +""" + get_region_ids_ordered(region_names::Vector{String}, conn::DuckDB.Connection) -> Vector{Int} + +Get region IDs in the same order as the region_names array. +Assumes all regions exist in the database. +""" +function get_region_ids_ordered(region_names::Vector{String}, conn::DuckDB.Connection) + region_ids = Vector{Int}() + + for region_name in region_names + result = DuckDB.execute(conn, "SELECT id FROM regions WHERE name = ?", [region_name]) |> columntable + isempty(result) && error("Region '$region_name' not found in database") + push!(region_ids, first(result.id)) + end + + return region_ids +end + +""" + get_interface_ids_ordered(interface_names::Vector{String}, conn::DuckDB.Connection) -> Vector{Int} + +Get interface IDs in the same order as the interface_names array. +Assumes all interfaces exist in the database. +""" +function get_interface_ids_ordered(interface_names::Vector{Pair{String,String}}, conn::DuckDB.Connection) + interface_ids = Vector{Int}() + + for interface_name in interface_names + iname_db = "$(interface_name.first)->$(interface_name.second)" + result = DuckDB.execute(conn, "SELECT id FROM interfaces WHERE name = ?", [iname_db]) |> columntable + isempty(result) && + error("Interface '$interface_name' not found in database") + + push!(interface_ids, first(result.id)) + end + + return interface_ids +end + diff --git a/PRASReport.jl/test/events.jl b/PRASReport.jl/test/events.jl new file mode 100644 index 00000000..7104c251 --- /dev/null +++ b/PRASReport.jl/test/events.jl @@ -0,0 +1,17 @@ +@testset "Test get_stepranges" begin + timestamps = ZonedDateTime.(DateTime(2023,1,1,1):Hour(1):DateTime(2023,1,1,11), tz"UTC") + selected_times = [timestamps[2], timestamps[3], timestamps[5], timestamps[6], timestamps[7], timestamps[9]] + step_ranges = PRASReport.get_stepranges(selected_times, 1, Hour) + + @test length(step_ranges) == 3 + @test step_ranges[1] == (timestamps[2]:Hour(1):timestamps[3]) + @test step_ranges[2] == (timestamps[5]:Hour(1):timestamps[7]) + @test step_ranges[3] == (timestamps[9]:Hour(1):timestamps[9]) +end + +@testset "Test get_events" begin + sf,flow = assess(system,SequentialMonteCarlo(samples=100),Shortfall(),Flow()); + + @test_throws "No shortfall events in this simulation" get_events(sf) + +end \ No newline at end of file diff --git a/PRASReport.jl/test/report.jl b/PRASReport.jl/test/report.jl new file mode 100644 index 00000000..d748411c --- /dev/null +++ b/PRASReport.jl/test/report.jl @@ -0,0 +1,43 @@ +@testset "String input to create_pras_report" begin + system_path = joinpath(@__DIR__, "../../PRASFiles.jl/src/Systems/rts.pras") + + string_test = @capture_out begin + create_pras_report(system_path, samples=1000, seed=1, + report_name="string_test") + end + string_test = strip(string_test) + @test startswith(string_test,"Writing report to:") + @test contains(string_test, "string_test.html") + string_test = replace(string_test, r"Writing report to: " => "") + @test isfile(string_test) + rm(string_test; force=true) +end + +@testset "SystemModel input to create_pras_report" begin + string_test = @capture_out begin + create_pras_report(system, samples=1000, seed=1, + report_name="sysmodel_test") + end + string_test = strip(string_test) + @test startswith(string_test,"Writing report to:") + @test contains(string_test, "sysmodel_test.html") + string_test = replace(string_test, r"Writing report to: " => "") + @test isfile(string_test) + rm(string_test, force=true) +end + +@testset "ShortfallResult, FlowResult input to create_pras_report" begin + sf,flow = assess(system,SequentialMonteCarlo(samples=1000,seed=1), + Shortfall(),Flow()) + + string_test = @capture_out begin + create_pras_report(sf,flow, + report_name="sfflow_test") + end + string_test = strip(string_test) + @test startswith(string_test,"Writing report to:") + @test contains(string_test, "sfflow_test.html") + string_test = replace(string_test, r"Writing report to: " => "") + @test isfile(string_test) + rm(string_test, force=true) +end diff --git a/PRASReport.jl/test/runtests.jl b/PRASReport.jl/test/runtests.jl new file mode 100644 index 00000000..abbffea9 --- /dev/null +++ b/PRASReport.jl/test/runtests.jl @@ -0,0 +1,23 @@ +using Test +using Dates +using PRASReport +using PRASCore +using PRASFiles +using TimeZones +using Suppressor + +# Test copperplate +# Test get functions from database for wrong names +system = rts_gmlc() + +@testset "PRASReport.jl Tests" begin + @testset "Test events" begin + include("events.jl") + end + + @testset "Test html report generation" begin + + include("report.jl") + + end +end