Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
atribera authored Dec 11, 2024
1 parent 5b97cbd commit 0e25135
Show file tree
Hide file tree
Showing 17 changed files with 87,629 additions and 0 deletions.
Empty file added progress/__init__.py
Empty file.
91 changes: 91 additions & 0 deletions progress/data_download_process.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import numpy as np
import pandas as pd
import yaml

from mod_wind import Wind
from mod_solar import Solar
from mod_kmeans import KMeans_Pipeline


class DataProcess:

def __init__(self, input_file):
# open configuration file
with open(input_file, 'r') as f:
self.config = yaml.safe_load(f)

def ProcessWindData(self):

wind_directory = self.config['data'] + '/Wind'

# download and process wind data
if wind_directory:

print("Downloading and processing wind data ...")

wind_sites = wind_directory + '/wind_sites.csv'
wind_power_curves = wind_directory + '/w_power_curves.csv'
windspeed_data = wind_directory + '/windspeed_data.csv'
wind_tr_rate = wind_directory + '/t_rate.xlsx'

wind = Wind()

# download wind data
wind.DownloadWindData(wind_directory, wind_sites, self.config['api_key'], self.config['email'], self.config['affiliation'], \
self.config['year_start_w'], self.config['year_end_w'])

w_sites, farm_name, zone_no, w_classes, w_turbines, r_cap, p_class, out_curve2, out_curve3,\
start_speed = wind.WindFarmsData(wind_sites, wind_power_curves)

# calculate transition rates
wind.CalWindTrRates(wind_directory, windspeed_data, wind_sites, wind_power_curves)

tr_mats = pd.read_excel(wind_tr_rate, sheet_name=None)
tr_mats = np.array([tr_mats[sheet_name].to_numpy() for sheet_name in tr_mats])

return

def ProcessSolarData(self):

solar_directory = self.config['data'] + '/Solar'

# download and process solar data
if solar_directory:

print("Downloading and processing solar data ...")

solar_site_data = solar_directory+"/solar_sites.csv"
solar_prob_data = solar_directory+"/solar_probs.csv"

solar = Solar(solar_site_data, solar_directory)

# download weather data and calculate solar generation
solar.SolarGen(self.config['api_key'], self.config['name'], self.config['affiliation'], \
self.config['email'], self.config['year_start_s'], self.config['year_end_s'])

# process data for input into k-means code
solar.SolarGenGather(self.config['year_start_s'], self.config['year_end_s'])

# Initialize the KMeans_Pipeline class
pipeline = KMeans_Pipeline(solar_directory, solar_site_data)

# Run the pipeline before performing any other actions
pipeline.run(n_clusters = 10)

# Calculate the cluster probabilities and save them to a CSV file
pipeline.calculate_cluster_probability()

# Split the data and cluster them based on the generated labels
pipeline.split_and_cluster_data()

s_sites, s_zone_no, s_max, s_profiles, solar_prob = solar.GetSolarProfiles(solar_prob_data)

print("Solar data processing complete!")

return

if __name__ == "__main__":

data = DataProcess('input.yaml')
data.ProcessWindData()
data.ProcessSolarData()
37 changes: 37 additions & 0 deletions progress/example_job.bash
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#!/bin/bash
## Do not put any commands or blank lines before the #SBATCH lines
#SBATCH --nodes=8 # Number of nodes - all cores per node are allocated to the job
#SBATCH --time=00:00:00 # Wall clock time (HH:MM:SS) - once the job exceeds this time, the job will be terminated (default is 5 minutes)
#SBATCH --account= XXXXXXX # (optional, organization dependant)
#SBATCH --job-name=progress # Name of job
#SBATCH --partition=batch # partition/queue name: short or batch (optional, organization dependant)
# short: 4hrs wallclock limit
# batch: nodes reserved for > 4hrs (default)
#SBATCH --qos=normal # Quality of Service: long, large, priority or normal (optional, organization dependant)
# normal: request up to 48hrs wallclock (default)
# long: request up to 96hrs wallclock and no larger than 64nodes
# large: greater than 50% of cluster (special request)
# priority: High priority jobs (special request)

nodes=$SLURM_JOB_NUM_NODES # Number of nodes - the number of nodes you have requested (for a list of SLURM environment variables see "man sbatch")
cores=36 # Number MPI processes to run on each node (a.k.a. PPN)
# No. of cores per node will vary
# using openmpi-intel/1.10
export PATH=$PATH:$HOME//path/to/anaconda3/version/bin ## Add any required binaries to the PATH environment variable
source activate progress ## Activate the required anaconda environment (if any)
scontrol show job $SLURM_JOB_ID ### write job information to output file
# conda deactivate ## Deactivate the previously activated anaconda environment (if any)
mpiexec --bind-to core --npernode $cores --n $(($cores*$nodes)) python /path/to/mult_proc_example_simulation.py

# Note 1: This will start ($nodes * $cores) total MPI processes using $cores per node.
# If you want some other number of processes, add "-np N" after the mpiexec, where N is the total you want.
# Example: mpiexec -np 44 ......(for a 2 node job, this will load 36 processes on the first node and 8 processes on the second node)
# If you want a specific number of process to run on each node, (thus increasing the effective memory per core), use the --npernode option.
# Example: mpiexec -np 24 --npernode 12 ......(for a 2 node job, this will load 12 processes on each node)

# For openmpi 1.10: mpiexec --bind-to core --npernode 8 --n PUT_THE_TOTAL_NUMBER_OF_MPI_PROCESSES_HERE /path/to/executable [--args...]

# To submit your job, do:
# sbatch <script_name>
#
#The slurm output file will by default, be written into the directory you submitted your job from (slurm-JOBID.out)
248 changes: 248 additions & 0 deletions progress/example_simulation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,248 @@
import numpy as np
from time import perf_counter
from pyomo.environ import *
import copy
import pandas as pd
import os
import yaml

from mod_sysdata import RASystemData
from mod_solar import Solar
from mod_wind import Wind
from mod_utilities import RAUtilities
from mod_matrices import RAMatrices
from mod_plot import RAPlotTools
from mod_kmeans import KMeans_Pipeline

def MCS(input_file) :
'''This function performs mixed time sequential MCS using methods from the different RA modules'''

# open configuration file
with open(input_file, 'r') as f:
config = yaml.safe_load(f)

# data file locations
system_directory = config['data'] + '/System'
solar_directory = config['data'] + '/Solar'
wind_directory = config['data'] + '/Wind'

# Monte Carlo simulation parameters
samples = config['samples']
sim_hours = config['sim_hours']

# system data
data_gen = system_directory + '/gen.csv'
data_branch = system_directory + '/branch.csv'
data_bus = system_directory + '/bus.csv'
data_load = system_directory + '/load.csv'
data_storage = system_directory + '/storage.csv'
BMva = 100

rasd = RASystemData()
genbus, ng, pmax, pmin, FOR_gen, MTTF_gen, MTTR_gen, gencost = rasd.gen(data_gen)
nl, fb, tb, cap_trans, MTTF_trans, MTTR_trans = rasd.branch(data_branch)
bus_name, bus_no, nz = rasd.bus(data_bus)
load_all_regions = rasd.load(bus_name, data_load)

essname, essbus, ness, ess_pmax, ess_pmin, ess_duration, ess_socmax, ess_socmin, ess_eff, \
disch_cost, ch_cost, MTTF_ess, MTTR_ess, ess_units = rasd.storage(data_storage)

raut = RAUtilities()
mu_tot, lambda_tot = raut.reltrates(MTTF_gen, MTTF_trans, MTTR_gen, MTTR_trans, MTTF_ess, MTTR_ess)
cap_max, cap_min = raut.capacities(nl, pmax, pmin, ess_pmax, ess_pmin, cap_trans) # calling this function to get values of cap_max and cap_min

# download and process wind data
if wind_directory:

wind_sites = wind_directory + '/wind_sites.csv'
wind_power_curves = wind_directory + '/w_power_curves.csv'
windspeed_data = wind_directory + '/windspeed_data.csv'
wind_tr_rate = wind_directory + '/t_rate.xlsx'

wind = Wind()
w_sites, farm_name, zone_no, w_classes, w_turbines, r_cap, p_class, out_curve2, out_curve3,\
start_speed = wind.WindFarmsData(wind_sites, wind_power_curves)

# calculate transition rates
wind.CalWindTrRates(wind_directory, windspeed_data, wind_sites, wind_power_curves)

tr_mats = pd.read_excel(wind_tr_rate, sheet_name=None)
tr_mats = np.array([tr_mats[sheet_name].to_numpy() for sheet_name in tr_mats])

# download and process solar data
if solar_directory:

solar_site_data = solar_directory+"/solar_sites.csv"
solar_prob_data = solar_directory+"/solar_probs.csv"

solar = Solar(solar_site_data, solar_directory)

s_sites, s_zone_no, s_max, s_profiles, solar_prob = solar.GetSolarProfiles(solar_prob_data)

print("Solar data processing complete!")

# matrices required for optimization
ramat = RAMatrices(nz)
gen_mat = ramat.genmat(ng, genbus, ness, essbus)
ch_mat = ramat.chmat(ness, essbus, nz)
A_inc = ramat.Ainc(nl, fb, tb)
curt_mat = ramat.curtmat(nz)

# dictionary for storing temp. index values
indices_rec = {"LOLP_rec": np.zeros(samples), "EUE_rec": np.zeros(samples), "MDT_rec": np.zeros(samples), \
"LOLF_rec": np.zeros(samples), "EPNS_rec": np.zeros(samples), "LOLP_hr": np.zeros(sim_hours), \
"LOLE_rec": np.zeros(samples),"mLOLP_rec":np.zeros(samples), "COV_rec": np.zeros(samples)}

LOL_track = np.zeros((samples, sim_hours))

tic = perf_counter()

for s in range(samples):

print(f'Sample: {s+1}')

# temp variables to be used for each sample
var_s = {"t_min": 0, "LLD": 0, "curtailment": np.zeros(sim_hours), "label_LOLF": np.zeros(sim_hours), "freq_LOLF": 0, "LOL_days": 0, \
"outage_day": np.zeros(365)}

# current states of components
current_state = np.ones(ng + nl + ness) # all gens and TLs in up state at the start of the year

if wind_directory:
current_w_class = np.floor(np.random.uniform(0, 1, w_sites)*w_classes).astype(int) # starting wind speed class for each site (random)

# record data for plotting and exporting (optional)
renewable_rec = {"wind_rec": np.zeros((nz, sim_hours)), "solar_rec": np.zeros((nz, sim_hours)), "congen_temp": 0, \
"rengen_temp": 0}

SOC_old = 0.5*(np.multiply(np.multiply(ess_pmax, ess_duration), ess_socmax))/BMva
SOC_rec = np.zeros((ness, sim_hours))
curt_rec = np.zeros(sim_hours)
# gen_rec = np.zeros((sim_hours, ng))

for n in range(sim_hours):

# get current states(up/down) and capacities of all system components
next_state, current_cap, var_s["t_min"] = raut.NextState(var_s["t_min"], ng, ness, nl, \
lambda_tot, mu_tot, current_state, cap_max, cap_min, ess_units)
current_state = copy.deepcopy(next_state)

# update SOC based on failures in ESS
ess_smax, ess_smin, SOC_old = raut.updateSOC(ng, nl, current_cap, ess_pmax, ess_duration, ess_socmax, ess_socmin, SOC_old)

# calculate upper and lower bounds of gens and tls
gt_limits = {"g_lb": np.concatenate((current_cap["min"][0:ng]/BMva, current_cap["min"][ng + nl::]/BMva)), \
"g_ub": np.concatenate((current_cap["max"][0:ng]/BMva, current_cap["max"][ng + nl::]/BMva)), "tl": current_cap["max"][ng:ng + nl]/BMva}

def fb_Pg(model, i):
return (gt_limits["g_lb"][i], gt_limits["g_ub"][i])

def fb_flow(model,i):
return (-gt_limits["tl"][i], gt_limits["tl"][i])

def fb_ess(model, i):
return(-current_cap["max"][ng + nl::][i]/BMva, current_cap["min"][ng + nl::][i]/BMva)

def fb_soc(model, i):
return(ess_smin[i]/BMva, ess_smax[i]/BMva)

# get wind power output for all zones/areas
if wind_directory:
w_zones, current_w_class = raut.WindPower(nz, w_sites, zone_no, \
w_classes, r_cap, current_w_class, tr_mats, p_class, w_turbines, out_curve2, out_curve3)

# get solar power output for all zones/areas
if solar_directory:
s_zones = raut.SolarPower(n, nz, s_zone_no, solar_prob, s_profiles, s_sites, s_max)

# record wind and solar profiles for plotting (optional)
if wind_directory:
renewable_rec["wind_rec"][:, n] = w_zones

if solar_directory:
s_zones_t = np.transpose(s_zones)
renewable_rec["solar_rec"][:, n] = s_zones_t[:, n%24]

# recalculate net load (for distribution side resources, optional)
part_netload = config['load_factor']*load_all_regions

if solar_directory and wind_directory:
net_load = part_netload[n] - w_zones - s_zones[n%24]
elif solar_directory==False and wind_directory:
net_load = part_netload[n] - w_zones
elif solar_directory and wind_directory==False:
net_load = part_netload[n] - s_zones[n%24]
elif solar_directory==False and wind_directory==False:
net_load = part_netload[n]

# optimize dipatch and calculate load curtailment

if config['model'] == 'Zonal':
load_curt, SOC_old = raut.OptDispatch(ng, nz, nl, ness, fb_ess, fb_soc, BMva, fb_Pg, fb_flow, A_inc, gen_mat, curt_mat, ch_mat, \
gencost, net_load, SOC_old, ess_pmax, ess_eff, disch_cost, ch_cost)
elif config['model'] == 'Copper Sheet':
load_curt, SOC_old = raut.OptDispatchLite(ng, nz, ness, fb_ess, fb_soc, BMva, fb_Pg, A_inc, \
gencost, net_load, SOC_old, ess_pmax, ess_eff, disch_cost, ch_cost)

# record values for visualization purposes
SOC_rec[:, n] = SOC_old*BMva
curt_rec[n] = load_curt*BMva

# track loss of load states
var_s, LOL_track = raut.TrackLOLStates(load_curt, BMva, var_s, LOL_track, s, n)

if (n+1)%100 == 0:
print(f'Hour {n + 1}')

# collect indices for all samples
indices_rec = raut.UpdateIndexArrays(indices_rec, var_s, sim_hours, s)

# check for convergence using LOLP and COV
indices_rec["mLOLP_rec"][s] = np.mean(indices_rec["LOLP_rec"][0:s+1])
var_LOLP = np.var(indices_rec["LOLP_rec"][0:s+1])
indices_rec["COV_rec"][s] = np.sqrt(var_LOLP)/indices_rec["mLOLP_rec"][s]

print(indices_rec["COV_rec"])

# calculate reliability indices for the MCS
indices = raut.GetReliabilityIndices(indices_rec, sim_hours, samples)

toc = perf_counter()
print(f"Codes finished in {toc-tic} seconds")

main_folder = os.path.dirname(os.path.abspath(__file__))

if not os.path.exists(f"{main_folder}/Results"):
os.makedirs(f"{main_folder}/Results")

df = pd.DataFrame([indices])
df.to_csv(f"{main_folder}/Results/indices.csv", index=False)

if sim_hours == 8760:

raut.OutageHeatMap(LOL_track, 1, samples, main_folder)

return(indices, SOC_rec, curt_rec, renewable_rec, bus_name, essname, main_folder, sim_hours, \
samples, indices_rec["mLOLP_rec"], indices_rec["COV_rec"])

# =========================================================================================
# SIMULATION
# =========================================================================================

if __name__ == "__main__":

# run MCS
indices, SOC_rec, curt_rec, renewable_rec, bus_name, essname, main_folder, sim_hours, \
samples, mLOLP_rec, COV_rec = MCS('input.yaml')

# plot results
rapt = RAPlotTools(main_folder)
rapt.PlotSolarGen(renewable_rec["solar_rec"], bus_name)
rapt.PlotWindGen(renewable_rec["wind_rec"], bus_name)
rapt.PlotSOC(SOC_rec, essname)
rapt.PlotLoadCurt(curt_rec)
rapt.PlotLOLP(mLOLP_rec, samples, 1)
rapt.PlotCOV(COV_rec, samples, 1)
if sim_hours == 8760:
rapt.OutageMap(f"{main_folder}/Results/LOL_perc_prob.csv")

Loading

0 comments on commit 0e25135

Please sign in to comment.