Skip to content

Commit

Permalink
Completed SensitivityAnalyzer.makeData
Browse files Browse the repository at this point in the history
  • Loading branch information
joseph-hellerstein committed Nov 2, 2023
1 parent 00776d0 commit 909f1bd
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 28 deletions.
7 changes: 6 additions & 1 deletion src/Oscillators/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,4 +60,9 @@
PLOT_DIR = os.path.join(PROJECT_DIR, "plots")
EVALUATION_CSV = os.path.join(DATA_DIR, "evaluation_data.csv")
EVALUATION_PLOT_PATH = os.path.join(PLOT_DIR, "evaluation_plot.pdf")
HISTOGRAM_PLOT_PATH = os.path.join(PLOT_DIR, "histogram_plot.pdf")
HISTOGRAM_PLOT_PATH = os.path.join(PLOT_DIR, "histogram_plot.pdf")

# Data fields
C_NONOSCILLATING = "nonoscillating"
C_INFEASIBLE = "infeasible"
C_SAMPLE_SIZE = "sample_size"
48 changes: 30 additions & 18 deletions src/Oscillators/sensitivity_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
SENSITIVITY_DATA_DIR = os.path.join("%s", "sensitivity_data")
DEVIATION_DIR = os.path.join(SENSITIVITY_DATA_DIR, "%s")
MEAN_PATH = os.path.join(DEVIATION_DIR, "mean.csv")
STD_PATH = os.path.join(DEVIATION_DIR, "mean.csv")
STD_PATH = os.path.join(DEVIATION_DIR, "std.csv")
OTHER_PATH = os.path.join(DEVIATION_DIR, "other.csv")


Expand All @@ -43,7 +43,7 @@ def __init__(self, baseline_parameter_dct=None):
Solver.calculateDependentParameters(self.baseline_parameter_dct)
self.solver = Solver()
self.solver.solve()
self.oc_df, self.baseline_oc_df = self.makeOscillationCharacteristicsDF()
self.oc_expression_df, self.baseline_oc_value_df = self.makeOscillationCharacteristicsDF()

def makeOscillationCharacteristicsDF(self):
"""
Expand Down Expand Up @@ -75,7 +75,7 @@ def makeOscillationCharacteristicsDF(self):

def _depreacatedGetRandomValues(self, x_term, parameter_name, cv, num_sample):
"""Returns a random value of the parameter"""
std = self.baseline_oc_df.loc[parameter_name, x_term]*cv
std = self.baseline_oc_value_df.loc[parameter_name, x_term]*cv
result = np.random.normal(self.baseline_parameter_df.loc[parameter_name, x_term], std, num_sample)
return result

Expand Down Expand Up @@ -157,7 +157,7 @@ def makeErrorStatistics(self, frac_deviation=1, num_sample=NUM_SAMPLE):
continue
for oc in cn.OSCILLATION_CHARACTERISTICS:
# Calculate the oscillation characteristic
sample_value = float(sp.N(self.oc_df.loc[oc, x_term].subs(symbol_dct)))
sample_value = float(sp.N(self.oc_expression_df.loc[oc, x_term].subs(symbol_dct)))
oc_sample_dct[x_term][oc].append(sample_value)
# Calculate instances of negative concentrations, which are infeasible
negative_arr = np.repeat(0, num_sample)
Expand All @@ -171,7 +171,7 @@ def makeErrorStatistics(self, frac_deviation=1, num_sample=NUM_SAMPLE):
std_dct = self._initializeTwoLevelDct()
for x_term in X_TERMS:
for oc in cn.OSCILLATION_CHARACTERISTICS:
baseline_value = self.baseline_oc_df.loc[oc, x_term]
baseline_value = self.baseline_oc_value_df.loc[oc, x_term]
abs_error_arr = np.abs((np.array(oc_sample_dct[x_term][oc]) - baseline_value)/baseline_value)
mean_dct[x_term][oc] = np.mean(abs_error_arr)
std_dct[x_term][oc] = np.std(abs_error_arr)
Expand All @@ -189,26 +189,38 @@ def makeData(self, frac_deviations, num_sample=NUM_SAMPLE, data_dir=cn.DATA_DIR)
Args:
frac_deviations (_type_): _description_
num_sample (_type_, optional): _description_. Defaults to NUM_SAMPLE.
Notes
Retrive data with index using: pd.read_csv(path_dir[MEAN], index_col=0)
"""
MEAN = "mean"
STD = "std"
OTHER = "other"
cur_dir = SENSITIVITY_DATA_DIR % data_dir
import pdb; pdb.set_trace()
if not os.path.isdir(cur_dir):
os.mkdir(cur_dir)
for frac in frac_deviations:
path_dir = {"mean": MEAN_PATH % (data_dir, str(frac)),
"std": STD_PATH % (data_dir, str(frac)),
"other": OTHER_PATH % (data_dir, str(frac))
sub_dir = os.path.join(cur_dir, str(frac))
if not os.path.isdir(sub_dir):
os.mkdir(sub_dir)
path_dir = {MEAN: MEAN_PATH % (data_dir, str(frac)),
STD: STD_PATH % (data_dir, str(frac)),
OTHER: OTHER_PATH % (data_dir, str(frac))
}
import pdb; pdb.set_trace()
if not os.path.isdir(deviation_dir):
os.mkdir(deviation_dir)
mean_path = MEAN_PATH % frac
std_path = STD_PATH % frac
other_path = OTHER_PATH % frac
statistics = self.makeErrorStatistics(frac_deviation=frac, num_sample=num_sample)
statistics.mean_df.to_csv(mean_path)
statistics.std_df.to_csv(std_path)
statistics.mean_df.to_csv(path_dir[MEAN])
statistics.std_df.to_csv(path_dir[STD])
other_df = pd.DataFrame({cn.C_NONOSCILLATING: [statistics.frac_nonoscillating],
cn.C_INFEASIBLE: [statistics.frac_infeasible],
cn.C_SAMPLE_SIZE: [statistics.sample_size]})
other_df.to_csv(other_path)
other_ser = pd.Series([statistics.frac_nonoscillating,
statistics.frac_infeasible,
statistics.sample_size], index=[cn.C_NONOSCILLATING,
cn.C_INFEASIBLE,
cn.C_SAMPLE_SIZE])
other_ser.to_csv(path_dir[OTHER])


if __name__ == "__main__":
analyzer = SensitivityAnalyzer()
analyzer.makeData(frac_deviations=[0.1, 0.2, 0.5], num_sample=200)
18 changes: 9 additions & 9 deletions tests/test_sensitivity_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,11 @@

import numpy as np
import os
import pandas as pda
import pandas as pd
import shutil
import sympy as sp
import unittest

IGNORE_TEST = True
IGNORE_TEST = False
IS_PLOT = False
ANALYZER = SensitivityAnalyzer() # Used for debugging individual tests
TEST_DIR = os.path.dirname(os.path.abspath(__file__)) # This directory
Expand All @@ -27,6 +26,9 @@ def tearDown(self):
self.remove()

def remove(self):
if IGNORE_TEST:
# Keep data if debugging
return
temp_dir = sa.SENSITIVITY_DATA_DIR % TEST_DIR
if os.path.isdir(temp_dir):
shutil.rmtree(temp_dir)
Expand All @@ -35,7 +37,7 @@ def testConstructor(self):
if IGNORE_TEST:
return
self.assertTrue(isinstance(self.analyzer.baseline_oc_value_df, pd.DataFrame))
self.assertTrue(isinstance(self.analyzer.baseline_oc_value_df.loc[cn.C_ALPHA, cn.C_X1], float))
self.assertTrue(self.analyzer.oc_expression_df.loc[cn.C_ALPHA, cn.C_X1] is not None)

def testGetRandomValues(self):
if IGNORE_TEST:
Expand All @@ -60,8 +62,8 @@ def testMakeRandomParameterDct(self):
return
dct = self.analyzer._makeRandomParameterDct()
self.assertTrue(isinstance(dct, dict))
self.assertTrue(isinstance(dct[cn.C_X1], dict))
self.assertTrue(isinstance(dct[cn.C_X1][cn.C_THETA], np.ndarray))
self.assertTrue(isinstance(dct, dict))
self.assertTrue(isinstance(dct[cn.C_K1], np.ndarray))

def testMakeDataFrameFromTwoLevelDct(self):
if IGNORE_TEST:
Expand All @@ -86,9 +88,7 @@ def testMakeData(self):
#if IGNORE_TEST:
# return
self.analyzer.makeData(frac_deviations=[0.1, 0.5], num_sample=10, data_dir=TEST_DIR)
import pdb; pdb.set_trace()


if __name__ == "__main__":
unittest.main()

unittest.main()

0 comments on commit 909f1bd

Please sign in to comment.