Skip to content

Commit

Permalink
initial PRVI model and tests
Browse files Browse the repository at this point in the history
  • Loading branch information
kevinmilner committed Mar 14, 2024
1 parent db28506 commit c92fa11
Show file tree
Hide file tree
Showing 3 changed files with 156 additions and 39 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Date;
import java.util.HashSet;
Expand All @@ -14,6 +15,8 @@
import java.util.Set;

import org.opensha.commons.data.CSVFile;
import org.opensha.commons.data.Site;
import org.opensha.commons.geo.Location;
import org.opensha.commons.geo.Region;
import org.opensha.commons.hpc.JavaShellScriptWriter;
import org.opensha.commons.hpc.mpj.FastMPJShellScriptWriter;
Expand Down Expand Up @@ -75,6 +78,9 @@
import org.opensha.sha.earthquake.rupForecastImpl.nshm23.prior2018.NSHM18_FaultModels;
import org.opensha.sha.earthquake.rupForecastImpl.nshm23.prior2018.NSHM18_LogicTreeBranch;
import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader;
import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_LogicTreeBranch;
import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_DeformationModels;
import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_FaultModels;
import org.opensha.sha.util.NEHRP_TestCity;

import com.google.common.base.Preconditions;
Expand Down Expand Up @@ -143,8 +149,8 @@ public static void main(String[] args) throws IOException {
List<RandomlySampledLevel<?>> individualRandomLevels = new ArrayList<>();
int samplingBranchCountMultiplier = 1;

String dirName = new SimpleDateFormat("yyyy_MM_dd").format(new Date());
// String dirName = "2024_02_02";
// String dirName = new SimpleDateFormat("yyyy_MM_dd").format(new Date());
String dirName = "2024_03_11";

/*
* UCERF3 logic tree
Expand Down Expand Up @@ -238,9 +244,13 @@ public static void main(String[] args) throws IOException {
// dirName += "-nshm23_u3_hybrid_branches";
// double avgNumRups = 325000;

List<LogicTreeLevel<? extends LogicTreeNode>> levels = NSHM23_LogicTreeBranch.levelsOnFault;
dirName += "-nshm23_branches";
double avgNumRups = 600000;
// List<LogicTreeLevel<? extends LogicTreeNode>> levels = NSHM23_LogicTreeBranch.levelsOnFault;
// dirName += "-nshm23_branches";
// double avgNumRups = 600000;

List<LogicTreeLevel<? extends LogicTreeNode>> levels = PRVI25_LogicTreeBranch.levelsOnFault;
dirName += "-prvi25_branches";
double avgNumRups = 50000;

// List<LogicTreeLevel<? extends LogicTreeNode>> levels = NSHM18_LogicTreeBranch.levels;
// dirName += "-nshm18_branches-wc_94";
Expand Down Expand Up @@ -277,7 +287,7 @@ public static void main(String[] args) throws IOException {
// dirName += "-new_scale_rels";
// dirName += "-full_set";

// Class<? extends InversionConfigurationFactory> factoryClass = NSHM23_InvConfigFactory.class;
Class<? extends InversionConfigurationFactory> factoryClass = NSHM23_InvConfigFactory.class;

// Class<? extends InversionConfigurationFactory> factoryClass = NSHM23_InvConfigFactory.MFDUncert0p1.class;
// dirName += "-mfd_uncert_0p1";
Expand Down Expand Up @@ -425,9 +435,9 @@ public static void main(String[] args) throws IOException {
// dirName += "-dm_sampling";
// individualRandomLevels.add(new RandomDefModSampleLevel());

Class<? extends InversionConfigurationFactory> factoryClass = DefModSamplingEnabledInvConfig.ConnDistB0p5MidSegCorrCapSigma.class;
dirName += "-dm_sampling_cap_sigma";
individualRandomLevels.add(new RandomDefModSampleLevel());
// Class<? extends InversionConfigurationFactory> factoryClass = DefModSamplingEnabledInvConfig.ConnDistB0p5MidSegCorrCapSigma.class;
// dirName += "-dm_sampling_cap_sigma";
// individualRandomLevels.add(new RandomDefModSampleLevel());

if (!factoryClass.equals(NSHM23_InvConfigFactory.class)) {
// try instantiate it to make sure we get any static modifiers that might change branch weights
Expand Down Expand Up @@ -493,7 +503,8 @@ public static void main(String[] args) throws IOException {
// NSHM18_FaultModels.NSHM18_WUS_PlusU3_FM_3p1,
// NSHM23_FaultModels.FM_v1p4,
// NSHM23_FaultModels.FM_v2,
NSHM23_FaultModels.WUS_FM_v3,
// NSHM23_FaultModels.WUS_FM_v3,
PRVI25_FaultModels.PRVI_FM_INITIAL,

// // SINGLE STATE
// NSHM23_SingleStates.NM,
Expand Down Expand Up @@ -975,43 +986,55 @@ else if (mpjWrite instanceof FastMPJShellScriptWriter)
// site hazard job
RupSetFaultModel fm = logicTree.getBranch(0).getValue(RupSetFaultModel.class);
if (fm != null) {
Set<NEHRP_TestCity> sites;
Collection<Site> sites = null;
if (fm instanceof FaultModels) {
// CA
sites = NEHRP_TestCity.getCA();
} else {
sites = new ArrayList<>();
for (NEHRP_TestCity site : NEHRP_TestCity.getCA())
sites.add(new Site(site.location(), site.toString()));
} else if (fm instanceof NSHM23_FaultModels) {
// filter out CEUS for now
Region reg = NSHM23_RegionLoader.loadFullConterminousWUS();
sites = new HashSet<>();
sites = new ArrayList<>();
for (NEHRP_TestCity site : NEHRP_TestCity.values()) {
if (reg.contains(site.location()))
sites.add(site);
sites.add(new Site(site.location(), site.toString()));
}
} else if (fm instanceof PRVI25_FaultModels) {
CSVFile<String> csv = CSVFile.readStream(PRVI25_FaultModels.class.getResourceAsStream("/data/erf/prvi25/sites/prvi_sites.csv"), true);
sites = new ArrayList<>();
for (int row=1; row<csv.getNumRows(); row++) {
String name = csv.get(row, 0);
Location loc = new Location(csv.getDouble(row, 1), csv.getDouble(row, 2));
sites.add(new Site(loc, name));
}
}
CSVFile<String> csv = new CSVFile<>(true);
csv.addLine("Name", "Latitude", "Longitude");
for (NEHRP_TestCity site : sites)
csv.addLine(site.toString(), site.location().lat+"", site.location().lon+"");
File localSitesFile = new File(localDir, "hazard_sites.csv");
csv.writeToFile(localSitesFile);

argz = "--input-file "+resultsPath+".zip";
argz += " --output-dir "+resultsPath+"_hazard_sites";
argz += " --sites-file "+dirPath+"/"+localSitesFile.getName();
argz += " "+MPJTaskCalculator.argumentBuilder().exactDispatch(1).threads(remoteTotalThreads).build();
argz += " --gridded-seis EXCLUDE";
script = mpjWrite.buildScript(MPJ_SiteLogicTreeHazardCurveCalc.class.getName(), argz);
pbsWrite.writeScript(new File(localDir, "batch_hazard_sites.slurm"), script, mins, nodes, remoteTotalThreads, queue);

if (griddedJob) {
argz = "--input-file "+resultsPath;
argz += " --logic-tree "+dirPath+"/logic_tree_full_gridded.json";
argz += " --output-dir "+resultsPath+"_hazard_sites_full_gridded";
if (sites != null && !sites.isEmpty()) {
CSVFile<String> csv = new CSVFile<>(true);
csv.addLine("Name", "Latitude", "Longitude");
for (Site site : sites)
csv.addLine(site.getName(), site.getLocation().lat+"", site.getLocation().lon+"");
File localSitesFile = new File(localDir, "hazard_sites.csv");
csv.writeToFile(localSitesFile);

argz = "--input-file "+resultsPath+".zip";
argz += " --output-dir "+resultsPath+"_hazard_sites";
argz += " --sites-file "+dirPath+"/"+localSitesFile.getName();
argz += " --gridded-seis INCLUDE";
argz += " "+MPJTaskCalculator.argumentBuilder().minDispatch(2).maxDispatch(10).threads(remoteTotalThreads).build();
argz += " "+MPJTaskCalculator.argumentBuilder().exactDispatch(1).threads(remoteTotalThreads).build();
argz += " --gridded-seis EXCLUDE";
script = mpjWrite.buildScript(MPJ_SiteLogicTreeHazardCurveCalc.class.getName(), argz);
pbsWrite.writeScript(new File(localDir, "batch_hazard_sites_full_gridded.slurm"), script, mins, nodes, remoteTotalThreads, queue);
pbsWrite.writeScript(new File(localDir, "batch_hazard_sites.slurm"), script, mins, nodes, remoteTotalThreads, queue);

if (griddedJob) {
argz = "--input-file "+resultsPath;
argz += " --logic-tree "+dirPath+"/logic_tree_full_gridded.json";
argz += " --output-dir "+resultsPath+"_hazard_sites_full_gridded";
argz += " --sites-file "+dirPath+"/"+localSitesFile.getName();
argz += " --gridded-seis INCLUDE";
argz += " "+MPJTaskCalculator.argumentBuilder().minDispatch(2).maxDispatch(10).threads(remoteTotalThreads).build();
script = mpjWrite.buildScript(MPJ_SiteLogicTreeHazardCurveCalc.class.getName(), argz);
pbsWrite.writeScript(new File(localDir, "batch_hazard_sites_full_gridded.slurm"), script, mins, nodes, remoteTotalThreads, queue);
}
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
package scratch.kevin.nshm23.prvi;

import java.io.IOException;
import java.util.List;

import org.opensha.commons.geo.json.Feature;
import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet;
import org.opensha.sha.earthquake.faultSysSolution.RuptureSets;
import org.opensha.sha.earthquake.faultSysSolution.RuptureSets.CoulombRupSetConfig;
import org.opensha.sha.earthquake.faultSysSolution.modules.ClusterRuptures;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture;
import org.opensha.sha.earthquake.faultSysSolution.util.SubSectionBuilder;
import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.NSHM23_ScalingRelationships;
import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_FaultModels;
import org.opensha.sha.faultSurface.FaultSection;
import org.opensha.sha.faultSurface.GeoJSONFaultSection;

import com.google.common.base.Preconditions;

public class BunceMainRidgeConnectionTests {

public static void main(String[] args) throws IOException {
PRVI25_FaultModels fm = PRVI25_FaultModels.PRVI_FM_INITIAL;
List<? extends FaultSection> allSects = fm.getFaultSections();

FaultSection bunce5 = null;
FaultSection mainRidge1 = null;

for (FaultSection sect : allSects) {
if (sect.getSectionName().equals("Bunce 5"))
bunce5 = sect;
if (sect.getSectionName().equals("Main Ridge 1"))
mainRidge1 = sect;
}

Preconditions.checkNotNull(bunce5);
Preconditions.checkNotNull(mainRidge1);

// this doesn't
bunce5.setAveRake(0d);

// this does the trick
// bunce5.setAveRake(90d);
// bunce5 = setDip(bunce5, 60);

// or this does the trick
mainRidge1.setAveRake(-45d);

List<FaultSection> subSects = SubSectionBuilder.buildSubSects(List.of(bunce5, mainRidge1));

CoulombRupSetConfig config = new RuptureSets.CoulombRupSetConfig(subSects, "coulomb", NSHM23_ScalingRelationships.AVERAGE);
FaultSystemRupSet rupSet = config.build(1);
ClusterRuptures cRups = rupSet.requireModule(ClusterRuptures.class);

boolean hasJump = false;
double largestMag = 0d;
double largestMultiMag = 0d;
int mostSubSects = 0;
for (int r=0; r<cRups.size(); r++) {
double mag = rupSet.getMagForRup(r);
mostSubSects = Integer.max(mostSubSects, rupSet.getSectionsIndicesForRup(r).size());
largestMag = Double.max(mag, largestMag);
ClusterRupture cRup = cRups.get(r);
if (cRup.getTotalNumJumps() > 0) {
hasJump = true;
largestMultiMag = Double.max(mag, largestMultiMag);
}
}

System.out.println("Bunce5: rake="+(int)bunce5.getAveRake()+", dip="+(int)bunce5.getAveDip()+", dipDir="+(int)bunce5.getDipDirection());
System.out.println("MainRidge1: rake="+(int)mainRidge1.getAveRake()+", dip="+(int)mainRidge1.getAveDip()+", dipDir="+(int)mainRidge1.getDipDirection());
System.out.println("Largest mag: "+largestMag);
System.out.println("Largest subsect count: "+mostSubSects+"/"+subSects.size());
System.out.println("Has jump? "+hasJump);
if (hasJump)
System.out.println("Largest jumping mag: "+largestMultiMag);
}

private static FaultSection setDip(FaultSection sect, double dip) {
Feature feature = new GeoJSONFaultSection(sect).toFeature();
feature.properties.set(GeoJSONFaultSection.DIP, dip);
return GeoJSONFaultSection.fromFeature(feature);
}

}
13 changes: 11 additions & 2 deletions src/main/java/scratch/kevin/nshm23/prvi/InitialDefModConvert.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,9 @@
public class InitialDefModConvert {

public static void main(String[] args) throws IOException {
File inputFile = new File("/home/kevin/OpenSHA/nshm23/prvi/2024_02-initial_dm/NSHM2025_GeoDefModel_PRVI.geojson");
File outputFile = new File("/home/kevin/OpenSHA/nshm23/prvi/2024_02-initial_dm/NSHM2025_GeoDefModel_PRVI_mod.geojson");
File dir = new File("/home/kevin/workspace/opensha/src/main/resources/data/erf/prvi25/fault_models/initial");
File inputFile = new File(dir, "NSHM2025_GeoDefModel_PRVI.geojson");
File outputFile = new File(dir, "NSHM2025_GeoDefModel_PRVI_mod.geojson");
List<Feature> features = new ArrayList<>(FeatureCollection.read(inputFile).features);

features.sort(new Comparator<Feature>() {
Expand Down Expand Up @@ -83,6 +84,14 @@ public int compare(Feature o1, Feature o2) {
props.remove("PrefSlpRat");
props.set(GeoJSONFaultSection.SLIP_RATE, slip);

double lowSlip = props.getDouble("MinSlpRat", Double.NaN);
props.remove("MinSlpRat");
props.set("LowRate", lowSlip);

double highSlip = props.getDouble("MaxSlpRat", Double.NaN);
props.remove("MaxSlpRat");
props.set("HighRate", highSlip);

features.set(i, feature);
}

Expand Down

0 comments on commit c92fa11

Please sign in to comment.