Skip to content

Commit

Permalink
updates for PRVI GMM calcs
Browse files Browse the repository at this point in the history
  • Loading branch information
Kevin Milner committed Dec 14, 2024
1 parent 36a8f33 commit 473662a
Show file tree
Hide file tree
Showing 10 changed files with 312 additions and 58 deletions.
24 changes: 21 additions & 3 deletions src/main/java/scratch/kevin/latex/LaTeXUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,8 @@ public static String numberExpFormatSigFigs(Number number, int sigFigs) {
return numberExpFormat(numberToString(DataUtils.roundSigFigs(number.doubleValue(), sigFigs)));
}

public static String numberExpFormatFixedDecimal(Number number, int scale) {
return numberExpFormat(numberToString(DataUtils.roundFixed(number.doubleValue(), scale)));
public static String numberExpFormatFixedDecimal(Number number, int decimalPlaces) {
return numberExpFormat(numberToString(DataUtils.roundFixed(number.doubleValue(), decimalPlaces)));
}

private static synchronized String numberToString(Number number) {
Expand All @@ -108,7 +108,7 @@ private static synchronized String numberToString(Number number) {
str = nonExpOptionalDF.format(number);
return str;
}

private static final DecimalFormat nonExpOptionalDF = new DecimalFormat("0.###########");

public static String numberExpFormat(String number) {
Expand All @@ -130,13 +130,31 @@ public static String numberExpFormat(String number) {
// Return the formatted LaTeX string
return String.format("$%s \\times 10^{%s}$", base, exponent);
}

private static final DecimalFormat groupedIntDF = new DecimalFormat("0");
static {
groupedIntDF.setGroupingUsed(true);
groupedIntDF.setGroupingSize(3);
}

public static String groupedIntNumber(Number number) {
return groupedIntDF.format(number);
}

private static final DecimalFormat percentDF = new DecimalFormat("0.##########%");

public static String numberAsPercent(Number number, int decimalPlaces) {
return numberExpFormatFixedDecimal(number, decimalPlaces)+LATEX_ESCAPE_MAP.get('%');
}

public static void main(String[] args) {
String raw = "Special characters: & % $ # _ { } ~ ^ \\ and \\& already escaped.";
String escaped = escapeLaTeX(raw);
System.out.println("Original: " + raw);
System.out.println("Escaped: " + escaped);

System.out.println(numberAsPercent(100d*0.505/1.3, 0));

System.out.println(defineValueCommand("SlipRateExample", "33%"));
System.out.println(defineValueCommand("SlipRateExample", "\\expnum("+numberExpFormat(3.14)+")", false));
System.out.println(defineValueCommand("SlipRateExample", "\\expnum("+numberExpFormat(3e-10)+")", false));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -230,8 +230,7 @@ else if (gridReg.getNodeCount() > 5000)
argz += " --region "+regPath;
if (noMFDs)
argz += " --no-mfds";
if (vs30 != null)
argz += " --vs30 "+vs30.floatValue();

argz += " --gridded-seis "+bgOp.name();
if (gmms != null)
for (AttenRelRef gmm : gmms)
Expand All @@ -244,6 +243,8 @@ else if (gridReg.getNodeCount() > 5000)
argz += (float)periods[p];
}
}
if (vs30 != null)
argz += " --vs30 "+vs30.floatValue();
if (supersample)
argz += " --supersample";
if (sigmaTrunc != null)
Expand Down
54 changes: 54 additions & 0 deletions src/main/java/scratch/kevin/prvi25/GMMFilterTests.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
package scratch.kevin.prvi25;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;

import org.opensha.commons.logicTree.LogicTree;
import org.opensha.commons.logicTree.LogicTreeBranch;
import org.opensha.commons.logicTree.LogicTreeLevel;
import org.opensha.commons.logicTree.LogicTreeNode;
import org.opensha.sha.earthquake.faultSysSolution.hazard.mpj.MPJ_LogicTreeHazardCalc;
import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_LogicTreeBranch;
import org.opensha.sha.imr.AttenRelRef;
import org.opensha.sha.imr.ScalarIMR;
import org.opensha.sha.imr.attenRelImpl.nshmp.NSHMP_GMM_Wrapper;
import org.opensha.sha.imr.param.IntensityMeasureParams.PGA_Param;
import org.opensha.sha.util.TectonicRegionType;

import gov.usgs.earthquake.nshmp.gmm.GmmInput;
import gov.usgs.earthquake.nshmp.gmm.GroundMotion;

class GMMFilterTests {

public static void main(String[] args) {
List<LogicTreeLevel<? extends LogicTreeNode>> allLevels = new ArrayList<>();
allLevels.addAll(PRVI25_LogicTreeBranch.levelsCrustalGMM);
allLevels.addAll(PRVI25_LogicTreeBranch.levelsInterfaceGMM);
allLevels.addAll(PRVI25_LogicTreeBranch.levelsSlabGMM);
List<LogicTree<?>> logicTrees = List.of(
LogicTree.buildExhaustive(PRVI25_LogicTreeBranch.levelsCrustalGMM, true),
LogicTree.buildExhaustive(PRVI25_LogicTreeBranch.levelsInterfaceGMM, true),
LogicTree.buildExhaustive(PRVI25_LogicTreeBranch.levelsSlabGMM, true),
LogicTree.buildExhaustive(allLevels, true)
);

for (LogicTree<?> tree : logicTrees) {
System.out.println("Testing LogicTree with "+tree.size()+" levels");
for (LogicTreeBranch<?> branch : tree) {
System.out.println("\tBranch: "+branch);
Map<TectonicRegionType, ? extends Supplier<ScalarIMR>> gmms = MPJ_LogicTreeHazardCalc.getGMM_Suppliers(branch, null);
for (TectonicRegionType trt : gmms.keySet()) {
NSHMP_GMM_Wrapper gmm = (NSHMP_GMM_Wrapper)gmms.get(trt).get();
gmm.setIntensityMeasure(PGA_Param.NAME);
System.out.println("\t\t"+trt.name()+" GMM: "+gmm.getName());
gmm.setCurrentGmmInput(GmmInput.builder().withDefaults().build());
gov.usgs.earthquake.nshmp.tree.LogicTree<GroundMotion> gmmTree = gmm.getGroundMotionTree();
System.out.println("\t\t\tTree has "+gmmTree.size()+" values: "+gmmTree);
}
}
}
}

}
131 changes: 101 additions & 30 deletions src/main/java/scratch/kevin/prvi25/GMMLogicTreeWriter.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,13 @@
import org.opensha.commons.logicTree.LogicTree;
import org.opensha.commons.logicTree.LogicTreeBranch;
import org.opensha.commons.logicTree.LogicTreeLevel;
import org.opensha.commons.logicTree.LogicTreeLevel.FileBackedLevel;
import org.opensha.commons.logicTree.LogicTreeNode;
import org.opensha.commons.logicTree.LogicTreeNode.FileBackedNode;
import org.opensha.sha.earthquake.faultSysSolution.FaultSystemSolution;
import org.opensha.sha.earthquake.faultSysSolution.hazard.mpj.MPJ_LogicTreeHazardCalc;
import org.opensha.sha.earthquake.faultSysSolution.hazard.mpj.MPJ_SiteLogicTreeHazardCurveCalc;
import org.opensha.sha.earthquake.faultSysSolution.modules.SolutionLogicTree;
import org.opensha.sha.earthquake.param.IncludeBackgroundOption;
import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_CrustalFaultModels;
import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_LogicTreeBranch;
Expand All @@ -31,47 +35,60 @@

import edu.usc.kmilner.mpj.taskDispatch.MPJTaskCalculator;

import static scratch.kevin.prvi25.figures.PRVI_Paths.*;

public class GMMLogicTreeWriter {

public static void main(String[] args) throws IOException {
File baseDir = new File("/home/kevin/OpenSHA/nshm23/batch_inversions");

String jobSuffix = "";
String outputSuffix = "";
String dirSuffix = "";
String forceInputFileName = null;
String logicTreeOutputName = null;
boolean combineOnly = false;

// GriddedRegion gridReg = new GriddedRegion(PRVI25_RegionLoader.loadPRVI_ModelBroad(), 0.1, GriddedRegion.ANCHOR_0_0);
GriddedRegion gridReg = new GriddedRegion(PRVI25_RegionLoader.loadPRVI_MapExtents(), 0.025, GriddedRegion.ANCHOR_0_0);
// GriddedRegion gridReg = new GriddedRegion(PRVI25_RegionLoader.loadPRVI_MapExtents(), 0.025, GriddedRegion.ANCHOR_0_0);
GriddedRegion gridReg = new GriddedRegion(PRVI25_RegionLoader.loadPRVI_Tight(), 0.025, GriddedRegion.ANCHOR_0_0);
System.out.println("Region has "+gridReg.getNodeCount()+" nodes");

Double vs30 = null;

vs30 = 760d; dirSuffix = "-vs760";
Double sigmaTrunc = 3d;
boolean supersample = true;
double[] periods = { 0d, 0.2d, 1d, 5d };

/*
* Active crustal
*
* do supra-seis and then include
*/
List<LogicTreeLevel<? extends LogicTreeNode>> gmmLevels = PRVI25_LogicTreeBranch.levelsCrustalGMM;
File sourceDir = new File(baseDir, "2024_08_16-prvi25_crustal_branches-dmSample5x");
File outputDir = new File(baseDir, sourceDir.getName()+"-gmTreeCalcs");
// supra-seis only
// File sourceTreeFile = new File(sourceDir, "logic_tree.json");
// int mins = 1440;
// IncludeBackgroundOption bgOp = IncludeBackgroundOption.EXCLUDE;
// including gridded
int mins = 1440*5;
File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded.json");
// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded_sampled.json"); jobSuffix = "_sampled";
IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;
// List<LogicTreeLevel<? extends LogicTreeNode>> gmmLevels = PRVI25_LogicTreeBranch.levelsCrustalGMM;
// File sourceDir = CRUSTAL_DIR;
// File outputDir = new File(sourceDir.getParentFile(), sourceDir.getName()+"-gmTreeCalcs"+dirSuffix);
// // supra-seis only
//// File sourceTreeFile = new File(sourceDir, "logic_tree.json");
//// int mins = 1440;
//// IncludeBackgroundOption bgOp = IncludeBackgroundOption.EXCLUDE;
// // including gridded
// int mins = 1440*5;
// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded.json");
//// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded_sampled.json"); jobSuffix = "_sampled";
// IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;

/*
* Interface
*
* do supra-seis, then gridded-only, then combine
*/
// List<LogicTreeLevel<? extends LogicTreeNode>> gmmLevels = PRVI25_LogicTreeBranch.levelsInterfaceGMM;
// File sourceDir = new File(baseDir, "2024_08_16-prvi25_subduction_branches");
// File outputDir = new File(baseDir, sourceDir.getName()+"-gmTreeCalcs");
// File sourceDir = SUBDUCTION_DIR;
// File outputDir = new File(sourceDir.getParentFile(), sourceDir.getName()+"-gmTreeCalcs"+dirSuffix);
// // supra-seis only
// File sourceTreeFile = new File(sourceDir, "logic_tree.json");
// int mins = 1440;
// IncludeBackgroundOption bgOp = IncludeBackgroundOption.EXCLUDE;
//// File sourceTreeFile = new File(sourceDir, "logic_tree.json");
//// int mins = 1440;
//// IncludeBackgroundOption bgOp = IncludeBackgroundOption.EXCLUDE;
// // interface gridded only
//// int mins = 1440;
//// File sourceTreeFile = new File(sourceDir, "logic_tree_gridded_only.json");
Expand All @@ -81,21 +98,21 @@ public static void main(String[] args) throws IOException {
//// jobSuffix = "_interface";
//// outputSuffix = jobSuffix;
// // interface both (combine only)
//// combineOnly = true;
//// int mins = 1440;
//// forceInputFileName = "results_full_gridded_interface_only.zip";
//// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded.json");
//// logicTreeOutputName = "logic_tree_full_gridded_interface_only.json";
//// IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;
//// jobSuffix = "_interface";
//// outputSuffix = jobSuffix;
// combineOnly = true;
// int mins = 1440;
// forceInputFileName = "results_full_gridded_interface_only.zip";
// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded.json");
// logicTreeOutputName = "logic_tree_full_gridded_interface_only.json";
// IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;
// jobSuffix = "_interface";
// outputSuffix = jobSuffix;

/*
* Slab
*/
// List<LogicTreeLevel<? extends LogicTreeNode>> gmmLevels = PRVI25_LogicTreeBranch.levelsSlabGMM;
// File sourceDir = new File(baseDir, "2024_08_16-prvi25_subduction_branches");
// File outputDir = new File(baseDir, sourceDir.getName()+"-gmTreeCalcs");
// File sourceDir = SUBDUCTION_DIR;
// File outputDir = new File(sourceDir.getParentFile(), sourceDir.getName()+"-gmTreeCalcs"+dirSuffix);
// // always slab gridded only
// int mins = 1440;
// File sourceTreeFile = new File(sourceDir, "logic_tree_gridded_only.json");
Expand All @@ -105,6 +122,32 @@ public static void main(String[] args) throws IOException {
// jobSuffix = "_slab";
// outputSuffix = jobSuffix;

/*
* Branch averaged (GMM-only)
*/
List<LogicTreeLevel<? extends LogicTreeNode>> gmmLevels = PRVI25_LogicTreeBranch.levelsCombinedGMM;
File sourceDir = COMBINED_DIR;
File outputDir = new File(sourceDir.getParentFile(), sourceDir.getName()+"-ba_only-gmTreeCalcs"+dirSuffix);
// write out a SLT that only contains that node
File sourceTreeFile = new File(outputDir, "fake_erf_logic_tree.json");
FileBackedLevel fakeLevel = new FileBackedLevel("ERF Model", "ERF",
List.of(new FileBackedNode("Branch Averaged ERF", "BranchAveragedERF", 1d, "BA_ERF")));
LogicTree<?> tempTree = LogicTree.buildExhaustive(List.of(fakeLevel), true);
Preconditions.checkState(tempTree.size() == 1);
File sourceFile = new File(outputDir, "fake_erf_slt.zip");
SolutionLogicTree.FileBuilder builder = new SolutionLogicTree.FileBuilder(sourceFile);
builder.setSerializeGridded(true);
builder.solution(FaultSystemSolution.load(COMBINED_SOL), tempTree.getBranch(0));
builder.close();
forceInputFileName = sourceFile.getName();
tempTree.write(sourceTreeFile);
logicTreeOutputName = "logic_tree.json";
sourceDir = outputDir;
int mins = 1440;
IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;


// FOR ALL
LogicTree<?> erfTree = LogicTree.read(sourceTreeFile);
System.out.println("Read "+erfTree.size()+" ERF branches");

Expand Down Expand Up @@ -208,6 +251,20 @@ else if (bgOp == IncludeBackgroundOption.EXCLUDE)
if (combineOnly)
argz += " --combine-only";
argz += " --region "+dirPath+"/"+gridRegFile.getName();
if (vs30 != null)
argz += " --vs30 "+vs30.floatValue();
if (supersample)
argz += " --supersample";
if (sigmaTrunc != null)
argz += " --gmm-sigma-trunc-one-sided "+sigmaTrunc.floatValue();
if (periods != null) {
argz += " --periods ";
for (int p=0; p<periods.length; p++) {
if (p > 0)
argz += ",";
argz += (float)periods[p];
}
}
argz += " "+MPJTaskCalculator.argumentBuilder().maxDispatch(100).threads(remoteTotalThreads).build();
List<String> script = mpjWrite.buildScript(MPJ_LogicTreeHazardCalc.class.getName(), argz);
pbsWrite.writeScript(new File(localDir, mapScriptName), script, mins, nodes, remoteTotalThreads, queue);
Expand All @@ -232,6 +289,20 @@ else if (bgOp == IncludeBackgroundOption.EXCLUDE)
argz += " --output-dir "+dirPath+"/"+outputFilePrefix+"_sites"+outputSuffix;
argz += " --sites-file "+dirPath+"/"+localSitesFile.getName();
argz += " --gridded-seis "+bgOp.name();
if (vs30 != null)
argz += " --vs30 "+vs30.floatValue();
if (supersample)
argz += " --supersample";
if (sigmaTrunc != null)
argz += " --gmm-sigma-trunc-one-sided "+sigmaTrunc.floatValue();
if (periods != null) {
argz += " --periods ";
for (int p=0; p<periods.length; p++) {
if (p > 0)
argz += ",";
argz += (float)periods[p];
}
}
argz += " "+MPJTaskCalculator.argumentBuilder().minDispatch(1).maxDispatch(10).threads(remoteTotalThreads).build();
script = mpjWrite.buildScript(MPJ_SiteLogicTreeHazardCurveCalc.class.getName(), argz);
pbsWrite.writeScript(new File(localDir, siteScriptName), script, mins, nodes, remoteTotalThreads, queue);
Expand Down
Loading

0 comments on commit 473662a

Please sign in to comment.