Skip to content

Commit

Permalink
updates for new PRVI runs
Browse files Browse the repository at this point in the history
  • Loading branch information
Kevin Milner committed Oct 28, 2024
1 parent 267d664 commit e6fc917
Show file tree
Hide file tree
Showing 4 changed files with 154 additions and 2 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
package scratch.kevin.nshm23;

import java.io.File;
import java.io.IOException;

import org.opensha.commons.data.xyz.GriddedGeoDataSet;
import org.opensha.commons.logicTree.LogicTreeBranch;
import org.opensha.sha.earthquake.faultSysSolution.modules.GridSourceProvider;
import org.opensha.sha.earthquake.faultSysSolution.modules.SolutionLogicTree;
import org.opensha.sha.earthquake.faultSysSolution.reports.plots.NucleationRatePlot;

import com.google.common.base.Preconditions;

public class GriddedBranchMomentRatesFileWriter {

public static void main(String[] args) throws IOException {
File sltFile = new File("/home/kevin/OpenSHA/nshm23/batch_inversions/"
+ "2024_02_02-nshm23_branches-WUS_FM_v3/results_gridded_branches_simplified.zip");
SolutionLogicTree slt = SolutionLogicTree.load(sltFile);

File outputDir = new File("/tmp/gridded_moment_rate_branches");
Preconditions.checkState(outputDir.exists() || outputDir.mkdir());

for (LogicTreeBranch<?> branch : slt.getLogicTree()) {
System.out.println("Branch: "+branch);
GridSourceProvider gridProv = slt.loadGridProvForBranch(branch);
GriddedGeoDataSet xyz = NucleationRatePlot.calcGriddedNucleationMomentRates(gridProv);
GriddedGeoDataSet.writeXYZFile(xyz, new File(outputDir, branch.buildFileName()+".xyz"));
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -630,7 +630,13 @@ public static void main(String[] args) throws IOException {

// dirName += "-proxyGriddedTests";

Class<? extends InversionConfigurationFactory> factoryClass = PRVI25_InvConfigFactory.class;
// Class<? extends InversionConfigurationFactory> factoryClass = PRVI25_InvConfigFactory.class;

Class<? extends InversionConfigurationFactory> factoryClass = PRVI25_InvConfigFactory.GriddedUseM1Bounds.class;
dirName += "-grid_bounds_m1";

// Class<? extends InversionConfigurationFactory> factoryClass = PRVI25_InvConfigFactory.GriddedUseM1toMmaxBounds.class;
// dirName += "-grid_bounds_m1_to_mmax";

if (!factoryClass.equals(PRVI25_InvConfigFactory.class)) {
// try instantiate it to make sure we get any static modifiers that might change branch weights
Expand Down
86 changes: 86 additions & 0 deletions src/main/java/scratch/kevin/prvi25/RegionalMFD_Spider.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
package scratch.kevin.prvi25;

import java.io.File;
import java.io.IOException;
import java.util.List;

import org.opensha.commons.util.ClassUtils;
import org.opensha.commons.util.modules.AverageableModule.AveragingAccumulator;
import org.opensha.commons.util.modules.ModuleArchive;
import org.opensha.commons.util.modules.OpenSHA_Module;
import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet;
import org.opensha.sha.earthquake.faultSysSolution.modules.RegionsOfInterest;
import org.opensha.sha.magdist.IncrementalMagFreqDist;
import org.opensha.sha.util.TectonicRegionType;

public class RegionalMFD_Spider {

public static void main(String[] args) throws IOException {
if (args.length != 1) {
System.err.println("USAGE: <results-dir>");
System.exit(1);
}
File dir = new File(args[0]);

AveragingAccumulator<RegionsOfInterest> averager = null;

ModuleArchive.VERBOSE_DEFAULT = false;

for (File subdir : dir.listFiles()) {
if (!subdir.isDirectory())
continue;
File solFile = new File(subdir, "solution.zip");
if (!solFile.exists())
continue;
System.out.println(subdir.getName());
ModuleArchive<OpenSHA_Module> archive = new ModuleArchive<>(solFile);
RegionsOfInterest roi = archive.loadUnlistedModule(RegionsOfInterest.class, FaultSystemRupSet.NESTING_PREFIX);
archive.getInput().close();

List<IncrementalMagFreqDist> mfds = roi.getMFDs();
System.out.println("\tRegion count: "+roi.getRegions().size());
System.out.println("\tMFDs: "+(mfds == null ? "NULL" : mfds.size()+""));
if (mfds != null) {
for (int i=0; i<mfds.size(); i++) {
IncrementalMagFreqDist mfd = mfds.get(i);
System.out.println("\t\tMFD "+i+": "+(mfd == null ? "NULL" :
mfd.getName()+" ("+ClassUtils.getClassNameWithoutPackage(mfd.getClass())+")"));
}
}
List<TectonicRegionType> trts = roi.getTRTs();
System.out.println("\tTRTs: "+(trts == null ? "NULL" : trts.size()+""));
if (trts != null) {
for (int i=0; i<trts.size(); i++) {
TectonicRegionType trt = trts.get(i);
System.out.println("\t\tTRT "+i+": "+(trt == null ? "NULL" : trt));
}
}

if (averager == null)
averager = roi.averagingAccumulator();
averager.process(roi, 1d);
}

System.out.println("Building average");
RegionsOfInterest roi = averager.getAverage();
List<IncrementalMagFreqDist> mfds = roi.getMFDs();
System.out.println("\tRegion count: "+roi.getRegions().size());
System.out.println("\tMFDs: "+(mfds == null ? "NULL" : mfds.size()+""));
if (mfds != null) {
for (int i=0; i<mfds.size(); i++) {
IncrementalMagFreqDist mfd = mfds.get(i);
System.out.println("\t\tMFD "+i+": "+(mfd == null ? "NULL" :
mfd.getName()+" ("+ClassUtils.getClassNameWithoutPackage(mfd.getClass())+")"));
}
}
List<TectonicRegionType> trts = roi.getTRTs();
System.out.println("\tTRTs: "+(trts == null ? "NULL" : trts.size()+""));
if (trts != null) {
for (int i=0; i<trts.size(); i++) {
TectonicRegionType trt = trts.get(i);
System.out.println("\t\tTRT "+i+": "+(trt == null ? "NULL" : trt));
}
}
}

}
30 changes: 29 additions & 1 deletion src/main/java/scratch/kevin/ucerf3/PureScratch.java
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@
import org.opensha.sha.earthquake.faultSysSolution.modules.SolutionLogicTree;
import org.opensha.sha.earthquake.faultSysSolution.modules.SolutionSlipRates;
import org.opensha.sha.earthquake.faultSysSolution.modules.TrueMeanRuptureMappings;
import org.opensha.sha.earthquake.faultSysSolution.reports.ReportMetadata;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.FaultSubsectionCluster;
import org.opensha.sha.earthquake.faultSysSolution.ruptures.plausibility.PlausibilityResult;
Expand Down Expand Up @@ -2979,13 +2980,40 @@ private static void test330() throws IOException {
// System.out.println("InMemory Loading took "+(float)totalLoadSecs+" s ("+(float)initialLoadSecs+" s without extra modules)");
}

private static void test331() throws IOException {
// Feature feature = Feature.read(new File("/tmp/gridded_region.geojson"));
// GriddedRegion.fromFeature(feature);

LogicTree.read(new File("/tmp/gridded_region.geojson"));

// FaultSystemSolution sol = FaultSystemSolution.load(new File("/home/kevin/OpenSHA/nshm23/batch_inversions/"
// + "2024_10_24-prvi25_crustal_branches-dmSample5x/results_PRVI_CRUSTAL_FM_V1p1_branch_averaged.zip"));
// Region region = ReportMetadata.detectRegion(sol);
// GriddedRegion gridReg = new GriddedRegion(region, 0.1, GriddedRegion.ANCHOR_0_0);
// Feature feature = gridReg.toFeature();
// String json = feature.toJSON();
// Feature.fromJSON(json);
}

private static void test332() throws IOException {
MeanUCERF2 u2 = new MeanUCERF2();
u2.setParameter(MeanUCERF2.CYBERSHAKE_DDW_CORR_PARAM_NAME, true);
u2.updateForecast();
ProbEqkSource source = u2.getSource(136);
System.out.println("Source 136 is "+source.getName());
for (int r=0; r<source.getNumRuptures(); r++) {
ProbEqkRupture rup = source.getRupture(r);
System.out.println(r+". M="+(float)rup.getMag()+"; surface is "+rup.getRuptureSurface().getClass().getName());
}
}

/**
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
try {
test330();
test332();
} catch (Throwable t) {
t.printStackTrace();
System.exit(1);
Expand Down

0 comments on commit e6fc917

Please sign in to comment.