Skip to content

Commit

Permalink
PRVI paper figures and calcs
Browse files Browse the repository at this point in the history
  • Loading branch information
Kevin Milner committed Dec 20, 2024
1 parent 13fbc62 commit 3a64aec
Show file tree
Hide file tree
Showing 16 changed files with 949 additions and 105 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ public static void main(String[] args) throws IOException {
boolean hazardGridded = false;
boolean forceRequiredNonzeroWeight = false;
Double forceHazardGridSpacing = null;
long randSeed = 12345678l;

File remoteMainDir = new File("/project/scec_608/kmilner/nshm23/batch_inversions");
int remoteTotalThreads = 20;
Expand Down Expand Up @@ -638,7 +639,11 @@ public static void main(String[] args) throws IOException {
levels.remove(i);
Preconditions.checkState(levels.size() == origNumLevels -1);
individualRandomLevels.add(new PRVI25_CrustalRandomlySampledDeformationModelLevel());
samplingBranchCountMultiplier = 5; // 5 for each branch
// samplingBranchCountMultiplier = 5; // 5 for each branch
// samplingBranchCountMultiplier = 10; // 10 for each branch
// samplingBranchCountMultiplier = 20; // 20 for each branch
samplingBranchCountMultiplier = 50; // 50 for each branch
randSeed *= samplingBranchCountMultiplier;
dirName += "-dmSample";
if (samplingBranchCountMultiplier > 1)
dirName += samplingBranchCountMultiplier+"x";
Expand Down Expand Up @@ -756,7 +761,6 @@ public static void main(String[] args) throws IOException {
// int numSamples = nodes*5;
// int numSamples = nodes*4;
// long randSeed = System.currentTimeMillis();
long randSeed = 12345678l;
int numSamples = 0;
// int numSamples = 450;
// int numSamples = 36*10;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1562,29 +1562,29 @@ else if (hazDiff < 0 && moDiff < 0)
private static final DecimalFormat twoDigits = new DecimalFormat("0.00");
private static final DecimalFormat pDF = new DecimalFormat("0.00%");

static GriddedGeoDataSet asLog10(GriddedGeoDataSet xyz) {
public static GriddedGeoDataSet asLog10(GriddedGeoDataSet xyz) {
xyz = xyz.copy();
xyz.log10();
return xyz;
}

static GriddedGeoDataSet sumMap(GriddedGeoDataSet map1, GriddedGeoDataSet map2) {
public static GriddedGeoDataSet sumMap(GriddedGeoDataSet map1, GriddedGeoDataSet map2) {
Preconditions.checkState(map1.size() == map2.size());
GriddedGeoDataSet ret = new GriddedGeoDataSet(map1.getRegion());
for (int i=0; i<ret.size(); i++)
ret.set(i, map1.get(i)+map2.get(i));
return ret;
}

static GriddedGeoDataSet mapPDiff(GriddedGeoDataSet map1, GriddedGeoDataSet map2) {
public static GriddedGeoDataSet mapPDiff(GriddedGeoDataSet map1, GriddedGeoDataSet map2) {
Preconditions.checkState(map1.size() == map2.size());
GriddedGeoDataSet ret = new GriddedGeoDataSet(map1.getRegion());
for (int i=0; i<ret.size(); i++)
ret.set(i, 100d*(map1.get(i)-map2.get(i))/map2.get(i));
return ret;
}

static GriddedGeoDataSet mapDiff(GriddedGeoDataSet map1, GriddedGeoDataSet map2) {
public static GriddedGeoDataSet mapDiff(GriddedGeoDataSet map1, GriddedGeoDataSet map2) {
Preconditions.checkState(map1.size() == map2.size());
GriddedGeoDataSet ret = new GriddedGeoDataSet(map1.getRegion());
for (int i=0; i<ret.size(); i++)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.opensha.commons.geo.json.Feature;
import org.opensha.sha.earthquake.faultSysSolution.util.SolHazardMapCalc.ReturnPeriods;
import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader;
import org.opensha.sha.earthquake.rupForecastImpl.prvi25.util.PRVI25_RegionLoader;

import com.google.common.base.Preconditions;

Expand All @@ -26,20 +27,30 @@ public static void main(String[] args) throws IOException {
File invsDir = new File("/data/kevin/nshm23/batch_inversions");

File extCalcDir = new File("/home/kevin/OpenSHA/nshm23/nshmp-haz-models/ext_hazard_calcs/"
+ "conus-6b4-nshmp-haz-lib1415-grid_smooth_OFF-0p1-vs760-20240307-087a4937807eb5/vs30-760");
File outputDir = new File(invsDir, "2024_03_08-nshmp-haz-external-conus-6b4-ask2014-vs760-grid_smooth_OFF");
+ "prvi-t2-2003-ERF-2025-vB1-GMMs-2025conf-0p01-vs760-20241213-49f58ecb02d600/vs30-760");
File outputDir = new File(invsDir, "2024_12_13-nshmp-haz-external-prvi-2b1-prvi25gmms-vs760");

GriddedRegion gridReg = new GriddedRegion(
PRVI25_RegionLoader.loadPRVI_MapExtents(),
0.01, GriddedRegion.ANCHOR_0_0);

double[] periods = {0d, 0.2, 1d, 5d};

// File extCalcDir = new File("/home/kevin/OpenSHA/nshm23/nshmp-haz-models/ext_hazard_calcs/"
// + "conus-6b4-nshmp-haz-lib1415-grid_smooth_OFF-0p1-vs760-20240307-087a4937807eb5/vs30-760");
// File outputDir = new File(invsDir, "2024_03_08-nshmp-haz-external-conus-6b4-ask2014-vs760-grid_smooth_OFF");
// + "conus-6b4-nshmp-haz-grid_smooth_optimize_OFF-0p1-vs760-20240213-195bddb0d73730/vs30-760");
// File outputDir = new File(invsDir, "2024_02_15-nshmp-haz-external-conus-6b4-ask2014-vs760-grid_smooth_optimize_OFF");
// + "conus-6b4-nshmp-haz-0p1-vs760-20240208-afaff93cd918f5/vs30-760");
// File outputDir = new File(invsDir, "2024_02_08-nshmp-haz-external-conus-6b4-ask2014-vs760");
// + "conus-6b4-nshmp-haz-0p1-vs760-20240228-ef0f647c24c8d1/vs30-760");
// File outputDir = new File(invsDir, "2024_02_28-nshmp-haz-external-conus-6b4-ask2014-vs760");

GriddedRegion gridReg = new GriddedRegion(
NSHM23_RegionLoader.loadFullConterminousUS(),
0.1, GriddedRegion.ANCHOR_0_0);
// GriddedRegion gridReg = new GriddedRegion(
// NSHM23_RegionLoader.loadFullConterminousUS(),
// 0.1, GriddedRegion.ANCHOR_0_0);

double[] periods = {0d, 1d};
// double[] periods = {0d, 1d};
ReturnPeriods[] rps = {ReturnPeriods.TWO_IN_50, ReturnPeriods.TEN_IN_50};

Preconditions.checkState(outputDir.exists() || outputDir.mkdir());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,8 @@ private static void remapTree(LogicTree<?> tree, Map<LogicTreeLevel<?>, LogicTre
Map<LogicTreeNode, LogicTreeNode> nodeRemaps, String nameAdd, String shortNameAdd) {
for (LogicTreeLevel<?> level : tree.getLevels()) {
String name = level.getName();
if (name.toLowerCase().contains("crustal") || name.toLowerCase().contains("subduction")) {
if (name.toLowerCase().contains("crustal") || name.toLowerCase().contains("subduction")
|| name.toLowerCase().contains("interface") || name.toLowerCase().contains("slab")) {
// keep it as is
levelRemaps.put(level, level);
for (LogicTreeNode node : level.getNodes())
Expand Down
131 changes: 75 additions & 56 deletions src/main/java/scratch/kevin/prvi25/GMMLogicTreeWriter.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.opensha.commons.logicTree.LogicTreeNode;
import org.opensha.commons.logicTree.LogicTreeNode.FileBackedNode;
import org.opensha.sha.earthquake.faultSysSolution.FaultSystemSolution;
import org.opensha.sha.earthquake.faultSysSolution.hazard.AbstractLogicTreeHazardCombiner;
import org.opensha.sha.earthquake.faultSysSolution.hazard.mpj.MPJ_LogicTreeHazardCalc;
import org.opensha.sha.earthquake.faultSysSolution.hazard.mpj.MPJ_SiteLogicTreeHazardCurveCalc;
import org.opensha.sha.earthquake.faultSysSolution.modules.SolutionLogicTree;
Expand Down Expand Up @@ -57,6 +58,8 @@ public static void main(String[] args) throws IOException {
vs30 = 760d; dirSuffix = "-vs760";
Double sigmaTrunc = 3d;
boolean supersample = true;
int erfSamples = -1;
int gmmSamplesPerERF = -1;
double[] periods = { 0d, 0.2d, 1d, 5d };

/*
Expand All @@ -74,30 +77,37 @@ public static void main(String[] args) throws IOException {
// // including gridded
// int mins = 1440*5;
// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded.json");
// erfSamples = 10000; gmmSamplesPerERF = 1; jobSuffix = "_sampled"; logicTreeOutputName = "logic_tree_full_gridded_sampled.json";
//// File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded_sampled.json"); jobSuffix = "_sampled";
// IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;

/*
* Interface
* Interface separate slab and interface
*
* do supra-seis, then gridded-only, then combine
* do supra-seis, then each gridded-only, then interface combine
*
* then need to separately combine the slab logic tree
*/
// List<LogicTreeLevel<? extends LogicTreeNode>> gmmLevels = PRVI25_LogicTreeBranch.levelsInterfaceGMM;
// File sourceDir = SUBDUCTION_DIR;
// File outputDir = new File(sourceDir.getParentFile(), sourceDir.getName()+"-gmTreeCalcs"+dirSuffix);
// // supra-seis only
//// File sourceTreeFile = new File(sourceDir, "logic_tree.json");
//// int mins = 1440;
//// IncludeBackgroundOption bgOp = IncludeBackgroundOption.EXCLUDE;
// // interface gridded only
//// int mins = 1440;
//// File sourceTreeFile = new File(sourceDir, "logic_tree_gridded_only.json");
//// logicTreeOutputName = "logic_tree_gridded_interface_only.json";
//// IncludeBackgroundOption bgOp = IncludeBackgroundOption.ONLY;
//// forceInputFileName = "results_gridded_branches_interface_only.zip";
//// jobSuffix = "_interface";
//// outputSuffix = jobSuffix;
// // interface both (combine only)
List<LogicTreeLevel<? extends LogicTreeNode>> gmmLevels = PRVI25_LogicTreeBranch.levelsInterfaceGMM;
File sourceDir = SUBDUCTION_DIR;
File outputDir = new File(sourceDir.getParentFile(), sourceDir.getName()+"-gmTreeCalcs"+dirSuffix);
// supra-seis only
// File sourceTreeFile = new File(sourceDir, "logic_tree.json");
// int mins = 1440;
// IncludeBackgroundOption bgOp = IncludeBackgroundOption.EXCLUDE;
// interface gridded only
int mins = 1440;
// File sourceTreeFile = new File(sourceDir, "logic_tree_gridded_only.json");
File sourceTreeFile = new File(sourceDir, "logic_tree_full_gridded_for_only_calc.json");
logicTreeOutputName = "logic_tree_gridded_interface_only.json";
IncludeBackgroundOption bgOp = IncludeBackgroundOption.ONLY;
// this was for if gridded only depended on FM but it also depends on scale
// forceInputFileName = "results_gridded_branches_interface_only.zip";
// use this one because it has scaling relationship specific gridded models
forceInputFileName = "results_full_gridded_interface_only.zip";
jobSuffix = "_interface";
outputSuffix = jobSuffix;
// interface both (combine only)
// combineOnly = true;
// int mins = 1440;
// forceInputFileName = "results_full_gridded_interface_only.zip";
Expand Down Expand Up @@ -125,26 +135,26 @@ public static void main(String[] args) throws IOException {
/*
* Branch averaged (GMM-only)
*/
List<LogicTreeLevel<? extends LogicTreeNode>> gmmLevels = PRVI25_LogicTreeBranch.levelsCombinedGMM;
File sourceDir = COMBINED_DIR;
File outputDir = new File(sourceDir.getParentFile(), sourceDir.getName()+"-ba_only-gmTreeCalcs"+dirSuffix);
// write out a SLT that only contains that node
File sourceTreeFile = new File(outputDir, "fake_erf_logic_tree.json");
FileBackedLevel fakeLevel = new FileBackedLevel("ERF Model", "ERF",
List.of(new FileBackedNode("Branch Averaged ERF", "BranchAveragedERF", 1d, "BA_ERF")));
LogicTree<?> tempTree = LogicTree.buildExhaustive(List.of(fakeLevel), true);
Preconditions.checkState(tempTree.size() == 1);
File sourceFile = new File(outputDir, "fake_erf_slt.zip");
SolutionLogicTree.FileBuilder builder = new SolutionLogicTree.FileBuilder(sourceFile);
builder.setSerializeGridded(true);
builder.solution(FaultSystemSolution.load(COMBINED_SOL), tempTree.getBranch(0));
builder.close();
forceInputFileName = sourceFile.getName();
tempTree.write(sourceTreeFile);
logicTreeOutputName = "logic_tree.json";
sourceDir = outputDir;
int mins = 1440;
IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;
// List<LogicTreeLevel<? extends LogicTreeNode>> gmmLevels = PRVI25_LogicTreeBranch.levelsCombinedGMM;
// File sourceDir = COMBINED_DIR;
// File outputDir = new File(sourceDir.getParentFile(), sourceDir.getName()+"-ba_only-gmTreeCalcs"+dirSuffix);
// // write out a SLT that only contains that node
// File sourceTreeFile = new File(outputDir, "fake_erf_logic_tree.json");
// FileBackedLevel fakeLevel = new FileBackedLevel("ERF Model", "ERF",
// List.of(new FileBackedNode("Branch Averaged ERF", "BranchAveragedERF", 1d, "BA_ERF")));
// LogicTree<?> tempTree = LogicTree.buildExhaustive(List.of(fakeLevel), true);
// Preconditions.checkState(tempTree.size() == 1);
// File sourceFile = new File(outputDir, "fake_erf_slt.zip");
// SolutionLogicTree.FileBuilder builder = new SolutionLogicTree.FileBuilder(sourceFile);
// builder.setSerializeGridded(true);
// builder.solution(FaultSystemSolution.load(COMBINED_SOL), tempTree.getBranch(0));
// builder.close();
// forceInputFileName = sourceFile.getName();
// tempTree.write(sourceTreeFile);
// logicTreeOutputName = "logic_tree.json";
// sourceDir = outputDir;
// int mins = 1440;
// IncludeBackgroundOption bgOp = IncludeBackgroundOption.INCLUDE;


// FOR ALL
Expand All @@ -159,23 +169,32 @@ public static void main(String[] args) throws IOException {
File gridRegFile = new File(outputDir, "gridded_region.geojson");
Feature.write(gridReg.toFeature(), gridRegFile);

List<LogicTreeLevel<? extends LogicTreeNode>> combLevels = new ArrayList<>();
combLevels.addAll(erfTree.getLevels());
combLevels.addAll(gmmLevels);

List<LogicTreeBranch<LogicTreeNode>> combBranches = new ArrayList<>(erfTree.size()*gmmTree.size());

for (LogicTreeBranch<?> branch : erfTree) {
for (LogicTreeBranch<?> gmmBranch : gmmTree) {
LogicTreeBranch<LogicTreeNode> combBranch = new LogicTreeBranch<>(combLevels);
for (LogicTreeNode node : branch)
combBranch.setValue(node);
for (LogicTreeNode node : gmmBranch)
combBranch.setValue(node);
combBranches.add(combBranch);
LogicTree<?> logicTree;
if (gmmSamplesPerERF > 0) {
System.out.println("Pairwise sampling with pairwise="+gmmSamplesPerERF+" branches");
if (erfSamples <= 0)
erfSamples = erfTree.size();
logicTree = AbstractLogicTreeHazardCombiner.pairwiseSampleLogicTrees(erfTree, gmmTree, erfSamples, gmmSamplesPerERF);
} else {
List<LogicTreeLevel<? extends LogicTreeNode>> combLevels = new ArrayList<>();
combLevels.addAll(erfTree.getLevels());
combLevels.addAll(gmmLevels);

List<LogicTreeBranch<LogicTreeNode>> combBranches = new ArrayList<>(erfTree.size()*gmmTree.size());

for (LogicTreeBranch<?> branch : erfTree) {
for (LogicTreeBranch<?> gmmBranch : gmmTree) {
LogicTreeBranch<LogicTreeNode> combBranch = new LogicTreeBranch<>(combLevels);
for (LogicTreeNode node : branch)
combBranch.setValue(node);
for (LogicTreeNode node : gmmBranch)
combBranch.setValue(node);
combBranches.add(combBranch);
}
}

logicTree = LogicTree.fromExisting(combLevels, combBranches);
}
LogicTree<?> logicTree = LogicTree.fromExisting(combLevels, combBranches);

File localLogicTree = new File(outputDir, logicTreeOutputName == null ? sourceTreeFile.getName() : logicTreeOutputName);
logicTree.write(localLogicTree);
Expand Down Expand Up @@ -246,15 +265,15 @@ else if (bgOp == IncludeBackgroundOption.EXCLUDE)
argz += " --gridded-seis "+bgOp.name();
String logicTreePath = dirPath+"/"+localLogicTree.getName();
argz += " --logic-tree "+logicTreePath;
if (bgOp == IncludeBackgroundOption.ONLY || bgOp == IncludeBackgroundOption.INCLUDE)
if (!inputFileName.contains("interface_only") && (bgOp == IncludeBackgroundOption.ONLY || bgOp == IncludeBackgroundOption.INCLUDE))
argz += " --quick-grid-calc";
if (combineOnly)
argz += " --combine-only";
argz += " --region "+dirPath+"/"+gridRegFile.getName();
if (vs30 != null)
argz += " --vs30 "+vs30.floatValue();
if (supersample)
argz += " --supersample";
argz += " --supersample-quick";
if (sigmaTrunc != null)
argz += " --gmm-sigma-trunc-one-sided "+sigmaTrunc.floatValue();
if (periods != null) {
Expand Down Expand Up @@ -292,7 +311,7 @@ else if (bgOp == IncludeBackgroundOption.EXCLUDE)
if (vs30 != null)
argz += " --vs30 "+vs30.floatValue();
if (supersample)
argz += " --supersample";
argz += " --supersample-quick";
if (sigmaTrunc != null)
argz += " --gmm-sigma-trunc-one-sided "+sigmaTrunc.floatValue();
if (periods != null) {
Expand Down
Loading

0 comments on commit 3a64aec

Please sign in to comment.