Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,8 @@ public BoundedUncertainty(UncertaintyBoundType type, double lowerBound, double u
public String toString() {
return "type="+type.name()+"\tbounds=["+(float)lowerBound+", "+(float)upperBound+"]\tstdDev="+(float)stdDev;
}

public boolean contains(double value) {
return value >= lowerBound && value <= upperBound;
}
}
34 changes: 34 additions & 0 deletions src/main/java/org/opensha/commons/util/MarkdownUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,12 @@ public TableBuilder addColumn(Object val) {
return addColumn(val.toString());
}

public TableBuilder addColumns(Object... vals) {
for (Object val : vals)
addColumn(val);
return this;
}

public TableBuilder addColumn(String val) {
if (curLine == null)
initNewLine();
Expand Down Expand Up @@ -185,6 +191,34 @@ public List<String> build() {

return strings;
}

public CSVFile<String> toCSV() {
return toCSV(false);
}

public CSVFile<String> toCSV(boolean stripFormatting) {
boolean sameSize = true;
int len = lines.get(0).length;
for (String[] line : lines)
sameSize = sameSize && line.length == len;
CSVFile<String> csv = new CSVFile<>(sameSize);
for (String[] line : lines) {
List<String> csvLine = new ArrayList<>(line.length);
for (String val : line) {
if (stripFormatting) {
if (val.startsWith("_") && val.endsWith("_"))
val = val.substring(1, val.length()-1);
if (val.startsWith("**") && val.endsWith("**"))
val = val.substring(2, val.length()-2);
else if (val.startsWith("*") && val.endsWith("*"))
val = val.substring(1, val.length()-1);
}
csvLine.add(val);
}
csv.addLine(csvLine);
}
return csv;
}
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,12 @@
import org.opensha.sha.earthquake.faultSysSolution.inversion.constraints.ConstraintWeightingType;
import org.opensha.sha.earthquake.faultSysSolution.inversion.sa.ConstraintRange;

import cern.colt.function.tdouble.DoubleComparator;

import com.google.common.base.Preconditions;

/**
* Summary statistics for inversion misfits
* Summary statistics for inversion misfit
*
* @author kevin
*
Expand Down Expand Up @@ -103,11 +105,17 @@ public static class MisfitStats {
public final double energy;
public final double std;
public final double rmse;

private final static double epsilon = 1.0e-10;

public MisfitStats(double[] misfits, boolean inequality, double weight) {
this(misfits, new ConstraintRange(inequality ? "Inequality" : "Equality",
inequality ? "Ineq" : "Eq", 0, misfits.length, inequality, weight, null));
}

private boolean doubleEquals(double u, double v) {
return (Math.abs(u - v) <= MisfitStats.epsilon * Math.abs(u) && Math.abs(u - v) <= MisfitStats.epsilon * Math.abs(v));
}

public MisfitStats(double[] misfits, ConstraintRange range) {
this.range = range;
Expand All @@ -123,7 +131,10 @@ public MisfitStats(double[] misfits, ConstraintRange range) {
double energy = 0d;

StandardDeviation std = new StandardDeviation();
int included = 0;
for (double val : misfits) {
if (doubleEquals(val,-1d)) // deal with edge cse MFD bins where we're setting artificial rate/stddev values
continue;
if (range.inequality && val < 0d)
val = 0d;
mean += val;
Expand All @@ -133,9 +144,10 @@ public MisfitStats(double[] misfits, ConstraintRange range) {
l2Norm += val*val;
energy += (val*range.weight)*(val*range.weight);
std.increment(val);
included+=1;
}
mean /= (double)numRows;
absMean /= (double)numRows;
mean /= (double)included;
absMean /= (double)included;

this.mean = mean;
this.absMean = absMean;
Expand All @@ -149,8 +161,24 @@ public MisfitStats(double[] misfits, ConstraintRange range) {
this.std = Math.abs(misfits[0]);
else
this.std = std.getResult();
double mse = l2Norm/(double)numRows;
double mse = l2Norm/(double)included;
this.rmse = Math.sqrt(mse);

/*
if (range.shortName.equals("UncertMFDEquality")) {
System.out.println("MFD misfit array:");
System.out.println("number included: " + included);
for (double val : misfits) {
System.out.println(val);
}
}
else {
System.out.println(range.shortName);
System.out.println("numRows = " + numRows + " included = " + included);
}
*/


}

MisfitStats(List<String> csvLine) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

import org.jfree.data.Range;
import org.opensha.commons.data.CSVFile;
import org.opensha.commons.data.function.DefaultXY_DataSet;
import org.opensha.commons.data.function.HistogramFunction;
import org.opensha.commons.data.function.XY_DataSet;
Expand All @@ -35,6 +37,7 @@
import org.opensha.sha.earthquake.faultSysSolution.modules.PaleoseismicConstraintData;
import org.opensha.sha.earthquake.faultSysSolution.modules.SectSlipRates;
import org.opensha.sha.earthquake.faultSysSolution.modules.SlipAlongRuptureModel;
import org.opensha.sha.earthquake.faultSysSolution.reports.AbstractRupSetPlot;
import org.opensha.sha.earthquake.faultSysSolution.reports.AbstractSolutionPlot;
import org.opensha.sha.earthquake.faultSysSolution.reports.ReportMetadata;
import org.opensha.sha.earthquake.faultSysSolution.reports.ReportPageGen;
Expand All @@ -53,6 +56,11 @@ public List<String> plot(FaultSystemSolution sol, ReportMetadata meta, File reso
PaleoseismicConstraintData data = sol.getRupSet().requireModule(PaleoseismicConstraintData.class);

FaultSystemSolution compSol = meta.hasComparisonSol() ? meta.comparison.sol : null;
if (compSol != null) {
// make sure they use the same sections
if (!sol.getRupSet().areSectionsEquivalentTo(compSol.getRupSet()))
compSol = null;
}

boolean hasRateData = data.hasPaleoRateConstraints();
boolean hasSlipData = data.hasPaleoSlipConstraints();
Expand All @@ -73,7 +81,7 @@ public List<String> plot(FaultSystemSolution sol, ReportMetadata meta, File reso

String title = "Paleo-Rate Data Fits";

lines.addAll(compTable(resourcesDir, "paleo_rate", relPathToResources, title, meta, paleoRates, compPaleoRates).build());
lines.addAll(paleoLines(resourcesDir, "paleo_rate", relPathToResources, title, meta, paleoRates, compPaleoRates));
lines.add("");
}

Expand All @@ -90,15 +98,16 @@ public List<String> plot(FaultSystemSolution sol, ReportMetadata meta, File reso
lines.add(topLink); lines.add("");
}

lines.addAll(compTable(resourcesDir, "paleo_slip", relPathToResources, title, meta, paleoSlips, compPaleoSlips).build());
lines.addAll(paleoLines(resourcesDir, "paleo_slip", relPathToResources, title, meta, paleoSlips, compPaleoSlips));
lines.add("");
}

return lines;
}

private static TableBuilder compTable(File resourcesDir, String prefix, String relPathToResources, String title, ReportMetadata meta,
Map<SectMappedUncertainDataConstraint, Double> data, Map<SectMappedUncertainDataConstraint, Double> compData) throws IOException {
private static List<String> paleoLines(File resourcesDir, String prefix, String relPathToResources, String title,
ReportMetadata meta, Map<SectMappedUncertainDataConstraint, Double> data,
Map<SectMappedUncertainDataConstraint, Double> compData) throws IOException {
TableBuilder table = MarkdownUtils.tableBuilder();

if (compData == null) {
Expand All @@ -118,14 +127,94 @@ data, title, getTruncatedTitle(meta.primary.name)+" Recurrence Rate (/yr)").getN
compData, title, COMP_COLOR).getName()+")");
}

return table;
List<String> lines = new ArrayList<>();
lines.addAll(table.build());
lines.add("");

table = MarkdownUtils.tableBuilder();;
table.initNewLine();
table.addColumns("Site Name", "Mapped Subsection ID", "Mapped Parent Section", "Constraint Mean Rate",
"68% Bounds", "Constraint 95% Bounds",
"Std. Dev", "Solution Rate", "z-score");
if (compData != null)
table.addColumns("Comparison Solution Rate", "Comparison z-score");
table.finalizeLine();

for (SectMappedUncertainDataConstraint val : data.keySet()) {
table.initNewLine();
table.addColumn(val.name);
if (val.sectionIndex >= 0)
table.addColumn(val.sectionIndex).addColumn(meta.primary.rupSet.getFaultSectionData(val.sectionIndex).getParentSectionName());
else
table.addColumn("_N/A_").addColumn("_N/A_");
table.addColumn((float)val.bestEstimate);
BoundedUncertainty bounds68 = val.estimateUncertaintyBounds(UncertaintyBoundType.CONF_68);
BoundedUncertainty bounds95 = val.estimateUncertaintyBounds(UncertaintyBoundType.CONF_95);
table.addColumn("["+(float)bounds68.lowerBound+", "+(float)bounds68.upperBound+"]");
table.addColumn("["+(float)bounds95.lowerBound+", "+(float)bounds95.upperBound+"]");
table.addColumn((float)val.getPreferredStdDev());
Double solVal = data.get(val);
if (solVal == null) {
table.addColumn("_N/A_").addColumn("_N/A_");
} else {
if (bounds68.contains(solVal))
table.addColumn("**"+solVal.floatValue()+"**");
else if (bounds95.contains(solVal))
table.addColumn("_"+solVal.floatValue()+"_");
else
table.addColumn(solVal.floatValue());
double z = (solVal - val.bestEstimate)/val.getPreferredStdDev();
table.addColumn((float)z);
}
if (compData != null) {
Double compVal = compData.get(val);
if (compVal == null) {
// slip ones may be different, see if we have a match
for (SectMappedUncertainDataConstraint oData : compData.keySet()) {
if (oData.name.equals(val.name) && oData.sectionIndex == val.sectionIndex) {
compVal = compData.get(oData);
break;
}
}
}
if (compVal == null) {
table.addColumn("_N/A_").addColumn("_N/A_");
} else {
if (bounds68.contains(compVal))
table.addColumn("**"+compVal.floatValue()+"**");
else if (bounds95.contains(compVal))
table.addColumn("_"+compVal.floatValue()+"_");
else
table.addColumn(compVal.floatValue());
double z = (compVal - val.bestEstimate)/val.getPreferredStdDev();
table.addColumn((float)z);
}
}
table.finalizeLine();
}

File csvFile = new File(resourcesDir, prefix+".csv");
CSVFile<String> csv = table.toCSV(true);
csv.writeToFile(csvFile);

String line = "Paleo data comparison table. For the solution";
if (compData == null)
line += " column";
else
line += " and comparison columns";
line += ", text is **bold if within 68% bounds**, _italicized if within 95% bounds_, and otherwise plain text.";
line += " [Download CSV here]("+resourcesDir.getName()+"/"+csvFile.getName()+").";
lines.add(line);
lines.add("");
lines.addAll(table.build());

return lines;
}

private static Map<SectMappedUncertainDataConstraint, Double> calcSolPaleoRates(
List<? extends SectMappedUncertainDataConstraint> paleoRateData, PaleoProbabilityModel paleoProbModel,
FaultSystemSolution sol) {
Map<SectMappedUncertainDataConstraint, Double> ret = new HashMap<>();
Map<SectMappedUncertainDataConstraint, Double> ret = new LinkedHashMap<>(); // use linked implementation to maintain iteration order

FaultSystemRupSet rupSet = sol.getRupSet();

Expand All @@ -145,7 +234,7 @@ private static Map<SectMappedUncertainDataConstraint, Double> calcSolPaleoSlipRa
List<? extends SectMappedUncertainDataConstraint> paleoSlipData, PaleoSlipProbabilityModel paleoSlipProbModel,
FaultSystemSolution sol) {

Map<SectMappedUncertainDataConstraint, Double> ret = new HashMap<>();
Map<SectMappedUncertainDataConstraint, Double> ret = new LinkedHashMap<>(); // use linked implementation to maintain iteration order

FaultSystemRupSet rupSet = sol.getRupSet();
List<SectMappedUncertainDataConstraint> rateData = PaleoseismicConstraintData.inferRatesFromSlipConstraints(
Expand Down Expand Up @@ -379,11 +468,14 @@ public static void main(String[] args) throws IOException {
File outputDir = new File("/tmp/temp_report");

PaleoDataComparisonPlot plot = new PaleoDataComparisonPlot();

List<AbstractRupSetPlot> plots = List.of(plot);
// List<AbstractRupSetPlot> plots = List.of(plot, new ModulesPlot(), new NamedFaultPlot());

// ReportPageGen report = new ReportPageGen(sol.getRupSet(), sol, "Solution", outputDir, List.of(plot));
ReportMetadata meta = new ReportMetadata(new RupSetMetadata("Sol 1", sol1),
sol2 == null ? null : new RupSetMetadata("Sol 2", sol2));
ReportPageGen report = new ReportPageGen(meta, outputDir, List.of(plot, new ModulesPlot(), new NamedFaultPlot()));
ReportPageGen report = new ReportPageGen(sol1.getRupSet(), sol1, "Solution", outputDir, List.of(plot));
// ReportMetadata meta = new ReportMetadata(new RupSetMetadata("Sol 1", sol1),
// sol2 == null ? null : new RupSetMetadata("Sol 2", sol2));
// ReportPageGen report = new ReportPageGen(meta, outputDir, plots);

report.setReplot(true);
report.generatePage();
Expand Down