diff --git a/src/main/java/org/opensha/commons/data/uncertainty/BoundedUncertainty.java b/src/main/java/org/opensha/commons/data/uncertainty/BoundedUncertainty.java index 42464186d..394ef240a 100644 --- a/src/main/java/org/opensha/commons/data/uncertainty/BoundedUncertainty.java +++ b/src/main/java/org/opensha/commons/data/uncertainty/BoundedUncertainty.java @@ -37,4 +37,8 @@ public BoundedUncertainty(UncertaintyBoundType type, double lowerBound, double u public String toString() { return "type="+type.name()+"\tbounds=["+(float)lowerBound+", "+(float)upperBound+"]\tstdDev="+(float)stdDev; } + + public boolean contains(double value) { + return value >= lowerBound && value <= upperBound; + } } \ No newline at end of file diff --git a/src/main/java/org/opensha/commons/util/MarkdownUtils.java b/src/main/java/org/opensha/commons/util/MarkdownUtils.java index 04d2574a9..9d68daaaa 100644 --- a/src/main/java/org/opensha/commons/util/MarkdownUtils.java +++ b/src/main/java/org/opensha/commons/util/MarkdownUtils.java @@ -93,6 +93,12 @@ public TableBuilder addColumn(Object val) { return addColumn(val.toString()); } + public TableBuilder addColumns(Object... vals) { + for (Object val : vals) + addColumn(val); + return this; + } + public TableBuilder addColumn(String val) { if (curLine == null) initNewLine(); @@ -185,6 +191,34 @@ public List build() { return strings; } + + public CSVFile toCSV() { + return toCSV(false); + } + + public CSVFile toCSV(boolean stripFormatting) { + boolean sameSize = true; + int len = lines.get(0).length; + for (String[] line : lines) + sameSize = sameSize && line.length == len; + CSVFile csv = new CSVFile<>(sameSize); + for (String[] line : lines) { + List csvLine = new ArrayList<>(line.length); + for (String val : line) { + if (stripFormatting) { + if (val.startsWith("_") && val.endsWith("_")) + val = val.substring(1, val.length()-1); + if (val.startsWith("**") && val.endsWith("**")) + val = val.substring(2, val.length()-2); + else if (val.startsWith("*") && val.endsWith("*")) + val = val.substring(1, val.length()-1); + } + csvLine.add(val); + } + csv.addLine(csvLine); + } + return csv; + } } /** diff --git a/src/main/java/org/opensha/sha/earthquake/faultSysSolution/modules/InversionMisfitStats.java b/src/main/java/org/opensha/sha/earthquake/faultSysSolution/modules/InversionMisfitStats.java index 9199f12bc..df814ed95 100644 --- a/src/main/java/org/opensha/sha/earthquake/faultSysSolution/modules/InversionMisfitStats.java +++ b/src/main/java/org/opensha/sha/earthquake/faultSysSolution/modules/InversionMisfitStats.java @@ -11,10 +11,12 @@ import org.opensha.sha.earthquake.faultSysSolution.inversion.constraints.ConstraintWeightingType; import org.opensha.sha.earthquake.faultSysSolution.inversion.sa.ConstraintRange; +import cern.colt.function.tdouble.DoubleComparator; + import com.google.common.base.Preconditions; /** - * Summary statistics for inversion misfits + * Summary statistics for inversion misfit * * @author kevin * @@ -103,11 +105,17 @@ public static class MisfitStats { public final double energy; public final double std; public final double rmse; + + private final static double epsilon = 1.0e-10; public MisfitStats(double[] misfits, boolean inequality, double weight) { this(misfits, new ConstraintRange(inequality ? "Inequality" : "Equality", inequality ? "Ineq" : "Eq", 0, misfits.length, inequality, weight, null)); } + + private boolean doubleEquals(double u, double v) { + return (Math.abs(u - v) <= MisfitStats.epsilon * Math.abs(u) && Math.abs(u - v) <= MisfitStats.epsilon * Math.abs(v)); + } public MisfitStats(double[] misfits, ConstraintRange range) { this.range = range; @@ -123,7 +131,10 @@ public MisfitStats(double[] misfits, ConstraintRange range) { double energy = 0d; StandardDeviation std = new StandardDeviation(); + int included = 0; for (double val : misfits) { + if (doubleEquals(val,-1d)) // deal with edge cse MFD bins where we're setting artificial rate/stddev values + continue; if (range.inequality && val < 0d) val = 0d; mean += val; @@ -133,9 +144,10 @@ public MisfitStats(double[] misfits, ConstraintRange range) { l2Norm += val*val; energy += (val*range.weight)*(val*range.weight); std.increment(val); + included+=1; } - mean /= (double)numRows; - absMean /= (double)numRows; + mean /= (double)included; + absMean /= (double)included; this.mean = mean; this.absMean = absMean; @@ -149,8 +161,24 @@ public MisfitStats(double[] misfits, ConstraintRange range) { this.std = Math.abs(misfits[0]); else this.std = std.getResult(); - double mse = l2Norm/(double)numRows; + double mse = l2Norm/(double)included; this.rmse = Math.sqrt(mse); + + /* + if (range.shortName.equals("UncertMFDEquality")) { + System.out.println("MFD misfit array:"); + System.out.println("number included: " + included); + for (double val : misfits) { + System.out.println(val); + } + } + else { + System.out.println(range.shortName); + System.out.println("numRows = " + numRows + " included = " + included); + } + */ + + } MisfitStats(List csvLine) { diff --git a/src/main/java/org/opensha/sha/earthquake/faultSysSolution/reports/plots/PaleoDataComparisonPlot.java b/src/main/java/org/opensha/sha/earthquake/faultSysSolution/reports/plots/PaleoDataComparisonPlot.java index ca86e61e0..f30c1bbac 100644 --- a/src/main/java/org/opensha/sha/earthquake/faultSysSolution/reports/plots/PaleoDataComparisonPlot.java +++ b/src/main/java/org/opensha/sha/earthquake/faultSysSolution/reports/plots/PaleoDataComparisonPlot.java @@ -7,10 +7,12 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.jfree.data.Range; +import org.opensha.commons.data.CSVFile; import org.opensha.commons.data.function.DefaultXY_DataSet; import org.opensha.commons.data.function.HistogramFunction; import org.opensha.commons.data.function.XY_DataSet; @@ -35,6 +37,7 @@ import org.opensha.sha.earthquake.faultSysSolution.modules.PaleoseismicConstraintData; import org.opensha.sha.earthquake.faultSysSolution.modules.SectSlipRates; import org.opensha.sha.earthquake.faultSysSolution.modules.SlipAlongRuptureModel; +import org.opensha.sha.earthquake.faultSysSolution.reports.AbstractRupSetPlot; import org.opensha.sha.earthquake.faultSysSolution.reports.AbstractSolutionPlot; import org.opensha.sha.earthquake.faultSysSolution.reports.ReportMetadata; import org.opensha.sha.earthquake.faultSysSolution.reports.ReportPageGen; @@ -53,6 +56,11 @@ public List plot(FaultSystemSolution sol, ReportMetadata meta, File reso PaleoseismicConstraintData data = sol.getRupSet().requireModule(PaleoseismicConstraintData.class); FaultSystemSolution compSol = meta.hasComparisonSol() ? meta.comparison.sol : null; + if (compSol != null) { + // make sure they use the same sections + if (!sol.getRupSet().areSectionsEquivalentTo(compSol.getRupSet())) + compSol = null; + } boolean hasRateData = data.hasPaleoRateConstraints(); boolean hasSlipData = data.hasPaleoSlipConstraints(); @@ -73,7 +81,7 @@ public List plot(FaultSystemSolution sol, ReportMetadata meta, File reso String title = "Paleo-Rate Data Fits"; - lines.addAll(compTable(resourcesDir, "paleo_rate", relPathToResources, title, meta, paleoRates, compPaleoRates).build()); + lines.addAll(paleoLines(resourcesDir, "paleo_rate", relPathToResources, title, meta, paleoRates, compPaleoRates)); lines.add(""); } @@ -90,15 +98,16 @@ public List plot(FaultSystemSolution sol, ReportMetadata meta, File reso lines.add(topLink); lines.add(""); } - lines.addAll(compTable(resourcesDir, "paleo_slip", relPathToResources, title, meta, paleoSlips, compPaleoSlips).build()); + lines.addAll(paleoLines(resourcesDir, "paleo_slip", relPathToResources, title, meta, paleoSlips, compPaleoSlips)); lines.add(""); } return lines; } - private static TableBuilder compTable(File resourcesDir, String prefix, String relPathToResources, String title, ReportMetadata meta, - Map data, Map compData) throws IOException { + private static List paleoLines(File resourcesDir, String prefix, String relPathToResources, String title, + ReportMetadata meta, Map data, + Map compData) throws IOException { TableBuilder table = MarkdownUtils.tableBuilder(); if (compData == null) { @@ -118,14 +127,94 @@ data, title, getTruncatedTitle(meta.primary.name)+" Recurrence Rate (/yr)").getN compData, title, COMP_COLOR).getName()+")"); } - return table; + List lines = new ArrayList<>(); + lines.addAll(table.build()); + lines.add(""); + + table = MarkdownUtils.tableBuilder();; + table.initNewLine(); + table.addColumns("Site Name", "Mapped Subsection ID", "Mapped Parent Section", "Constraint Mean Rate", + "68% Bounds", "Constraint 95% Bounds", + "Std. Dev", "Solution Rate", "z-score"); + if (compData != null) + table.addColumns("Comparison Solution Rate", "Comparison z-score"); + table.finalizeLine(); + + for (SectMappedUncertainDataConstraint val : data.keySet()) { + table.initNewLine(); + table.addColumn(val.name); + if (val.sectionIndex >= 0) + table.addColumn(val.sectionIndex).addColumn(meta.primary.rupSet.getFaultSectionData(val.sectionIndex).getParentSectionName()); + else + table.addColumn("_N/A_").addColumn("_N/A_"); + table.addColumn((float)val.bestEstimate); + BoundedUncertainty bounds68 = val.estimateUncertaintyBounds(UncertaintyBoundType.CONF_68); + BoundedUncertainty bounds95 = val.estimateUncertaintyBounds(UncertaintyBoundType.CONF_95); + table.addColumn("["+(float)bounds68.lowerBound+", "+(float)bounds68.upperBound+"]"); + table.addColumn("["+(float)bounds95.lowerBound+", "+(float)bounds95.upperBound+"]"); + table.addColumn((float)val.getPreferredStdDev()); + Double solVal = data.get(val); + if (solVal == null) { + table.addColumn("_N/A_").addColumn("_N/A_"); + } else { + if (bounds68.contains(solVal)) + table.addColumn("**"+solVal.floatValue()+"**"); + else if (bounds95.contains(solVal)) + table.addColumn("_"+solVal.floatValue()+"_"); + else + table.addColumn(solVal.floatValue()); + double z = (solVal - val.bestEstimate)/val.getPreferredStdDev(); + table.addColumn((float)z); + } + if (compData != null) { + Double compVal = compData.get(val); + if (compVal == null) { + // slip ones may be different, see if we have a match + for (SectMappedUncertainDataConstraint oData : compData.keySet()) { + if (oData.name.equals(val.name) && oData.sectionIndex == val.sectionIndex) { + compVal = compData.get(oData); + break; + } + } + } + if (compVal == null) { + table.addColumn("_N/A_").addColumn("_N/A_"); + } else { + if (bounds68.contains(compVal)) + table.addColumn("**"+compVal.floatValue()+"**"); + else if (bounds95.contains(compVal)) + table.addColumn("_"+compVal.floatValue()+"_"); + else + table.addColumn(compVal.floatValue()); + double z = (compVal - val.bestEstimate)/val.getPreferredStdDev(); + table.addColumn((float)z); + } + } + table.finalizeLine(); + } + + File csvFile = new File(resourcesDir, prefix+".csv"); + CSVFile csv = table.toCSV(true); + csv.writeToFile(csvFile); + + String line = "Paleo data comparison table. For the solution"; + if (compData == null) + line += " column"; + else + line += " and comparison columns"; + line += ", text is **bold if within 68% bounds**, _italicized if within 95% bounds_, and otherwise plain text."; + line += " [Download CSV here]("+resourcesDir.getName()+"/"+csvFile.getName()+")."; + lines.add(line); + lines.add(""); + lines.addAll(table.build()); + return lines; } private static Map calcSolPaleoRates( List paleoRateData, PaleoProbabilityModel paleoProbModel, FaultSystemSolution sol) { - Map ret = new HashMap<>(); + Map ret = new LinkedHashMap<>(); // use linked implementation to maintain iteration order FaultSystemRupSet rupSet = sol.getRupSet(); @@ -145,7 +234,7 @@ private static Map calcSolPaleoSlipRa List paleoSlipData, PaleoSlipProbabilityModel paleoSlipProbModel, FaultSystemSolution sol) { - Map ret = new HashMap<>(); + Map ret = new LinkedHashMap<>(); // use linked implementation to maintain iteration order FaultSystemRupSet rupSet = sol.getRupSet(); List rateData = PaleoseismicConstraintData.inferRatesFromSlipConstraints( @@ -379,11 +468,14 @@ public static void main(String[] args) throws IOException { File outputDir = new File("/tmp/temp_report"); PaleoDataComparisonPlot plot = new PaleoDataComparisonPlot(); + + List plots = List.of(plot); +// List plots = List.of(plot, new ModulesPlot(), new NamedFaultPlot()); -// ReportPageGen report = new ReportPageGen(sol.getRupSet(), sol, "Solution", outputDir, List.of(plot)); - ReportMetadata meta = new ReportMetadata(new RupSetMetadata("Sol 1", sol1), - sol2 == null ? null : new RupSetMetadata("Sol 2", sol2)); - ReportPageGen report = new ReportPageGen(meta, outputDir, List.of(plot, new ModulesPlot(), new NamedFaultPlot())); + ReportPageGen report = new ReportPageGen(sol1.getRupSet(), sol1, "Solution", outputDir, List.of(plot)); +// ReportMetadata meta = new ReportMetadata(new RupSetMetadata("Sol 1", sol1), +// sol2 == null ? null : new RupSetMetadata("Sol 2", sol2)); +// ReportPageGen report = new ReportPageGen(meta, outputDir, plots); report.setReplot(true); report.generatePage();