[PATCH 3 of 3] Some work on SINFO FlowDepth
Wald Commits
scm-commit at wald.intevation.org
Thu Feb 8 18:48:35 CET 2018
# HG changeset patch
# User gernotbelger
# Date 1518112104 -3600
# Node ID 9f7a285b0ee3380ce9afae167f27d3e7b646351e
# Parent 23264d1a528f0ee1cd8c9982d26f0cbb9f969de6
Some work on SINFO FlowDepth
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/BedHeightInfo.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/BedHeightInfo.java Thu Feb 08 18:48:24 2018 +0100
@@ -0,0 +1,75 @@
+/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ * Björnsen Beratende Ingenieure GmbH
+ * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+package org.dive4elements.river.artifacts.sinfo.flowdepth;
+
+import org.dive4elements.river.model.BedHeight;
+
+/**
+ * Basic infos about a {@link BedHeight}.
+ *
+ * @author Gernot Belger
+ */
+final class BedHeightInfo {
+
+ private final Integer year;
+
+ private final String description;
+
+ private final String type;
+
+ private final String locationSystem;
+
+ private final String curElevationModelName;
+
+ private final String oldElevationModelName;
+
+ public static BedHeightInfo from(final BedHeight bedHeight) {
+ return new BedHeightInfo(bedHeight);
+ }
+
+ private BedHeightInfo(final BedHeight bedHeight) {
+
+ this.year = bedHeight.getYear();
+
+ // private String evaluationBy;
+ this.description = bedHeight.getDescription();
+
+ this.type = bedHeight.getType().getName();
+
+ this.locationSystem = bedHeight.getLocationSystem().getName();
+
+ this.curElevationModelName = bedHeight.getCurElevationModel().getName();
+ this.oldElevationModelName = bedHeight.getCurElevationModel().getName();
+ }
+
+ public Integer getYear() {
+ return this.year;
+ }
+
+ public String getDescription() {
+ return this.description;
+ }
+
+ public String getType() {
+ return this.type;
+ }
+
+ public String getLocationSystem() {
+ return this.locationSystem;
+ }
+
+ public String getCurElevationModelName() {
+ return this.curElevationModelName;
+ }
+
+ public String getOldElevationModelName() {
+ return this.oldElevationModelName;
+ }
+}
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthCalculation.java
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthCalculation.java Thu Feb 08 18:47:36 2018 +0100
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthCalculation.java Thu Feb 08 18:48:24 2018 +0100
@@ -1,6 +1,6 @@
/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
- * Software engineering by
- * Björnsen Beratende Ingenieure GmbH
+ * Software engineering by
+ * Björnsen Beratende Ingenieure GmbH
* Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
*
* This file is Free Software under the GNU AGPL (>=v3)
@@ -17,6 +17,7 @@
import org.dive4elements.river.artifacts.model.Calculation;
import org.dive4elements.river.artifacts.model.CalculationResult;
import org.dive4elements.river.artifacts.model.LocationProvider;
+import org.dive4elements.river.artifacts.model.QKms;
import org.dive4elements.river.artifacts.model.WKms;
import org.dive4elements.river.artifacts.resources.Resources;
import org.dive4elements.river.artifacts.sinfo.SINFOArtifact;
@@ -32,127 +33,159 @@
private static final String CSV_NOT_IN_GAUGE_RANGE = "export.waterlevel.csv.not.in.gauge.range";
- private CallContext context;
+ private final CallContext context;
- public FlowDepthCalculation( final CallContext context ) {
- this.context = context;
- }
-
+ public FlowDepthCalculation( final CallContext context ) {
+ this.context = context;
+ }
+
public CalculationResult calculate(final SINFOArtifact sinfo) {
-
+
// FIXME: find user of this artifact; probably only possible by selecting the collection that contains this artifact...
final String user = "unbekannt";
-
- /* access input data */
- final FlowDepthAccess access = new FlowDepthAccess(sinfo);
- final River river = access.getRiver();
-
- final Collection<DifferencesPair> diffPairs = access.getDifferencePairs();
-
- final double from = access.getFrom();
- final double to = access.getTo();
- final boolean useTkh = access.isUseTransportBodies();
+ /* access input data */
+ final FlowDepthAccess access = new FlowDepthAccess(sinfo);
+ final River river = access.getRiver();
- /* calculate results for each diff pair */
- final Calculation problems = new Calculation();
+ final Collection<DifferencesPair> diffPairs = access.getDifferencePairs();
- final List<Gauge> gauges = river.determineGauges(from, to);
- final GaugeIndex gaugeIndex = new GaugeIndex(gauges);
+ final double from = access.getFrom();
+ final double to = access.getTo();
- final String calcModeLabel = Resources.getMsg(context.getMeta(),sinfo.getCalculationMode().name() );
-
-
+ final boolean useTkh = access.isUseTransportBodies();
+
+ /* calculate results for each diff pair */
+ final Calculation problems = new Calculation();
+
+ final List<Gauge> gauges = river.determineGauges(from, to);
+ final GaugeIndex gaugeIndex = new GaugeIndex(gauges);
+
+ final String calcModeLabel = Resources.getMsg(this.context.getMeta(),sinfo.getCalculationMode().name() );
+
final FlowDepthCalculationResults results = new FlowDepthCalculationResults(calcModeLabel, user, river, from, to, useTkh);
- for (final DifferencesPair diffPair : diffPairs) {
- final FlowDepthCalculationResult result = calculateResult( river, from, to, diffPair, problems, gaugeIndex );
- if( result != null )
- results.addResult(result);
- }
-
- return new CalculationResult(results,problems);
+ for (final DifferencesPair diffPair : diffPairs) {
+ final FlowDepthCalculationResult result = calculateResult( river, from, to, diffPair, problems, gaugeIndex );
+ if( result != null )
+ results.addResult(result);
+ }
+
+ return new CalculationResult(results,problems);
}
- private FlowDepthCalculationResult calculateResult(final River river, final double from, final double to, final DifferencesPair diffPair, final Calculation problems, final GaugeIndex gaugeIndex) {
+ private FlowDepthCalculationResult calculateResult(final River river, final double from, final double to, final DifferencesPair diffPair, final Calculation problems, final GaugeIndex gaugeIndex) {
- /* access real input data from database */
- final String soundingId = diffPair.getSoundingId();
- final String wstId = diffPair.getWstId();
+ /* access real input data from database */
+ final String soundingId = diffPair.getSoundingId();
+ final String wstId = diffPair.getWstId();
- final BedHeight bedHeight = loadBedHeight( soundingId, from, to );
- final WKms wstKms = new WDifferencesState().getWKms(wstId, context, from, to);
- if( bedHeight == null || wstKms == null )
- return null;
+ final BedHeight bedHeight = loadBedHeight( soundingId, from, to );
+ if( bedHeight == null )
+ {
+ final String message = Resources.format(this.context.getMeta(), "Failed to access sounding with id '{0}'", soundingId);
+ problems.addProblem(message);
+ return null;
+ }
- final FlowDepthCalculationResult resultData = new FlowDepthCalculationResult(wstKms.getName(), bedHeight.getDescription());
+ final WKms wstKms = new WDifferencesState().getWKms(wstId, this.context, from, to);
+ if( wstKms == null )
+ {
+ final String message = Resources.format(this.context.getMeta(), "Failed to access waterlevel with id '{0}'", wstId);
+ problems.addProblem(message);
+ return null;
+ }
- final String notinrange = Resources.getMsg(context.getMeta(), CSV_NOT_IN_GAUGE_RANGE, CSV_NOT_IN_GAUGE_RANGE);
+ // FIXME: woher bekommen?
+ final int wspYear = 0;
- // TODO: unklarheiten
- // 'idealerweise alle 100m' was heisst das? kann doch nur durch datenverfügbarkeit bestimmt werden
- // wie mit unterschiedlichen Ranges umgehen? Schnitt bilden? Fehlermeldung? ...?
- // wie interpolieren? wst interpolieren? peilung interpolieren?
+ final String wspLabel = wstKms.getName();
+ final String soundingLabel = bedHeight.getDescription();
+ final String label = String.format("%s - %s", wspLabel, soundingLabel);
- // FIXME: für die Berechnung der TKH sind weitere 'in FLYS vorliegende' Daten notwendig.
- // aktuell unklar ob das durch andere Barten berechnete Werte oder Basisdaten sind
- // TODO: check Vergleiche BArt 'Transportkörperhöhen'
-
- // TODO: Berechnung der Transportkörperhöhen
- // - woher kommen die zusätzlichen eingangsdaten? sind das fixe daten pro gewässer? --> falls ja, warum nicht einmal berechnen und in db ablegen?
+ final BedHeightInfo sounding = BedHeightInfo.from(bedHeight);
+ final WstInfo wstInfo = new WstInfo(wspLabel, wspYear);
- final String bedHeightLabel = bedHeight.getDescription();
- final String wstLabel = wstKms.getName();
+ final FlowDepthCalculationResult resultData = new FlowDepthCalculationResult(label, wstInfo, sounding);
- for (int i = 0; i < wstKms.size(); i++) {
+ final String notinrange = Resources.getMsg(this.context.getMeta(), CSV_NOT_IN_GAUGE_RANGE, CSV_NOT_IN_GAUGE_RANGE);
- final double km = wstKms.getKm(i);
- final double wst = wstKms.getW(i);
- // FIXME: interpolate from bedheights?
- final double meanBedHeight = 79.32;
+ // TODO: prüfe diskretisierung wsp --> > 1000m --> Fehlermeldung
- final double flowDepth = wst - meanBedHeight;
-
- final double tkh = 0;
- final double flowDepthTkh = flowDepth - tkh;
-
- // FIXME: discharge not available for all wst? or any?
- final double discharge = 0.0;
+ // TODO: Berechnung der Transportkörperhöhen
+ // - woher kommen die zusätzlichen eingangsdaten? sind das fixe daten pro gewässer? --> falls ja, warum nicht einmal berechnen und in db ablegen?
- // REMARK: access the location once only during calculation
- final String location = LocationProvider.getLocation(river.getName(), km);
-
- // REMARK: access the gauge once only during calculation
- final Gauge gauge = gaugeIndex.findGauge(km);
- final String gaugeLabel = gauge == null ? notinrange : gauge.getName();
-
- resultData.addRow( km, flowDepth, flowDepthTkh, tkh, wst, discharge, wstLabel, gaugeLabel, meanBedHeight, bedHeightLabel, location );
- }
-
- return resultData;
- }
+ // Benötigte Daten
+ // - Abfluss / Station
+ // - kein Abfluss --> Fehler
+ if( !(wstKms instanceof QKms))
+ {
+ final String message = Resources.format(this.context.getMeta(), "{0}: keine Abflussdaten vorhanden, Transportkörperhöhenberechnung nicht möglich", label);
+ problems.addProblem(message);
+ // TODO: keine Berechnung TKH
+ }
- private BedHeight loadBedHeight(final String soundingId, final double from, final double to) {
-
- // FIXME: absolutely unbelievable....
- // The way how bed-heights (and other data too) is accessed is different for nearly ever calculation-type throughout flys.
- // The knowledge on how to parse the datacage-ids is spread thorugh the complete code-base...
+ // - Sohlbeschaffenheit (D50 Korndurchmesser aus Seddb)
+ // - Abhängig von Peiljahr
+ // - kein D50 vorhanden --> Fehler
+ // - Art der Gewässersohle (starr/mobil)
- // We use here the way on how bed-heights are accessed by the BedDifferenceAccess/BedDifferenceCalculation, but this is plain random
- final String[] parts = soundingId.split(";");
- final BedHeightsArtifact artifact = (BedHeightsArtifact) RiverUtils.getArtifact(parts[0], context);
+ final String bedHeightLabel = bedHeight.getDescription();
+ final String wstLabel = wstKms.getName();
- final Integer bedheightId = artifact.getDataAsInteger("height_id");
- // FIXME: this only works with type 'single'; unclear on how to distinguish from epoch data (or whatever the other type means)
- // Luckily, the requirement is to only access 'single' data here.
- // final String bedheightType = artifact.getDataAsString("type");
-
- // FIXME: BedDifferences uses this, but we also need the metadata of the BedHeight
- // FIXME: second absolutely awful thing: BedHeight is a hibernate binding class, accessing the database via hibernate stuff
- // BedHeightFactory uses its own (direct) way of accessing the data, with its own implemented data classes.
- //return BedHeightFactory.getHeight(bedheightType, bedheightId, from, to);
-
- return BedHeight.getBedHeightById(bedheightId);
- }
+ // FIXME: basis der diskretisierung ist bedHeight, die wspl werden interpoliert
+ for (int i = 0; i < wstKms.size(); i++) {
+
+ final double km = wstKms.getKm(i);
+ final double wst = wstKms.getW(i);
+ // FIXME: interpolate from bedheights?
+ final double meanBedHeight = 79.32;
+
+ final double flowDepth = wst - meanBedHeight;
+
+ final double discharge = wstKms instanceof QKms ? ((QKms) wstKms).getQ(i) : Double.NaN;
+
+ // FIXME: calculate tkh
+ final double tkh = 0;
+ final double flowDepthTkh = flowDepth - tkh;
+
+
+ // REMARK: access the location once only during calculation
+ final String location = LocationProvider.getLocation(river.getName(), km);
+
+ // REMARK: access the gauge once only during calculation
+ // FIXME: copy specific handling from original wst
+ final Gauge gauge = gaugeIndex.findGauge(km);
+ final String gaugeLabel = gauge == null ? notinrange : gauge.getName();
+
+ resultData.addRow( km, flowDepth, flowDepthTkh, tkh, wst, discharge, wstLabel, gaugeLabel, meanBedHeight, bedHeightLabel, location );
+ }
+
+ return resultData;
+ }
+
+ private BedHeight loadBedHeight(final String soundingId, final double from, final double to) {
+
+ // FIXME: absolutely unbelievable....
+ // The way how bed-heights (and other data too) is accessed is different for nearly ever calculation-type throughout flys.
+ // The knowledge on how to parse the datacage-ids is spread through the complete code-base...
+
+ // We use here the way on how bed-heights are accessed by the BedDifferenceAccess/BedDifferenceCalculation, but this is plain random
+ final String[] parts = soundingId.split(";");
+
+ final BedHeightsArtifact artifact = (BedHeightsArtifact) RiverUtils.getArtifact(parts[0], this.context);
+
+ final Integer bedheightId = artifact.getDataAsInteger("height_id");
+ // FIXME: this only works with type 'single'; unclear on how to distinguish from epoch data (or whatever the other type means)
+ // Luckily, the requirement is to only access 'single' data here.
+ // final String bedheightType = artifact.getDataAsString("type");
+
+ // FIXME: BedDifferences uses this, but we also need the metadata of the BedHeight
+ // FIXME: second absolutely awful thing: BedHeight is a hibernate binding class, accessing the database via hibernate stuff
+ // BedHeightFactory uses its own (direct) way of accessing the data, with its own implemented data classes.
+ //return BedHeightFactory.getHeight(bedheightType, bedheightId, from, to);
+
+ return BedHeight.getBedHeightById(bedheightId);
+ }
}
\ No newline at end of file
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthCalculationResult.java
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthCalculationResult.java Thu Feb 08 18:47:36 2018 +0100
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthCalculationResult.java Thu Feb 08 18:48:24 2018 +0100
@@ -1,6 +1,6 @@
/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
- * Software engineering by
- * Björnsen Beratende Ingenieure GmbH
+ * Software engineering by
+ * Björnsen Beratende Ingenieure GmbH
* Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
*
* This file is Free Software under the GNU AGPL (>=v3)
@@ -23,46 +23,53 @@
*/
class FlowDepthCalculationResult
implements Serializable {
-
- private static final long serialVersionUID = 1L;
- private final Collection<FlowDepthRow> rows = new ArrayList<>();
+ private static final long serialVersionUID = 1L;
- private final String wstLabel;
-
- private final String soundingLabel;
+ private final Collection<FlowDepthRow> rows = new ArrayList<>();
- public FlowDepthCalculationResult(final String wstLabel, final String soundingLabel) {
- this.wstLabel = wstLabel;
- this.soundingLabel = soundingLabel;
- }
+ private final String label;
- public void addRow(double station, double flowDepth, double flowDepthWithTkh, double tkh, double waterlevel, double discharge, String waterlevelLabel, String gauge, double meanBedHeight, String sondageLabel, String location) {
- rows.add(new FlowDepthRow(station, flowDepth, flowDepthWithTkh, tkh, waterlevel, discharge, waterlevelLabel, gauge, meanBedHeight, sondageLabel, location));
- }
-
- public String getWstLabel() {
- return this.wstLabel;
- }
+ private final BedHeightInfo sounding;
- public String getSoundingLabel() {
- return this.soundingLabel;
- }
+ private final WstInfo wst;
- public Collection<FlowDepthRow> getRows() {
- return Collections.unmodifiableCollection( rows );
- }
-
+ public FlowDepthCalculationResult(final String label, final WstInfo wst, final BedHeightInfo sounding) {
+ this.label = label;
+ this.wst = wst;
+ this.sounding = sounding;
+ }
+
+ public void addRow(final double station, final double flowDepth, final double flowDepthWithTkh, final double tkh, final double waterlevel, final double discharge, final String waterlevelLabel, final String gauge, final double meanBedHeight, final String sondageLabel, final String location) {
+ this.rows.add(new FlowDepthRow(station, flowDepth, flowDepthWithTkh, tkh, waterlevel, discharge, waterlevelLabel, gauge, meanBedHeight, sondageLabel, location));
+ }
+
+ public String getLabel() {
+ return this.label;
+ }
+
+ public WstInfo getWst() {
+ return this.wst;
+ }
+
+ public BedHeightInfo getSounding() {
+ return this.sounding;
+ }
+
+ public Collection<FlowDepthRow> getRows() {
+ return Collections.unmodifiableCollection( this.rows );
+ }
+
public double[][] getFlowDepthPoints() {
- TDoubleArrayList xPoints = new TDoubleArrayList(rows.size());
- TDoubleArrayList yPoints = new TDoubleArrayList(rows.size());
-
- for (FlowDepthRow row : rows) {
- xPoints.add(row.getStation());
- yPoints.add(row.getFlowDepth());
- }
-
+ final TDoubleArrayList xPoints = new TDoubleArrayList(this.rows.size());
+ final TDoubleArrayList yPoints = new TDoubleArrayList(this.rows.size());
+
+ for (final FlowDepthRow row : this.rows) {
+ xPoints.add(row.getStation());
+ yPoints.add(row.getFlowDepth());
+ }
+
return new double[][] { xPoints.toNativeArray(), yPoints.toNativeArray() };
}
}
\ No newline at end of file
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java Thu Feb 08 18:47:36 2018 +0100
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java Thu Feb 08 18:48:24 2018 +0100
@@ -17,6 +17,7 @@
import java.util.Locale;
import java.util.Map;
+import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dive4elements.artifacts.CallMeta;
import org.dive4elements.artifacts.common.utils.Config;
@@ -44,7 +45,7 @@
// REMARK: must be public because its registered in generators.xml
public class FlowDepthExporter extends AbstractExporter {
- /** The log used in this exporter.*/
+ /** The log used in this exporter. */
private static Logger log = Logger.getLogger(FlowDepthExporter.class);
private static final String CSV_KM_HEADER = "sinfo.export.flow_depth.csv.header.km";
@@ -59,52 +60,58 @@
private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short";
private static final String CSV_SOUNDING_HEADER = "sinfo.export.flow_depth.csv.header.sounding";
private static final String CSV_LOCATION_HEADER = "sinfo.export.flow_depth.csv.header.location";
-
- private static final String CSV_META_HEADER_RESULT =
- "sinfo.export.flow_depth.csv.meta.header.result";
- private static final String CSV_META_HEADER_RESULT_LABEL =
- "sinfo.export.flow_depth.csv.meta.header.result.label";
-
- private static final String CSV_META_VERSION =
- "sinfo.export.flow_depth.csv.meta.version";
+ private static final String CSV_META_HEADER_RESULT = "sinfo.export.flow_depth.csv.meta.header.result";
- private static final String CSV_META_VERSION_LABEL =
- "sinfo.export.flow_depth.csv.meta.version.label";
-
- private static final String CSV_META_USER =
- "sinfo.export.flow_depth.csv.meta.user";
-
- private static final String CSV_META_USER_LABEL =
- "sinfo.export.flow_depth.csv.meta.user.label";
-
- private static final String CSV_META_CREATION =
- "sinfo.export.flow_depth.csv.meta.creation";
+ private static final String CSV_META_HEADER_RESULT_LABEL = "sinfo.export.flow_depth.csv.meta.header.result.label";
- private static final String CSV_META_CREATION_LABEL =
- "sinfo.export.flow_depth.csv.meta.creation.label";
-
- private static final String CSV_META_RIVER =
- "sinfo.export.flow_depth.csv.meta.river";
+ private static final String CSV_META_VERSION = "sinfo.export.flow_depth.csv.meta.version";
- private static final String CSV_META_RIVER_LABEL =
- "sinfo.export.flow_depth.csv.meta.river.label";
-
- private static final String CSV_META_HEADER_SOUNDING =
- "sinfo.export.flow_depth.csv.meta.header.sounding";
+ private static final String CSV_META_VERSION_LABEL = "sinfo.export.flow_depth.csv.meta.version.label";
- private static final String CSV_META_HEADER_WATERLEVEL =
- "sinfo.export.flow_depth.csv.meta.header.waterlevel";
+ private static final String CSV_META_USER = "sinfo.export.flow_depth.csv.meta.user";
- private static final String CSV_META_RANGE =
- "sinfo.export.flow_depth.csv.meta.range";
+ private static final String CSV_META_USER_LABEL = "sinfo.export.flow_depth.csv.meta.user.label";
- private static final String CSV_META_RANGE_LABEL =
- "sinfo.export.flow_depth.csv.meta.range.label";
-
+ private static final String CSV_META_CREATION = "sinfo.export.flow_depth.csv.meta.creation";
+
+ private static final String CSV_META_CREATION_LABEL = "sinfo.export.flow_depth.csv.meta.creation.label";
+
+ private static final String CSV_META_RIVER = "sinfo.export.flow_depth.csv.meta.river";
+
+ private static final String CSV_META_RIVER_LABEL = "sinfo.export.flow_depth.csv.meta.river.label";
+
+ private static final String CSV_META_HEADER_SOUNDING = "sinfo.export.flow_depth.csv.meta.header.sounding";
+
+ private static final String CSV_META_HEADER_SOUNDING_YEAR = "sinfo.export.flow_depth.csv.meta.header.sounding.year";
+
+ private static final String CSV_META_HEADER_SOUNDING_TYPE = "sinfo.export.flow_depth.csv.meta.header.sounding.type";
+
+ private static final String CSV_META_HEADER_SOUNDING_PRJ = "sinfo.export.flow_depth.csv.meta.header.sounding.prj";
+
+ private static final String CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL = "sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel";
+
+ private static final String CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL = "sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel.original";
+
+ private static final String CSV_META_HEADER_WATERLEVEL = "sinfo.export.flow_depth.csv.meta.header.waterlevel";
+
+ private static final String CSV_META_HEADER_WATERLEVEL_NAME = "sinfo.export.flow_depth.csv.meta.header.waterlevel.name";
+
+ private static final String CSV_META_HEADER_WATERLEVEL_ELEVATION_MODEL = "sinfo.export.flow_depth.csv.meta.header.waterlevel.elevationmodel";
+
+ private static final String CSV_META_HEADER_WATERLEVEL_EVALUATOR = "sinfo.export.flow_depth.csv.meta.header.waterlevel.evaluator";
+
+ private static final String CSV_META_HEADER_WATERLEVEL_GAUGE = "sinfo.export.flow_depth.csv.meta.header.waterlevel.gauge";
+
+ private static final String CSV_META_HEADER_WATERLEVEL_YEAR = "sinfo.export.flow_depth.csv.meta.header.waterlevel.year";
+
+ private static final String CSV_META_RANGE = "sinfo.export.flow_depth.csv.meta.range";
+
+ private static final String CSV_META_RANGE_LABEL = "sinfo.export.flow_depth.csv.meta.range.label";
+
private static final String CSV_META_HEIGHT_UNIT_RIVER = "sinfo.export.flow_depth.csv.meta.height_unit.river";
-
- private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper";
+
+ private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper";
private static final String UNIT_M = "m";
@@ -112,37 +119,37 @@
private static final String UNIT_CUBIC_M = "m³/s";
- /** The storage that contains the current calculation result.*/
+ /** The storage that contains the current calculation result. */
private FlowDepthCalculationResults data = null;
- /**
- * Formats header with unit
- */
- private String msgUnit(final String key, String unit ) {
-
- final String msg = msg(key);
- return String.format("%s [%s]", msg, unit);
+ /**
+ * Formats header with unit
+ */
+ private String msgUnit(final String key, final String unit) {
+
+ final String msg = msg(key);
+ return String.format("%s [%s]", msg, unit);
}
-
+
@Override
- protected void addData(Object d) {
- /* reset */
- data = null;
+ protected void addData(final Object d) {
+ /* reset */
+ this.data = null;
if (d instanceof CalculationResult) {
- final Object dat = ((CalculationResult)d).getData();
- if( dat != null )
- data = (FlowDepthCalculationResults)dat;
+ final Object dat = ((CalculationResult) d).getData();
+ if (dat != null)
+ this.data = (FlowDepthCalculationResults) dat;
}
}
-
+
@Override
- protected void writeCSVData(CSVWriter writer) {
+ protected void writeCSVData(final CSVWriter writer) {
log.info("FlowDepthExporter.writeCSVData");
/* fetch calculation results */
- final FlowDepthCalculationResults results = data;
+ final FlowDepthCalculationResults results = this.data;
final boolean useTkh = results.isUseTkh();
final River river = results.getRiver();
@@ -152,116 +159,115 @@
writeCSVHeader(writer, river, useTkh);
for (final FlowDepthCalculationResult result : results.getResults()) {
- writeCSVFlowDepthResult(writer, result, useTkh);
- }
+ writeCSVFlowDepthResult(writer, result, useTkh);
+ }
}
- private void writeCSVFlowDepthResult(final CSVWriter writer, final FlowDepthCalculationResult result, final boolean useTkh) {
+ private void writeCSVFlowDepthResult(final CSVWriter writer, final FlowDepthCalculationResult result,
+ final boolean useTkh) {
+
+ /* first some specific metadata */
+ final BedHeightInfo sounding = result.getSounding();
+ final WstInfo wst = result.getWst();
+
+ // "##METADATEN PEILUNG"
+ writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING);
+
+ // "# Jahr der Peilung: "
+ writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_YEAR, Integer.toString(sounding.getYear()));
+ // "# Aufnahmeart: "
+ writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_TYPE, sounding.getType());
+ // "# Lagesystem: "
+ writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_PRJ, sounding.getLocationSystem());
+ // "# Höhensystem: "
+ // TODO: klären einheit oder name des höhensystems?
+ writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL, sounding.getCurElevationModelName());
+ // "# ursprüngliches Höhensystem: "
+ writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL,
+ sounding.getOldElevationModelName());
+
+ // "##METADATEN WASSERSPIEGELLAGE"
+ writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL);
+ // "# Bezeichnung der Wasserspiegellage: "
+ writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_NAME, wst.getLabel());
+ // "# Höhensystem der Wasserspiegellage: "
+ // FIXME: discussion!
+ writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_ELEVATION_MODEL, "FIXME");
+ // "# Auswerter: ": discussion!
+ // FIXME: discussion!
+ writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_EVALUATOR, "FIXME");
+ // "# Bezugspegel: " discussion
+ // FIXME: Umsetzung IDENTISCH zu allen möglichen Arten wie ein WSPL berechnet wird....
+ writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_GAUGE, "FIXME");
+ // "# Jahr/Zeitraum der Wasserspiegellage: "
+ // FIXME: discussion!
+ writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(wst.getYear()));
+
+ /* nwo the value rows */
final Collection<FlowDepthRow> rows = result.getRows();
for (final FlowDepthRow flowDepthRow : rows) {
- writeCSVFlowDepthRow(writer, flowDepthRow, useTkh);
- }
- }
+ writeCSVFlowDepthRow(writer, flowDepthRow, useTkh);
+ }
+ }
- private void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) {
+ private void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) {
log.info("FlowDepthExporter.writeCSVMeta");
final String calcModeLabel = results.getCalcModeLabel();
final River river = results.getRiver();
- writeCSVMetaEntry(writer, CSV_META_HEADER_RESULT, msg( CSV_META_HEADER_RESULT_LABEL ), river.getName(), calcModeLabel );
+ writeCSVMetaEntry(writer, CSV_META_HEADER_RESULT, msg(CSV_META_HEADER_RESULT_LABEL), river.getName(),
+ calcModeLabel);
- // "# FLYS-Version: "
- writeCSVMetaEntry(writer, CSV_META_VERSION, msg( CSV_META_VERSION_LABEL ), FLYS.VERSION );
+ // "# FLYS-Version: "
+ writeCSVMetaEntry(writer, CSV_META_VERSION, msg(CSV_META_VERSION_LABEL), FLYS.VERSION);
- // "# Bearbeiter: "
- writeCSVMetaEntry(writer, CSV_META_USER, msg( CSV_META_USER_LABEL ), results.getUser() );
+ // "# Bearbeiter: "
+ writeCSVMetaEntry(writer, CSV_META_USER, msg(CSV_META_USER_LABEL), results.getUser());
// "# Datum der Erstellung: "
- final Locale locale = Resources.getLocale(context.getMeta());
+ final Locale locale = Resources.getLocale(this.context.getMeta());
final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
- writeCSVMetaEntry(writer, CSV_META_CREATION, msg( CSV_META_CREATION_LABEL ), df.format(new Date()) );
+ writeCSVMetaEntry(writer, CSV_META_CREATION, msg(CSV_META_CREATION_LABEL), df.format(new Date()));
// "# Gewässer: "
- writeCSVMetaEntry(writer, CSV_META_RIVER, msg( CSV_META_RIVER_LABEL ), river.getName() );
+ writeCSVMetaEntry(writer, CSV_META_RIVER, msg(CSV_META_RIVER_LABEL), river.getName());
// "# Höhensystem des Flusses: "
+ // FIXME: klären, was ist gemeint?
final Unit wstUnit = river.getWstUnit();
writeCSVMetaEntry(writer, CSV_META_HEIGHT_UNIT_RIVER, wstUnit.getName());
// "# Ort/Bereich (km): "
- writeCSVMetaEntry(writer, CSV_META_RANGE, msg( CSV_META_RANGE_LABEL ), getKmFormatter().format(results.getFrom() ), getKmFormatter().format( results.getTo()));
-
- // "##METADATEN PEILUNG"
- writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING );
-
- // FIXME: check: macht nicht viel sinn da es mehrere geben kann.. oder immer wieder wiederholen?
-// "# Jahr der Peilung: "
- // FIXME
-// "# Aufnahmeart: "
- // FIXME
-// "# Lagesystem: "
- // FIXME
-// "# Höhensystem: "
- // FIXME
-// "# ursprüngliches Höhensystem: "
- // FIXME
-// "##METADATEN WASSERSPIEGELLAGE"
- writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL );
-// "# Bezeichnung der Wasserspiegellage: "
- // FIXME
-// "# Höhensystem der Wasserspiegellage: "
- // FIXME
-// "# Auswerter: "
- // FIXME
-// "# Bezugspegel: "
- // FIXME
-// "# Jahr/Zeitraum der Wasserspiegellage: "
- // FIXME
-
- // "# W/Pegel [cm]: " (nur bei Eingabe des Wasserstands am Pegel)
- // TODO: unklar, es wird kein W eingegeben
-
- // "# Q (m³/s): " (nur bei Eingabe des Durchflusses)
- // TODO: unklar, es wird kein Q eingegeben
+ writeCSVMetaEntry(writer, CSV_META_RANGE, msg(CSV_META_RANGE_LABEL), getKmFormatter().format(results.getFrom()),
+ getKmFormatter().format(results.getTo()));
writer.writeNext(new String[] { "" });
}
+ private void writeCSVMetaEntry(final CSVWriter writer, final String message, final Object... messageArgs) {
- private void writeCSVMetaEntry(CSVWriter writer, String message, Object... messageArgs) {
+ final CallMeta meta = this.context.getMeta();
- CallMeta meta = context.getMeta();
+ writer.writeNext(new String[] { Resources.getMsg(meta, message, message, messageArgs) });
+ }
- writer.writeNext(new String[] {
- Resources.getMsg(
- meta,
- message,
- message,
- messageArgs)
- });
- }
-
- /**
+ /**
* Write the header, with different headings depending on whether at a
* gauge or at a location.
- * @param river
- * @param useTkh
+ *
+ * @param river
+ * @param useTkh
*/
- private void writeCSVHeader(
- final CSVWriter writer,
- final River river,
- final boolean useTkh
- ) {
+ private void writeCSVHeader(final CSVWriter writer, final River river, final boolean useTkh) {
log.info("FlowDepthExporter.writeCSVHeader");
final Collection<String> header = new ArrayList<>(11);
-
+
header.add(msg(CSV_KM_HEADER));
header.add(msgUnit(CSV_FLOWDEPTH_HEADER, UNIT_M));
- if( useTkh )
- {
- header.add(msgUnit(CSV_FLOWDEPTHTKH_HEADER, UNIT_M));
- header.add(msgUnit(CSV_TKH_HEADER, UNIT_CM));
+ if (useTkh) {
+ header.add(msgUnit(CSV_FLOWDEPTHTKH_HEADER, UNIT_M));
+ header.add(msgUnit(CSV_TKH_HEADER, UNIT_CM));
}
final String wstUnitName = river.getWstUnit().getName();
@@ -273,138 +279,136 @@
header.add(msg(CSV_SOUNDING_HEADER));
header.add(msg(CSV_LOCATION_HEADER));
- writer.writeNext(header.toArray(new String[header.size()]));
+ writer.writeNext(header.toArray(new String[header.size()]));
}
/**
* Format a row of a flow depth result into an array of string, both used by csv and pdf
- * @param useTkh
+ *
+ * @param useTkh
*/
- private String[] formatFlowDepthRow(
- final FlowDepthRow row,
- boolean useTkh ) {
+ private String[] formatFlowDepthRow(final FlowDepthRow row, final boolean useTkh) {
- final Collection<String> lines = new ArrayList<>(11);
-
- // Fluss-km
- lines.add( getKmFormatter().format( row.getStation() ) );
-
- // Fließtiefe [m]
- lines.add( getFlowDepthFormatter().format( row.getFlowDepth() ) );
-
- if( useTkh )
- {
- // Fließtiefe mit TKH [m]
- lines.add( getFlowDepthFormatter().format( row.getFlowDepthWithTkh() ) );
-
- // TKH [cm]
- lines.add( getTkhFormatter().format( row.getTkh() ) );
- }
-
- // Wasserstand [NN + m]
- lines.add( getW2Formatter().format( row.getWaterlevel() ) );
-
- // Q [m³/s]
- lines.add( getQFormatter().format( row.getDischarge() ) );
-
- // Bezeichnung
- lines.add( row.getWaterlevelLabel() );
-
- // Bezugspegel
- lines.add( row.getGauge() );
-
- // Mittlere Sohlhöhe [NN + m]
- lines.add( getMeanBedHeighFormatter().format( row.getMeanBedHeight( ) ) );
-
- // Peilung/Epoche
- lines.add( row.getSoundageLabel() );
+ final Collection<String> lines = new ArrayList<>(11);
- // Lage
- lines.add( row.getLocation() );
-
- return lines.toArray(new String[lines.size()]);
+ // Fluss-km
+ lines.add(getKmFormatter().format(row.getStation()));
+
+ // Fließtiefe [m]
+ lines.add(getFlowDepthFormatter().format(row.getFlowDepth()));
+
+ if (useTkh) {
+ // Fließtiefe mit TKH [m]
+ lines.add(getFlowDepthFormatter().format(row.getFlowDepthWithTkh()));
+
+ // TKH [cm]
+ lines.add(getTkhFormatter().format(row.getTkh()));
+ }
+
+ // Wasserstand [NN + m]
+ lines.add(getW2Formatter().format(row.getWaterlevel()));
+
+ // Q [m³/s]
+ final double discharge = row.getDischarge();
+ if( Double.isNaN(discharge))
+ lines.add(StringUtils.EMPTY);
+ else
+ lines.add(getQFormatter().format(discharge));
+
+ // Bezeichnung
+ lines.add(row.getWaterlevelLabel());
+
+ // Bezugspegel
+ lines.add(row.getGauge());
+
+ // Mittlere Sohlhöhe [NN + m]
+ lines.add(getMeanBedHeighFormatter().format(row.getMeanBedHeight()));
+
+ // Peilung/Epoche
+ lines.add(row.getSoundageLabel());
+
+ // Lage
+ lines.add(row.getLocation());
+
+ return lines.toArray(new String[lines.size()]);
}
+
/**
* Write "rows" of csv data from wqkms with writer.
- * @param useTkh
+ *
+ * @param useTkh
*/
- private void writeCSVFlowDepthRow(
- final CSVWriter writer,
- final FlowDepthRow row,
- final boolean useTkh
- ) {
+ private void writeCSVFlowDepthRow(final CSVWriter writer, final FlowDepthRow row, final boolean useTkh) {
log.debug("FlowDepthExporter.writeCSVFlowDepthRow");
final String[] formattedRow = formatFlowDepthRow(row, useTkh);
- writer.writeNext( formattedRow );
+ writer.writeNext(formattedRow);
}
- @Override
- protected void writePDF(OutputStream outStream) {
+ @Override
+ protected void writePDF(final OutputStream outStream) {
log.debug("write PDF");
-
+
final JRDataSource source = createJRData();
final String confPath = Config.getConfigDirectory().toString();
- // FIXME: distinguish between with and without tkh: we need two jasper reports!
+ // FIXME: distinguish between with and without tkh: we need two jasper reports!
- final Map<String,Object> parameters = new HashMap<>();
+ final Map<String, Object> parameters = new HashMap<>();
parameters.put("ReportTitle", "Exported Data");
try {
- final JasperPrint print = JasperFillManager.fillReport(
- confPath + JASPER_FILE,
- parameters,
- source);
+ final JasperPrint print = JasperFillManager.fillReport(confPath + JASPER_FILE, parameters, source);
JasperExportManager.exportReportToPdfStream(print, outStream);
}
- catch(JRException je) {
+ catch (final JRException je) {
log.warn("Error generating PDF Report!", je);
}
}
private JRDataSource createJRData() {
-
+
/* fetch calculation results */
- final FlowDepthCalculationResults results = data;
-
- final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource();
-
+ final FlowDepthCalculationResults results = this.data;
+
+ final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource();
+
addJRMetaData(source, results);
final boolean useTkh = results.isUseTkh();
-
+
for (final FlowDepthCalculationResult result : results.getResults()) {
- addJRTableData(source, result, useTkh);
- }
+ addJRTableData(source, result, useTkh);
+ }
return source;
}
- private void addJRMetaData(final MetaAndTableJRDataSource source, FlowDepthCalculationResults results) {
+ private void addJRMetaData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResults results) {
- final River river = results.getRiver();
- final String wstUnitName = river.getWstUnit().getName();
+ final River river = results.getRiver();
+ final String wstUnitName = river.getWstUnit().getName();
- /* general metadata */
- source.addMetaData("header", msg(CSV_META_HEADER_RESULT_LABEL));
- source.addMetaData("calcMode", results.getCalcModeLabel());
+ /* general metadata */
+ source.addMetaData("header", msg(CSV_META_HEADER_RESULT_LABEL));
+ source.addMetaData("calcMode", results.getCalcModeLabel());
- source.addMetaData("version_label", msg(CSV_META_VERSION_LABEL));
+ source.addMetaData("version_label", msg(CSV_META_VERSION_LABEL));
source.addMetaData("version", FLYS.VERSION);
source.addMetaData("user_label", msg(CSV_META_USER_LABEL));
source.addMetaData("user", results.getUser());
-
- final Locale locale = Resources.getLocale(context.getMeta());
+
+ final Locale locale = Resources.getLocale(this.context.getMeta());
final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
source.addMetaData("date_label", msg(CSV_META_CREATION_LABEL));
source.addMetaData("date", df.format(new Date()));
- source.addMetaData("river_label", msg(CSV_META_RIVER_LABEL) );
+ source.addMetaData("river_label", msg(CSV_META_RIVER_LABEL));
source.addMetaData("river", river.getName());
- final String rangeValue = String.format( "%s - %s", getKmFormatter().format(results.getFrom() ), getKmFormatter().format( results.getTo()));
+ final String rangeValue = String.format("%s - %s", getKmFormatter().format(results.getFrom()),
+ getKmFormatter().format(results.getTo()));
source.addMetaData("range_label", msg(CSV_META_RANGE_LABEL));
source.addMetaData("range", rangeValue);
@@ -423,14 +427,15 @@
source.addMetaData("location_header", msg(CSV_LOCATION_HEADER));
}
- private void addJRTableData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResult result, final boolean useTkh) {
-
- final Collection<FlowDepthRow> rows = result.getRows();
+ private void addJRTableData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResult result,
+ final boolean useTkh) {
- for (final FlowDepthRow row : rows) {
-
- final String[] formattedRow = formatFlowDepthRow(row, useTkh);
- source.addData(formattedRow);
- }
- }
-}
\ No newline at end of file
+ final Collection<FlowDepthRow> rows = result.getRows();
+
+ for (final FlowDepthRow row : rows) {
+
+ final String[] formattedRow = formatFlowDepthRow(row, useTkh);
+ source.addData(formattedRow);
+ }
+ }
+}
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthState.java
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthState.java Thu Feb 08 18:47:36 2018 +0100
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthState.java Thu Feb 08 18:48:24 2018 +0100
@@ -31,6 +31,8 @@
/// ** The log that is used in this state. */
// private static Logger log = Logger.getLogger(FlowDepthState.class);
+ private static final long serialVersionUID = 1L;
+
private static final String I18N_FACET_FLOW_DEPTH_FILTERED_DESCRIPTION = "sinfo.facet.flow_depth.filtered.description";
/**
@@ -100,13 +102,9 @@
for (int index = 0; index < resultList.size(); index++) {
final FlowDepthCalculationResult result = resultList.get(index);
- /* compute theme label */
- final String wspLabel = result.getWstLabel();
- final String soundingLabel = result.getSoundingLabel();
- final String inputLabel = String.format("%s - %s", wspLabel, soundingLabel);
/* filtered (zoom dependent mean) flow depth */
- final String facetFlowDepthFilteredDescription = Resources.getMsg( context.getMeta(), I18N_FACET_FLOW_DEPTH_FILTERED_DESCRIPTION, I18N_FACET_FLOW_DEPTH_FILTERED_DESCRIPTION, inputLabel );
+ final String facetFlowDepthFilteredDescription = Resources.getMsg( context.getMeta(), I18N_FACET_FLOW_DEPTH_FILTERED_DESCRIPTION, I18N_FACET_FLOW_DEPTH_FILTERED_DESCRIPTION, result.getLabel() );
facets.add(new FlowDepthFilterFacet(
index,
FlowDepthProcessor.FACET_FLOW_DEPTH_FILTERED,
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/WstInfo.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/WstInfo.java Thu Feb 08 18:48:24 2018 +0100
@@ -0,0 +1,32 @@
+/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ * Björnsen Beratende Ingenieure GmbH
+ * Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+package org.dive4elements.river.artifacts.sinfo.flowdepth;
+
+/**
+ * @author Gernot Belger
+ */
+final class WstInfo {
+
+ private final String label;
+ private final int year;
+
+ public WstInfo(final String label, final int year) {
+ this.label = label;
+ this.year = year;
+ }
+
+ public String getLabel() {
+ return this.label;
+ }
+
+ public int getYear() {
+ return this.year;
+ }
+}
\ No newline at end of file
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/resources/messages.properties
--- a/artifacts/src/main/resources/messages.properties Thu Feb 08 18:47:36 2018 +0100
+++ b/artifacts/src/main/resources/messages.properties Thu Feb 08 18:48:24 2018 +0100
@@ -804,7 +804,17 @@
sinfo.export.flow_depth.csv.meta.river = # {0}: {1}
sinfo.export.flow_depth.csv.meta.river.label = Gew\u00e4sser
sinfo.export.flow_depth.csv.meta.header.sounding = ##METADATEN PEILUNG
+sinfo.export.flow_depth.csv.meta.header.sounding.year = # Jahr der Peilung: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.type = # Aufnahmeart: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.prj = # Lagesystem: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel = # H\u00f6hensystem: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel.original = # urspr\u00fcngliches H\u00f6hensystem: {0}
sinfo.export.flow_depth.csv.meta.header.waterlevel = ##METADATEN WASSERSPIEGELLAGE
+sinfo.export.flow_depth.csv.meta.header.waterlevel.name = # Bezeichnung der Wasserspiegellage: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.elevationmodel = # H\u00f6hensystem der Wasserspiegellage: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.evaluator = # Auswerter: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.gauge = # Bezugspegel: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.year = # Jahr/Zeitraum der Wasserspiegellage: {0}
sinfo.export.flow_depth.csv.meta.range = # {0}: {1} - {2}
sinfo.export.flow_depth.csv.meta.range.label = Range (km)
sinfo.export.flow_depth.csv.meta.height_unit.river = # H\u00f6henbezugssystem des Flusses: {0}
@@ -833,3 +843,4 @@
sinfo.chart.flow_depth.section.yaxis.label=Flie\u00dftiefe h [m]
sinfo.facet.flow_depth.filtered.description = Flie\u00dftiefe ({0})
+
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/resources/messages_de.properties
--- a/artifacts/src/main/resources/messages_de.properties Thu Feb 08 18:47:36 2018 +0100
+++ b/artifacts/src/main/resources/messages_de.properties Thu Feb 08 18:48:24 2018 +0100
@@ -810,7 +810,17 @@
sinfo.export.flow_depth.csv.meta.river = # {0}: {1}
sinfo.export.flow_depth.csv.meta.river.label = Gew\u00e4sser
sinfo.export.flow_depth.csv.meta.header.sounding = ##METADATEN PEILUNG
+sinfo.export.flow_depth.csv.meta.header.sounding.year = # Jahr der Peilung: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.type = # Aufnahmeart: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.prj = # Lagesystem: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel = # H\u00f6hensystem: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel.original = # urspr\u00fcngliches H\u00f6hensystem: {0}
sinfo.export.flow_depth.csv.meta.header.waterlevel = ##METADATEN WASSERSPIEGELLAGE
+sinfo.export.flow_depth.csv.meta.header.waterlevel.name = # Bezeichnung der Wasserspiegellage: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.elevationmodel = # H\u00f6hensystem der Wasserspiegellage: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.evaluator = # Auswerter: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.gauge = # Bezugspegel: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.year = # Jahr/Zeitraum der Wasserspiegellage: {0}
sinfo.export.flow_depth.csv.meta.range = # {0}: {1} - {2}
sinfo.export.flow_depth.csv.meta.range.label = Bereich (km)
sinfo.export.flow_depth.csv.meta.height_unit.river = # H\u00f6henbezugssystem des Flusses: {0}
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/resources/messages_de_DE.properties
--- a/artifacts/src/main/resources/messages_de_DE.properties Thu Feb 08 18:47:36 2018 +0100
+++ b/artifacts/src/main/resources/messages_de_DE.properties Thu Feb 08 18:48:24 2018 +0100
@@ -806,7 +806,17 @@
sinfo.export.flow_depth.csv.meta.river = # {0}: {1}
sinfo.export.flow_depth.csv.meta.river.label = Gew\u00e4sser
sinfo.export.flow_depth.csv.meta.header.sounding = ##METADATEN PEILUNG
+sinfo.export.flow_depth.csv.meta.header.sounding.year = # Jahr der Peilung: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.type = # Aufnahmeart: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.prj = # Lagesystem: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel = # H\u00f6hensystem: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel.original = # urspr\u00fcngliches H\u00f6hensystem: {0}
sinfo.export.flow_depth.csv.meta.header.waterlevel = ##METADATEN WASSERSPIEGELLAGE
+sinfo.export.flow_depth.csv.meta.header.waterlevel.name = # Bezeichnung der Wasserspiegellage: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.elevationmodel = # H\u00f6hensystem der Wasserspiegellage: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.evaluator = # Auswerter: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.gauge = # Bezugspegel: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.year = # Jahr/Zeitraum der Wasserspiegellage: {0}
sinfo.export.flow_depth.csv.meta.range = # {0}: {1} - {2}
sinfo.export.flow_depth.csv.meta.range.label = Bereich (km)
sinfo.export.flow_depth.csv.meta.height_unit.river = # H\u00f6henbezugssystem des Flusses: {0}
diff -r 23264d1a528f -r 9f7a285b0ee3 artifacts/src/main/resources/messages_en.properties
--- a/artifacts/src/main/resources/messages_en.properties Thu Feb 08 18:47:36 2018 +0100
+++ b/artifacts/src/main/resources/messages_en.properties Thu Feb 08 18:48:24 2018 +0100
@@ -805,7 +805,17 @@
sinfo.export.flow_depth.csv.meta.river = # {0}: {1}
sinfo.export.flow_depth.csv.meta.river.label = Gew\u00e4sser
sinfo.export.flow_depth.csv.meta.header.sounding = ##METADATEN PEILUNG
+sinfo.export.flow_depth.csv.meta.header.sounding.year = # Jahr der Peilung: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.type = # Aufnahmeart: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.prj = # Lagesystem: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel = # H\u00f6hensystem: {0}
+sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel.original = # urspr\u00fcngliches H\u00f6hensystem: {0}
sinfo.export.flow_depth.csv.meta.header.waterlevel = ##METADATEN WASSERSPIEGELLAGE
+sinfo.export.flow_depth.csv.meta.header.waterlevel.name = # Bezeichnung der Wasserspiegellage: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.elevationmodel = # H\u00f6hensystem der Wasserspiegellage: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.evaluator = # Auswerter: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.gauge = # Bezugspegel: {0}
+sinfo.export.flow_depth.csv.meta.header.waterlevel.year = # Jahr/Zeitraum der Wasserspiegellage: {0}
sinfo.export.flow_depth.csv.meta.range = # {0}: {1} - {2}
sinfo.export.flow_depth.csv.meta.range.label = Range (km)
sinfo.export.flow_depth.csv.meta.height_unit.river = # H\u00f6henbezugssystem des Flusses: {0}
More information about the Dive4Elements-commits
mailing list