[PATCH] Basically implemented SINFO-Tkh Exports

Wald Commits scm-commit at wald.intevation.org
Thu Mar 1 18:49:39 CET 2018


# HG changeset patch
# User gernotbelger
# Date 1519926574 -3600
# Node ID 791714b92b5c961a1905c71f8d12cbbc24706a06
# Parent  04ad2cfce5599d9497597e5e55636613c484a42d
Basically implemented SINFO-Tkh Exports

diff -r 04ad2cfce559 -r 791714b92b5c artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/SInfoI18NStrings.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/SInfoI18NStrings.java	Thu Mar 01 18:49:34 2018 +0100
@@ -0,0 +1,72 @@
+/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+package org.dive4elements.river.artifacts.sinfo;
+
+/**
+ * Some commonly used i10n-strings.
+ *
+ * @author Gernot Belger
+ */
+public interface SInfoI18NStrings {
+
+    public static final String CSV_META_HEADER_RESULT = "sinfo.export.flow_depth.csv.meta.header.result";
+
+    public static final String CSV_META_HEADER_RESULT_LABEL = "sinfo.export.flow_depth.csv.meta.header.result.label";
+
+    public static final String CSV_META_VERSION = "sinfo.export.flow_depth.csv.meta.version";
+
+    public static final String CSV_META_VERSION_LABEL = "sinfo.export.flow_depth.csv.meta.version.label";
+
+    public static final String CSV_META_USER = "sinfo.export.flow_depth.csv.meta.user";
+
+    public static final String CSV_META_USER_LABEL = "sinfo.export.flow_depth.csv.meta.user.label";
+
+    public static final String CSV_META_CREATION = "sinfo.export.flow_depth.csv.meta.creation";
+
+    public static final String CSV_META_CREATION_LABEL = "sinfo.export.flow_depth.csv.meta.creation.label";
+
+    public static final String CSV_META_RIVER = "sinfo.export.flow_depth.csv.meta.river";
+
+    public static final String CSV_META_RIVER_LABEL = "sinfo.export.flow_depth.csv.meta.river.label";
+
+    public static final String CSV_KM_HEADER = "sinfo.export.flow_depth.csv.header.km";
+
+    public static final String CSV_MEAN_BED_HEIGHT_HEADER = "sinfo.export.flow_depth.csv.header.mean_bed_height";
+
+    public static final String CSV_WATERLEVEL_HEADER = "sinfo.export.flow_depth.csv.header.waterlevel";
+
+    public static final String CSV_DISCHARGE_HEADER = "sinfo.export.flow_depth.csv.header.discharge";
+
+    public static final String CSV_LABEL_HEADER = "sinfo.export.flow_depth.csv.header.label";
+
+    public static final String CSV_GAUGE_HEADER = "sinfo.export.flow_depth.csv.header.gauge";
+
+    public static final String CSV_LOCATION_HEADER = "sinfo.export.flow_depth.csv.header.location";
+
+    public static final String CSV_META_HEADER_WATERLEVEL = "sinfo.export.flow_depth.csv.meta.header.waterlevel";
+
+    public static final String CSV_META_HEADER_WATERLEVEL_NAME = "sinfo.export.flow_depth.csv.meta.header.waterlevel.name";
+
+    public static final String CSV_META_HEADER_WATERLEVEL_GAUGE = "sinfo.export.flow_depth.csv.meta.header.waterlevel.gauge";
+
+    public static final String CSV_META_HEADER_WATERLEVEL_YEAR = "sinfo.export.flow_depth.csv.meta.header.waterlevel.year";
+
+    public static final String CSV_META_RANGE = "sinfo.export.flow_depth.csv.meta.range";
+
+    public static final String CSV_META_RANGE_LABEL = "sinfo.export.flow_depth.csv.meta.range.label";
+
+    public static final String CSV_META_HEIGHT_UNIT_RIVER = "sinfo.export.flow_depth.csv.meta.height_unit.river";
+
+    public static final String UNIT_M = "m";
+
+    public static final String UNIT_CM = "cm";
+
+    public static final String UNIT_CUBIC_M = "m³/s";
+}
\ No newline at end of file
diff -r 04ad2cfce559 -r 791714b92b5c artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/common/AbstractSInfoExporter.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/common/AbstractSInfoExporter.java	Thu Mar 01 18:49:34 2018 +0100
@@ -0,0 +1,167 @@
+/** Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+package org.dive4elements.river.artifacts.sinfo.common;
+
+import java.io.OutputStream;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.artifacts.CallMeta;
+import org.dive4elements.artifacts.common.utils.Config;
+import org.dive4elements.river.artifacts.model.CalculationResult;
+import org.dive4elements.river.artifacts.resources.Resources;
+import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
+import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
+import org.dive4elements.river.exports.AbstractExporter;
+
+import au.com.bytecode.opencsv.CSVWriter;
+import net.sf.jasperreports.engine.JRDataSource;
+import net.sf.jasperreports.engine.JRException;
+import net.sf.jasperreports.engine.JasperExportManager;
+import net.sf.jasperreports.engine.JasperFillManager;
+import net.sf.jasperreports.engine.JasperPrint;
+
+/**
+ * @author Gernot Belger
+ */
+public abstract class AbstractSInfoExporter<ROW extends AbstractSInfoResultRow, RESULT extends AbstractSInfoCalculationResult<ROW>, RESULTS extends AbstractSInfoCalculationResults<ROW, RESULT>> extends AbstractExporter {
+
+    /** The storage that contains the current calculation result. */
+    private RESULTS data = null;
+
+    protected abstract Logger getLog();
+
+    public RESULTS getData() {
+        return this.data;
+    }
+
+    @Override
+    protected final void addData(final Object d) {
+        /* reset */
+        this.data = null;
+
+        if (d instanceof CalculationResult) {
+
+            final Object dat = ((CalculationResult) d).getData();
+            if (dat != null)
+                this.data = (RESULTS) dat;
+        }
+    }
+
+    @Override
+    protected final void writeCSVData(final CSVWriter writer) {
+        getLog().info("writeCSVData");
+
+        /* fetch calculation results */
+        final RESULTS results = this.data;
+
+        final RiverInfo river = results.getRiver();
+
+        /* write as csv */
+        writeCSVMeta(writer, results);
+        writeCSVHeader(writer, river);
+
+        for (final RESULT result : results.getResults()) {
+            writeCSVResult(writer, result);
+        }
+    }
+
+    protected abstract void writeCSVHeader(final CSVWriter writer, final RiverInfo river);
+
+    protected abstract void writeCSVMeta(final CSVWriter writer, final RESULTS results);
+
+    protected final void writeCSVMetaEntry(final CSVWriter writer, final String message, final Object... messageArgs) {
+
+        final CallMeta meta = this.context.getMeta();
+
+        writer.writeNext(new String[] { Resources.getMsg(meta, message, message, messageArgs) });
+    }
+
+    protected final void writeCSVResult(final CSVWriter writer, final RESULT result) {
+
+        writeCSVResultHeader(writer, result);
+
+        /* nwo the value rows */
+        final Collection<ROW> rows = result.getRows();
+        for (final ROW row : rows) {
+            writeCSVRow(writer, row);
+        }
+    }
+
+    protected abstract void writeCSVResultHeader(CSVWriter writer, RESULT result);
+
+    protected final void writeCSVRow(final CSVWriter writer, final ROW row) {
+        getLog().debug("writeCSVFlowDepthRow");
+
+        final String[] formattedRow = formatCSVRow(row);
+        writer.writeNext(formattedRow);
+    }
+
+    protected abstract String[] formatCSVRow(final ROW row);
+
+    @Override
+    protected final void writePDF(final OutputStream outStream) {
+        getLog().debug("write PDF");
+
+        final JRDataSource source = createJRData();
+
+        final String confPath = Config.getConfigDirectory().toString();
+
+        // FIXME: distinguish between with and without tkh: we need two jasper reports!
+
+        final Map<String, Object> parameters = new HashMap<>();
+        parameters.put("ReportTitle", "Exported Data");
+
+        try {
+            final String jasperPath = confPath + getJasperFile();
+
+            final JasperPrint print = JasperFillManager.fillReport(jasperPath, parameters, source);
+            JasperExportManager.exportReportToPdfStream(print, outStream);
+        }
+        catch (final JRException je) {
+            getLog().warn("Error generating PDF Report!", je);
+        }
+    }
+
+    protected abstract String getJasperFile();
+
+    private JRDataSource createJRData() {
+
+        /* fetch calculation results */
+        final RESULTS results = this.data;
+
+        final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource();
+
+        addJRMetaData(source, results);
+
+        for (final RESULT result : results.getResults()) {
+            addJRTableData(source, result);
+        }
+
+        return source;
+    }
+
+    protected abstract void addJRMetaData(final MetaAndTableJRDataSource source, final RESULTS results);
+
+    protected final void addJRTableData(final MetaAndTableJRDataSource source, final RESULT result) {
+
+        final Collection<ROW> rows = result.getRows();
+
+        for (final ROW row : rows) {
+
+            final String[] formattedRow = formatPDFRow(row);
+            source.addData(formattedRow);
+        }
+    }
+
+    protected abstract String[] formatPDFRow(final ROW row);
+}
\ No newline at end of file
diff -r 04ad2cfce559 -r 791714b92b5c artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java	Thu Mar 01 17:45:49 2018 +0100
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/flowdepth/FlowDepthExporter.java	Thu Mar 01 18:49:34 2018 +0100
@@ -8,37 +8,27 @@
 
 package org.dive4elements.river.artifacts.sinfo.flowdepth;
 
-import java.io.OutputStream;
 import java.text.DateFormat;
 import java.text.NumberFormat;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Date;
-import java.util.HashMap;
 import java.util.Locale;
-import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.math.DoubleRange;
 import org.apache.log4j.Logger;
-import org.dive4elements.artifacts.CallMeta;
-import org.dive4elements.artifacts.common.utils.Config;
 import org.dive4elements.river.FLYS;
-import org.dive4elements.river.artifacts.model.CalculationResult;
 import org.dive4elements.river.artifacts.resources.Resources;
+import org.dive4elements.river.artifacts.sinfo.SInfoI18NStrings;
+import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter;
 import org.dive4elements.river.artifacts.sinfo.util.BedHeightInfo;
 import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
 import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
 import org.dive4elements.river.artifacts.sinfo.util.WstInfo;
-import org.dive4elements.river.exports.AbstractExporter;
 import org.dive4elements.river.utils.RiverUtils;
 
 import au.com.bytecode.opencsv.CSVWriter;
-import net.sf.jasperreports.engine.JRDataSource;
-import net.sf.jasperreports.engine.JRException;
-import net.sf.jasperreports.engine.JasperExportManager;
-import net.sf.jasperreports.engine.JasperFillManager;
-import net.sf.jasperreports.engine.JasperPrint;
 
 /**
  * Generates different output formats (csv, pdf) of data that resulted from a flow depths computation.
@@ -47,43 +37,17 @@
  * @author Gernot Belger
  */
 // REMARK: must be public because its registered in generators.xml
-public class FlowDepthExporter extends AbstractExporter {
+public class FlowDepthExporter extends AbstractSInfoExporter<FlowDepthRow, FlowDepthCalculationResult, FlowDepthCalculationResults> {
 
     /** The log used in this exporter. */
     private static Logger log = Logger.getLogger(FlowDepthExporter.class);
 
-    private static final String CSV_KM_HEADER = "sinfo.export.flow_depth.csv.header.km";
     private static final String CSV_FLOWDEPTH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepth";
     private static final String CSV_FLOWDEPTHTKH_HEADER = "sinfo.export.flow_depth.csv.header.flowdepthTkh";
     private static final String CSV_TKH_HEADER = "sinfo.export.flow_depth.csv.header.tkh";
-    private static final String CSV_WATERLEVEL_HEADER = "sinfo.export.flow_depth.csv.header.waterlevel";
-    private static final String CSV_DISCHARGE_HEADER = "sinfo.export.flow_depth.csv.header.discharge";
-    private static final String CSV_LABEL_HEADER = "sinfo.export.flow_depth.csv.header.label";
-    private static final String CSV_GAUGE_HEADER = "sinfo.export.flow_depth.csv.header.gauge";
-    private static final String CSV_MEAN_BED_HEIGHT_HEADER = "sinfo.export.flow_depth.csv.header.mean_bed_height";
+
     private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short";
     private static final String CSV_SOUNDING_HEADER = "sinfo.export.flow_depth.csv.header.sounding";
-    private static final String CSV_LOCATION_HEADER = "sinfo.export.flow_depth.csv.header.location";
-
-    private static final String CSV_META_HEADER_RESULT = "sinfo.export.flow_depth.csv.meta.header.result";
-
-    private static final String CSV_META_HEADER_RESULT_LABEL = "sinfo.export.flow_depth.csv.meta.header.result.label";
-
-    private static final String CSV_META_VERSION = "sinfo.export.flow_depth.csv.meta.version";
-
-    private static final String CSV_META_VERSION_LABEL = "sinfo.export.flow_depth.csv.meta.version.label";
-
-    private static final String CSV_META_USER = "sinfo.export.flow_depth.csv.meta.user";
-
-    private static final String CSV_META_USER_LABEL = "sinfo.export.flow_depth.csv.meta.user.label";
-
-    private static final String CSV_META_CREATION = "sinfo.export.flow_depth.csv.meta.creation";
-
-    private static final String CSV_META_CREATION_LABEL = "sinfo.export.flow_depth.csv.meta.creation.label";
-
-    private static final String CSV_META_RIVER = "sinfo.export.flow_depth.csv.meta.river";
-
-    private static final String CSV_META_RIVER_LABEL = "sinfo.export.flow_depth.csv.meta.river.label";
 
     private static final String CSV_META_HEADER_SOUNDING = "sinfo.export.flow_depth.csv.meta.header.sounding";
 
@@ -99,73 +63,15 @@
 
     private static final String CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL = "sinfo.export.flow_depth.csv.meta.header.sounding.elevationmodel.original";
 
-    private static final String CSV_META_HEADER_WATERLEVEL = "sinfo.export.flow_depth.csv.meta.header.waterlevel";
-
-    private static final String CSV_META_HEADER_WATERLEVEL_NAME = "sinfo.export.flow_depth.csv.meta.header.waterlevel.name";
-
-    private static final String CSV_META_HEADER_WATERLEVEL_GAUGE = "sinfo.export.flow_depth.csv.meta.header.waterlevel.gauge";
-
-    private static final String CSV_META_HEADER_WATERLEVEL_YEAR = "sinfo.export.flow_depth.csv.meta.header.waterlevel.year";
-
-    private static final String CSV_META_RANGE = "sinfo.export.flow_depth.csv.meta.range";
-
-    private static final String CSV_META_RANGE_LABEL = "sinfo.export.flow_depth.csv.meta.range.label";
-
-    private static final String CSV_META_HEIGHT_UNIT_RIVER = "sinfo.export.flow_depth.csv.meta.height_unit.river";
-
     private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper";
 
-    private static final String UNIT_M = "m";
-
-    private static final String UNIT_CM = "cm";
-
-    private static final String UNIT_CUBIC_M = "m³/s";
-
-    /** The storage that contains the current calculation result. */
-    private FlowDepthCalculationResults data = null;
-
-    /**
-     * Formats header with unit
-     */
-    private String msgUnit(final String key, final String unit) {
-
-        final String msg = msg(key);
-        return String.format("%s [%s]", msg, unit);
+    @Override
+    protected Logger getLog() {
+        return log;
     }
 
     @Override
-    protected void addData(final Object d) {
-        /* reset */
-        this.data = null;
-
-        if (d instanceof CalculationResult) {
-
-            final Object dat = ((CalculationResult) d).getData();
-            if (dat != null)
-                this.data = (FlowDepthCalculationResults) dat;
-        }
-    }
-
-    @Override
-    protected void writeCSVData(final CSVWriter writer) {
-        log.info("FlowDepthExporter.writeCSVData");
-
-        /* fetch calculation results */
-        final FlowDepthCalculationResults results = this.data;
-
-        final boolean useTkh = results.isUseTkh();
-        final RiverInfo river = results.getRiver();
-
-        /* write as csv */
-        writeCSVMeta(writer, results);
-        writeCSVHeader(writer, river, useTkh);
-
-        for (final FlowDepthCalculationResult result : results.getResults()) {
-            writeCSVFlowDepthResult(writer, result, useTkh);
-        }
-    }
-
-    private void writeCSVFlowDepthResult(final CSVWriter writer, final FlowDepthCalculationResult result, final boolean useTkh) {
+    protected void writeCSVResultHeader(final CSVWriter writer, final FlowDepthCalculationResult result) {
 
         /* first some specific metadata */
         final BedHeightInfo sounding = result.getSounding();
@@ -188,60 +94,49 @@
         writeCSVMetaEntry(writer, CSV_META_HEADER_SOUNDING_ELEVATIOIN_MODEL_ORIGINAL, sounding.getOldElevationModelUnit());
 
         // "##METADATEN WASSERSPIEGELLAGE"
-        writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL);
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL);
         // "# Bezeichnung der Wasserspiegellage: "
-        writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_NAME, wst.getLabel());
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_NAME, wst.getLabel());
         // "# Bezugspegel: "
-        writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_GAUGE, wst.getGauge());
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_GAUGE, wst.getGauge());
         // "# Jahr/Zeitraum der Wasserspiegellage: "
-        writeCSVMetaEntry(writer, CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(wst.getYear()));
-
-        /* nwo the value rows */
-        final Collection<FlowDepthRow> rows = result.getRows();
-        for (final FlowDepthRow flowDepthRow : rows) {
-            writeCSVFlowDepthRow(writer, flowDepthRow, useTkh);
-        }
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(wst.getYear()));
     }
 
-    private void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) {
+    @Override
+    protected final void writeCSVMeta(final CSVWriter writer, final FlowDepthCalculationResults results) {
         log.info("FlowDepthExporter.writeCSVMeta");
 
         final String calcModeLabel = results.getCalcModeLabel();
         final RiverInfo river = results.getRiver();
-        writeCSVMetaEntry(writer, CSV_META_HEADER_RESULT, msg(CSV_META_HEADER_RESULT_LABEL), river.getName(), calcModeLabel);
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_RESULT, msg(SInfoI18NStrings.CSV_META_HEADER_RESULT_LABEL), river.getName(), calcModeLabel);
 
         // "# FLYS-Version: "
-        writeCSVMetaEntry(writer, CSV_META_VERSION, msg(CSV_META_VERSION_LABEL), FLYS.VERSION);
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_VERSION, msg(SInfoI18NStrings.CSV_META_VERSION_LABEL), FLYS.VERSION);
 
         // "# Bearbeiter: "
-        writeCSVMetaEntry(writer, CSV_META_USER, msg(CSV_META_USER_LABEL), results.getUser());
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_USER, msg(SInfoI18NStrings.CSV_META_USER_LABEL), results.getUser());
 
         // "# Datum der Erstellung: "
         final Locale locale = Resources.getLocale(this.context.getMeta());
         final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
-        writeCSVMetaEntry(writer, CSV_META_CREATION, msg(CSV_META_CREATION_LABEL), df.format(new Date()));
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_CREATION, msg(SInfoI18NStrings.CSV_META_CREATION_LABEL), df.format(new Date()));
 
         // "# Gewässer: "
-        writeCSVMetaEntry(writer, CSV_META_RIVER, msg(CSV_META_RIVER_LABEL), river.getName());
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_RIVER, msg(SInfoI18NStrings.CSV_META_RIVER_LABEL), river.getName());
 
         // "# Höhensystem des Flusses: "
-        writeCSVMetaEntry(writer, CSV_META_HEIGHT_UNIT_RIVER, river.getWstUnit());
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEIGHT_UNIT_RIVER, river.getWstUnit());
 
         // "# Ort/Bereich (km): "
         final DoubleRange calcRange = results.getCalcRange();
-        writeCSVMetaEntry(writer, CSV_META_RANGE, msg(CSV_META_RANGE_LABEL), getKmFormatter().format(calcRange.getMinimumDouble()),
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_RANGE, msg(SInfoI18NStrings.CSV_META_RANGE_LABEL),
+                getKmFormatter().format(calcRange.getMinimumDouble()),
                 getKmFormatter().format(calcRange.getMaximumDouble()));
 
         writer.writeNext(new String[] { "" });
     }
 
-    private void writeCSVMetaEntry(final CSVWriter writer, final String message, final Object... messageArgs) {
-
-        final CallMeta meta = this.context.getMeta();
-
-        writer.writeNext(new String[] { Resources.getMsg(meta, message, message, messageArgs) });
-    }
-
     /**
      * Write the header, with different headings depending on whether at a
      * gauge or at a location.
@@ -249,35 +144,41 @@
      * @param river
      * @param useTkh
      */
-    private void writeCSVHeader(final CSVWriter writer, final RiverInfo river, final boolean useTkh) {
+    @Override
+    protected final void writeCSVHeader(final CSVWriter writer, final RiverInfo river) {
         log.info("FlowDepthExporter.writeCSVHeader");
 
         final Collection<String> header = new ArrayList<>(11);
 
-        header.add(msg(CSV_KM_HEADER));
-        header.add(msgUnit(CSV_FLOWDEPTH_HEADER, UNIT_M));
-        if (useTkh) {
-            header.add(msgUnit(CSV_FLOWDEPTHTKH_HEADER, UNIT_M));
-            header.add(msgUnit(CSV_TKH_HEADER, UNIT_CM));
+        header.add(msg(SInfoI18NStrings.CSV_KM_HEADER));
+        header.add(msgUnit(CSV_FLOWDEPTH_HEADER, SInfoI18NStrings.UNIT_M));
+        if (getData().isUseTkh()) {
+            header.add(msgUnit(CSV_FLOWDEPTHTKH_HEADER, SInfoI18NStrings.UNIT_M));
+            header.add(msgUnit(CSV_TKH_HEADER, SInfoI18NStrings.UNIT_CM));
         }
 
-        header.add(msgUnit(CSV_WATERLEVEL_HEADER, river.getWstUnit()));
-        header.add(msgUnit(CSV_DISCHARGE_HEADER, UNIT_CUBIC_M));
-        header.add(msg(CSV_LABEL_HEADER));
-        header.add(msg(CSV_GAUGE_HEADER));
-        header.add(msgUnit(CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit()));
+        header.add(msgUnit(SInfoI18NStrings.CSV_WATERLEVEL_HEADER, river.getWstUnit()));
+        header.add(msgUnit(SInfoI18NStrings.CSV_DISCHARGE_HEADER, SInfoI18NStrings.UNIT_CUBIC_M));
+        header.add(msg(SInfoI18NStrings.CSV_LABEL_HEADER));
+        header.add(msg(SInfoI18NStrings.CSV_GAUGE_HEADER));
+        header.add(msgUnit(SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit()));
         header.add(msg(CSV_SOUNDING_HEADER));
-        header.add(msg(CSV_LOCATION_HEADER));
+        header.add(msg(SInfoI18NStrings.CSV_LOCATION_HEADER));
 
         writer.writeNext(header.toArray(new String[header.size()]));
     }
 
+    @Override
+    protected final String[] formatCSVRow(final FlowDepthRow row) {
+        return formatFlowDepthRow(row);
+    }
+
     /**
      * Format a row of a flow depth result into an array of string, both used by csv and pdf
      *
      * @param useTkh
      */
-    private String[] formatFlowDepthRow(final FlowDepthRow row, final boolean useTkh) {
+    private String[] formatFlowDepthRow(final FlowDepthRow row) {
 
         final Collection<String> lines = new ArrayList<>(11);
 
@@ -287,7 +188,7 @@
         // Fließtiefe [m]
         lines.add(getFlowDepthFormatter().format(row.getFlowDepth()));
 
-        if (useTkh) {
+        if (getData().isUseTkh()) {
             // Fließtiefe mit TKH [m]
             lines.add(getFlowDepthFormatter().format(row.getFlowDepthWithTkh()));
 
@@ -325,109 +226,58 @@
         return lines.toArray(new String[lines.size()]);
     }
 
-    /**
-     * Write "rows" of csv data from wqkms with writer.
-     *
-     * @param useTkh
-     */
-    private void writeCSVFlowDepthRow(final CSVWriter writer, final FlowDepthRow row, final boolean useTkh) {
-        log.debug("FlowDepthExporter.writeCSVFlowDepthRow");
-
-        final String[] formattedRow = formatFlowDepthRow(row, useTkh);
-        writer.writeNext(formattedRow);
+    @Override
+    protected final String getJasperFile() {
+        return JASPER_FILE;
     }
 
     @Override
-    protected void writePDF(final OutputStream outStream) {
-        log.debug("write PDF");
-
-        final JRDataSource source = createJRData();
-
-        final String confPath = Config.getConfigDirectory().toString();
-
-        // FIXME: distinguish between with and without tkh: we need two jasper reports!
-
-        final Map<String, Object> parameters = new HashMap<>();
-        parameters.put("ReportTitle", "Exported Data");
-        try {
-            final JasperPrint print = JasperFillManager.fillReport(confPath + JASPER_FILE, parameters, source);
-            JasperExportManager.exportReportToPdfStream(print, outStream);
-        }
-        catch (final JRException je) {
-            log.warn("Error generating PDF Report!", je);
-        }
-    }
-
-    private JRDataSource createJRData() {
-
-        /* fetch calculation results */
-        final FlowDepthCalculationResults results = this.data;
-
-        final MetaAndTableJRDataSource source = new MetaAndTableJRDataSource();
-
-        addJRMetaData(source, results);
-
-        final boolean useTkh = results.isUseTkh();
-
-        for (final FlowDepthCalculationResult result : results.getResults()) {
-            addJRTableData(source, result, useTkh);
-        }
-
-        return source;
-    }
-
-    private void addJRMetaData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResults results) {
+    protected final void addJRMetaData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResults results) {
 
         final RiverInfo river = results.getRiver();
         final String wstUnitName = river.getWstUnit();
 
         /* general metadata */
-        source.addMetaData("header", msg(CSV_META_HEADER_RESULT_LABEL));
+        source.addMetaData("header", msg(SInfoI18NStrings.CSV_META_HEADER_RESULT_LABEL));
         source.addMetaData("calcMode", results.getCalcModeLabel());
 
-        source.addMetaData("version_label", msg(CSV_META_VERSION_LABEL));
+        source.addMetaData("version_label", msg(SInfoI18NStrings.CSV_META_VERSION_LABEL));
         source.addMetaData("version", FLYS.VERSION);
 
-        source.addMetaData("user_label", msg(CSV_META_USER_LABEL));
+        source.addMetaData("user_label", msg(SInfoI18NStrings.CSV_META_USER_LABEL));
         source.addMetaData("user", results.getUser());
 
         final Locale locale = Resources.getLocale(this.context.getMeta());
         final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
-        source.addMetaData("date_label", msg(CSV_META_CREATION_LABEL));
+        source.addMetaData("date_label", msg(SInfoI18NStrings.CSV_META_CREATION_LABEL));
         source.addMetaData("date", df.format(new Date()));
 
-        source.addMetaData("river_label", msg(CSV_META_RIVER_LABEL));
+        source.addMetaData("river_label", msg(SInfoI18NStrings.CSV_META_RIVER_LABEL));
         source.addMetaData("river", river.getName());
 
         final DoubleRange calcRange = results.getCalcRange();
         final NumberFormat kmFormatter = getKmFormatter();
         final String rangeValue = String.format("%s - %s", kmFormatter.format(calcRange.getMinimumDouble()), kmFormatter.format(calcRange.getMaximumDouble()));
-        source.addMetaData("range_label", msg(CSV_META_RANGE_LABEL));
+        source.addMetaData("range_label", msg(SInfoI18NStrings.CSV_META_RANGE_LABEL));
         source.addMetaData("range", rangeValue);
 
         /* column headings */
-        source.addMetaData("station_header", msg(CSV_KM_HEADER));
+        source.addMetaData("station_header", msg(SInfoI18NStrings.CSV_KM_HEADER));
         source.addMetaData("flowdepth_header", msg(CSV_FLOWDEPTH_HEADER));
         source.addMetaData("flowdepth_tkh_header", msg(CSV_FLOWDEPTHTKH_HEADER));
         source.addMetaData("tkh_header", msg(CSV_TKH_HEADER));
-        source.addMetaData("waterlevel_header", msg(CSV_WATERLEVEL_HEADER));
+        source.addMetaData("waterlevel_header", msg(SInfoI18NStrings.CSV_WATERLEVEL_HEADER));
         source.addMetaData("river_unit", wstUnitName);
-        source.addMetaData("discharge_header", msg(CSV_DISCHARGE_HEADER));
-        source.addMetaData("waterlevel_name_header", msg(CSV_LABEL_HEADER));
-        source.addMetaData("gauge_header", msg(CSV_GAUGE_HEADER));
+        source.addMetaData("discharge_header", msg(SInfoI18NStrings.CSV_DISCHARGE_HEADER));
+        source.addMetaData("waterlevel_name_header", msg(SInfoI18NStrings.CSV_LABEL_HEADER));
+        source.addMetaData("gauge_header", msg(SInfoI18NStrings.CSV_GAUGE_HEADER));
         source.addMetaData("bedheight_header", msg(CSV_MEAN_BED_HEIGHT_HEADER_SHORT));
         source.addMetaData("sounding_name_header", msg(CSV_SOUNDING_HEADER));
-        source.addMetaData("location_header", msg(CSV_LOCATION_HEADER));
+        source.addMetaData("location_header", msg(SInfoI18NStrings.CSV_LOCATION_HEADER));
     }
 
-    private void addJRTableData(final MetaAndTableJRDataSource source, final FlowDepthCalculationResult result, final boolean useTkh) {
-
-        final Collection<FlowDepthRow> rows = result.getRows();
-
-        for (final FlowDepthRow row : rows) {
-
-            final String[] formattedRow = formatFlowDepthRow(row, useTkh);
-            source.addData(formattedRow);
-        }
+    @Override
+    protected final String[] formatPDFRow(final FlowDepthRow row) {
+        return formatFlowDepthRow(row);
     }
-}
+}
\ No newline at end of file
diff -r 04ad2cfce559 -r 791714b92b5c artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/sinfo/tkhstate/TkhExporter.java	Thu Mar 01 18:49:34 2018 +0100
@@ -0,0 +1,231 @@
+/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.artifacts.sinfo.tkhstate;
+
+import java.text.DateFormat;
+import java.text.NumberFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Locale;
+
+import org.apache.commons.lang.math.DoubleRange;
+import org.apache.log4j.Logger;
+import org.dive4elements.river.FLYS;
+import org.dive4elements.river.artifacts.resources.Resources;
+import org.dive4elements.river.artifacts.sinfo.SInfoI18NStrings;
+import org.dive4elements.river.artifacts.sinfo.common.AbstractSInfoExporter;
+import org.dive4elements.river.artifacts.sinfo.util.MetaAndTableJRDataSource;
+import org.dive4elements.river.artifacts.sinfo.util.RiverInfo;
+import org.dive4elements.river.artifacts.sinfo.util.WstInfo;
+import org.dive4elements.river.utils.RiverUtils;
+
+import au.com.bytecode.opencsv.CSVWriter;
+
+/**
+ * Generates different output formats (csv, pdf) of data that resulted from a tkh computation.
+ *
+ * @author Gernot Belger
+ */
+// REMARK: must be public because its registered in generators.xml
+public class TkhExporter extends AbstractSInfoExporter<TkhResultRow, TkhCalculationResult, TkhCalculationResults> {
+
+    /** The log used in this exporter. */
+    private static Logger log = Logger.getLogger(TkhExporter.class);
+
+    private static final String CSV_META_CALCULATION_FORMULA = "sinfo.export.tkh.calculation.formula";
+
+    private static final String CSV_TKH_HEADER = "sinfo.export.tkh.csv.header.tkh";
+
+    private static final String CSV_TKHKIND_HEADER = "sinfo.export.tkh.csv.header.tkhkind";
+
+    private static final String PREFIX_TKH_KIND = "sinfo.export.tkh.soilkind.";
+
+    private static final String CSV_MEAN_BED_HEIGHT_HEADER_SHORT = "sinfo.export.flow_depth.csv.header.mean_bed_height.short";
+
+    private static final String JASPER_FILE = "/jasper/sinfo.flowdepth.jasper";
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    @Override
+    protected final void writeCSVMeta(final CSVWriter writer, final TkhCalculationResults results) {
+        log.info("TkhExporter.writeCSVMeta");
+
+        final String calcModeLabel = results.getCalcModeLabel();
+        final RiverInfo river = results.getRiver();
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_RESULT, msg(SInfoI18NStrings.CSV_META_HEADER_RESULT_LABEL), river.getName(), calcModeLabel);
+
+        // "# FLYS-Version: "
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_VERSION, msg(SInfoI18NStrings.CSV_META_VERSION_LABEL), FLYS.VERSION);
+
+        // "# Bearbeiter: "
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_USER, msg(SInfoI18NStrings.CSV_META_USER_LABEL), results.getUser());
+
+        // "# Datum der Erstellung: "
+        final Locale locale = Resources.getLocale(this.context.getMeta());
+        final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_CREATION, msg(SInfoI18NStrings.CSV_META_CREATION_LABEL), df.format(new Date()));
+
+        // "# Gewässer: "
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_RIVER, msg(SInfoI18NStrings.CSV_META_RIVER_LABEL), river.getName());
+
+        // "# Höhensystem des Flusses: "
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEIGHT_UNIT_RIVER, river.getWstUnit());
+
+        // "# Ort/Bereich (km): "
+        final DoubleRange calcRange = results.getCalcRange();
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_RANGE, msg(SInfoI18NStrings.CSV_META_RANGE_LABEL),
+                getKmFormatter().format(calcRange.getMinimumDouble()), getKmFormatter().format(calcRange.getMaximumDouble()));
+
+        // "# Berechnungsgrundlage: Gleichung nach GILL (1971)"
+        writeCSVMetaEntry(writer, CSV_META_CALCULATION_FORMULA);
+
+        writer.writeNext(new String[] { "" });
+    }
+
+    /**
+     * Write the header, with different headings depending on whether at a gauge or at a location.
+     */
+    @Override
+    protected final void writeCSVHeader(final CSVWriter writer, final RiverInfo river) {
+        log.info("TkhExporter.writeCSVHeader");
+
+        final Collection<String> header = new ArrayList<>(11);
+
+        header.add(msg(SInfoI18NStrings.CSV_KM_HEADER));
+        header.add(msgUnit(CSV_TKH_HEADER, SInfoI18NStrings.UNIT_CM));
+        header.add(msgUnit(CSV_TKHKIND_HEADER, SInfoI18NStrings.UNIT_CM));
+        header.add(msgUnit(SInfoI18NStrings.CSV_MEAN_BED_HEIGHT_HEADER, river.getWstUnit()));
+
+        header.add(msgUnit(SInfoI18NStrings.CSV_WATERLEVEL_HEADER, river.getWstUnit()));
+        header.add(msgUnit(SInfoI18NStrings.CSV_DISCHARGE_HEADER, SInfoI18NStrings.UNIT_CUBIC_M));
+        header.add(msg(SInfoI18NStrings.CSV_LABEL_HEADER));
+        header.add(msg(SInfoI18NStrings.CSV_GAUGE_HEADER));
+        header.add(msg(SInfoI18NStrings.CSV_LOCATION_HEADER));
+
+        writer.writeNext(header.toArray(new String[header.size()]));
+    }
+
+    @Override
+    protected void writeCSVResultHeader(final CSVWriter writer, final TkhCalculationResult result) {
+
+        /* first some specific metadata */
+        final WstInfo wst = result.getWst();
+
+        // "##METADATEN WASSERSPIEGELLAGE"
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL);
+        // "# Bezeichnung der Wasserspiegellage: "
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_NAME, wst.getLabel());
+        // "# Bezugspegel: "
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_GAUGE, wst.getGauge());
+        // "# Jahr/Zeitraum der Wasserspiegellage: "
+        writeCSVMetaEntry(writer, SInfoI18NStrings.CSV_META_HEADER_WATERLEVEL_YEAR, Integer.toString(wst.getYear()));
+    }
+
+    @Override
+    protected final String[] formatCSVRow(final TkhResultRow row) {
+        return formatRow(row);
+    }
+
+    /**
+     * Format a row of a flow depth result into an array of string, both used by csv and pdf
+     *
+     * @param useTkh
+     */
+    private String[] formatRow(final TkhResultRow row) {
+
+        final Collection<String> lines = new ArrayList<>(11);
+
+        // Fluss-km
+        lines.add(getKmFormatter().format(row.getStation()));
+
+        // TKH [cm]
+        lines.add(getTkhFormatter().format(row.getTkh()));
+
+        // Einteilung der Gewässersohle (starr/mobil)
+        lines.add(msg(PREFIX_TKH_KIND + row.getTkhKind().name()));
+
+        // Mittlere Sohlhöhe [NN + m]
+        lines.add(getMeanBedHeighFormatter().format(row.getMeanBedHeight()));
+
+        // Wasserstand [NN + m]
+        lines.add(getW2Formatter().format(row.getWaterlevel()));
+
+        // Q [m³/s]
+        final double discharge = row.getDischarge();
+        final double roundedDischarge = RiverUtils.roundQ(discharge);
+        lines.add(getQFormatter().format(roundedDischarge));
+
+        // Bezeichnung
+        lines.add(row.getWaterlevelLabel());
+
+        // Bezugspegel
+        lines.add(row.getGauge());
+
+        // Lage
+        lines.add(row.getLocation());
+
+        return lines.toArray(new String[lines.size()]);
+    }
+
+    @Override
+    protected final String getJasperFile() {
+        return JASPER_FILE;
+    }
+
+    @Override
+    protected final void addJRMetaData(final MetaAndTableJRDataSource source, final TkhCalculationResults results) {
+
+        final RiverInfo river = results.getRiver();
+        final String wstUnitName = river.getWstUnit();
+
+        /* general metadata */
+        source.addMetaData("header", msg(SInfoI18NStrings.CSV_META_HEADER_RESULT_LABEL));
+        source.addMetaData("calcMode", results.getCalcModeLabel());
+
+        source.addMetaData("version_label", msg(SInfoI18NStrings.CSV_META_VERSION_LABEL));
+        source.addMetaData("version", FLYS.VERSION);
+
+        source.addMetaData("user_label", msg(SInfoI18NStrings.CSV_META_USER_LABEL));
+        source.addMetaData("user", results.getUser());
+
+        final Locale locale = Resources.getLocale(this.context.getMeta());
+        final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale);
+        source.addMetaData("date_label", msg(SInfoI18NStrings.CSV_META_CREATION_LABEL));
+        source.addMetaData("date", df.format(new Date()));
+
+        source.addMetaData("river_label", msg(SInfoI18NStrings.CSV_META_RIVER_LABEL));
+        source.addMetaData("river", river.getName());
+        source.addMetaData("river_unit", wstUnitName);
+
+        final DoubleRange calcRange = results.getCalcRange();
+        final NumberFormat kmFormatter = getKmFormatter();
+        final String rangeValue = String.format("%s - %s", kmFormatter.format(calcRange.getMinimumDouble()), kmFormatter.format(calcRange.getMaximumDouble()));
+        source.addMetaData("range_label", msg(SInfoI18NStrings.CSV_META_RANGE_LABEL));
+        source.addMetaData("range", rangeValue);
+
+        /* column headings */
+        source.addMetaData("station_header", msg(SInfoI18NStrings.CSV_KM_HEADER));
+        source.addMetaData("tkh_header", msg(CSV_TKH_HEADER));
+        source.addMetaData("bedheight_header", msg(CSV_MEAN_BED_HEIGHT_HEADER_SHORT));
+        source.addMetaData("waterlevel_header", msg(SInfoI18NStrings.CSV_WATERLEVEL_HEADER));
+        source.addMetaData("discharge_header", msg(SInfoI18NStrings.CSV_DISCHARGE_HEADER));
+        source.addMetaData("waterlevel_name_header", msg(SInfoI18NStrings.CSV_LABEL_HEADER));
+        source.addMetaData("gauge_header", msg(SInfoI18NStrings.CSV_GAUGE_HEADER));
+        source.addMetaData("location_header", msg(SInfoI18NStrings.CSV_LOCATION_HEADER));
+    }
+
+    @Override
+    protected final String[] formatPDFRow(final TkhResultRow row) {
+        return formatRow(row);
+    }
+}
\ No newline at end of file
diff -r 04ad2cfce559 -r 791714b92b5c artifacts/src/main/java/org/dive4elements/river/exports/AbstractExporter.java
--- a/artifacts/src/main/java/org/dive4elements/river/exports/AbstractExporter.java	Thu Mar 01 17:45:49 2018 +0100
+++ b/artifacts/src/main/java/org/dive4elements/river/exports/AbstractExporter.java	Thu Mar 01 18:49:34 2018 +0100
@@ -307,7 +307,14 @@
     protected String msg(final String key, final String def, final Object args) {
         return Resources.getMsg(context.getMeta(), key, def, args);
     }
-
+    
+    /**
+     * Formats header with unit: msg [unit]
+     */
+    protected final String msgUnit(final String key, final String unit) {
+        final String msg = msg(key);
+        return String.format("%s [%s]", msg, unit);
+    }
 
     /**
      * This method starts CSV creation. It makes use of writeCSVData() which has
diff -r 04ad2cfce559 -r 791714b92b5c artifacts/src/main/resources/messages.properties
--- a/artifacts/src/main/resources/messages.properties	Thu Mar 01 17:45:49 2018 +0100
+++ b/artifacts/src/main/resources/messages.properties	Thu Mar 01 18:49:34 2018 +0100
@@ -861,4 +861,10 @@
 sinfo.chart.tkh.section.title=Transportk\u00f6rperh\u00f6hen
 sinfo.chart.tkh.section.yaxis.label = Transportk\u00f6rperh\u00f6hen [cm]
 sinfo.chart.tkh.yaxis.label = Transportk\u00f6rperh\u00f6hen [cm]
-sinfo.facet.tkh.description = \u0394d ({0})
\ No newline at end of file
+sinfo.facet.tkh.description = \u0394d ({0})
+
+sinfo.export.tkh.calculation.formula = # Berechnungsgrundlage: Gleichung nach GILL (1971)
+sinfo.export.tkh.csv.header.tkh = Transport-k\u00f6rperh\u00f6he
+sinfo.export.tkh.csv.header.tkhkind = Einteilung der Gew\u00e4ssersohle
+sinfo.export.tkh.soilkind.mobil = Mobil
+sinfo.export.tkh.soilkindkind.starr = Starr
\ No newline at end of file
diff -r 04ad2cfce559 -r 791714b92b5c artifacts/src/main/resources/messages_de.properties
--- a/artifacts/src/main/resources/messages_de.properties	Thu Mar 01 17:45:49 2018 +0100
+++ b/artifacts/src/main/resources/messages_de.properties	Thu Mar 01 18:49:34 2018 +0100
@@ -861,4 +861,10 @@
 sinfo.chart.tkh.section.title=Transportk\u00f6rperh\u00f6hen
 sinfo.chart.tkh.section.yaxis.label = Transportk\u00f6rperh\u00f6hen [cm]
 sinfo.chart.tkh.yaxis.label = Transportk\u00f6rperh\u00f6hen [cm]
-sinfo.facet.tkh.description = \u0394d ({0})
\ No newline at end of file
+sinfo.facet.tkh.description = \u0394d ({0})
+
+sinfo.export.tkh.calculation.formula = # Berechnungsgrundlage: Gleichung nach GILL (1971)
+sinfo.export.tkh.csv.header.tkh = Transport-k\u00f6rperh\u00f6he
+sinfo.export.tkh.csv.header.tkhkind = Einteilung der Gew\u00e4ssersohle
+sinfo.export.tkh.soilkind.mobil = Mobil
+sinfo.export.tkh.soilkindkind.starr = Starr
\ No newline at end of file


More information about the Dive4Elements-commits mailing list