[PATCH] Importer: Import sediment load at measurement stations
Wald Commits
scm-commit at wald.intevation.org
Fri Jul 18 15:38:14 CEST 2014
# HG changeset patch
# User Tom Gottfried <tom at intevation.de>
# Date 1405690646 -7200
# Node ID d86cc6a17b7aa2e81d4cde6c337950a4f137b18c
# Parent cd35b76f1ef8d8774dcce8768eea4327f9589361
Importer: Import sediment load at measurement stations.
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/contrib/import_river.sh
--- a/backend/contrib/import_river.sh Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/contrib/import_river.sh Fri Jul 18 15:37:26 2014 +0200
@@ -235,6 +235,7 @@
-Dflys.backend.importer.skip.waterlevels=true \
-Dflys.backend.importer.skip.sq.relation=true \
-Dflys.backend.importer.skip.sediment.density=true \
+ -Dflys.backend.importer.skip.sediment.load=true \
-Dflys.backend.importer.skip.sediment.load.ls=true \
-Dflys.backend.importer.skip.morphological.width=true \
-Dflys.backend.importer.skip.flow.velocity=true \
@@ -282,6 +283,7 @@
-Dflys.backend.importer.skip.waterlevels=false \
-Dflys.backend.importer.skip.sq.relation=false \
-Dflys.backend.importer.skip.sediment.density=false \
+ -Dflys.backend.importer.skip.sediment.load=false \
-Dflys.backend.importer.skip.sediment.load.ls=false \
-Dflys.backend.importer.skip.morphological.width=false \
-Dflys.backend.importer.skip.flow.velocity=false \
@@ -329,6 +331,7 @@
-Dflys.backend.importer.skip.waterlevels=true \
-Dflys.backend.importer.skip.sq.relation=true \
-Dflys.backend.importer.skip.sediment.density=true \
+ -Dflys.backend.importer.skip.sediment.load=true \
-Dflys.backend.importer.skip.sediment.load.ls=true \
-Dflys.backend.importer.skip.morphological.width=true \
-Dflys.backend.importer.skip.flow.velocity=true \
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/contrib/run_hydr_morph.sh
--- a/backend/contrib/run_hydr_morph.sh Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/contrib/run_hydr_morph.sh Fri Jul 18 15:37:26 2014 +0200
@@ -47,6 +47,7 @@
IMPORTER_SKIP_MORPHOLOGICAL_WIDTH=false
IMPORTER_SKIP_POROSITY=false
IMPORTER_SKIP_SEDIMENT_DENSITY=false
+IMPORTER_SKIP_SEDIMENT_LOAD=false
IMPORTER_SKIP_SEDIMENT_LOAD_LS=false
IMPORTER_SKIP_SQ_RELATION=false
IMPORTER_SKIP_WATERLEVELS=false
@@ -95,6 +96,7 @@
-Dflys.backend.importer.skip.official.lines=$IMPORTER_SKIP_OFFICIAL_LINES \
-Dflys.backend.importer.skip.prfs=$IMPORTER_SKIP_PRFS \
-Dflys.backend.importer.skip.sediment.density=$IMPORTER_SKIP_SEDIMENT_DENSITY \
+ -Dflys.backend.importer.skip.sediment.load=$IMPORTER_SKIP_SEDIMENT_LOAD \
-Dflys.backend.importer.skip.sediment.load.ls=$IMPORTER_SKIP_SEDIMENT_LOAD_LS \
-Dflys.backend.importer.skip.sq.relation=$IMPORTER_SKIP_SQ_RELATION \
-Dflys.backend.importer.skip.w80s=$IMPORTER_SKIP_W80S \
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/doc/documentation/de/importer-hydr-morph.tex
--- a/backend/doc/documentation/de/importer-hydr-morph.tex Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/doc/documentation/de/importer-hydr-morph.tex Fri Jul 18 15:37:26 2014 +0200
@@ -211,6 +211,18 @@
\textit{Morphologie/Geschwindigkeit\_Schubspannung/Modellrechnungen} und\\
\textit{Morphologie/Geschwindigkeit\_Schubspannung/v-Messungen} geladen.
+\subsubsection{Sedimentfracht an Messstellen}
+Mit
+\textbf{-Dflys.backend.importer.skip.sediment.load=true}
+kann der Import der Sedimentfracht an Messstellen unterdrückt werden.
+Es werden die CSV-Dateien aus dem Verzeichnis
+\textit{Morphologie/Fracht/Messstellen} geladen.
+Dabei werden die Dateien aus den
+Unterverzeichnissen \textit{Einzeljahre}, \textit{Epochen}
+und \textit{amtliche Epochen} entsprechend als
+\textit{Einzeljahre}, \textit{Epochen} und
+\textit{amtliche Epochen} gespeichert.
+
\subsubsection{Sedimentfracht (Längsschnitt-Daten)}
Mit
\textbf{-Dflys.backend.importer.skip.sediment.load.ls=true}
@@ -218,7 +230,7 @@
der Sedimentfracht unterdrückt werden.
Es werden die CSV-Dateien aus dem Verzeichnis
\textit{Morphologie/Fracht/Laengsschnitte} geladen.
-Dabei werden die Dateien aus dem
+Dabei werden die Dateien aus den
Unterverzeichnissen \textit{Einzeljahre}, \textit{Epochen}
und \textit{amtliche Epochen} entsprechend als
\textit{Einzeljahre}, \textit{Epochen} und
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/src/main/java/org/dive4elements/river/importer/Config.java
--- a/backend/src/main/java/org/dive4elements/river/importer/Config.java Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/Config.java Fri Jul 18 15:37:26 2014 +0200
@@ -88,6 +88,9 @@
public static final String SKIP_SEDIMENT_LOAD_LS =
"flys.backend.importer.skip.sediment.load.ls";
+ public static final String SKIP_SEDIMENT_LOAD =
+ "flys.backend.importer.skip.sediment.load";
+
public static final String SKIP_WATERLEVELS =
"flys.backend.importer.skip.waterlevels";
@@ -231,6 +234,10 @@
return getFlag(SKIP_SEDIMENT_LOAD_LS);
}
+ public boolean skipSedimentLoad() {
+ return getFlag(SKIP_SEDIMENT_LOAD);
+ }
+
public boolean skipWaterlevels() {
return getFlag(SKIP_WATERLEVELS);
}
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java Fri Jul 18 15:37:26 2014 +0200
@@ -29,7 +29,9 @@
import org.dive4elements.river.importer.parsers.PorosityParser;
import org.dive4elements.river.importer.parsers.SQRelationParser;
import org.dive4elements.river.importer.parsers.SedimentDensityParser;
+import org.dive4elements.river.importer.parsers.AbstractSedimentLoadParser;
import org.dive4elements.river.importer.parsers.SedimentLoadLSParser;
+import org.dive4elements.river.importer.parsers.SedimentLoadParser;
import org.dive4elements.river.importer.parsers.W80Parser;
import org.dive4elements.river.importer.parsers.W80CSVParser;
import org.dive4elements.river.importer.parsers.WaterlevelDifferencesParser;
@@ -108,6 +110,8 @@
public static final String SEDIMENT_LOAD_LS_DIR = "Laengsschnitte";
+ public static final String SEDIMENT_LOAD_MS_DIR = "Messstellen";
+
public static final String SEDIMENT_LOAD_SINGLE_DIR = "Einzeljahre";
public static final String SEDIMENT_LOAD_EPOCH_DIR = "Epochen";
@@ -176,6 +180,8 @@
protected List<ImportSedimentLoadLS> sedimentLoadLSs;
+ protected List<ImportSedimentLoad> sedimentLoads;
+
protected List<ImportMeasurementStation> measurementStations;
protected List<ImportSQRelation> sqRelations;
@@ -291,6 +297,7 @@
flowVelocityModels = new ArrayList<ImportFlowVelocityModel>();
flowVelocityMeasurements = new ArrayList<ImportFlowVelocityMeasurement>();
sedimentLoadLSs = new ArrayList<ImportSedimentLoadLS>();
+ sedimentLoads = new ArrayList<ImportSedimentLoad>();
measurementStations = new ArrayList<ImportMeasurementStation>();
sqRelations = new ArrayList<ImportSQRelation>();
}
@@ -378,15 +385,16 @@
parseOfficialLines();
parseFloodWater();
parseFloodProtection();
+ parseMeasurementStations();
parseBedHeight();
parseSedimentDensity();
parsePorosity();
parseMorphologicalWidth();
parseFlowVelocity();
parseSedimentLoadLS();
+ parseSedimentLoad();
parseWaterlevels();
parseWaterlevelDifferences();
- parseMeasurementStations();
parseSQRelation();
}
@@ -595,9 +603,9 @@
}
- private void parseSedimentLoadLSDir(
+ private void parseSedimentLoadFiles(
File[] files,
- SedimentLoadLSParser parser
+ AbstractSedimentLoadParser parser
) throws IOException {
for (File file: files) {
if (file.isDirectory()) {
@@ -611,6 +619,34 @@
}
}
+
+ private void parseSedimentLoadDir(
+ File sedimentLoadDir,
+ AbstractSedimentLoadParser parser
+ ) throws IOException {
+
+ File[] sedimentLoadSubDirs = {
+ new File(sedimentLoadDir,
+ SEDIMENT_LOAD_SINGLE_DIR),
+ new File(sedimentLoadDir,
+ SEDIMENT_LOAD_EPOCH_DIR),
+ new File(sedimentLoadDir,
+ SEDIMENT_LOAD_OFF_EPOCH_DIR),
+ };
+
+ for (File subDir : sedimentLoadSubDirs) {
+ File[] files = subDir.listFiles();
+
+ if (files == null || files.length == 0) {
+ log.warn("Cannot read directory '" + subDir + "'");
+ }
+ else {
+ parseSedimentLoadFiles(files, parser);
+ }
+ }
+ }
+
+
protected void parseSedimentLoadLS() throws IOException {
if (Config.INSTANCE.skipSedimentLoadLS()) {
log.info("skip parsing sediment load longitudinal section data");
@@ -619,46 +655,37 @@
log.debug("Parse sediment load longitudinal section data");
+ SedimentLoadLSParser parser = new SedimentLoadLSParser();
+
File minfoDir = getMinfoDir();
File sedimentLoadDir = new File(minfoDir, SEDIMENT_LOAD_DIR);
File sedimentLoadLSDir = new File(sedimentLoadDir,
SEDIMENT_LOAD_LS_DIR);
- File singleDir = new File(sedimentLoadLSDir,
- SEDIMENT_LOAD_SINGLE_DIR);
- File epochDir = new File(sedimentLoadLSDir,
- SEDIMENT_LOAD_EPOCH_DIR);
- File offEpochDir = new File(sedimentLoadLSDir,
- SEDIMENT_LOAD_OFF_EPOCH_DIR);
+ parseSedimentLoadDir(sedimentLoadLSDir, parser);
- File[] singles = singleDir.listFiles();
- File[] epochs = epochDir.listFiles();
- File[] offEpochs = offEpochDir.listFiles();
+ sedimentLoadLSs = parser.getSedimentLoadLSs();
+ }
- SedimentLoadLSParser parser = new SedimentLoadLSParser();
- if (singles == null || singles.length == 0) {
- log.warn("Cannot read directory '" + singleDir + "'");
- }
- else {
- parseSedimentLoadLSDir(singles, parser);
+ protected void parseSedimentLoad() throws IOException {
+ if (Config.INSTANCE.skipSedimentLoad()) {
+ log.info("skip parsing sediment load data at measurement stations");
+ return;
}
- if (epochs == null || epochs.length == 0) {
- log.warn("Cannot read directory '" + epochDir + "'");
- }
- else {
- parseSedimentLoadLSDir(epochs, parser);
- }
+ log.debug("Parse sediment load data at measurement stations");
- if (offEpochs == null || offEpochs.length == 0) {
- log.warn("Cannot read directory '" + offEpochDir + "'");
- }
- else {
- parseSedimentLoadLSDir(offEpochs, parser);
- }
+ SedimentLoadParser parser = new SedimentLoadParser(name);
- sedimentLoadLSs = parser.getSedimentLoadLSs();
+ File minfoDir = getMinfoDir();
+ File sedimentLoadDir = new File(minfoDir, SEDIMENT_LOAD_DIR);
+ File sedimentLoadMSDir = new File(sedimentLoadDir,
+ SEDIMENT_LOAD_MS_DIR);
+
+ parseSedimentLoadDir(sedimentLoadMSDir, parser);
+
+ sedimentLoads = parser.getSedimentLoads();
}
@@ -1257,15 +1284,16 @@
storeOfficialLines();
storeFloodWater();
storeFloodProtection();
+ storeMeasurementStations();
storeBedHeight();
storeSedimentDensity();
storePorosity();
storeMorphologicalWidth();
storeFlowVelocity();
storeSedimentLoadLS();
+ storeSedimentLoad();
storeWaterlevels();
storeWaterlevelDifferences();
- storeMeasurementStations();
storeSQRelations();
storeOfficialNumber();
}
@@ -1515,6 +1543,17 @@
}
+ public void storeSedimentLoad() {
+ if (!Config.INSTANCE.skipSedimentLoad()) {
+ log.info("store sediment load data at measurement stations");
+
+ for (ImportSedimentLoad sedimentLoad: sedimentLoads) {
+ sedimentLoad.storeDependencies();
+ }
+ }
+ }
+
+
public void storeMeasurementStations() {
if (!Config.INSTANCE.skipMeasurementStations()) {
log.info("store measurement stations");
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoad.java
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoad.java Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoad.java Fri Jul 18 15:37:26 2014 +0200
@@ -1,10 +1,12 @@
package org.dive4elements.river.importer;
import java.util.List;
+import java.util.ArrayList;
import org.dive4elements.river.model.GrainFraction;
import org.dive4elements.river.model.SedimentLoad;
import org.dive4elements.river.model.TimeInterval;
+import org.dive4elements.river.model.MeasurementStation;
import org.hibernate.Query;
import org.hibernate.Session;
@@ -18,28 +20,61 @@
private String description;
private Integer kind;
+ private List<ImportSedimentLoadValue> values;
+
public ImportSedimentLoad() {
+ this.values = new ArrayList<ImportSedimentLoadValue>();
+ }
+
+ public ImportSedimentLoad (
+ ImportGrainFraction grainFraction,
+ ImportTimeInterval timeInterval,
+ String description,
+ Integer kind
+ ) {
+ this.grainFraction = grainFraction;
+ this.timeInterval = timeInterval;
+ this.description = description;
+ this.kind = kind;
+
+ this.values = new ArrayList<ImportSedimentLoadValue>();
+ }
+
+ public void addValue(ImportSedimentLoadValue value) {
+ values.add(value);
}
public void storeDependencies() {
grainFraction.getPeer();
timeInterval.getPeer();
- sqTimeInterval.getPeer();
+
+ if (sqTimeInterval != null) {
+ sqTimeInterval.getPeer();
+ }
getPeer();
+
+ for (ImportSedimentLoadValue value : values) {
+ value.storeDependencies(peer);
+ }
+
}
public SedimentLoad getPeer() {
if (peer == null) {
Session session = ImporterSession.getInstance().getDatabaseSession();
+
+ String sqtquery = sqTimeInterval == null ?
+ "sq_time_interval_id is null" :
+ "sqTimeInterval = :sqTimeInterval";
Query query = session.createQuery(
"from SedimentLoad where " +
" grainFraction = :grainFraction and " +
" timeInterval = :timeInterval and " +
- " sqTimeInterval = :sqTimeInterval and " +
" description = :description and " +
- " kind = :kind");
+ " kind = :kind and " +
+ sqtquery);
GrainFraction gf = grainFraction.getPeer();
TimeInterval ti = timeInterval.getPeer();
@@ -50,7 +85,10 @@
query.setParameter("grainFraction", gf);
query.setParameter("timeInterval", ti);
- query.setParameter("sqTimeInterval", sqti);
+
+ if (sqti != null) {
+ query.setParameter("sqTimeInterval", sqti);
+ }
query.setParameter("description", description);
query.setParameter("kind", kind);
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadLS.java
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadLS.java Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadLS.java Fri Jul 18 15:37:26 2014 +0200
@@ -69,10 +69,6 @@
public void storeDependencies(River river) {
log.debug("store dependencies");
- if (grainFraction != null) {
- grainFraction.storeDependencies();
- }
-
SedimentLoadLS peer = getPeer(river);
if (peer != null) {
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadValue.java
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadValue.java Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportSedimentLoadValue.java Fri Jul 18 15:37:26 2014 +0200
@@ -3,7 +3,6 @@
import java.util.List;
import org.dive4elements.river.model.MeasurementStation;
-import org.dive4elements.river.model.River;
import org.dive4elements.river.model.SedimentLoad;
import org.dive4elements.river.model.SedimentLoadValue;
import org.hibernate.Query;
@@ -13,43 +12,37 @@
private SedimentLoadValue peer;
- private ImportMeasurementStation station;
- private ImportSedimentLoad sedimentLoad;
- private Double value;
+ private MeasurementStation station;
+ private Double value;
public ImportSedimentLoadValue() {
}
public ImportSedimentLoadValue(
- ImportMeasurementStation station,
- ImportSedimentLoad sedimentLoad,
- Double value
+ MeasurementStation station,
+ Double value
) {
this.station = station;
- this.sedimentLoad = sedimentLoad;
this.value = value;
}
- protected SedimentLoadValue getPeer(River river) {
+ protected SedimentLoadValue getPeer(SedimentLoad sedimentLoad) {
if (peer == null) {
Session session = ImporterSession.getInstance().getDatabaseSession();
Query query = session.createQuery(
"from SedimentLoadValue where " +
- " station = :station and " +
+ " measurementStation = :station and " +
" sedimentLoad = :sedimentLoad and " +
" value = :value");
- MeasurementStation ms = station.getPeer(river);
- SedimentLoad sl = sedimentLoad.getPeer();
-
- query.setParameter("station", ms);
- query.setParameter("sedimentLoad", sl);
+ query.setParameter("station", station);
+ query.setParameter("sedimentLoad", sedimentLoad);
query.setParameter("value", value);
List<SedimentLoadValue> values = query.list();
if (values.isEmpty()) {
- peer = new SedimentLoadValue(sl, ms, value);
+ peer = new SedimentLoadValue(sedimentLoad, station, value);
session.save(peer);
}
else {
@@ -60,10 +53,8 @@
return peer;
}
- public void storeDependencies(River river) {
- station.storeDependencies(river);
- sedimentLoad.storeDependencies();
- getPeer(river);
+ public void storeDependencies(SedimentLoad sedimentLoad) {
+ getPeer(sedimentLoad);
}
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/src/main/java/org/dive4elements/river/importer/parsers/AbstractSedimentLoadParser.java
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/AbstractSedimentLoadParser.java Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/AbstractSedimentLoadParser.java Fri Jul 18 15:37:26 2014 +0200
@@ -24,7 +24,6 @@
import org.dive4elements.river.importer.ImporterSession;
import org.dive4elements.river.importer.ImportGrainFraction;
import org.dive4elements.river.importer.ImportTimeInterval;
-import org.dive4elements.river.importer.ImportUnit;
import org.dive4elements.river.model.GrainFraction;
@@ -53,9 +52,6 @@
public static final Pattern META_FRACTION_NAME =
Pattern.compile("^Fraktionsname: (.*)");
- public static final Pattern META_UNIT =
- Pattern.compile("^Einheit: \\[(.*)\\].*");
-
public static final Pattern META_COLUMN_NAMES =
Pattern.compile("^Fluss-km.*");
@@ -69,11 +65,12 @@
* with respect to file location (offical epoch or not?) */
protected abstract void initializeSedimentLoads();
+ protected abstract void handleMetaLine(String line)
+ throws LineParserException;
+
protected ImportGrainFraction grainFraction;
- protected ImportUnit unit;
-
protected String description;
protected String[] columnNames;
@@ -102,35 +99,6 @@
}
- protected void handleMetaLine(String line) throws LineParserException {
- if (handleMetaUnit(line)) {
- return;
- }
- if (handleMetaFraction(line)) {
- return;
- }
- if (handleMetaFractionName(line)) {
- return;
- }
- if (handleColumnNames(line)) {
- return;
- }
- log.warn("ASLP: Unknown meta line: '" + line + "'");
- }
-
-
- protected boolean handleMetaUnit(String line) {
- Matcher m = META_UNIT.matcher(line);
-
- if (m.matches()) {
- unit = new ImportUnit(m.group(1));
- return true;
- }
-
- return false;
- }
-
-
public boolean handleMetaFraction(String line) {
Matcher m = META_FRACTION.matcher(line);
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentLoadLSParser.java
--- a/backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentLoadLSParser.java Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentLoadLSParser.java Fri Jul 18 15:37:26 2014 +0200
@@ -16,6 +16,8 @@
import java.util.ArrayList;
import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import org.apache.log4j.Logger;
@@ -34,10 +36,16 @@
Logger.getLogger(SedimentLoadLSParser.class);
+ public static final Pattern META_UNIT =
+ Pattern.compile("^Einheit: \\[(.*)\\].*");
+
+
protected List<ImportSedimentLoadLS> sedimentLoadLSs;
protected ImportSedimentLoadLS[] current;
+ protected ImportUnit unit;
+
public SedimentLoadLSParser() {
sedimentLoadLSs = new ArrayList<ImportSedimentLoadLS>();
@@ -65,6 +73,36 @@
@Override
+ protected void handleMetaLine(String line) throws LineParserException {
+ if (handleMetaUnit(line)) {
+ return;
+ }
+ if (handleMetaFraction(line)) {
+ return;
+ }
+ if (handleMetaFractionName(line)) {
+ return;
+ }
+ if (handleColumnNames(line)) {
+ return;
+ }
+ log.warn("ASLP: Unknown meta line: '" + line + "'");
+ }
+
+
+ protected boolean handleMetaUnit(String line) {
+ Matcher m = META_UNIT.matcher(line);
+
+ if (m.matches()) {
+ unit = new ImportUnit(m.group(1));
+ return true;
+ }
+
+ return false;
+ }
+
+
+ @Override
protected void handleDataLine(String line) {
String[] vals = line.split(SEPERATOR_CHAR);
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentLoadParser.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/parsers/SedimentLoadParser.java Fri Jul 18 15:37:26 2014 +0200
@@ -0,0 +1,197 @@
+/* Copyright (C) 2011, 2012, 2013 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.parsers;
+
+import java.io.File;
+import java.io.IOException;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+
+import org.dive4elements.river.importer.ImporterSession;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.ImportGrainFraction;
+import org.dive4elements.river.importer.ImportSedimentLoad;
+import org.dive4elements.river.importer.ImportSedimentLoadValue;
+import org.dive4elements.river.importer.ImportTimeInterval;
+import org.dive4elements.river.importer.ImportMeasurementStation;
+
+import org.dive4elements.river.model.GrainFraction;
+import org.dive4elements.river.model.MeasurementStation;
+
+/** Parses sediment load longitudinal section files. */
+public class SedimentLoadParser extends AbstractSedimentLoadParser {
+ private static final Logger log =
+ Logger.getLogger(SedimentLoadParser.class);
+
+
+ public static final String MEASUREMENT_TYPE_BEDLOAD = "Geschiebe";
+
+ public static final String MEASUREMENT_TYPE_SUSP = "Schwebstoff";
+
+ public static final String GRAINFRACTION_NAME_SUSP = "suspended_sediment";
+
+ public static final String GRAINFRACTION_NAME_TOTAL = "total";
+
+ protected List<ImportSedimentLoad> sedimentLoads;
+
+ protected ImportSedimentLoad[] current;
+
+ protected String rivername;
+
+ public SedimentLoadParser() {
+ sedimentLoads = new ArrayList<ImportSedimentLoad>();
+ }
+
+ public SedimentLoadParser(String rivername) {
+ sedimentLoads = new ArrayList<ImportSedimentLoad>();
+ this.rivername = rivername;
+ }
+
+
+
+ @Override
+ protected void reset() {
+ current = null;
+ grainFraction = null;
+ }
+
+
+ @Override
+ protected void finish() {
+ if (current != null) {
+ for (ImportSedimentLoad isy: current) {
+ sedimentLoads.add(isy);
+ }
+ }
+
+ description = null;
+ }
+
+
+ @Override
+ protected void handleMetaLine(String line) throws LineParserException {
+ if (handleMetaFraction(line)) {
+ return;
+ }
+ if (handleMetaFractionName(line)) {
+ return;
+ }
+ if (handleColumnNames(line)) {
+ return;
+ }
+ log.warn("ASLP: Unknown meta line: '" + line + "'");
+ }
+
+
+ private void initializeSedimentLoadValues(String[] vals,
+ MeasurementStation m) throws ParseException {
+ for (int i = 1, n = columnNames.length-1; i < n; i++) {
+ String curVal = vals[i];
+
+ if (curVal != null && curVal.length() > 0) {
+ current[i-1].addValue(new ImportSedimentLoadValue(
+ m, nf.parse(curVal).doubleValue()
+ ));
+ }
+ }
+ }
+
+ @Override
+ protected void handleDataLine(String line) {
+ String[] vals = line.split(SEPERATOR_CHAR);
+
+ if (vals == null || vals.length < columnNames.length-1) {
+ log.warn("SLP: skip invalid data line: '" + line + "'");
+ return;
+ }
+
+ try {
+ Double km = nf.parse(vals[0]).doubleValue();
+
+ List<MeasurementStation> ms =
+ ImporterSession.getInstance().getMeasurementStations(
+ rivername, km);
+
+ String gfn = grainFraction.getPeer().getName();
+
+ if (ms != null && !ms.isEmpty()) {
+
+ // Check for measurement station at km fitting grain fraction
+ for (MeasurementStation m : ms) {
+ if (gfn.equals(GRAINFRACTION_NAME_TOTAL)) {
+ // total load can be at any station type
+ initializeSedimentLoadValues(vals, m);
+ return;
+ }
+ if (gfn.equals(GRAINFRACTION_NAME_SUSP) &&
+ m.getMeasurementType().equals(MEASUREMENT_TYPE_SUSP)) {
+ // susp. sediment can only be at respective stations
+ initializeSedimentLoadValues(vals, m);
+ return;
+ }
+ if (!gfn.equals(GRAINFRACTION_NAME_SUSP) &&
+ m.getMeasurementType().equals(MEASUREMENT_TYPE_BEDLOAD)) {
+ /** anything but total load and susp. sediment
+ can only be at bed load measurement stations */
+ initializeSedimentLoadValues(vals, m);
+ return;
+ }
+ }
+ log.error("SLP: No measurement station at km " + km +
+ " fitting grain fraction " + gfn +
+ " on river " + rivername);
+ return;
+ }
+ else {
+ log.error("SLP: No measurement station at km " + km +
+ " on river " + rivername);
+ return;
+ }
+ }
+ catch (ParseException pe) {
+ log.warn("SLP: unparseable number in data row '" + line + "':", pe);
+ }
+ }
+
+
+ @Override
+ protected void initializeSedimentLoads() {
+ // skip first column (Fluss-km) and last column (Hinweise)
+ current = new ImportSedimentLoad[columnNames.length-2];
+
+ Integer kind;
+
+ if (inputFile.getAbsolutePath().contains("amtliche Epochen")) {
+ kind = new Integer(1);
+ }
+ else {
+ kind = new Integer(0);
+ }
+
+ for (int i = 0, n = columnNames.length; i < n-2; i++) {
+ current[i] = new ImportSedimentLoad(
+ grainFraction,
+ getTimeInterval(columnNames[i+1]),
+ description,
+ kind);
+ }
+ }
+
+
+ public List<ImportSedimentLoad> getSedimentLoads() {
+ return sedimentLoads;
+ }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r cd35b76f1ef8 -r d86cc6a17b7a backend/src/main/java/org/dive4elements/river/model/SedimentLoadValue.java
--- a/backend/src/main/java/org/dive4elements/river/model/SedimentLoadValue.java Fri Jul 18 13:03:28 2014 +0200
+++ b/backend/src/main/java/org/dive4elements/river/model/SedimentLoadValue.java Fri Jul 18 15:37:26 2014 +0200
@@ -20,7 +20,7 @@
import javax.persistence.Table;
@Entity
- at Table(name = "sediment_load_value")
+ at Table(name = "sediment_load_values")
public class SedimentLoadValue
implements Serializable
{
More information about the Dive4Elements-commits
mailing list