[PATCH 3 of 8] Importer for the Schifffahrt (S-INFO) and Oekologie (U-INFO) files

Wald Commits scm-commit at wald.intevation.org
Tue Apr 3 10:44:55 CEST 2018


# HG changeset patch
# User mschaefer
# Date 1522743510 -7200
# Node ID 50416a0df38590909a5eadd7fb0d1c2b10cf13f0
# Parent  da5dc74466522c52b2d0b45c8cf0fb86877e9f34
Importer for the Schifffahrt (S-INFO) and Oekologie (U-INFO) files

diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/backend/FLYSCredentials.java
--- a/backend/src/main/java/org/dive4elements/river/backend/FLYSCredentials.java	Tue Apr 03 10:02:01 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/backend/FLYSCredentials.java	Tue Apr 03 10:18:30 2018 +0200
@@ -9,16 +9,15 @@
 package org.dive4elements.river.backend;
 
 import org.dive4elements.artifacts.common.utils.Config;
-
 import org.dive4elements.river.model.Annotation;
 import org.dive4elements.river.model.AnnotationType;
 import org.dive4elements.river.model.Attribute;
 import org.dive4elements.river.model.AxisKind;
 import org.dive4elements.river.model.BedHeight;
+import org.dive4elements.river.model.BedHeightType;
 import org.dive4elements.river.model.BedHeightValue;
-import org.dive4elements.river.model.BedHeightType;
+import org.dive4elements.river.model.BoundaryKind;
 import org.dive4elements.river.model.Building;
-import org.dive4elements.river.model.BoundaryKind;
 import org.dive4elements.river.model.CrossSection;
 import org.dive4elements.river.model.CrossSectionLine;
 import org.dive4elements.river.model.CrossSectionPoint;
@@ -61,6 +60,7 @@
 import org.dive4elements.river.model.MorphologicalWidth;
 import org.dive4elements.river.model.MorphologicalWidthValue;
 import org.dive4elements.river.model.NamedMainValue;
+import org.dive4elements.river.model.OfficialLine;
 import org.dive4elements.river.model.Porosity;
 import org.dive4elements.river.model.PorosityValue;
 import org.dive4elements.river.model.Position;
@@ -71,14 +71,14 @@
 import org.dive4elements.river.model.SQRelation;
 import org.dive4elements.river.model.SQRelationValue;
 import org.dive4elements.river.model.SectieKind;
-import org.dive4elements.river.model.SobekKind;
 import org.dive4elements.river.model.SeddbName;
 import org.dive4elements.river.model.SedimentDensity;
 import org.dive4elements.river.model.SedimentDensityValue;
 import org.dive4elements.river.model.SedimentLoad;
-import org.dive4elements.river.model.SedimentLoadValue;
 import org.dive4elements.river.model.SedimentLoadLS;
 import org.dive4elements.river.model.SedimentLoadLSValue;
+import org.dive4elements.river.model.SedimentLoadValue;
+import org.dive4elements.river.model.SobekKind;
 import org.dive4elements.river.model.TimeInterval;
 import org.dive4elements.river.model.Unit;
 import org.dive4elements.river.model.Wst;
@@ -86,208 +86,251 @@
 import org.dive4elements.river.model.WstColumnQRange;
 import org.dive4elements.river.model.WstColumnValue;
 import org.dive4elements.river.model.WstQRange;
-import org.dive4elements.river.model.OfficialLine;
+import org.dive4elements.river.model.sinfo.BedMobility;
+import org.dive4elements.river.model.sinfo.BedMobilityValue;
+import org.dive4elements.river.model.sinfo.Channel;
+import org.dive4elements.river.model.sinfo.ChannelValue;
+import org.dive4elements.river.model.sinfo.Collision;
+import org.dive4elements.river.model.sinfo.CollisionType;
+import org.dive4elements.river.model.sinfo.CollisionValue;
+import org.dive4elements.river.model.sinfo.DailyDischarge;
+import org.dive4elements.river.model.sinfo.DailyDischargeValue;
+import org.dive4elements.river.model.sinfo.DepthEvolution;
+import org.dive4elements.river.model.sinfo.DepthEvolutionValue;
+import org.dive4elements.river.model.sinfo.FlowDepth;
+import org.dive4elements.river.model.sinfo.FlowDepthColumn;
+import org.dive4elements.river.model.sinfo.FlowDepthValue;
+import org.dive4elements.river.model.sinfo.Infrastructure;
+import org.dive4elements.river.model.sinfo.InfrastructureValue;
+import org.dive4elements.river.model.sinfo.Tkh;
+import org.dive4elements.river.model.sinfo.TkhColumn;
+import org.dive4elements.river.model.sinfo.TkhValue;
+import org.dive4elements.river.model.uinfo.Salix;
+import org.dive4elements.river.model.uinfo.SalixRank;
+import org.dive4elements.river.model.uinfo.SalixValue;
 
 public class FLYSCredentials
 extends      Credentials
 {
     public static final String XPATH_USER =
-        "/artifact-database/backend-database/user/text()";
+            "/artifact-database/backend-database/user/text()";
 
     public static final String XPATH_PASSWORD =
-        "/artifact-database/backend-database/password/text()";
+            "/artifact-database/backend-database/password/text()";
 
     public static final String XPATH_DIALECT =
-        "/artifact-database/backend-database/dialect/text()";
+            "/artifact-database/backend-database/dialect/text()";
 
     public static final String XPATH_DRIVER =
-        "/artifact-database/backend-database/driver/text()";
+            "/artifact-database/backend-database/driver/text()";
 
     public static final String XPATH_URL =
-        "/artifact-database/backend-database/url/text()";
+            "/artifact-database/backend-database/url/text()";
 
     public static final String XPATH_CONNECTION_INIT_SQLS =
-        "/artifact-database/backend-database/connection-init-sqls/text()";
+            "/artifact-database/backend-database/connection-init-sqls/text()";
 
     public static final String XPATH_VALIDATION_QUERY =
-        "/artifact-database/backend-database/validation-query/text()";
+            "/artifact-database/backend-database/validation-query/text()";
 
     public static final String XPATH_MAX_WAIT =
-        "/artifact-database/backend-database/max-wait/text()";
+            "/artifact-database/backend-database/max-wait/text()";
 
     public static final String DEFAULT_USER =
-        System.getProperty("flys.backend.user", "flys");
+            System.getProperty("flys.backend.user", "flys");
 
     public static final String DEFAULT_PASSWORD =
-        System.getProperty("flys.backend.password", "flys");
+            System.getProperty("flys.backend.password", "flys");
 
     public static final String DEFAULT_DIALECT =
-        System.getProperty(
-            "flys.backend.dialect",
-            "org.hibernate.dialect.PostgreSQLDialect");
+            System.getProperty(
+                    "flys.backend.dialect",
+                    "org.hibernate.dialect.PostgreSQLDialect");
 
     public static final String DEFAULT_DRIVER =
-        System.getProperty(
-            "flys.backend.driver",
-            "org.postgresql.Driver");
+            System.getProperty(
+                    "flys.backend.driver",
+                    "org.postgresql.Driver");
 
     public static final String DEFAULT_URL =
-        System.getProperty(
-            "flys.backend.url",
-            "jdbc:postgresql://localhost:5432/flys");
+            System.getProperty(
+                    "flys.backend.url",
+                    "jdbc:postgresql://localhost:5432/flys");
 
     public static final String DEFAULT_CONNECTION_INIT_SQLS =
-        System.getProperty(
-            "flys.backend.connection.init.sqls");
+            System.getProperty(
+                    "flys.backend.connection.init.sqls");
 
     public static final String DEFAULT_VALIDATION_QUERY =
-        System.getProperty(
-            "flys.backend.connection.validation.query");
+            System.getProperty(
+                    "flys.backend.connection.validation.query");
 
     public static final String DEFAULT_MAX_WAIT =
-        System.getProperty("flys.backend.connection.max.wait");
+            System.getProperty("flys.backend.connection.max.wait");
 
     public static final Class [] CLASSES = {
-        Annotation.class,
-        AnnotationType.class,
-        Attribute.class,
-        AxisKind.class,
-        BedHeight.class,
-        BedHeightValue.class,
-        BedHeightType.class,
-        Building.class,
-        BoundaryKind.class,
-        CrossSection.class,
-        CrossSectionLine.class,
-        CrossSectionPoint.class,
-        CrossSectionTrack.class,
-        CrossSectionTrackKind.class,
-        Depth.class,
-        DGM.class,
-        DischargeTable.class,
-        DischargeTableValue.class,
-        DischargeZone.class,
-        Edge.class,
-        ElevationModel.class,
-        FedState.class,
-        Fixpoint.class,
-        Floodmark.class,
-        Floodplain.class,
-        FloodplainKind.class,
-        Floodmaps.class,
-        FlowVelocityMeasurement.class,
-        FlowVelocityMeasurementValue.class,
-        FlowVelocityModel.class,
-        FlowVelocityModelValue.class,
-        Gauge.class,
-        GrainFraction.class,
-        HWSKind.class,
-        HWSLine.class,
-        HWSPoint.class,
-        HydrBoundary.class,
-        HydrBoundaryPoly.class,
-        HYK.class,
-        HYKEntry.class,
-        HYKFormation.class,
-        HYKFlowZoneType.class,
-        HYKFlowZone.class,
-        Jetty.class,
-        LocationSystem.class,
-        MainValueType.class,
-        MeasurementStation.class,
-        MorphologicalWidth.class,
-        MorphologicalWidthValue.class,
-        NamedMainValue.class,
-        MainValue.class,
-        Position.class,
-        Range.class,
-        River.class,
-        RiverAxis.class,
-        RiverAxisKm.class,
-        Porosity.class,
-        PorosityValue.class,
-        SectieKind.class,
-        SobekKind.class,
-        SeddbName.class,
-        SedimentDensity.class,
-        SedimentDensityValue.class,
-        SedimentLoad.class,
-        SedimentLoadValue.class,
-        SedimentLoadLS.class,
-        SedimentLoadLSValue.class,
-        SQRelation.class,
-        SQRelationValue.class,
-        TimeInterval.class,
-        Unit.class,
-        WstColumn.class,
-        WstColumnQRange.class,
-        WstColumnValue.class,
-        Wst.class,
-        WstQRange.class,
-        OfficialLine.class
+            Annotation.class,
+            AnnotationType.class,
+            Attribute.class,
+            AxisKind.class,
+            BedHeight.class,
+            BedHeightValue.class,
+            BedHeightType.class,
+            Building.class,
+            BoundaryKind.class,
+            CrossSection.class,
+            CrossSectionLine.class,
+            CrossSectionPoint.class,
+            CrossSectionTrack.class,
+            CrossSectionTrackKind.class,
+            Depth.class,
+            DGM.class,
+            DischargeTable.class,
+            DischargeTableValue.class,
+            DischargeZone.class,
+            Edge.class,
+            ElevationModel.class,
+            FedState.class,
+            Fixpoint.class,
+            Floodmark.class,
+            Floodplain.class,
+            FloodplainKind.class,
+            Floodmaps.class,
+            FlowVelocityMeasurement.class,
+            FlowVelocityMeasurementValue.class,
+            FlowVelocityModel.class,
+            FlowVelocityModelValue.class,
+            Gauge.class,
+            GrainFraction.class,
+            HWSKind.class,
+            HWSLine.class,
+            HWSPoint.class,
+            HydrBoundary.class,
+            HydrBoundaryPoly.class,
+            HYK.class,
+            HYKEntry.class,
+            HYKFormation.class,
+            HYKFlowZoneType.class,
+            HYKFlowZone.class,
+            Jetty.class,
+            LocationSystem.class,
+            MainValueType.class,
+            MeasurementStation.class,
+            MorphologicalWidth.class,
+            MorphologicalWidthValue.class,
+            NamedMainValue.class,
+            MainValue.class,
+            Position.class,
+            Range.class,
+            River.class,
+            RiverAxis.class,
+            RiverAxisKm.class,
+            Porosity.class,
+            PorosityValue.class,
+            SectieKind.class,
+            SobekKind.class,
+            SeddbName.class,
+            SedimentDensity.class,
+            SedimentDensityValue.class,
+            SedimentLoad.class,
+            SedimentLoadValue.class,
+            SedimentLoadLS.class,
+            SedimentLoadLSValue.class,
+            SQRelation.class,
+            SQRelationValue.class,
+            TimeInterval.class,
+            Unit.class,
+            WstColumn.class,
+            WstColumnQRange.class,
+            WstColumnValue.class,
+            Wst.class,
+            WstQRange.class,
+            OfficialLine.class,
+            BedMobility.class,
+            BedMobilityValue.class,
+            Infrastructure.class,
+            InfrastructureValue.class,
+            Channel.class,
+            ChannelValue.class,
+            CollisionType.class,
+            Collision.class,
+            CollisionValue.class,
+            DailyDischarge.class,
+            DailyDischargeValue.class,
+            SalixRank.class,
+            Salix.class,
+            SalixValue.class,
+            Tkh.class,
+            TkhColumn.class,
+            TkhValue.class,
+            FlowDepth.class,
+            FlowDepthColumn.class,
+            FlowDepthValue.class,
+            DepthEvolution.class,
+            DepthEvolutionValue.class
     };
 
     public FLYSCredentials() {
     }
 
     public FLYSCredentials(
-        String user,
-        String password,
-        String dialect,
-        String driver,
-        String url,
-        String connectionInitSqls,
-        String validationQuery,
-        String maxWait
-    ) {
+            final String user,
+            final String password,
+            final String dialect,
+            final String driver,
+            final String url,
+            final String connectionInitSqls,
+            final String validationQuery,
+            final String maxWait
+            ) {
         super(
-            user, password, dialect, driver, url,
-            connectionInitSqls, validationQuery, maxWait, CLASSES);
+                user, password, dialect, driver, url,
+                connectionInitSqls, validationQuery, maxWait, CLASSES);
     }
 
     private static Credentials instance;
 
     public static synchronized Credentials getInstance() {
         if (instance == null) {
-            String user =
-                Config.getStringXPath(XPATH_USER, DEFAULT_USER);
-            String password =
-                Config.getStringXPath(XPATH_PASSWORD, DEFAULT_PASSWORD);
-            String dialect =
-                Config.getStringXPath(XPATH_DIALECT, DEFAULT_DIALECT);
-            String driver =
-                Config.getStringXPath(XPATH_DRIVER, DEFAULT_DRIVER);
-            String url =
-                Config.getStringXPath(XPATH_URL, DEFAULT_URL);
-            String connectionInitSqls =
-                Config.getStringXPath(
-                    XPATH_CONNECTION_INIT_SQLS,
-                    DEFAULT_CONNECTION_INIT_SQLS);
-            String validationQuery =
-                Config.getStringXPath(
-                    XPATH_VALIDATION_QUERY,
-                    DEFAULT_VALIDATION_QUERY);
-            String maxWait =
-                Config.getStringXPath(XPATH_MAX_WAIT, DEFAULT_MAX_WAIT);
+            final String user =
+                    Config.getStringXPath(XPATH_USER, DEFAULT_USER);
+            final String password =
+                    Config.getStringXPath(XPATH_PASSWORD, DEFAULT_PASSWORD);
+            final String dialect =
+                    Config.getStringXPath(XPATH_DIALECT, DEFAULT_DIALECT);
+            final String driver =
+                    Config.getStringXPath(XPATH_DRIVER, DEFAULT_DRIVER);
+            final String url =
+                    Config.getStringXPath(XPATH_URL, DEFAULT_URL);
+            final String connectionInitSqls =
+                    Config.getStringXPath(
+                            XPATH_CONNECTION_INIT_SQLS,
+                            DEFAULT_CONNECTION_INIT_SQLS);
+            final String validationQuery =
+                    Config.getStringXPath(
+                            XPATH_VALIDATION_QUERY,
+                            DEFAULT_VALIDATION_QUERY);
+            final String maxWait =
+                    Config.getStringXPath(XPATH_MAX_WAIT, DEFAULT_MAX_WAIT);
 
             instance = new FLYSCredentials(
-                user, password, dialect, driver, url, connectionInitSqls,
-                validationQuery, maxWait);
+                    user, password, dialect, driver, url, connectionInitSqls,
+                    validationQuery, maxWait);
         }
         return instance;
     }
 
     public static Credentials getDefault() {
         return new FLYSCredentials(
-            DEFAULT_USER,
-            DEFAULT_PASSWORD,
-            DEFAULT_DIALECT,
-            DEFAULT_DRIVER,
-            DEFAULT_URL,
-            DEFAULT_CONNECTION_INIT_SQLS,
-            DEFAULT_VALIDATION_QUERY,
-            DEFAULT_MAX_WAIT
-        );
+                DEFAULT_USER,
+                DEFAULT_PASSWORD,
+                DEFAULT_DIALECT,
+                DEFAULT_DRIVER,
+                DEFAULT_URL,
+                DEFAULT_CONNECTION_INIT_SQLS,
+                DEFAULT_VALIDATION_QUERY,
+                DEFAULT_MAX_WAIT
+                );
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/Config.java
--- a/backend/src/main/java/org/dive4elements/river/importer/Config.java	Tue Apr 03 10:02:01 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/Config.java	Tue Apr 03 10:18:30 2018 +0200
@@ -11,121 +11,163 @@
 public class Config
 {
     public static final String SKIP_DEFAULT =
-        "flys.backend.importer.skip.default";
+            "flys.backend.importer.skip.default";
 
     public static final String DRY_RUN =
-        "flys.backend.importer.dry.run";
+            "flys.backend.importer.dry.run";
 
     public static final String INFO_GEW_FILE =
-        "flys.backend.importer.infogew.file";
+            "flys.backend.importer.infogew.file";
 
     public static final String ANNOTATION_TYPES =
-        "flys.backend.importer.annotation.types";
+            "flys.backend.importer.annotation.types";
 
     public static final String SKIP_GAUGES =
-        "flys.backend.importer.skip.gauges";
+            "flys.backend.importer.skip.gauges";
 
     public static final String SKIP_BWASTR =
-        "flys.backend.importer.skip.bwastr";
+            "flys.backend.importer.skip.bwastr";
 
     public static final String SKIP_HISTORICAL_DISCHARGE_TABLES =
-        "flys.backend.importer.skip.historical.discharge.tables";
+            "flys.backend.importer.skip.historical.discharge.tables";
 
     public static final String SKIP_ANNOTATIONS =
-        "flys.backend.importer.skip.annotations";
+            "flys.backend.importer.skip.annotations";
 
     public static final String SKIP_PRFS =
-        "flys.backend.importer.skip.prfs";
+            "flys.backend.importer.skip.prfs";
 
     public static final String SKIP_DA50S =
-        "flys.backend.importer.skip.da50s";
+            "flys.backend.importer.skip.da50s";
 
     public static final String SKIP_W80S =
-        "flys.backend.importer.skip.w80s";
+            "flys.backend.importer.skip.w80s";
 
     public static final String SKIP_W80_CSVS =
-        "flys.backend.importer.skip.w80.csvs";
+            "flys.backend.importer.skip.w80.csvs";
 
     public static final String SKIP_HYKS =
-        "flys.backend.importer.skip.hyks";
+            "flys.backend.importer.skip.hyks";
 
     public static final String SKIP_WST =
-        "flys.backend.importer.skip.wst";
+            "flys.backend.importer.skip.wst";
 
     public static final String SKIP_EXTRA_WSTS =
-        "flys.backend.importer.skip.extra.wsts";
+            "flys.backend.importer.skip.extra.wsts";
 
     public static final String SKIP_FIXATIONS =
-        "flys.backend.importer.skip.fixations";
+            "flys.backend.importer.skip.fixations";
 
     public static final String SKIP_OFFICIAL_LINES =
-        "flys.backend.importer.skip.official.lines";
+            "flys.backend.importer.skip.official.lines";
 
     public static final String SKIP_FLOOD_WATER =
-        "flys.backend.importer.skip.flood.water";
+            "flys.backend.importer.skip.flood.water";
 
     public static final String SKIP_FLOOD_PROTECTION =
-        "flys.backend.importer.skip.flood.protection";
+            "flys.backend.importer.skip.flood.protection";
 
     public static final String SKIP_BED_HEIGHT =
-        "flys.backend.importer.skip.bed.height";
+            "flys.backend.importer.skip.bed.height";
 
     public static final String SKIP_DA66S =
-        "flys.backend.importer.skip.da66s";
+            "flys.backend.importer.skip.da66s";
 
     public static final String SKIP_SEDIMENT_DENSITY =
-        "flys.backend.importer.skip.sediment.density";
+            "flys.backend.importer.skip.sediment.density";
 
     public static final String SKIP_POROSITY =
-        "flys.backend.importer.skip.porosity";
+            "flys.backend.importer.skip.porosity";
 
     public static final String SKIP_MORPHOLOGICAL_WIDTH =
-        "flys.backend.importer.skip.morphological.width";
+            "flys.backend.importer.skip.morphological.width";
 
     public static final String SKIP_FLOW_VELOCITY =
-        "flys.backend.importer.skip.flow.velocity";
+            "flys.backend.importer.skip.flow.velocity";
 
     public static final String SKIP_SEDIMENT_LOAD_LS =
-        "flys.backend.importer.skip.sediment.load.ls";
+            "flys.backend.importer.skip.sediment.load.ls";
 
     public static final String SKIP_SEDIMENT_LOAD =
-        "flys.backend.importer.skip.sediment.load";
+            "flys.backend.importer.skip.sediment.load";
 
     public static final String SKIP_WATERLEVELS =
-        "flys.backend.importer.skip.waterlevels";
+            "flys.backend.importer.skip.waterlevels";
 
     public static final String SKIP_WATERLEVEL_DIFFERENCES =
-        "flys.backend.importer.skip.waterlevel.differences";
+            "flys.backend.importer.skip.waterlevel.differences";
 
     public static final String SKIP_MEASUREMENT_STATIONS =
-        "flys.backend.importer.skip.measurement.stations";
+            "flys.backend.importer.skip.measurement.stations";
 
     public static final String SKIP_SQ_RELATION =
-        "flys.backend.importer.skip.sq.relation";
+            "flys.backend.importer.skip.sq.relation";
 
     public static final Double CROSS_SECTION_SIMPLIFICATION_EPSILON =
-        getDouble("flys.backend.importer.cross.section.simplification.epsilon");
+            getDouble("flys.backend.importer.cross.section.simplification.epsilon");
 
 
+    private enum SInfoSkip {
+        BED_MOBILITY("bed_mobility"), //
+        SELECTED_ADDITIONAL("selected_additional"), //
+        INFRASTRUCTURE("infrastructure"), //
+        CHANNEL("channel"), //
+        COLLISION("collision"), //
+        DAILY_DISCHARGE("daily_discharge"), //
+        TKH("tkh"), //
+        FLOW_DEPTH("flow_depth"), //
+        DEPTH_EVOLUTION("depth_evolution");
+
+        private final String name;
+
+        private SInfoSkip(final String suffix) {
+            this.name = "flys.backend.importer.skip.sinfo." + suffix;
+        }
+
+        public String getName() {
+            return this.name;
+        }
+
+        public boolean getFlag() {
+            return Config.getFlag(getName());
+        }
+    }
+
+    private enum UInfoSkip {
+        SALIX("salix");
+
+        private final String name;
+
+        private UInfoSkip(final String suffix) {
+            this.name = "flys.backend.importer.skip.uinfo." + suffix;
+        }
+
+        public String getName() {
+            return this.name;
+        }
+
+        public boolean getFlag() {
+            return Config.getFlag(getName());
+        }
+    }
+
     public static final Config INSTANCE = new Config();
 
     private Config() {
     }
 
-    public static final boolean getFlag(String key) {
-        String flag = System.getProperty(key);
-        return flag != null
-            ? Boolean.valueOf(flag)
-            : Boolean.getBoolean(SKIP_DEFAULT);
+    public static final boolean getFlag(final String key) {
+        final String flag = System.getProperty(key);
+        return (flag != null) ? Boolean.valueOf(flag) : Boolean.getBoolean(SKIP_DEFAULT);
     }
 
-    public static final Double getDouble(String key) {
+    public static final Double getDouble(final String key) {
         try {
-            String value = System.getProperty(key);
+            final String value = System.getProperty(key);
             return value != null
-                ? Double.valueOf(value)
-                : null;
-        } catch (NumberFormatException nfe) {
+                    ? Double.valueOf(value)
+                            : null;
+        } catch (final NumberFormatException nfe) {
             return null;
         }
     }
@@ -253,5 +295,45 @@
     public boolean skipSQRelation() {
         return getFlag(SKIP_SQ_RELATION);
     }
+
+    public boolean skipSInfoBedMobility() {
+        return SInfoSkip.BED_MOBILITY.getFlag();
+    }
+
+    public boolean skipSInfoSelectedAdditional() {
+        return SInfoSkip.SELECTED_ADDITIONAL.getFlag();
+    }
+
+    public boolean skipSInfoInfrastructure() {
+        return SInfoSkip.INFRASTRUCTURE.getFlag();
+    }
+
+    public boolean skipSInfoChannel() {
+        return SInfoSkip.CHANNEL.getFlag();
+    }
+
+    public boolean skipSInfoCollision() {
+        return SInfoSkip.COLLISION.getFlag();
+    }
+
+    public boolean skipSInfoDailyDischarge() {
+        return SInfoSkip.DAILY_DISCHARGE.getFlag();
+    }
+
+    public boolean skipSInfoTkh() {
+        return SInfoSkip.TKH.getFlag();
+    }
+
+    public boolean skipSInfoFlowDepth() {
+        return SInfoSkip.FLOW_DEPTH.getFlag();
+    }
+
+    public boolean skipSInfoDepthEvolution() {
+        return SInfoSkip.DEPTH_EVOLUTION.getFlag();
+    }
+
+    public boolean skipUInfoSalix() {
+        return UInfoSkip.SALIX.getFlag();
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java
--- a/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java	Tue Apr 03 10:02:01 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/importer/ImportRiver.java	Tue Apr 03 10:18:30 2018 +0200
@@ -8,10 +8,23 @@
 
 package org.dive4elements.river.importer;
 
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.artifacts.common.utils.FileTools;
 import org.dive4elements.artifacts.common.utils.FileTools.HashedFile;
-
-import org.dive4elements.artifacts.common.utils.FileTools;
-
+import org.dive4elements.river.backend.utils.DouglasPeuker;
+import org.dive4elements.river.importer.parsers.AbstractSedimentLoadParser;
 import org.dive4elements.river.importer.parsers.AnnotationClassifier;
 import org.dive4elements.river.importer.parsers.AnnotationsParser;
 import org.dive4elements.river.importer.parsers.BedHeightParser;
@@ -29,34 +42,17 @@
 import org.dive4elements.river.importer.parsers.PorosityParser;
 import org.dive4elements.river.importer.parsers.SQRelationParser;
 import org.dive4elements.river.importer.parsers.SedimentDensityParser;
-import org.dive4elements.river.importer.parsers.AbstractSedimentLoadParser;
 import org.dive4elements.river.importer.parsers.SedimentLoadLSParser;
 import org.dive4elements.river.importer.parsers.SedimentLoadParser;
+import org.dive4elements.river.importer.parsers.W80CSVParser;
 import org.dive4elements.river.importer.parsers.W80Parser;
-import org.dive4elements.river.importer.parsers.W80CSVParser;
 import org.dive4elements.river.importer.parsers.WaterlevelDifferencesParser;
 import org.dive4elements.river.importer.parsers.WaterlevelParser;
 import org.dive4elements.river.importer.parsers.WstParser;
-
+import org.dive4elements.river.importer.sinfo.SInfoImporter;
+import org.dive4elements.river.importer.uinfo.UInfoImporter;
 import org.dive4elements.river.model.River;
 import org.dive4elements.river.model.Unit;
-
-import org.dive4elements.river.backend.utils.DouglasPeuker;
-
-import java.io.File;
-import java.io.IOException;
-
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Iterator;
-
-import org.apache.log4j.Logger;
-
 import org.hibernate.Query;
 import org.hibernate.Session;
 
@@ -71,22 +67,22 @@
     public static final String FIXATIONS = "Fixierungen";
 
     public static final String EXTRA_LONGITUDINALS =
-        "Zus.Laengsschnitte";
+            "Zus.Laengsschnitte";
 
     public static final String [] OFFICIAL_LINES_FOLDERS = {
-        "Basisdaten",
-        "Fixierungen" };
+            "Basisdaten",
+    "Fixierungen" };
 
     public static final String OFFICIAL_LINES =
-        "Amtl_Linien.wst";
+            "Amtl_Linien.wst";
 
     public static final String OFFICIAL_LINES_CONFIG =
-        "Amtl_Linien.config";
+            "Amtl_Linien.config";
 
     public static final String FLOOD_WATER = "HW-Marken";
 
     public static final String FLOOD_PROTECTION =
-        "HW-Schutzanlagen";
+            "HW-Schutzanlagen";
 
     public static final String MINFO_DIR = "Morphologie";
 
@@ -99,10 +95,10 @@
     public static final String POROSITY_DIR = "Porositaet";
 
     public static final String MORPHOLOGICAL_WIDTH_DIR =
-        "morphologische_Breite";
+            "morphologische_Breite";
 
     public static final String FLOW_VELOCITY_DIR =
-        "Geschwindigkeit_Schubspannung";
+            "Geschwindigkeit_Schubspannung";
 
     public static final String FLOW_VELOCITY_MODEL = "Modellrechnungen";
 
@@ -119,22 +115,22 @@
     public static final String SEDIMENT_LOAD_EPOCH_DIR = "Epochen";
 
     public static final String SEDIMENT_LOAD_OFF_EPOCH_DIR =
-        "amtliche Epochen";
+            "amtliche Epochen";
 
     public static final String MINFO_FIXATIONS_DIR = "Fixierungsanalyse";
 
     public static final String MINFO_WATERLEVELS_DIR = "Wasserspiegellagen";
 
     public static final String MINFO_WATERLEVEL_DIFF_DIR =
-        "Wasserspiegeldifferenzen";
+            "Wasserspiegeldifferenzen";
 
     public static final String MINFO_BASE_DIR = "Basisdaten";
 
     public static final String MINFO_CORE_DATA_FILE =
-        "Stammdaten_Messstellen.csv";
+            "Stammdaten_Messstellen.csv";
 
     public static final String MINFO_SQ_DIR =
-        "Feststofftransport-Abfluss-Beziehung";
+            "Feststofftransport-Abfluss-Beziehung";
 
     protected String name;
 
@@ -142,6 +138,9 @@
 
     protected Long officialNumber;
 
+    /**
+     * Path of the river/Hydrologie/Basisdaten/river.wst file from which all other file paths are derived
+     */
     protected File wstFile;
 
     protected File bbInfoFile;
@@ -199,31 +198,40 @@
     /** Database-mapped River instance. */
     protected River peer;
 
+    /**
+     * Importer for a river's S-INFO files.
+     */
+    private final SInfoImporter sinfoImporter;
+
+    /**
+     * Importer for a river's U-INFO files.
+     */
+    private final UInfoImporter uinfoImporter;
 
     /** Callback-implementation for CrossSectionParsers. */
     private class ImportRiverCrossSectionParserCallback
     implements    CrossSectionParser.Callback {
 
-        private Set<HashedFile> files = new HashSet<HashedFile>();
-        private String          type;
+        private final Set<HashedFile> files = new HashSet<>();
+        private final String          type;
 
         /**
          * Create new Callback, given type which is used for logging
          * purposes only.
          */
-        public ImportRiverCrossSectionParserCallback(String type) {
+        public ImportRiverCrossSectionParserCallback(final String type) {
             this.type = type;
         }
 
 
         /** Accept file if not duplicate. */
         @Override
-        public boolean accept(File file) {
-            HashedFile hf = new HashedFile(file);
-            boolean success = files.add(hf);
+        public boolean accept(final File file) {
+            final HashedFile hf = new HashedFile(file);
+            final boolean success = this.files.add(hf);
             if (!success) {
-                log.warn(type + " file '" + file
-                    + "' seems to be a duplicate.");
+                log.warn(this.type + " file '" + file
+                        + "' seems to be a duplicate.");
             }
             return success;
         }
@@ -231,92 +239,94 @@
 
         /** Add crosssection. */
         @Override
-        public void parsed(CrossSectionParser parser) {
-           log.debug("callback from " + type + " parser");
+        public void parsed(final CrossSectionParser parser) {
+            log.debug("callback from " + this.type + " parser");
 
-            String  description = parser.getDescription();
-            Integer year        = parser.getYear();
-            ImportTimeInterval ti = year != null
-                ? new ImportTimeInterval(yearToDate(year))
-                : null;
+            final String  description = parser.getDescription();
+            final Integer year        = parser.getYear();
+            final ImportTimeInterval ti = year != null
+                    ? new ImportTimeInterval(yearToDate(year))
+                            : null;
 
-            Map<Double, List<XY>> data = parser.getData();
+                    final Map<Double, List<XY>> data = parser.getData();
 
-            List<ImportCrossSectionLine> lines =
-                new ArrayList<ImportCrossSectionLine>(data.size());
+                    final List<ImportCrossSectionLine> lines =
+                            new ArrayList<>(data.size());
 
-            Double simplificationEpsilon =
-                Config.INSTANCE.getCrossSectionSimplificationEpsilon();
+                    final Double simplificationEpsilon =
+                            Config.INSTANCE.getCrossSectionSimplificationEpsilon();
 
-            long numReadPoints      = 0L;
-            long numRemainingPoints = 0L;
+                    long numReadPoints      = 0L;
+                    long numRemainingPoints = 0L;
 
-            for (Map.Entry<Double, List<XY>> entry: data.entrySet()) {
-                Double   km     = entry.getKey();
-                List<XY> points = entry.getValue();
-                numReadPoints += points.size();
-                if (simplificationEpsilon != null) {
-                    points = DouglasPeuker.simplify(
-                        points, simplificationEpsilon);
-                }
-                numRemainingPoints += points.size();
-                lines.add(new ImportCrossSectionLine(km, points));
-            }
+                    for (final Map.Entry<Double, List<XY>> entry: data.entrySet()) {
+                        final Double   km     = entry.getKey();
+                        List<XY> points = entry.getValue();
+                        numReadPoints += points.size();
+                        if (simplificationEpsilon != null) {
+                            points = DouglasPeuker.simplify(
+                                    points, simplificationEpsilon);
+                        }
+                        numRemainingPoints += points.size();
+                        lines.add(new ImportCrossSectionLine(km, points));
+                    }
 
-            ImportRiver.this.addCrossSections(description, ti, lines);
+                    ImportRiver.this.addCrossSections(description, ti, lines);
 
-            if (simplificationEpsilon != null) {
-                double percent = numReadPoints > 0L
-                    ? ((double)numRemainingPoints/numReadPoints)*100d
-                    : 0d;
+                    if (simplificationEpsilon != null) {
+                        final double percent = numReadPoints > 0L
+                                ? ((double)numRemainingPoints/numReadPoints)*100d
+                                        : 0d;
 
-                log.info(String.format(
-                    "Number of points in cross section: %d / %d (%.2f%%)",
-                    numReadPoints, numRemainingPoints, percent));
-            }
+                                log.info(String.format(
+                                        "Number of points in cross section: %d / %d (%.2f%%)",
+                                        numReadPoints, numRemainingPoints, percent));
+                    }
         }
     } // ImportRiverCrossSectionParserCallback
 
 
     private void addCrossSections(
-        String                       description,
-        ImportTimeInterval           ti,
-        List<ImportCrossSectionLine> lines
-    ) {
-        crossSections.add(
-            new ImportCrossSection(this, description, ti, lines));
+            final String                       description,
+            final ImportTimeInterval           ti,
+            final List<ImportCrossSectionLine> lines
+            ) {
+        this.crossSections.add(
+                new ImportCrossSection(this, description, ti, lines));
     }
 
 
     public ImportRiver() {
-        hyks                     = new ArrayList<ImportHYK>();
-        crossSections            = new ArrayList<ImportCrossSection>();
-        extraWsts                = new ArrayList<ImportWst>();
-        fixations                = new ArrayList<ImportWst>();
-        officialLines            = new ArrayList<ImportWst>();
-        floodWater               = new ArrayList<ImportWst>();
-        waterlevels              = new ArrayList<ImportWst>();
-        waterlevelDifferences    = new ArrayList<ImportWst>();
-        floodProtection          = new ArrayList<ImportWst>();
-        sedimentDensities        = new ArrayList<ImportSedimentDensity>();
-        porosities               = new ArrayList<ImportPorosity>();
-        morphologicalWidths      = new ArrayList<ImportMorphWidth>();
-        flowVelocityModels       = new ArrayList<ImportFlowVelocityModel>();
-        flowVelocityMeasurements =
-            new ArrayList<ImportFlowVelocityMeasurement>();
-        sedimentLoadLSs          = new ArrayList<ImportSedimentLoadLS>();
-        sedimentLoads            = new ArrayList<ImportSedimentLoad>();
-        measurementStations      = new ArrayList<ImportMeasurementStation>();
-        sqRelations              = new ArrayList<ImportSQRelation>();
+        this.hyks                     = new ArrayList<>();
+        this.crossSections            = new ArrayList<>();
+        this.extraWsts                = new ArrayList<>();
+        this.fixations                = new ArrayList<>();
+        this.officialLines            = new ArrayList<>();
+        this.floodWater               = new ArrayList<>();
+        this.waterlevels              = new ArrayList<>();
+        this.waterlevelDifferences    = new ArrayList<>();
+        this.floodProtection          = new ArrayList<>();
+        this.sedimentDensities        = new ArrayList<>();
+        this.porosities               = new ArrayList<>();
+        this.morphologicalWidths      = new ArrayList<>();
+        this.flowVelocityModels       = new ArrayList<>();
+        this.flowVelocityMeasurements =
+                new ArrayList<>();
+        this.sedimentLoadLSs          = new ArrayList<>();
+        this.sedimentLoads            = new ArrayList<>();
+        this.measurementStations      = new ArrayList<>();
+        this.sqRelations              = new ArrayList<>();
+        this.sinfoImporter = new SInfoImporter();
+        this.uinfoImporter = new UInfoImporter();
     }
 
     public ImportRiver(
-        String               name,
-        String               modelUuid,
-        File                 wstFile,
-        File                 bbInfoFile,
-        AnnotationClassifier annotationClassifier
-    ) {
+            final String               name,
+            final String               modelUuid,
+            final File                 wstFile,
+            final File                 bbInfoFile,
+            final AnnotationClassifier annotationClassifier
+            ) {
         this();
         this.name                 = name;
         this.modelUuid            = modelUuid;
@@ -326,18 +336,18 @@
     }
 
     public String getName() {
-        return name;
+        return this.name;
     }
 
-    public void setName(String name) {
+    public void setName(final String name) {
         this.name = name;
     }
 
     public String getModelUuid() {
-        return modelUuid;
+        return this.modelUuid;
     }
 
-    public void setModelUuid(String modelUuid) {
+    public void setModelUuid(final String modelUuid) {
         this.modelUuid = modelUuid;
     }
 
@@ -345,41 +355,44 @@
         return this.officialNumber;
     }
 
-    public void setOfficialNumber(Long officialNumber) {
+    public void setOfficialNumber(final Long officialNumber) {
         this.officialNumber = officialNumber;
     }
 
     public File getWstFile() {
-        return wstFile;
+        return this.wstFile;
     }
 
-    public void setWstFile(File wstFile) {
+    public void setWstFile(final File wstFile) {
         this.wstFile = wstFile;
     }
 
     public File getBBInfo() {
-        return bbInfoFile;
+        return this.bbInfoFile;
     }
 
-    public void setBBInfo(File bbInfoFile) {
+    public void setBBInfo(final File bbInfoFile) {
         this.bbInfoFile = bbInfoFile;
     }
 
     public ImportWst getWst() {
-        return wst;
+        return this.wst;
     }
 
-    public void setWst(ImportWst wst) {
+    public void setWst(final ImportWst wst) {
         this.wst = wst;
     }
 
-    public File getMinfoDir() {
-        File riverDir  = wstFile
-            .getParentFile().getParentFile().getParentFile();
-        return new File(riverDir, MINFO_DIR);
+    private File getMinfoDir() {
+        return new File(getRiverDir(), MINFO_DIR);
+    }
+
+    private File getRiverDir() {
+        return this.wstFile.getParentFile().getParentFile().getParentFile();
     }
 
     public void parseDependencies() throws IOException {
+        log.info("Root dir is '" + getRiverDir() + "'");
         parseGauges();
         parseAnnotations();
         parsePRFs();
@@ -405,6 +418,10 @@
         parseWaterlevels();
         parseWaterlevelDifferences();
         parseSQRelation();
+        this.sinfoImporter.setup(getRiverDir(), this);
+        this.sinfoImporter.parse();
+        this.uinfoImporter.setup(getRiverDir(), this);
+        this.uinfoImporter.parse();
     }
 
     public void parseFloodProtection() throws IOException {
@@ -415,41 +432,41 @@
 
         log.info("Parse flood protection wst file");
 
-        File riverDir = wstFile.getParentFile().getParentFile();
+        final File riverDir = this.wstFile.getParentFile().getParentFile();
 
-        File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION));
+        final File dir = FileTools.repair(new File(riverDir, FLOOD_PROTECTION));
 
         if (!dir.isDirectory() || !dir.canRead()) {
             log.info("no directory '" + dir + "' found");
             return;
         }
 
-        File [] files = dir.listFiles();
+        final File [] files = dir.listFiles();
 
         if (files == null) {
             log.warn("cannot read '" + dir + "'");
             return;
         }
 
-        for (File file: files) {
+        for (final File file: files) {
             if (!file.isFile() || !file.canRead()) {
                 continue;
             }
-            String name = file.getName().toLowerCase();
+            final String name = file.getName().toLowerCase();
             if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
                 continue;
             }
             log.info("found file '" + file.getName() + "'");
             try {
-                WstParser wstParser = new WstParser();
+                final WstParser wstParser = new WstParser();
                 wstParser.parse(file);
-                ImportWst iw = wstParser.getWst();
+                final ImportWst iw = wstParser.getWst();
                 iw.setKind(5);
                 iw.setDescription(
-                    FLOOD_PROTECTION + "/" + iw.getDescription());
-                floodProtection.add(iw);
+                        FLOOD_PROTECTION + "/" + iw.getDescription());
+                this.floodProtection.add(iw);
             }
-            catch (WstParser.ParseException e) {
+            catch (final WstParser.ParseException e) {
                 log.error(e.getMessage());
             }
         }
@@ -460,13 +477,13 @@
             log.info("skip storing official number.");
             return;
         }
-        getPeer().setOfficialNumber(officialNumber);
+        getPeer().setOfficialNumber(this.officialNumber);
     }
 
     public void parseBedHeight() throws IOException {
-        File minfoDir     = getMinfoDir();
-        File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR);
-        File singlesDir   = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR);
+        final File minfoDir     = getMinfoDir();
+        final File bedHeightDir = new File(minfoDir, BED_HEIGHT_DIR);
+        final File singlesDir   = new File(bedHeightDir, BED_HEIGHT_SINGLE_DIR);
 
         if (Config.INSTANCE.skipBedHeight()) {
             log.info("skip parsing bed heights.");
@@ -486,26 +503,26 @@
 
         log.debug("Parse sediment density");
 
-        File minfoDir = getMinfoDir();
-        File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR);
+        final File minfoDir = getMinfoDir();
+        final File sediment = new File(minfoDir, SEDIMENT_DENSITY_DIR);
 
-        File[] files = sediment.listFiles();
+        final File[] files = sediment.listFiles();
 
         if (files == null) {
             log.warn("Cannot read directory '" + sediment + "'");
             return;
         }
 
-        SedimentDensityParser parser = new SedimentDensityParser();
+        final SedimentDensityParser parser = new SedimentDensityParser();
 
-        for (File file: files) {
+        for (final File file: files) {
             parser.parse(file);
         }
 
-        sedimentDensities = parser.getSedimentDensities();
+        this.sedimentDensities = parser.getSedimentDensities();
 
-        log.info("Parsed " + sedimentDensities.size()
-            + " sediment densities.");
+        log.info("Parsed " + this.sedimentDensities.size()
+        + " sediment densities.");
     }
 
     protected void parsePorosity() throws IOException {
@@ -516,25 +533,25 @@
 
         log.debug("Parse porosity");
 
-        File minfoDir = getMinfoDir();
-        File porosity = new File(minfoDir, POROSITY_DIR);
+        final File minfoDir = getMinfoDir();
+        final File porosity = new File(minfoDir, POROSITY_DIR);
 
-        File[] files = porosity.listFiles();
+        final File[] files = porosity.listFiles();
 
         if (files == null) {
             log.warn("Cannot read directory '" + porosity + "'");
             return;
         }
 
-        PorosityParser parser = new PorosityParser();
+        final PorosityParser parser = new PorosityParser();
 
-        for (File file: files) {
+        for (final File file: files) {
             parser.parse(file);
         }
 
-        porosities = parser.getPorosities();
+        this.porosities = parser.getPorosities();
 
-        log.info("Parsed " + porosities.size() + " porosities.");
+        log.info("Parsed " + this.porosities.size() + " porosities.");
     }
 
     protected void parseMorphologicalWidth() throws IOException {
@@ -545,26 +562,26 @@
 
         log.debug("Parse morphological width");
 
-        File minfoDir = getMinfoDir();
-        File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR);
+        final File minfoDir = getMinfoDir();
+        final File morphDir = new File(minfoDir, MORPHOLOGICAL_WIDTH_DIR);
 
-        File[] files = morphDir.listFiles();
+        final File[] files = morphDir.listFiles();
 
         if (files == null) {
             log.warn("Cannot read directory '" + morphDir + "'");
             return;
         }
 
-        MorphologicalWidthParser parser = new MorphologicalWidthParser();
+        final MorphologicalWidthParser parser = new MorphologicalWidthParser();
 
-        for (File file: files) {
+        for (final File file: files) {
             parser.parse(file);
         }
 
-        morphologicalWidths = parser.getMorphologicalWidths();
+        this.morphologicalWidths = parser.getMorphologicalWidths();
 
-        log.info("Parsed " + morphologicalWidths.size()
-            + " morph. widths files.");
+        log.info("Parsed " + this.morphologicalWidths.size()
+        + " morph. widths files.");
     }
 
 
@@ -576,78 +593,78 @@
 
         log.debug("Parse flow velocity");
 
-        File minfoDir   = getMinfoDir();
-        File flowDir    = new File(minfoDir, FLOW_VELOCITY_DIR);
-        File modelDir   = new File(flowDir, FLOW_VELOCITY_MODEL);
-        File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS);
+        final File minfoDir   = getMinfoDir();
+        final File flowDir    = new File(minfoDir, FLOW_VELOCITY_DIR);
+        final File modelDir   = new File(flowDir, FLOW_VELOCITY_MODEL);
+        final File measureDir = new File(flowDir, FLOW_VELOCITY_MEASUREMENTS);
 
-        File[] modelFiles   = modelDir.listFiles();
-        File[] measureFiles = measureDir.listFiles();
+        final File[] modelFiles   = modelDir.listFiles();
+        final File[] measureFiles = measureDir.listFiles();
 
         if (modelFiles == null) {
             log.warn("Cannot read directory '" + modelDir + "'");
         }
         else {
-            FlowVelocityModelParser parser = new FlowVelocityModelParser();
+            final FlowVelocityModelParser parser = new FlowVelocityModelParser();
 
-            for (File model: modelFiles) {
+            for (final File model: modelFiles) {
                 log.debug("Parse file '" + model + "'");
                 parser.parse(model);
             }
 
-            flowVelocityModels = parser.getModels();
+            this.flowVelocityModels = parser.getModels();
         }
 
         if (measureFiles == null) {
             log.warn("Cannot read directory '" + measureDir + "'");
         }
         else {
-            FlowVelocityMeasurementParser parser =
-                new FlowVelocityMeasurementParser();
+            final FlowVelocityMeasurementParser parser =
+                    new FlowVelocityMeasurementParser();
 
-            for (File measurement: measureFiles) {
+            for (final File measurement: measureFiles) {
                 log.debug("Parse file '" + measurement + "'");
                 parser.parse(measurement);
             }
 
-            flowVelocityMeasurements = parser.getMeasurements();
+            this.flowVelocityMeasurements = parser.getMeasurements();
         }
     }
 
 
     private void parseSedimentLoadFiles(
-        File[] files,
-        AbstractSedimentLoadParser parser
-    ) throws IOException {
-       for (File file: files) {
-           if (file.isDirectory()) {
-               for (File child: file.listFiles()) {
-                   parser.parse(child);
-               }
-           }
-           else {
-               parser.parse(file);
-           }
-       }
+            final File[] files,
+            final AbstractSedimentLoadParser parser
+            ) throws IOException {
+        for (final File file: files) {
+            if (file.isDirectory()) {
+                for (final File child: file.listFiles()) {
+                    parser.parse(child);
+                }
+            }
+            else {
+                parser.parse(file);
+            }
+        }
     }
 
 
     private void parseSedimentLoadDir(
-        File sedimentLoadDir,
-        AbstractSedimentLoadParser parser
-    ) throws IOException {
+            final File sedimentLoadDir,
+            final AbstractSedimentLoadParser parser
+            ) throws IOException {
 
-        File[] sedimentLoadSubDirs = {
-            new File(sedimentLoadDir,
-                     SEDIMENT_LOAD_SINGLE_DIR),
-            new File(sedimentLoadDir,
-                     SEDIMENT_LOAD_EPOCH_DIR),
-            new File(sedimentLoadDir,
-                     SEDIMENT_LOAD_OFF_EPOCH_DIR),
+        final File[] sedimentLoadSubDirs = {
+                new File(sedimentLoadDir,
+                        SEDIMENT_LOAD_SINGLE_DIR),
+                new File(sedimentLoadDir,
+                        SEDIMENT_LOAD_EPOCH_DIR),
+                new File(sedimentLoadDir,
+                        SEDIMENT_LOAD_OFF_EPOCH_DIR),
         };
 
-        for (File subDir : sedimentLoadSubDirs) {
-            File[] files = subDir.listFiles();
+        for (final File subDir : sedimentLoadSubDirs) {
+            final File[] files = subDir.listFiles();
 
             if (files == null || files.length == 0) {
                 log.warn("Cannot read directory '" + subDir + "'");
@@ -667,38 +684,38 @@
 
         log.debug("Parse sediment load longitudinal section data");
 
-        SedimentLoadLSParser parser = new SedimentLoadLSParser();
+        final SedimentLoadLSParser parser = new SedimentLoadLSParser();
 
-        File minfoDir          = getMinfoDir();
-        File sedimentLoadDir   = new File(minfoDir, SEDIMENT_LOAD_DIR);
-        File sedimentLoadLSDir = new File(sedimentLoadDir,
-                                          SEDIMENT_LOAD_LS_DIR);
+        final File minfoDir          = getMinfoDir();
+        final File sedimentLoadDir   = new File(minfoDir, SEDIMENT_LOAD_DIR);
+        final File sedimentLoadLSDir = new File(sedimentLoadDir,
+                SEDIMENT_LOAD_LS_DIR);
 
         parseSedimentLoadDir(sedimentLoadLSDir, parser);
 
-        sedimentLoadLSs = parser.getSedimentLoadLSs();
+        this.sedimentLoadLSs = parser.getSedimentLoadLSs();
     }
 
 
     protected void parseSedimentLoad() throws IOException {
         if (Config.INSTANCE.skipSedimentLoad()) {
             log.info(
-                "skip parsing sediment load data at measurement stations");
+                    "skip parsing sediment load data at measurement stations");
             return;
         }
 
         log.debug("Parse sediment load data at measurement stations");
 
-        SedimentLoadParser parser = new SedimentLoadParser(getPeer());
+        final SedimentLoadParser parser = new SedimentLoadParser(getPeer());
 
-        File minfoDir          = getMinfoDir();
-        File sedimentLoadDir   = new File(minfoDir, SEDIMENT_LOAD_DIR);
-        File sedimentLoadMSDir = new File(sedimentLoadDir,
-                                          SEDIMENT_LOAD_MS_DIR);
+        final File minfoDir          = getMinfoDir();
+        final File sedimentLoadDir   = new File(minfoDir, SEDIMENT_LOAD_DIR);
+        final File sedimentLoadMSDir = new File(sedimentLoadDir,
+                SEDIMENT_LOAD_MS_DIR);
 
         parseSedimentLoadDir(sedimentLoadMSDir, parser);
 
-        sedimentLoads = parser.getSedimentLoads();
+        this.sedimentLoads = parser.getSedimentLoads();
     }
 
 
@@ -710,29 +727,29 @@
 
         log.info("Parse waterlevels");
 
-        File minfo  = getMinfoDir();
-        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
-        File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR);
+        final File minfo  = getMinfoDir();
+        final File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
+        final File wspDir = new File(fixDir, MINFO_WATERLEVELS_DIR);
 
-        File[] files = wspDir.listFiles();
+        final File[] files = wspDir.listFiles();
 
         if (files == null) {
             log.warn("Cannot read directory for wl '" + wspDir + "'");
             return;
         }
 
-        WaterlevelParser parser = new WaterlevelParser();
+        final WaterlevelParser parser = new WaterlevelParser();
 
-        for (File file: files) {
+        for (final File file: files) {
             parser.parse(file);
         }
 
         // The parsed ImportWaterlevels are converted to
         // 'fixation'-wsts now.
-        for(ImportWst iw: parser.getWaterlevels()) {
+        for(final ImportWst iw: parser.getWaterlevels()) {
             iw.setDescription("CSV/" + iw.getDescription());
             iw.setKind(7);
-            waterlevels.add(iw);
+            this.waterlevels.add(iw);
         }
     }
 
@@ -744,27 +761,27 @@
 
         log.info("Parse measurement stations");
 
-        File minfo = getMinfoDir();
-        File minfoBaseDir = new File(minfo, MINFO_BASE_DIR);
-        File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE);
+        final File minfo = getMinfoDir();
+        final File minfoBaseDir = new File(minfo, MINFO_BASE_DIR);
+        final File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE);
 
         if (coredataFile == null || !coredataFile.exists()) {
             log.warn("No core data file '"
-                + coredataFile.getAbsolutePath() + "' found");
+                    + coredataFile.getAbsolutePath() + "' found");
             return;
         }
 
-        MeasurementStationsParser parser = new MeasurementStationsParser();
+        final MeasurementStationsParser parser = new MeasurementStationsParser();
         try {
             parser.parse(coredataFile);
-            measurementStations = parser.getMeasurementStations();
+            this.measurementStations = parser.getMeasurementStations();
 
-            log.info("Successfully parsed " + measurementStations.size()
-                + " measurement stations.");
+            log.info("Successfully parsed " + this.measurementStations.size()
+            + " measurement stations.");
         }
-        catch (IOException ioe) {
+        catch (final IOException ioe) {
             log.error("unable to parse file '" + coredataFile.getName() +
-                ": " + ioe.getMessage());
+                    ": " + ioe.getMessage());
         }
     }
 
@@ -777,28 +794,28 @@
 
         log.info("Parse waterlevel differences");
 
-        File minfo  = getMinfoDir();
-        File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
-        File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR);
+        final File minfo  = getMinfoDir();
+        final File fixDir = new File(minfo, MINFO_FIXATIONS_DIR);
+        final File diffDir = new File(fixDir, MINFO_WATERLEVEL_DIFF_DIR);
 
-        File[] files = diffDir.listFiles();
+        final File[] files = diffDir.listFiles();
 
         if (files == null) {
             log.warn("Cannot read directory '" + diffDir + "'");
             return;
         }
 
-        WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser();
+        final WaterlevelDifferencesParser parser = new WaterlevelDifferencesParser();
 
-        for (File file: files) {
+        for (final File file: files) {
             parser.parse(file);
         }
 
         // WaterlevelDifferences become Wsts now.
-        for(ImportWst iw: parser.getDifferences()) {
+        for(final ImportWst iw: parser.getDifferences()) {
             iw.setDescription("CSV/" + iw.getDescription());
             iw.setKind(6);
-            waterlevelDifferences.add(iw);
+            this.waterlevelDifferences.add(iw);
         }
     }
 
@@ -811,45 +828,50 @@
 
         log.info("Parse sq relations");
 
-        File minfo = getMinfoDir();
-        File sqDir = new File(minfo, MINFO_SQ_DIR);
+        final File minfo = getMinfoDir();
+        final File sqDir = new File(minfo, MINFO_SQ_DIR);
 
-        File[] files = sqDir.listFiles();
+        final File[] files = sqDir.listFiles();
 
         if (files == null) {
             log.warn("Cannot read directory '" + sqDir + "'");
             return;
         }
 
-        SQRelationParser parser = new SQRelationParser(getPeer());
+        final SQRelationParser parser = new SQRelationParser(getPeer());
 
-        for (File file: files) {
+        for (final File file: files) {
             parser.parse(file);
         }
 
-        sqRelations = parser.getSQRelations();
+        this.sqRelations = parser.getSQRelations();
 
-        log.debug("Parsed " + sqRelations.size() + " SQ relations.");
+        log.debug("Parsed " + this.sqRelations.size() + " SQ relations.");
     }
 
 
-    protected void parseBedHeights(File dir) throws IOException {
+    protected void parseBedHeights(final File dir) throws IOException {
         log.debug("Parse bed height singles");
 
-        File[] files = dir.listFiles();
+        final File[] files = dir.listFiles(new FilenameFilter() {
+            @Override
+            public boolean accept(final File dir, final String name) {
+                return name.toLowerCase().endsWith(".csv");
+            }
+        });
 
         if (files == null) {
             log.warn("Cannot read directory '" + dir + "'");
             return;
         }
 
-        BedHeightParser parser = new BedHeightParser();
+        final BedHeightParser parser = new BedHeightParser();
 
-        for (File file: files) {
+        for (final File file: files) {
             parser.parse(file);
         }
 
-        bedHeights = parser.getBedHeights();
+        this.bedHeights = parser.getBedHeights();
     }
 
     public void parseFloodWater() throws IOException {
@@ -860,40 +882,40 @@
 
         log.info("Parse flood water wst file");
 
-        File riverDir = wstFile.getParentFile().getParentFile();
+        final File riverDir = this.wstFile.getParentFile().getParentFile();
 
-        File dir = FileTools.repair(new File(riverDir, FLOOD_WATER));
+        final File dir = FileTools.repair(new File(riverDir, FLOOD_WATER));
 
         if (!dir.isDirectory() || !dir.canRead()) {
             log.info("no directory '" + dir + "' found");
             return;
         }
 
-        File [] files = dir.listFiles();
+        final File [] files = dir.listFiles();
 
         if (files == null) {
             log.warn("cannot read '" + dir + "'");
             return;
         }
 
-        for (File file: files) {
+        for (final File file: files) {
             if (!file.isFile() || !file.canRead()) {
                 continue;
             }
-            String name = file.getName().toLowerCase();
+            final String name = file.getName().toLowerCase();
             if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
                 continue;
             }
             log.info("found file '" + file.getName() + "'");
             try {
-                WstParser wstParser = new WstParser();
+                final WstParser wstParser = new WstParser();
                 wstParser.parse(file);
-                ImportWst iw = wstParser.getWst();
+                final ImportWst iw = wstParser.getWst();
                 iw.setKind(4);
                 iw.setDescription(FLOOD_WATER + "/" + iw.getDescription());
-                floodWater.add(iw);
+                this.floodWater.add(iw);
             }
-            catch (WstParser.ParseException e) {
+            catch (final WstParser.ParseException e) {
                 log.error(e.getMessage());
             }
         }
@@ -907,31 +929,31 @@
 
         log.info("Parse official wst files");
 
-        File riverDir = wstFile.getParentFile().getParentFile();
+        final File riverDir = this.wstFile.getParentFile().getParentFile();
 
-        for (String folder: OFFICIAL_LINES_FOLDERS) {
-            File dir = FileTools.repair(new File(riverDir, folder));
+        for (final String folder: OFFICIAL_LINES_FOLDERS) {
+            final File dir = FileTools.repair(new File(riverDir, folder));
 
             if (!dir.isDirectory() || !dir.canRead()) {
                 log.info("no directory '" + folder + "' found");
                 continue;
             }
 
-            File file = FileTools.repair(new File(dir, OFFICIAL_LINES));
+            final File file = FileTools.repair(new File(dir, OFFICIAL_LINES));
             if (!file.isFile() || !file.canRead()) {
                 log.warn("no official lines wst file found");
                 continue;
             }
             log.debug("Found WST file: " + file);
 
-            ImportWst iw = new ImportWst(
-                ImportOfficialWstColumn.COLUMN_FACTORY);
+            final ImportWst iw = new ImportWst(
+                    ImportOfficialWstColumn.COLUMN_FACTORY);
 
-            WstParser wstParser = new WstParser(iw);
+            final WstParser wstParser = new WstParser(iw);
             try {
                 wstParser.parse(file);
             }
-            catch (WstParser.ParseException e) {
+            catch (final WstParser.ParseException e) {
                 log.error(e.getMessage());
                 continue;
             }
@@ -939,41 +961,41 @@
             iw.setKind(3);
             iw.setDescription(folder + "/" + iw.getDescription());
 
-            File configFile = FileTools.repair(
-                new File(dir, OFFICIAL_LINES_CONFIG));
+            final File configFile = FileTools.repair(
+                    new File(dir, OFFICIAL_LINES_CONFIG));
             if (!configFile.isFile() || !configFile.canRead()) {
                 log.warn("no config file for official lines found");
             }
             else {
-                OfficialLinesConfigParser olcp =
-                    new OfficialLinesConfigParser();
+                final OfficialLinesConfigParser olcp =
+                        new OfficialLinesConfigParser();
                 try {
                     olcp.parse(configFile);
                 }
-                catch (IOException ioe) {
+                catch (final IOException ioe) {
                     log.warn("Error reading offical lines config", ioe);
                 }
-                List<String> mainValueNames = olcp.getMainValueNames();
+                final List<String> mainValueNames = olcp.getMainValueNames();
                 if (mainValueNames.isEmpty()) {
                     log.warn(
-                        "config file for offical lines contains no entries");
+                            "config file for offical lines contains no entries");
                 }
                 else {
                     // Join as much as possible.
-                    Iterator<ImportWstColumn> wi = iw.getColumns().iterator();
-                    Iterator<String> si = olcp.getMainValueNames().iterator();
+                    final Iterator<ImportWstColumn> wi = iw.getColumns().iterator();
+                    final Iterator<String> si = olcp.getMainValueNames().iterator();
                     while (wi.hasNext() && si.hasNext()) {
-                        ImportOfficialWstColumn wc =
-                            (ImportOfficialWstColumn)wi.next();
-                        String name = si.next();
-                        ImportOfficialLine iol =
-                            new ImportOfficialLine(name, wc);
+                        final ImportOfficialWstColumn wc =
+                                (ImportOfficialWstColumn)wi.next();
+                        final String name = si.next();
+                        final ImportOfficialLine iol =
+                                new ImportOfficialLine(name, wc);
                         wc.setOfficialLine(iol);
                     }
                 }
             }
 
-            officialLines.add(iw);
+            this.officialLines.add(iw);
         } // for all folders
 
     }
@@ -986,42 +1008,42 @@
 
         log.info("Parse fixation wst files");
 
-        File riverDir = wstFile.getParentFile().getParentFile();
+        final File riverDir = this.wstFile.getParentFile().getParentFile();
 
-        File fixDir = FileTools.repair(
-            new File(riverDir, FIXATIONS));
+        final File fixDir = FileTools.repair(
+                new File(riverDir, FIXATIONS));
 
         if (!fixDir.isDirectory() || !fixDir.canRead()) {
             log.info("no fixation wst file directory found");
             return;
         }
 
-        File [] files = fixDir.listFiles();
+        final File [] files = fixDir.listFiles();
 
         if (files == null) {
             log.warn("cannot read fixations wst file directory");
             return;
         }
 
-        for (File file: files) {
+        for (final File file: files) {
             if (!file.isFile() || !file.canRead()) {
                 continue;
             }
-            String name = file.getName().toLowerCase();
+            final String name = file.getName().toLowerCase();
             if (!name.endsWith(".wst")) {
                 continue;
             }
             log.debug("Found WST file: " + file);
 
             try {
-                WstParser wstParser = new WstParser();
+                final WstParser wstParser = new WstParser();
                 wstParser.parse(file);
-                ImportWst iw = wstParser.getWst();
+                final ImportWst iw = wstParser.getWst();
                 iw.setKind(2);
                 iw.setDescription(FIXATIONS+ "/" + iw.getDescription());
-                fixations.add(iw);
+                this.fixations.add(iw);
             }
-            catch (WstParser.ParseException e) {
+            catch (final WstParser.ParseException e) {
                 log.error(e.getMessage());
             }
         }
@@ -1035,43 +1057,43 @@
 
         log.info("Parse extra longitudinal wst files");
 
-        File riverDir = wstFile.getParentFile().getParentFile();
+        final File riverDir = this.wstFile.getParentFile().getParentFile();
 
-        File extraDir = FileTools.repair(
-            new File(riverDir, EXTRA_LONGITUDINALS));
+        final File extraDir = FileTools.repair(
+                new File(riverDir, EXTRA_LONGITUDINALS));
 
         if (!extraDir.isDirectory() || !extraDir.canRead()) {
             log.info("no extra longitudinal wst file directory found");
             return;
         }
 
-        File [] files = extraDir.listFiles();
+        final File [] files = extraDir.listFiles();
 
         if (files == null) {
             log.warn("cannot read extra longitudinal wst file directory");
             return;
         }
 
-        for (File file: files) {
+        for (final File file: files) {
             if (!file.isFile() || !file.canRead()) {
                 continue;
             }
-            String name = file.getName().toLowerCase();
+            final String name = file.getName().toLowerCase();
             if (!(name.endsWith(".zus") || name.endsWith(".wst"))) {
                 continue;
             }
             log.debug("Found WST file: " + file);
 
             try {
-                WstParser wstParser = new WstParser();
+                final WstParser wstParser = new WstParser();
                 wstParser.parse(file);
-                ImportWst iw = wstParser.getWst();
+                final ImportWst iw = wstParser.getWst();
                 iw.setKind(1);
                 iw.setDescription(
-                    EXTRA_LONGITUDINALS + "/" + iw.getDescription());
-                extraWsts.add(iw);
+                        EXTRA_LONGITUDINALS + "/" + iw.getDescription());
+                this.extraWsts.add(iw);
             }
-            catch (WstParser.ParseException e) {
+            catch (final WstParser.ParseException e) {
                 log.error(e.getMessage());
             }
         }
@@ -1084,13 +1106,13 @@
             return;
         }
 
-        WstParser wstParser = new WstParser();
+        final WstParser wstParser = new WstParser();
         try {
-            wstParser.parse(wstFile);
-            wst = wstParser.getWst();
-            wst.setKmUp(wst.guessWaterLevelIncreasing());
+            wstParser.parse(this.wstFile);
+            this.wst = wstParser.getWst();
+            this.wst.setKmUp(this.wst.guessWaterLevelIncreasing());
         }
-        catch (WstParser.ParseException e) {
+        catch (final WstParser.ParseException e) {
             log.error(e.getMessage());
         }
     }
@@ -1101,7 +1123,7 @@
             return;
         }
 
-        File gltFile = new File(wstFile.getParentFile(), PEGEL_GLT);
+        File gltFile = new File(this.wstFile.getParentFile(), PEGEL_GLT);
         gltFile = FileTools.repair(gltFile);
 
         if (!gltFile.isFile() || !gltFile.canRead()) {
@@ -1109,12 +1131,12 @@
             return;
         }
 
-        PegelGltParser pgltp = new PegelGltParser();
+        final PegelGltParser pgltp = new PegelGltParser();
         pgltp.parse(gltFile);
 
-        gauges = pgltp.getGauges();
+        this.gauges = pgltp.getGauges();
 
-        for (ImportGauge gauge: gauges) {
+        for (final ImportGauge gauge: this.gauges) {
             gauge.parseDependencies();
         }
     }
@@ -1125,12 +1147,12 @@
             return;
         }
 
-        File riverDir = wstFile.getParentFile().getParentFile();
-        AnnotationsParser aparser =
-            new AnnotationsParser(annotationClassifier);
+        final File riverDir = this.wstFile.getParentFile().getParentFile();
+        final AnnotationsParser aparser =
+                new AnnotationsParser(this.annotationClassifier);
         aparser.parse(riverDir);
 
-        annotations = aparser.getAnnotations();
+        this.annotations = aparser.getAnnotations();
     }
 
     public void parseHYKs() {
@@ -1140,33 +1162,33 @@
         }
 
         log.info("looking for HYK files");
-        HYKParser parser = new HYKParser();
-        File riverDir = wstFile
-            .getParentFile()  // Basisdaten
-            .getParentFile()  // Hydrologie
-            .getParentFile(); // <river>
+        final HYKParser parser = new HYKParser();
+        final File riverDir = this.wstFile
+                .getParentFile()  // Basisdaten
+                .getParentFile()  // Hydrologie
+                .getParentFile(); // <river>
 
         parser.parseHYKs(riverDir, new HYKParser.Callback() {
 
-            Set<HashedFile> hfs = new HashSet<HashedFile>();
+            Set<HashedFile> hfs = new HashSet<>();
 
             @Override
-            public boolean hykAccept(File file) {
-                HashedFile hf = new HashedFile(file);
-                boolean success = hfs.add(hf);
+            public boolean hykAccept(final File file) {
+                final HashedFile hf = new HashedFile(file);
+                final boolean success = this.hfs.add(hf);
                 if (!success) {
                     log.warn("HYK file '" + file
-                        + "' seems to be a duplicate.");
+                            + "' seems to be a duplicate.");
                 }
                 return success;
             }
 
             @Override
-            public void hykParsed(HYKParser parser) {
+            public void hykParsed(final HYKParser parser) {
                 log.debug("callback from HYK parser");
-                ImportHYK hyk = parser.getHYK();
+                final ImportHYK hyk = parser.getHYK();
                 hyk.setRiver(ImportRiver.this);
-                hyks.add(hyk);
+                ImportRiver.this.hyks.add(hyk);
             }
         });
     }
@@ -1178,14 +1200,14 @@
             log.info("skip parsing W80s");
             return;
         }
-        W80Parser parser = new W80Parser();
-        File riverDir = wstFile
-            .getParentFile()  // Basisdaten
-            .getParentFile()  // Hydrologie
-            .getParentFile(); // <river>
+        final W80Parser parser = new W80Parser();
+        final File riverDir = this.wstFile
+                .getParentFile()  // Basisdaten
+                .getParentFile()  // Hydrologie
+                .getParentFile(); // <river>
 
-        ImportRiverCrossSectionParserCallback w80Callback =
-            new ImportRiverCrossSectionParserCallback("w80");
+        final ImportRiverCrossSectionParserCallback w80Callback =
+                new ImportRiverCrossSectionParserCallback("w80");
         parser.parseW80s(riverDir, w80Callback);
     }
 
@@ -1195,20 +1217,20 @@
             log.info("skip parsing W80 csvs");
             return;
         }
-        W80CSVParser parser = new W80CSVParser();
-        File riverDir = wstFile
-            .getParentFile()  // Basisdaten
-            .getParentFile()  // Hydrologie
-            .getParentFile(); // <river>
+        final W80CSVParser parser = new W80CSVParser();
+        final File riverDir = this.wstFile
+                .getParentFile()  // Basisdaten
+                .getParentFile()  // Hydrologie
+                .getParentFile(); // <river>
 
         // Construct the Cross-Section-Data path.
-        File csDir = new File(riverDir.getPath()
-            + File.separator + "Geodaesie"
-            + File.separator + "Querprofile"
-            + File.separator + "QP-Daten");
+        final File csDir = new File(riverDir.getPath()
+                + File.separator + "Geodaesie"
+                + File.separator + "Querprofile"
+                + File.separator + "QP-Daten");
 
-        ImportRiverCrossSectionParserCallback w80CSVCallback =
-            new ImportRiverCrossSectionParserCallback("w80-csv");
+        final ImportRiverCrossSectionParserCallback w80CSVCallback =
+                new ImportRiverCrossSectionParserCallback("w80-csv");
         parser.parseW80CSVs(csDir, w80CSVCallback);
     }
 
@@ -1222,14 +1244,14 @@
             log.info("skip parsing DA50s");
             return;
         }
-        DA50Parser parser = new DA50Parser();
-        File riverDir = wstFile
-            .getParentFile()  // Basisdaten
-            .getParentFile()  // Hydrologie
-            .getParentFile(); // <river>
+        final DA50Parser parser = new DA50Parser();
+        final File riverDir = this.wstFile
+                .getParentFile()  // Basisdaten
+                .getParentFile()  // Hydrologie
+                .getParentFile(); // <river>
 
-        ImportRiverCrossSectionParserCallback da50Callback =
-            new ImportRiverCrossSectionParserCallback("da50");
+        final ImportRiverCrossSectionParserCallback da50Callback =
+                new ImportRiverCrossSectionParserCallback("da50");
 
         parser.parseDA50s(riverDir, da50Callback);
     }
@@ -1245,14 +1267,14 @@
         }
 
         log.info("looking for DA66 files");
-        DA66Parser parser = new DA66Parser();
-        File riverDir = wstFile
-            .getParentFile()  // Basisdaten
-            .getParentFile()  // Hydrologie
-            .getParentFile(); // <river>
+        final DA66Parser parser = new DA66Parser();
+        final File riverDir = this.wstFile
+                .getParentFile()  // Basisdaten
+                .getParentFile()  // Hydrologie
+                .getParentFile(); // <river>
 
-        ImportRiverCrossSectionParserCallback da66Callback =
-            new ImportRiverCrossSectionParserCallback("da66");
+        final ImportRiverCrossSectionParserCallback da66Callback =
+                new ImportRiverCrossSectionParserCallback("da66");
 
         parser.parseDA66s(riverDir, da66Callback);
     }
@@ -1265,21 +1287,21 @@
         }
 
         log.info("looking for PRF files");
-        PRFParser parser = new PRFParser();
-        File riverDir = wstFile
-            .getParentFile()  // Basisdaten
-            .getParentFile()  // Hydrologie
-            .getParentFile(); // <river>
+        final PRFParser parser = new PRFParser();
+        final File riverDir = this.wstFile
+                .getParentFile()  // Basisdaten
+                .getParentFile()  // Hydrologie
+                .getParentFile(); // <river>
 
-        ImportRiverCrossSectionParserCallback prfCallback =
-            new ImportRiverCrossSectionParserCallback("prf");
+        final ImportRiverCrossSectionParserCallback prfCallback =
+                new ImportRiverCrossSectionParserCallback("prf");
         parser.parsePRFs(riverDir, prfCallback);
     }
 
-    public static Date yearToDate(int year) {
-        Calendar cal = Calendar.getInstance();
+    public static Date yearToDate(final int year) {
+        final Calendar cal = Calendar.getInstance();
         cal.set(year, 5, 15, 12, 0, 0);
-        long ms = cal.getTimeInMillis();
+        final long ms = cal.getTimeInMillis();
         cal.setTimeInMillis(ms - ms%1000);
         return cal.getTime();
     }
@@ -1288,13 +1310,13 @@
         /* test whether river is already in database.
          * Otherwise it makes no sense to skip waterlevel model WST-file
          * because the altitude reference is taken from there. */
-        Session session = ImporterSession.getInstance().getDatabaseSession();
-        Query query = session.createQuery("from River where name=:name");
-        query.setString("name", name);
-        List<River> rivers = query.list();
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
+        final Query query = session.createQuery("from River where name=:name");
+        query.setString("name", this.name);
+        final List<River> rivers = query.list();
         if (rivers.isEmpty() && Config.INSTANCE.skipWst()){
             log.error("River not yet in database. "
-                + "You cannot skip importing waterlevel model.");
+                    + "You cannot skip importing waterlevel model.");
             return;
         }
 
@@ -1321,15 +1343,17 @@
         storeWaterlevelDifferences();
         storeSQRelations();
         storeOfficialNumber();
+        this.sinfoImporter.store();
+        this.uinfoImporter.store();
     }
 
     public void storeWstUnit() {
-        if (wst == null) {
+        if (this.wst == null) {
             log.warn("No unit given. "
-                + "Waterlevel-model WST-file has to be imported already.");
+                    + "Waterlevel-model WST-file has to be imported already.");
         }
         else {
-            wstUnit = wst.getUnit();
+            this.wstUnit = this.wst.getUnit();
         }
     }
 
@@ -1337,7 +1361,7 @@
         if (!Config.INSTANCE.skipHYKs()) {
             log.info("store HYKs");
             getPeer();
-            for (ImportHYK hyk: hyks) {
+            for (final ImportHYK hyk: this.hyks) {
                 hyk.storeDependencies();
             }
         }
@@ -1345,28 +1369,28 @@
 
     public void storeCrossSections() {
         if (!Config.INSTANCE.skipPRFs()
-            || !Config.INSTANCE.skipDA66s()
-            || !Config.INSTANCE.skipDA50s()
-            || !Config.INSTANCE.skipW80s()
-            || !Config.INSTANCE.skipW80CSVs()) {
+                || !Config.INSTANCE.skipDA66s()
+                || !Config.INSTANCE.skipDA50s()
+                || !Config.INSTANCE.skipW80s()
+                || !Config.INSTANCE.skipW80CSVs()) {
             log.info("store cross sections");
             getPeer();
-            for (ImportCrossSection crossSection: crossSections) {
+            for (final ImportCrossSection crossSection: this.crossSections) {
                 crossSection.storeDependencies();
             }
         }
     }
 
     public void storeWst() {
-        if (wst != null && !Config.INSTANCE.skipWst()) {
-            River river = getPeer();
-            wst.storeDependencies(river);
+        if (this.wst != null && !Config.INSTANCE.skipWst()) {
+            final River river = getPeer();
+            this.wst.storeDependencies(river);
 
             // The flow direction of the main wst and the corresponding
             // waterlevels determine if the river is 'km_up'.
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            river.setKmUp(wst.getKmUp());
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            river.setKmUp(this.wst.getKmUp());
             session.save(river);
         }
     }
@@ -1374,8 +1398,8 @@
     public void storeFixations() {
         if (!Config.INSTANCE.skipFixations()) {
             log.info("store fixation wsts");
-            River river = getPeer();
-            for (ImportWst fWst: fixations) {
+            final River river = getPeer();
+            for (final ImportWst fWst: this.fixations) {
                 log.debug("Fixation name: " + fWst.getDescription());
                 fWst.storeDependencies(river);
             }
@@ -1387,9 +1411,9 @@
     public void storeWaterlevels() {
         if (!Config.INSTANCE.skipWaterlevels())
 
-        log.info("store waterlevel wsts from csv");
-        River river = getPeer();
-        for (ImportWst wWst: waterlevels) {
+            log.info("store waterlevel wsts from csv");
+        final River river = getPeer();
+        for (final ImportWst wWst: this.waterlevels) {
             log.debug("Waterlevel name: " + wWst.getDescription());
             wWst.storeDependencies(river);
         }
@@ -1400,9 +1424,9 @@
     public void storeWaterlevelDifferences() {
         if (!Config.INSTANCE.skipWaterlevelDifferences())
 
-        log.info("store waterleveldifferences wsts from csv");
-        River river = getPeer();
-        for (ImportWst dWst: waterlevelDifferences) {
+            log.info("store waterleveldifferences wsts from csv");
+        final River river = getPeer();
+        for (final ImportWst dWst: this.waterlevelDifferences) {
             log.debug("water.diff.: name " + dWst.getDescription());
             dWst.storeDependencies(river);
         }
@@ -1412,8 +1436,8 @@
     public void storeExtraWsts() {
         if (!Config.INSTANCE.skipExtraWsts()) {
             log.info("store extra wsts");
-            River river = getPeer();
-            for (ImportWst wst: extraWsts) {
+            final River river = getPeer();
+            for (final ImportWst wst: this.extraWsts) {
                 log.debug("name: " + wst.getDescription());
                 wst.storeDependencies(river);
             }
@@ -1421,24 +1445,24 @@
     }
 
     public void storeOfficialLines() {
-        if (Config.INSTANCE.skipOfficialLines() || officialLines.isEmpty()) {
+        if (Config.INSTANCE.skipOfficialLines() || this.officialLines.isEmpty()) {
             return;
         }
 
         log.info("store official lines wsts");
-        River river = getPeer();
-        for (ImportWst wst: officialLines) {
+        final River river = getPeer();
+        for (final ImportWst wst: this.officialLines) {
             log.debug("name: " + wst.getDescription());
             wst.storeDependencies(river);
 
             // Store the official lines after the columns are store.
-            for (ImportWstColumn wc: wst.getColumns()) {
-                ImportOfficialWstColumn owc = (ImportOfficialWstColumn)wc;
-                ImportOfficialLine ioc = owc.getOfficialLine();
+            for (final ImportWstColumn wc: wst.getColumns()) {
+                final ImportOfficialWstColumn owc = (ImportOfficialWstColumn)wc;
+                final ImportOfficialLine ioc = owc.getOfficialLine();
                 if (ioc != null) {
                     if (ioc.getPeer(river) == null) {
                         log.warn("Cannot store official line: "
-                            + ioc.getName());
+                                + ioc.getName());
                     }
                 }
             }
@@ -1448,8 +1472,8 @@
     public void storeFloodWater() {
         if (!Config.INSTANCE.skipFloodWater()) {
             log.info("store flood water wsts");
-            River river = getPeer();
-            for (ImportWst wst: floodWater) {
+            final River river = getPeer();
+            for (final ImportWst wst: this.floodWater) {
                 log.debug("name: " + wst.getDescription());
                 wst.storeDependencies(river);
             }
@@ -1460,8 +1484,8 @@
     public void storeFloodProtection() {
         if (!Config.INSTANCE.skipFloodProtection()) {
             log.info("store flood protection wsts");
-            River river = getPeer();
-            for (ImportWst wst: floodProtection) {
+            final River river = getPeer();
+            for (final ImportWst wst: this.floodProtection) {
                 log.debug("name: " + wst.getDescription());
                 wst.storeDependencies(river);
             }
@@ -1472,13 +1496,13 @@
     public void storeBedHeight() {
         if (!Config.INSTANCE.skipBedHeight()) {
             log.info("store bed heights");
-            River river = getPeer();
+            final River river = getPeer();
 
-            if (bedHeights != null) {
-                for (ImportBedHeight tmp: bedHeights) {
-                    ImportBedHeight single = (ImportBedHeight) tmp;
+            if (this.bedHeights != null) {
+                for (final ImportBedHeight tmp: this.bedHeights) {
+                    final ImportBedHeight single = tmp;
 
-                    String desc = single.getDescription();
+                    final String desc = single.getDescription();
 
                     log.debug("name: " + desc);
 
@@ -1496,10 +1520,10 @@
         if (!Config.INSTANCE.skipSedimentDensity()) {
             log.info("store sediment density");
 
-            River river = getPeer();
+            final River river = getPeer();
 
-            for (ImportSedimentDensity density: sedimentDensities) {
-                String desc = density.getDescription();
+            for (final ImportSedimentDensity density: this.sedimentDensities) {
+                final String desc = density.getDescription();
 
                 log.debug("name: " + desc);
 
@@ -1512,10 +1536,10 @@
         if (!Config.INSTANCE.skipPorosity()) {
             log.info("store porosity");
 
-            River river = getPeer();
+            final River river = getPeer();
 
-            for (ImportPorosity porosity: porosities) {
-                String desc = porosity.getDescription();
+            for (final ImportPorosity porosity: this.porosities) {
+                final String desc = porosity.getDescription();
 
                 log.debug("name: " + desc);
 
@@ -1528,9 +1552,9 @@
         if (!Config.INSTANCE.skipMorphologicalWidth()) {
             log.info("store morphological width");
 
-            River river = getPeer();
+            final River river = getPeer();
 
-            for (ImportMorphWidth width: morphologicalWidths) {
+            for (final ImportMorphWidth width: this.morphologicalWidths) {
                 width.storeDependencies(river);
             }
         }
@@ -1540,14 +1564,14 @@
         if (!Config.INSTANCE.skipFlowVelocity()) {
             log.info("store flow velocity");
 
-            River river = getPeer();
+            final River river = getPeer();
 
-            for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels
-            ) {
+            for (final ImportFlowVelocityModel flowVelocityModel: this.flowVelocityModels
+                    ) {
                 flowVelocityModel.storeDependencies(river);
             }
 
-            for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) {
+            for (final ImportFlowVelocityMeasurement m: this.flowVelocityMeasurements) {
                 m.storeDependencies(river);
             }
         }
@@ -1558,9 +1582,9 @@
         if (!Config.INSTANCE.skipSedimentLoadLS()) {
             log.info("store sediment load longitudinal section data");
 
-            River river = getPeer();
+            final River river = getPeer();
 
-            for (ImportSedimentLoadLS sedimentLoadLS: sedimentLoadLSs) {
+            for (final ImportSedimentLoadLS sedimentLoadLS: this.sedimentLoadLSs) {
                 sedimentLoadLS.storeDependencies(river);
             }
         }
@@ -1571,7 +1595,7 @@
         if (!Config.INSTANCE.skipSedimentLoad()) {
             log.info("store sediment load data at measurement stations");
 
-            for (ImportSedimentLoad sedimentLoad: sedimentLoads) {
+            for (final ImportSedimentLoad sedimentLoad: this.sedimentLoads) {
                 sedimentLoad.storeDependencies();
             }
         }
@@ -1582,12 +1606,12 @@
         if (!Config.INSTANCE.skipMeasurementStations()) {
             log.info("store measurement stations");
 
-            River river = getPeer();
+            final River river = getPeer();
 
             int count = 0;
 
-            for (ImportMeasurementStation station: measurementStations) {
-                boolean success = station.storeDependencies(river);
+            for (final ImportMeasurementStation station: this.measurementStations) {
+                final boolean success = station.storeDependencies(river);
                 if (success) {
                     count++;
                 }
@@ -1604,7 +1628,7 @@
 
             int count = 0;
 
-            for (ImportSQRelation sqRelation: sqRelations) {
+            for (final ImportSQRelation sqRelation: this.sqRelations) {
                 sqRelation.storeDependencies();
                 count++;
             }
@@ -1616,8 +1640,8 @@
 
     public void storeAnnotations() {
         if (!Config.INSTANCE.skipAnnotations()) {
-            River river = getPeer();
-            for (ImportAnnotation annotation: annotations) {
+            final River river = getPeer();
+            for (final ImportAnnotation annotation: this.annotations) {
                 annotation.getPeer(river);
             }
         }
@@ -1626,10 +1650,10 @@
     public void storeGauges() {
         if (!Config.INSTANCE.skipGauges()) {
             log.info("store gauges:");
-            River river = getPeer();
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            for (ImportGauge gauge: gauges) {
+            final River river = getPeer();
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            for (final ImportGauge gauge: this.gauges) {
                 log.info("\tgauge: " + gauge.getName());
                 gauge.storeDependencies(river);
                 ImporterSession.getInstance().getDatabaseSession();
@@ -1639,31 +1663,31 @@
     }
 
     public River getPeer() {
-        if (peer == null) {
-            Session session = ImporterSession.getInstance()
-                .getDatabaseSession();
-            Query query = session.createQuery("from River where name=:name");
+        if (this.peer == null) {
+            final Session session = ImporterSession.getInstance()
+                    .getDatabaseSession();
+            final Query query = session.createQuery("from River where name=:name");
 
             Unit u = null;
-            if (wstUnit != null) {
-                u = wstUnit.getPeer();
+            if (this.wstUnit != null) {
+                u = this.wstUnit.getPeer();
             }
 
-            query.setString("name", name);
-            List<River> rivers = query.list();
+            query.setString("name", this.name);
+            final List<River> rivers = query.list();
             if (rivers.isEmpty()) {
-                log.info("Store new river '" + name + "'");
-                peer = new River(name, u, modelUuid);
+                log.info("Store new river '" + this.name + "'");
+                this.peer = new River(this.name, u, this.modelUuid);
                 if (!Config.INSTANCE.skipBWASTR()) {
-                    peer.setOfficialNumber(officialNumber);
+                    this.peer.setOfficialNumber(this.officialNumber);
                 }
-                session.save(peer);
+                session.save(this.peer);
             }
             else {
-                peer = rivers.get(0);
+                this.peer = rivers.get(0);
             }
         }
-        return peer;
+        return this.peer;
     }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/common/AbstractKmLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/common/AbstractKmLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,86 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.common;
+
+import org.dive4elements.river.importer.ImporterSession;
+import org.hibernate.Session;
+
+/**
+ * Abstract base class of a river station with associated data importing from a file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public abstract class AbstractKmLineImport<SERIES, KMTUPLE> {
+
+    /***** FIELDS *****/
+
+    protected double station;
+
+    private KMTUPLE peer;
+
+    protected StoreMode storeMode;
+
+    /***** CONSTRUCTOR *****/
+
+    public AbstractKmLineImport(final double km) {
+        this.station = km;
+    }
+
+
+    /***** METHODS *****/
+
+    /**
+     * Stores the station value record in the database
+     */
+    public StoreMode store(final SERIES parent, final StoreMode parentStoreMode) {
+        getPeer(parent, parentStoreMode);
+        return this.storeMode;
+    }
+
+
+    /**
+     * Gets the station value record from the database if existing, or creates a database record from this object and adds
+     * it
+     */
+    protected KMTUPLE getPeer(final SERIES parent, final StoreMode parentStoreMode) {
+        if (this.peer != null) {
+            this.storeMode = StoreMode.NONE;
+            return this.peer;
+        }
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
+        KMTUPLE value = null;
+        if (parentStoreMode != StoreMode.INSERT) {
+            value = queryValueItem(session, parent);
+        }
+        if (value == null) {
+            this.peer = createValueItem(parent);
+            session.save(this.peer);
+            this.storeMode = StoreMode.INSERT;
+        } else {
+            this.peer = value;
+            this.storeMode = StoreMode.UPDATE;
+        }
+        return this.peer;
+    }
+
+    /**
+     * Queries the (first matching) value item from the database
+     *
+     * @return first matching database value item, or null
+     */
+    protected abstract KMTUPLE queryValueItem(final Session session, final SERIES parent);
+
+    /**
+     * Creates a new value item
+     */
+    protected abstract KMTUPLE createValueItem(final SERIES parent);
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/common/AbstractParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/common/AbstractParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,408 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.common;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+import java.util.TreeSet;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.backend.utils.EpsilonComparator;
+import org.dive4elements.river.importer.ImportRiver;
+
+/**
+ * Abstract base class for a parser of one FLYS csv data file.<br />
+ * The {@link parse} method creates a SERIES object for the meta data
+ * and a list of KMLINE objects for the km value lines read from the file.<br />
+ * The {@link store} method gets or creates the corresponding database objects
+ * by the hibernate binding classes DB_SERIES and DB_KMTUPLE,
+ * and updates or inserts them in the database.
+ * DB_SERIES has a one-to-many relationship with DB_KMTUPLE.<br />
+ * <br />
+ * The structure of the file is as follows:<br />
+ * <ul>
+ * <li>one or more comment lines (#) with the meta info of the data series</li>
+ * <li>the comment line with the column titles of values table, starting with the km column</li>
+ * <li>the rows of the values table, each one on its own line</li>
+ * </ul>
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public abstract class AbstractParser<DB_SERIES, DB_KMTUPLE, KMLINE extends AbstractKmLineImport<DB_SERIES, DB_KMTUPLE>, HEADER extends AbstractSeriesImport<DB_SERIES, DB_KMTUPLE, KMLINE>> implements ImportParser {
+
+    /***** FIELDS *****/
+
+    public static final String ENCODING = "ISO-8859-1";
+
+    protected static final Locale DEFAULT_LOCALE = Locale.GERMAN;
+
+    public static final String START_META_CHAR = "#";
+
+    protected static final String SEPARATOR_CHAR = ";";
+
+    protected static final Pattern META_RIVERNAME = Pattern.compile("^#\\s*((Gew.sser)|(Gewaesser)):\\s*(\\S[^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    protected static final Pattern META_KMRANGE_INFO = Pattern.compile("^#\\s*Strecke:\\s*(\\S[^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    protected static final Pattern META_COMMENTS = Pattern.compile("^#\\s*weitere Bemerkungen:\\s*(\\S[^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_COLUMNTITLES = Pattern.compile("^#*\\s*Fluss.km\\s*;.+", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_SUBGROUP = Pattern.compile("^##.*", Pattern.CASE_INSENSITIVE);
+
+    private static NumberFormat numberFormat = NumberFormat.getInstance(Locale.ROOT);
+
+    /**
+     * Path of the file or directory to import from
+     */
+    protected final File importPath;
+
+    /**
+     * Part of {@link importPath} without the river root dir
+     */
+    protected final File rootRelativePath;
+
+    /**
+     * River for which the import runs
+     */
+    protected final ImportRiver river;
+
+    /**
+     * Reader during parse
+     */
+    protected LineNumberReader in;
+
+    /**
+     * Last line read from in
+     */
+    protected String currentLine;
+
+    /**
+     * State of the header lines parse loop
+     */
+    protected ParsingState headerParsingState;
+
+    /**
+     * Series header of the stations table, with the imported meta info.
+     */
+    protected HEADER seriesHeader;
+
+    /**
+     * List of meta info Pattern matched during {@link handleMetaLine}
+     */
+    protected final List<Pattern> metaPatternsMatched;
+
+    /**
+     * Column titles of the stations table, starting with the km column.
+     * All strings have been trimmed.
+     */
+    protected final List<String> columnTitles;
+
+    /**
+     * List of the km value tuples imported, no duplicate km
+     */
+    protected final List<KMLINE> values;
+
+    /**
+     * Ordered list with the imported km to check for duplicates.
+     */
+    protected final TreeSet<Double> kmExists;
+
+
+    /***** CONSTRUCTORS *****/
+
+    /**
+     * Constructs a parser for an import file
+     */
+    public AbstractParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        this.importPath = importPath;
+        this.rootRelativePath = rootRelativePath;
+        this.river = river;
+        this.metaPatternsMatched = new ArrayList<>();
+        this.kmExists = new TreeSet<>(EpsilonComparator.CMP);
+        this.columnTitles = new ArrayList<>();
+        this.values = new ArrayList<>();
+    }
+
+
+    /***** METHODS *****/
+
+    /**
+     * Lists all files from a directory having a type extension (starting with dot)
+     */
+    protected static List<File> listFiles(final File importDir, final String extension) {
+        final File[] files = importDir.listFiles(new FilenameFilter() {
+            @Override
+            public boolean accept(final File dir, final String name) {
+                return name.toLowerCase().endsWith(extension);
+            }
+        });
+        final List<File> fl = new ArrayList<>();
+        if (files != null)
+            for (final File file : files)
+                fl.add(file);
+        return fl;
+    }
+
+    /**
+     * Parses a file and adds series and values to the parser's collection
+     */
+    @Override
+    public void parse() throws IOException {
+        logStartInfo();
+        this.seriesHeader = createSeriesImport(this.importPath.getName().replaceAll("\\.csv", ""));
+        this.metaPatternsMatched.clear();
+        this.kmExists.clear();
+        this.headerParsingState = ParsingState.CONTINUE;
+        try {
+            try {
+                this.in = new LineNumberReader(new InputStreamReader(new FileInputStream(this.importPath), ENCODING));
+            }
+            catch (final Exception e) {
+                logError("Could not open (" + e.getMessage() + ")");
+                this.headerParsingState = ParsingState.STOP;
+            }
+            this.currentLine = null;
+            while (this.headerParsingState != ParsingState.STOP) {
+                this.currentLine = this.in.readLine();
+                if (this.currentLine == null)
+                    break;
+                this.currentLine = this.currentLine.trim();
+                if (this.currentLine.isEmpty())
+                    continue;
+                if (this.headerParsingState == ParsingState.CONTINUE)
+                    handleMetaLine();
+                else
+                    handleDataLine();
+            }
+            if (this.headerParsingState != ParsingState.STOP)
+                getLog().info("Number of values found: " + this.seriesHeader.getValueCount());
+        }
+        finally {
+            if (this.in != null) {
+                this.in.close();
+                this.in = null;
+            }
+        }
+        if (this.headerParsingState == ParsingState.STOP)
+            logError("Parsing of the file stopped due to a severe error");
+    }
+
+    /**
+     * Writes the parse start info to the log
+     */
+    protected void logStartInfo() {
+        getLog().info("Start parsing:;'" + this.rootRelativePath + "'");
+    }
+
+    /**
+     * Stores the parsed series and values in the database
+     */
+    @Override
+    public void store() {
+        if (this.headerParsingState != ParsingState.STOP) {
+            this.seriesHeader.store(this.river.getPeer());
+            final String counts = String.format("parse=%d, insert=%d, update/ignore=%d", this.seriesHeader.getValueCount(),
+                    this.seriesHeader.getValueStoreCount(StoreMode.INSERT), this.seriesHeader.getValueStoreCount(StoreMode.UPDATE));
+            if (this.seriesHeader.getValueCount() > this.seriesHeader.getValueStoreCount(StoreMode.INSERT))
+                logWarning("Number of value inserts less than number parsed: " + counts);
+            else
+                getLog().info("Number of values records: " + counts);
+        }
+        else
+            logWarning("Severe parsing errors, not storing series '" + this.seriesHeader.getFilename() + "'");
+    }
+
+    /**
+     * Strips separator chars from a meta info text, and trims leading and trailing whitespace
+     */
+    public static String parseMetaInfo(final String text) {
+        return text.replace(SEPARATOR_CHAR, "").trim();
+    }
+
+    /**
+     * Parses a number string with dot or comma as decimal char, and returning null in case of an error
+     */
+    public static Number parseDoubleWithNull(final String text) {
+        try {
+            return parseDouble(text);
+        }
+        catch (final Exception e) {
+            return null;
+        }
+    }
+
+    /**
+     * Parses a number string with dot or comma as decimal char
+     *
+     * @throws ParseException
+     */
+    public static Number parseDouble(final String text) throws ParseException {
+        return numberFormat.parse(text.replace(',', '.'));
+    }
+
+    /**
+     * Gets the class's logger
+     */
+    protected abstract Logger getLog();
+
+    /**
+     * Logs an error message, appending the relative file path
+     */
+    protected void logError(final String message) {
+        getLog().error(message + ";" + this.rootRelativePath);
+    }
+
+    /**
+     * Logs a warning message, appending the relative file path
+     */
+    protected void logWarning(final String message) {
+        getLog().warn(message + ";" + this.rootRelativePath);
+    }
+
+    /**
+     * Creates a new series import object
+     */
+    protected abstract HEADER createSeriesImport(final String filename);
+
+    protected void handleMetaLine() {
+        if (META_SUBGROUP.matcher(this.currentLine).matches())
+            return;
+        else if (handleMetaRivername())
+            return;
+        else if (handleMetaKmrange_info())
+            return;
+        else if (handleMetaComment())
+            return;
+        else if (handleMetaOther())
+            return;
+        else if (handleMetaColumnTitles()) {
+            if (this.headerParsingState != ParsingState.STOP)
+                this.headerParsingState = ParsingState.DONE;
+            return;
+        }
+        else {
+            if (this.currentLine.startsWith(START_META_CHAR)) {
+                if (this.headerParsingState != ParsingState.IGNORE)
+                    logWarning("Not matching any known meta type in line " + this.in.getLineNumber() + ", ignored");
+                else
+                    this.headerParsingState = ParsingState.CONTINUE;
+            }
+        }
+    }
+
+    private boolean handleMetaRivername() {
+        if (META_RIVERNAME.matcher(this.currentLine).matches()) {
+            this.metaPatternsMatched.add(META_RIVERNAME);
+            return true;
+        }
+        else
+            return false;
+    }
+
+    private boolean handleMetaKmrange_info() {
+        final Matcher m = META_KMRANGE_INFO.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_KMRANGE_INFO);
+            this.seriesHeader.setKmrange_info(parseMetaInfo(m.group(1)));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaComment() {
+        final Matcher m = META_COMMENTS.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_COMMENTS);
+            this.seriesHeader.setComment(parseMetaInfo(m.group(1)));
+            return true;
+        }
+        return false;
+    }
+
+    /**
+     * Parses currentLine for non-default meta info
+     *
+     * @return Whether the line has been handled
+     */
+    protected boolean handleMetaOther() {
+        return false;
+    }
+
+    /**
+     * Parses a header line for the km table column header line
+     *
+     * @return Whether the line has been handled and we are ready for reading the km values lines
+     */
+    protected boolean handleMetaColumnTitles() {
+        if (META_COLUMNTITLES.matcher(this.currentLine).matches()) {
+            this.metaPatternsMatched.add(META_COLUMNTITLES);
+            this.columnTitles.clear();
+            final String[] titles = this.currentLine.split(SEPARATOR_CHAR, 0);
+            for (int i = 0; i <= titles.length - 1; i++)
+                this.columnTitles.add(titles[i].trim());
+            return true;
+        }
+        return false;
+    }
+
+    private void handleDataLine() {
+        final String[] values = this.currentLine.split(SEPARATOR_CHAR, 0);
+        // Skip import line without data or only km
+        if (values.length < 2)
+            return;
+        Double km;
+        try {
+            km = Double.valueOf(parseDouble(values[0]).doubleValue());
+            if (kmMustBeUnique()) {
+                if (this.kmExists.contains(km)) {
+                    logWarning("Ignoring duplicate station '" + values[0] + "' in line " + this.in.getLineNumber());
+                    return;
+                }
+                this.kmExists.add(km);
+            }
+        }
+        catch (final Exception e) {
+            logError("Not parseable km in line " + this.in.getLineNumber() + ": " + e.getMessage());
+            return;
+        }
+        final KMLINE value = createKmLineImport(km, values);
+        if (value != null)
+            this.seriesHeader.addValue(value);
+    }
+
+    /**
+     * Whether {@link handleDataLine} shall check for and reject km duplicates
+     */
+    protected boolean kmMustBeUnique() {
+        return true;
+    }
+
+    /**
+     * Creates a value import item with the km and other fields of the current line;
+     * the km has been validated
+     *
+     * @return value item, or null if parse error
+     */
+    protected abstract KMLINE createKmLineImport(final Double km, final String[] values);
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/common/AbstractSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/common/AbstractSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,170 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.common;
+
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImporterSession;
+import org.dive4elements.river.model.River;
+import org.hibernate.Session;
+
+/**
+ * Abstract base class of a km bound data series of a river importing from a file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public abstract class AbstractSeriesImport<SERIES, KMTUPLE, KMLINE extends AbstractKmLineImport<SERIES, KMTUPLE>>
+{
+    /***** FIELDS *****/
+
+    /**
+     * Name of the imported file without type extension
+     */
+    protected String filename;
+
+    protected String kmrange_info;
+
+    protected String comment;
+
+    protected final List<KMLINE> values;
+
+    protected SERIES peer;
+
+    protected Map<StoreMode, Integer> valueStoreCount;
+
+    protected StoreMode seriesStoreMode;
+
+
+    /***** CONSTRUCTORS *****/
+
+    AbstractSeriesImport() {
+        this.values = new ArrayList<>();
+        this.valueStoreCount = new EnumMap<>(StoreMode.class);
+        for (final StoreMode mode : StoreMode.values())
+            this.valueStoreCount.put(mode, Integer.valueOf(0));
+    }
+
+    public AbstractSeriesImport(final String filename) {
+        this();
+        setFilename(filename);
+    }
+
+
+    /***** METHODS *****/
+
+    /**
+     * Gets the class's logger
+     */
+    public abstract Logger getLog();
+
+    void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    public int getValueCount() {
+        return this.values.size();
+    }
+
+    public void addValue(final KMLINE value) {
+        this.values.add(value);
+    }
+
+    public int getValueStoreCount(final StoreMode mode) {
+        return this.valueStoreCount.get(mode).intValue();
+    }
+
+    /**
+     * Stores the data series and their values in the database
+     *
+     * @param river
+     */
+    public StoreMode store(final River river) {
+        logStoreInfo();
+        for (final StoreMode mode : StoreMode.values())
+            this.valueStoreCount.put(mode, Integer.valueOf(0));
+        final SERIES peer = getPeer(river);
+        if (peer != null) {
+            for (final KMLINE value : this.values) {
+                incrementValueStoreCount(value.store(peer, this.seriesStoreMode));
+            }
+        }
+        ImporterSession.getInstance().getDatabaseSession().flush();
+        return this.seriesStoreMode;
+    }
+
+    /**
+     * Writes the store start info to the log
+     */
+    protected void logStoreInfo() {
+        getLog().info("Store series '" + getFilename() + "'");
+    }
+
+    private void incrementValueStoreCount(final StoreMode mode) {
+        this.valueStoreCount.put(mode, Integer.valueOf(this.valueStoreCount.get(mode).intValue() + 1));
+    }
+
+    /**
+     * Gets the model object of the data series, inserting it into the database if not already existing
+     */
+    public SERIES getPeer(final River river) {
+        if (this.peer != null) {
+            this.seriesStoreMode = StoreMode.NONE;
+            return this.peer;
+        }
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
+        final List<SERIES> rows = querySeriesItem(session, river);
+        if (rows.isEmpty()) {
+            getLog().info("Create new database instance");
+            this.peer = createSeriesItem(river);
+            session.save(this.peer);
+            this.seriesStoreMode = StoreMode.INSERT;
+        } else {
+            this.peer = rows.get(0);
+            this.seriesStoreMode = StoreMode.UPDATE;
+        }
+        return this.peer;
+    }
+
+    /**
+     * Queries the series item(s) from the database
+     */
+    public abstract List<SERIES> querySeriesItem(final Session session, final River river);
+
+    /**
+     * Creates a new value item
+     */
+    public abstract SERIES createSeriesItem(final River river);
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/common/ImportParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/common/ImportParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,33 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.common;
+
+import java.io.IOException;
+
+/**
+ * Parser interface
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public interface ImportParser {
+
+    /**
+     * Parses a file and adds the parsed objects to the parsers state
+     */
+    void parse() throws IOException;
+
+    /**
+     * Stores the parsed objects in the database with reference to a river
+     */
+    void store();
+
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/common/ParsingState.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/common/ParsingState.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,23 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.common;
+
+/**
+ * Return of a parser's line handling method
+ *
+ * @author Matthias Schäfer
+ */
+public enum ParsingState {
+    CONTINUE, // no match, continue with other handlers
+    IGNORE, // match, content ignored
+    DONE, // match, content handled
+    STOP // severe error, stop the parsing loop
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/common/StoreMode.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/common/StoreMode.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,20 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.common;
+
+/**
+ * The way a database record has been stored
+ * 
+ * @author Matthias Schäfer
+ */
+public enum StoreMode {
+    NONE, INSERT, UPDATE
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/SInfoImporter.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/SInfoImporter.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,205 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.ImportParser;
+import org.dive4elements.river.importer.sinfo.parsers.BedMobilityParser;
+import org.dive4elements.river.importer.sinfo.parsers.ChannelParser;
+import org.dive4elements.river.importer.sinfo.parsers.CollisionParser;
+import org.dive4elements.river.importer.sinfo.parsers.DailyDischargeParser;
+import org.dive4elements.river.importer.sinfo.parsers.DepthEvolutionParser;
+import org.dive4elements.river.importer.sinfo.parsers.FlowDepthParser;
+import org.dive4elements.river.importer.sinfo.parsers.InfrastructureParser;
+import org.dive4elements.river.importer.sinfo.parsers.SelectedAdditionalParser;
+import org.dive4elements.river.importer.sinfo.parsers.TkhParser;
+
+/**
+ * Import all S-INFO files of a river from its import directory and subdirectories<br />
+ * <br />
+ * Requires river and its gauges to exist in the database already
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class SInfoImporter
+{
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(SInfoImporter.class);
+
+    private static final String SINFO_DIR = "Schifffahrt";
+
+    private enum SInfoDirName {
+        BASICS("Basisdaten"), //
+        SELECTED_ADDITIONAL("Selektierte_Zusaetzliche_Laengsschnitte"), //
+        INFRASTRUCTURE("Infrastrukturen_BWaStr"), //
+        CHANNEL("Fahrrinnenverhaeltnisse"), //
+        COLLISION_EINZEL("Grundberuehrungen" + File.separator + "Einzeljahre"), //
+        COLLISION_EPOCHE("Grundberuehrungen" + File.separator + "Epoche"), //
+        TKH_EINZEL("Transportkoerperhoehen" + File.separator + "Einzeljahre"), //
+        TKH_EPOCHE("Transportkoerperhoehen" + File.separator + "Epochen"), //
+        TKH_MODELL("Transportkoerperhoehen" + File.separator + "Modellergebnisse"), //
+        FLOW_DEPTH("Modellierte_Datensaetze" + File.separator + "Fliesstiefen"), //
+        DEPTH_EVOLUTION_AKTUELL("Fliesstiefenentwicklung" + File.separator + "Bezug_aktueller_GlW"), //
+        DEPTH_EVOLUTION_ETAPPEN("Fliesstiefenentwicklung" + File.separator + "GlW-Etappen");
+
+        private final String dirname;
+
+        SInfoDirName(final String dirname) {
+            this.dirname = dirname;
+        }
+
+        public String getDir() {
+            return this.dirname;
+        }
+        public File getFile() {
+            return new File(getDir());
+        }
+
+        public File buildPath(final File rootDir) {
+            return new File(rootDir, getDir());
+        }
+    }
+
+    /**
+     * Series of river's stations with bed mobility flag.
+     */
+    private final List<ImportParser> parsers;
+
+    /**
+     * Path of the S-INFO data directory of the importing river.
+     */
+    private File rootDir;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public SInfoImporter() {
+        this.parsers = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    /**
+     * Inits the parser list regarding the skip flags.
+     */
+    public void setup(final File riverDir, final ImportRiver river) {
+        this.rootDir = new File(riverDir, SINFO_DIR);
+        log.info("Parse S-INFO files from " + this.rootDir);
+        this.parsers.clear();
+        if (!BedMobilityParser.shallSkip()) {
+            if (!this.parsers.addAll(BedMobilityParser.createParsers(SInfoDirName.BASICS.buildPath(this.rootDir), SInfoDirName.BASICS.getFile(), river)))
+                log.info("Bed mobility: no files found");
+        }
+        else {
+            log.info("Bed mobility: skipped");
+        }
+        if (!SelectedAdditionalParser.shallSkip()) {
+            if (!this.parsers.addAll(SelectedAdditionalParser.createParsers(SInfoDirName.SELECTED_ADDITIONAL.buildPath(this.rootDir),
+                    SInfoDirName.SELECTED_ADDITIONAL.getFile(), river)))
+                log.info("Selected additionals: no files found");
+        }
+        else {
+            log.info("Selected additionals: skipped");
+        }
+        if (!InfrastructureParser.shallSkip()) {
+            if (!this.parsers.addAll(InfrastructureParser.createParsers(SInfoDirName.INFRASTRUCTURE.buildPath(this.rootDir),
+                    SInfoDirName.INFRASTRUCTURE.getFile(), river)))
+                log.info("Infrastructure: no files found");
+        }
+        else {
+            log.info("Infrastructure: skipped");
+        }
+        if (!ChannelParser.shallSkip()) {
+            if (!this.parsers.addAll(ChannelParser.createParsers(SInfoDirName.CHANNEL.buildPath(this.rootDir), SInfoDirName.CHANNEL.getFile(), river)))
+                log.info("Channel: no files found");
+        }
+        else {
+            log.info("Channel: skipped");
+        }
+        if (!CollisionParser.shallSkip()) {
+            boolean added = false;
+            added = this.parsers.addAll(CollisionParser.createParsers(SInfoDirName.COLLISION_EINZEL.buildPath(this.rootDir),
+                    SInfoDirName.COLLISION_EINZEL.getFile(), river));
+            added |= this.parsers.addAll(CollisionParser.createParsers(SInfoDirName.COLLISION_EPOCHE.buildPath(this.rootDir),
+                    SInfoDirName.COLLISION_EPOCHE.getFile(), river));
+            if (!added)
+                log.info("Collision: no files found");
+        }
+        else {
+            log.info("Collision: skipped");
+        }
+        if (!DailyDischargeParser.shallSkip()) {
+            if (!this.parsers.addAll(DailyDischargeParser.createParsers(SInfoDirName.BASICS.buildPath(this.rootDir), SInfoDirName.BASICS.getFile(), river)))
+                log.info("Daily discharge: no files found");
+        }
+        else {
+            log.info("Daily discharge: skipped");
+        }
+        if (!TkhParser.shallSkip()) {
+            boolean added = false;
+            added = this.parsers.addAll(TkhParser.createParsers(SInfoDirName.TKH_EINZEL.buildPath(this.rootDir),
+                    SInfoDirName.TKH_EINZEL.getFile(), river));
+            added |= this.parsers.addAll(TkhParser.createParsers(SInfoDirName.TKH_EPOCHE.buildPath(this.rootDir),
+                    SInfoDirName.TKH_EPOCHE.getFile(), river));
+            added |= this.parsers.addAll(TkhParser.createParsers(SInfoDirName.TKH_MODELL.buildPath(this.rootDir),
+                    SInfoDirName.TKH_MODELL.getFile(), river));
+            if (!added)
+                log.info("Tkh: no files found");
+        }
+        else {
+            log.info("Tkh: skipped");
+        }
+        if (!FlowDepthParser.shallSkip()) {
+            if (!this.parsers.addAll(FlowDepthParser.createParsers(SInfoDirName.FLOW_DEPTH.buildPath(this.rootDir), SInfoDirName.FLOW_DEPTH.getFile(), river)))
+                log.info("FlowDepth: no files found");
+        }
+        else {
+            log.info("FlowDepth: skipped");
+        }
+        if (!DepthEvolutionParser.shallSkip()) {
+            boolean added = false;
+            added = this.parsers.addAll(DepthEvolutionParser.createParsers(SInfoDirName.DEPTH_EVOLUTION_AKTUELL.buildPath(this.rootDir),
+                    SInfoDirName.DEPTH_EVOLUTION_AKTUELL.getFile(), river));
+            added |= this.parsers.addAll(DepthEvolutionParser.createParsers(SInfoDirName.DEPTH_EVOLUTION_ETAPPEN.buildPath(this.rootDir),
+                    SInfoDirName.DEPTH_EVOLUTION_ETAPPEN.getFile(), river));
+            if (!added)
+                log.info("Depth evolution: no files found");
+        }
+        else {
+            log.info("Depth evolution: skipped");
+        }
+    }
+
+    /**
+     * Imports the files according to the active parser list
+     */
+    public void parse() throws IOException {
+        for (final ImportParser parser : this.parsers)
+            parser.parse();
+    }
+
+    /**
+     * Stores all pending import objects
+     */
+    public void store() {
+        for (final ImportParser parser : this.parsers)
+            parser.store();
+    }
+
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/BedMobilityKmLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/BedMobilityKmLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,64 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.dive4elements.river.importer.common.AbstractKmLineImport;
+import org.dive4elements.river.model.sinfo.BedMobility;
+import org.dive4elements.river.model.sinfo.BedMobilityValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported bed mobility value of a river station.
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class BedMobilityKmLineImport extends AbstractKmLineImport<BedMobility, BedMobilityValue> {
+
+    /***** FIELDS *****/
+
+    private boolean bedMoving;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public BedMobilityKmLineImport(final Double km, final boolean bedMoving) {
+        super(km.doubleValue());
+        this.bedMoving = bedMoving;
+    }
+
+    /***** METHODS *****/
+
+    public void setBedMoving(final boolean bedMoving) {
+        this.bedMoving = bedMoving;
+    }
+
+    @Override
+    protected BedMobilityValue queryValueItem(final Session session, final BedMobility parent) {
+        final Query query = session.createQuery("FROM BedMobilityValue WHERE (bedMobility=:parent) AND (station=:station)");
+        query.setParameter("parent", parent);
+        query.setParameter("station", this.station);
+        final List rows = query.list();
+        if (!rows.isEmpty())
+            return (BedMobilityValue) rows.get(0);
+        else
+            return null;
+    }
+
+
+    @Override
+    protected BedMobilityValue createValueItem(final BedMobility parent) {
+        return new BedMobilityValue(parent, this.station, this.bedMoving);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/BedMobilitySeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/BedMobilitySeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,62 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.AbstractSeriesImport;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.BedMobility;
+import org.dive4elements.river.model.sinfo.BedMobilityValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported bed mobility data series of a river
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class BedMobilitySeriesImport extends AbstractSeriesImport<BedMobility, BedMobilityValue, BedMobilityKmLineImport> {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(BedMobilitySeriesImport.class);
+
+
+    /***** CONSTRUCTORS *****/
+
+    public BedMobilitySeriesImport(final String filename) {
+        super(filename);
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    public Logger getLog() {
+        return log;
+    }
+
+    @Override
+    public List<BedMobility> querySeriesItem(final Session session, final River river) {
+        final Query query = session.createQuery("FROM BedMobility WHERE river=:river");
+        query.setParameter("river", river);
+        return query.list();
+    }
+
+
+    @Override
+    public BedMobility createSeriesItem(final River river) {
+        return new BedMobility(river, this.filename, this.kmrange_info, this.comment);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/ChannelKmLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/ChannelKmLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,71 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.dive4elements.river.importer.common.AbstractKmLineImport;
+import org.dive4elements.river.model.sinfo.Channel;
+import org.dive4elements.river.model.sinfo.ChannelValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported channel size of a river station.
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class ChannelKmLineImport extends AbstractKmLineImport<Channel, ChannelValue> {
+
+    /***** FIELDS *****/
+
+    private double width;
+
+    private double depth;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public ChannelKmLineImport(final Double km, final double width, final double depth) {
+        super(km.doubleValue());
+        this.width = width;
+        this.depth = depth;
+    }
+
+    /***** METHODS *****/
+
+    public void setWidth(final double width) {
+        this.width = width;
+    }
+
+    public void setDepth(final double depth) {
+        this.depth = depth;
+    }
+
+    @Override
+    protected ChannelValue queryValueItem(final Session session, final Channel parent) {
+        final Query query = session.createQuery("FROM ChannelValue WHERE (channel=:parent) AND (station=:station)");
+        query.setParameter("parent", parent);
+        query.setParameter("station", this.station);
+        final List rows = query.list();
+        if (!rows.isEmpty())
+            return (ChannelValue) rows.get(0);
+        else
+            return null;
+    }
+
+
+    @Override
+    protected ChannelValue createValueItem(final Channel parent) {
+        return new ChannelValue(parent, this.station, this.width, this.depth);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/ChannelSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/ChannelSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,74 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.AbstractSeriesImport;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.Channel;
+import org.dive4elements.river.model.sinfo.ChannelValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported channel data series of a river
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class ChannelSeriesImport extends AbstractSeriesImport<Channel, ChannelValue, ChannelKmLineImport> {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(ChannelSeriesImport.class);
+
+    private Integer year_from;
+
+    private Integer year_to;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public ChannelSeriesImport(final String filename) {
+        super(filename);
+    }
+
+
+    /***** METHODS *****/
+
+    public void setYear_from(final Integer year_from) {
+        this.year_from = year_from;
+    }
+
+    public void setYear_to(final Integer year_to) {
+        this.year_to = year_to;
+    }
+
+    @Override
+    public Logger getLog() {
+        return log;
+    }
+
+    @Override
+    public List<Channel> querySeriesItem(final Session session, final River river) {
+        final Query query = session.createQuery("FROM Channel WHERE river=:river");
+        query.setParameter("river", river);
+        return query.list();
+    }
+
+
+    @Override
+    public Channel createSeriesItem(final River river) {
+        return new Channel(river, this.filename, this.kmrange_info, this.comment, this.year_from, this.year_to);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/CollisionKmLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/CollisionKmLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,73 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.Date;
+import java.util.List;
+
+import org.dive4elements.river.importer.common.AbstractKmLineImport;
+import org.dive4elements.river.model.sinfo.Collision;
+import org.dive4elements.river.model.sinfo.CollisionValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported collision event of a river station.
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class CollisionKmLineImport extends AbstractKmLineImport<Collision, CollisionValue> {
+
+    /***** FIELDS *****/
+
+    private final CollisionTypeImport collisionType;
+
+    private final Date eventDate;
+
+    private final String gaugeName;
+
+    private final double gaugeW;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public CollisionKmLineImport(final Double km, final CollisionTypeImport collisionType, final Date eventDate, final String gaugeName, final double gaugeW) {
+        super(km.doubleValue());
+        this.collisionType = collisionType;
+        this.eventDate = eventDate;
+        this.gaugeName = gaugeName;
+        this.gaugeW = gaugeW;
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected CollisionValue queryValueItem(final Session session, final Collision parent) {
+        final Query query = session.createQuery("FROM CollisionValue WHERE (collision=:parent) AND (eventDate=:eventdate)"
+                + " AND (station BETWEEN (:station-0.0001) AND (:station+0.0001))");
+        query.setParameter("parent", parent);
+        query.setParameter("station", this.station);
+        query.setParameter("eventdate", this.eventDate);
+        final List rows = query.list();
+        if (!rows.isEmpty())
+            return (CollisionValue) rows.get(0);
+        else
+            return null;
+    }
+
+
+    @Override
+    protected CollisionValue createValueItem(final Collision parent) {
+        return new CollisionValue(parent, this.station, this.collisionType.getPeer(), this.eventDate, this.gaugeName, this.gaugeW);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/CollisionSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/CollisionSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,69 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.AbstractSeriesImport;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.Collision;
+import org.dive4elements.river.model.sinfo.CollisionValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported collision data series of a river
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class CollisionSeriesImport extends AbstractSeriesImport<Collision, CollisionValue, CollisionKmLineImport> {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(CollisionSeriesImport.class);
+
+    private int year;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public CollisionSeriesImport(final String filename) {
+        super(filename);
+    }
+
+
+    /***** METHODS *****/
+
+    public void setYear(final int year) {
+        this.year = year;
+    }
+
+    @Override
+    public Logger getLog() {
+        return log;
+    }
+
+    @Override
+    public List<Collision> querySeriesItem(final Session session, final River river) {
+        final Query query = session.createQuery("FROM Collision WHERE river=:river AND lower(filename)=:filename");
+        query.setParameter("river", river);
+        query.setParameter("filename", this.filename.toLowerCase());
+        return query.list();
+    }
+
+
+    @Override
+    public Collision createSeriesItem(final River river) {
+        return new Collision(river, this.filename, this.kmrange_info, this.comment, this.year);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/CollisionTypeImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/CollisionTypeImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,83 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImporterSession;
+import org.dive4elements.river.model.sinfo.CollisionType;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported collision type
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class CollisionTypeImport implements Comparable<CollisionTypeImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(CollisionTypeImport.class);
+
+    protected String name;
+
+    protected CollisionType peer;
+
+    /***** CONSTRUCTOR *****/
+
+    public CollisionTypeImport() {
+    }
+
+    public CollisionTypeImport(final String name) {
+        this.name = name;
+    }
+
+    /***** METHODS *****/
+
+    @Override
+    public int compareTo(final CollisionTypeImport other) {
+        return this.name.compareTo(other.name);
+    }
+
+    @Override
+    public int hashCode() {
+        return this.name.hashCode();
+    }
+
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(final String name) {
+        this.name = name;
+    }
+
+    public CollisionType getPeer() {
+        if (this.peer != null)
+            return this.peer;
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
+        final Query query = session.createQuery("FROM CollisionType WHERE lower(name)=:name");
+        query.setParameter("name", this.name.trim().toLowerCase());
+        final List<CollisionType> types = query.list();
+        if (types.isEmpty()) {
+            this.peer = new CollisionType(this.name);
+            session.save(this.peer);
+            log.info(String.format("Create new database instance: %d, '%s'", this.peer.getId(), this.name));
+        }
+        else {
+            this.peer = types.get(0);
+        }
+        return this.peer;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/DailyDischargeDayLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/DailyDischargeDayLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,64 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.Date;
+import java.util.List;
+
+import org.dive4elements.river.importer.common.AbstractKmLineImport;
+import org.dive4elements.river.model.sinfo.DailyDischarge;
+import org.dive4elements.river.model.sinfo.DailyDischargeValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported daily discharge of a gauge.
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class DailyDischargeDayLineImport extends AbstractKmLineImport<DailyDischarge, DailyDischargeValue> {
+
+    /***** FIELDS *****/
+
+    private final Date day;
+
+    private final Double discharge;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public DailyDischargeDayLineImport(final Date day, final Double discharge) {
+        super(Double.NaN);
+        this.day = day;
+        this.discharge = discharge;
+    }
+
+    /***** METHODS *****/
+
+    @Override
+    protected DailyDischargeValue queryValueItem(final Session session, final DailyDischarge parent) {
+        final Query query = session.createQuery("FROM DailyDischargeValue WHERE (DailyDischarge=:parent) AND (day=:day)");
+        query.setParameter("parent", parent);
+        query.setParameter("day", this.day);
+        final List rows = query.list();
+        if (!rows.isEmpty())
+            return (DailyDischargeValue) rows.get(0);
+        else
+            return null;
+    }
+
+
+    @Override
+    protected DailyDischargeValue createValueItem(final DailyDischarge parent) {
+        return new DailyDischargeValue(parent, this.day, this.discharge);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/DailyDischargeSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/DailyDischargeSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,93 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.AbstractSeriesImport;
+import org.dive4elements.river.model.Gauge;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.DailyDischarge;
+import org.dive4elements.river.model.sinfo.DailyDischargeValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported daily discharge value series of a gauge
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class DailyDischargeSeriesImport extends AbstractSeriesImport<DailyDischarge, DailyDischargeValue, DailyDischargeDayLineImport> {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(DailyDischargeSeriesImport.class);
+
+    private String gaugeName;
+
+    private long gaugeNumber;
+
+    private Gauge gauge;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public DailyDischargeSeriesImport(final String filename) {
+        super(filename);
+    }
+
+
+    /***** METHODS *****/
+
+    public String getGaugeName() {
+        return this.gaugeName;
+    }
+
+    public void setGaugeName(final String gaugeName) {
+        this.gaugeName = gaugeName;
+    }
+
+    public long getGaugeNumber() {
+        return this.gaugeNumber;
+    }
+
+    public void setGaugeNumber(final long gaugeNumber) {
+        this.gaugeNumber = gaugeNumber;
+    }
+
+    public Gauge getGauge() {
+        return this.gauge;
+    }
+
+    public void setGauge(final Gauge gauge) {
+        this.gauge = gauge;
+    }
+
+    @Override
+    public Logger getLog() {
+        return log;
+    }
+
+    @Override
+    public List<DailyDischarge> querySeriesItem(final Session session, final River river) {
+        final Query query = session.createQuery("FROM DailyDischarge WHERE gauge=:gauge");
+        query.setParameter("gauge", this.gauge);
+        return query.list();
+    }
+
+
+    @Override
+    public DailyDischarge createSeriesItem(final River river) {
+        return new DailyDischarge(this.gauge, this.filename, this.comment);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/DepthEvolutionKmLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/DepthEvolutionKmLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,71 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.dive4elements.river.importer.common.AbstractKmLineImport;
+import org.dive4elements.river.model.sinfo.DepthEvolution;
+import org.dive4elements.river.model.sinfo.DepthEvolutionValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported depth evolution values of a river station.
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class DepthEvolutionKmLineImport extends AbstractKmLineImport<DepthEvolution, DepthEvolutionValue> {
+
+    /***** FIELDS *****/
+
+    private double total_change;
+
+    private double change_per_year;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public DepthEvolutionKmLineImport(final Double km, final double total_change, final double change_per_year) {
+        super(km.doubleValue());
+        this.total_change = total_change;
+        this.change_per_year = change_per_year;
+    }
+
+    /***** METHODS *****/
+
+    public void setTotal_change(final double total_change) {
+        this.total_change = total_change;
+    }
+
+    public void setChange_per_year(final double change_per_year) {
+        this.change_per_year = change_per_year;
+    }
+
+    @Override
+    protected DepthEvolutionValue queryValueItem(final Session session, final DepthEvolution parent) {
+        final Query query = session.createQuery("FROM DepthEvolutionValue WHERE (DepthEvolution=:parent) AND (station=:station)");
+        query.setParameter("parent", parent);
+        query.setParameter("station", this.station);
+        final List rows = query.list();
+        if (!rows.isEmpty())
+            return (DepthEvolutionValue) rows.get(0);
+        else
+            return null;
+    }
+
+
+    @Override
+    protected DepthEvolutionValue createValueItem(final DepthEvolution parent) {
+        return new DepthEvolutionValue(parent, this.station, this.total_change, this.change_per_year);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/DepthEvolutionSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/DepthEvolutionSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,105 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.AbstractSeriesImport;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.DepthEvolution;
+import org.dive4elements.river.model.sinfo.DepthEvolutionValue;
+import org.hibernate.SQLQuery;
+import org.hibernate.Session;
+
+/**
+ * Imported depth evolution data series of a river
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class DepthEvolutionSeriesImport extends AbstractSeriesImport<DepthEvolution, DepthEvolutionValue, DepthEvolutionKmLineImport> {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(DepthEvolutionSeriesImport.class);
+
+    private Integer start_year;
+
+    private Integer reference_year;
+
+    private String curr_sounding;
+
+    private String old_sounding;
+
+    private String curr_glw;
+
+    private String old_glw;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public DepthEvolutionSeriesImport(final String filename) {
+        super(filename);
+    }
+
+
+    /***** METHODS *****/
+
+    public void setStart_year(final Integer start_year) {
+        this.start_year = start_year;
+    }
+
+    public void setReference_year(final Integer reference_year) {
+        this.reference_year = reference_year;
+    }
+
+    public void setCurr_sounding(final String curr_sounding) {
+        this.curr_sounding = curr_sounding;
+    }
+
+    public void setOld_sounding(final String old_sounding) {
+        this.old_sounding = old_sounding;
+    }
+
+    public void setCurr_glw(final String curr_glw) {
+        this.curr_glw = curr_glw;
+    }
+
+    public void setOld_glw(final String old_glw) {
+        this.old_glw = old_glw;
+    }
+
+    @Override
+    public Logger getLog() {
+        return log;
+    }
+
+    @Override
+    public List<DepthEvolution> querySeriesItem(final Session session, final River river) {
+        /*
+         * final Query query = session.createQuery("FROM DepthEvolution WHERE river=:river AND lower(filename)=:filename");
+         * query.setParameter("river", river);
+         * query.setParameter("filename", this.filename.toLowerCase());
+         */
+        final SQLQuery query = session.createSQLQuery("SELECT * FROM depth_evolution WHERE (river_id=:river) AND (lower(filename)=:filename)");
+        query.setParameter("river", river.getId());
+        query.setParameter("filename", getFilename().toLowerCase());
+        return query.list();
+    }
+
+
+    @Override
+    public DepthEvolution createSeriesItem(final River river) {
+        return new DepthEvolution(river, this.filename, this.kmrange_info, this.comment, this.start_year, this.reference_year, this.curr_sounding,
+                this.old_sounding, this.curr_glw, this.old_glw);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/FlowDepthColumnSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/FlowDepthColumnSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,118 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.AbstractSeriesImport;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.FlowDepthColumn;
+import org.dive4elements.river.model.sinfo.FlowDepthValue;
+import org.hibernate.SQLQuery;
+import org.hibernate.Session;
+
+/**
+ * Imported flow depth data series of a river
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class FlowDepthColumnSeriesImport extends AbstractSeriesImport<FlowDepthColumn, FlowDepthValue, FlowDepthKmLineImport> {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(FlowDepthColumnSeriesImport.class);
+
+    private int year;
+
+    private String sounding_info;
+
+    private String evaluation_by;
+
+    private final FlowDepthSeriesImport parent;
+
+    private final String colName;
+
+    private final File relativeFilePath;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public FlowDepthColumnSeriesImport(final String filename) {
+        this(filename, null, null, null);
+    }
+
+    public FlowDepthColumnSeriesImport(final String filename, final FlowDepthSeriesImport parent, final String colName, final File relativeFilePath) {
+        super(filename);
+        this.parent = parent;
+        this.colName = colName;
+        this.relativeFilePath = relativeFilePath;
+    }
+
+
+    /***** METHODS *****/
+
+    public int getYear() {
+        return this.year;
+    }
+
+    public void setYear(final int year) {
+        this.year = year;
+    }
+
+    public String getSounding_info() {
+        return this.sounding_info;
+    }
+
+    public void setSounding_info(final String sounding_info) {
+        this.sounding_info = sounding_info;
+    }
+
+    public String getEvaluation_by() {
+        return this.evaluation_by;
+    }
+
+    public void setEvaluation_by(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    @Override
+    public Logger getLog() {
+        return log;
+    }
+
+    @Override
+    public List<FlowDepthColumn> querySeriesItem(final Session session, final River river) {
+        /*
+         * final Query query = session.createQuery("FROM FlowDepthColumn WHERE (FlowDepth=:parent) AND lower(name)=:colname");
+         * query.setParameter("parent", this.parent.getPeer(river));
+         * query.setParameter("colname", this.colName.toLowerCase());
+         */
+        // FIXME the normal query raises a null pointer exception
+        final SQLQuery query = session.createSQLQuery("SELECT * FROM flow_depth_column WHERE (flow_depth_id=:parent) AND (lower(name)=:colname)");
+        query.setParameter("parent", this.parent.getPeer(river).getId());
+        query.setParameter("colname", this.colName.toLowerCase());
+        return query.list();
+    }
+
+
+    @Override
+    public FlowDepthColumn createSeriesItem(final River river) {
+        return new FlowDepthColumn(this.parent.getPeer(river), this.colName);
+    }
+
+    @Override
+    protected void logStoreInfo() {
+        getLog().info(String.format("Store series column '%s':;'%s'", this.colName, this.relativeFilePath));
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/FlowDepthKmLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/FlowDepthKmLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,65 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.dive4elements.river.importer.common.AbstractKmLineImport;
+import org.dive4elements.river.model.sinfo.FlowDepthColumn;
+import org.dive4elements.river.model.sinfo.FlowDepthValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported flow depth value of a river station.
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class FlowDepthKmLineImport extends AbstractKmLineImport<FlowDepthColumn, FlowDepthValue> {
+
+    /***** FIELDS *****/
+
+    /**
+     * Flow depth in m
+     */
+    private final double depth;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public FlowDepthKmLineImport(final Double km, final double depth) {
+        super(km.doubleValue());
+        this.depth = depth;
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected FlowDepthValue queryValueItem(final Session session, final FlowDepthColumn parent) {
+        final Query query = session.createQuery("FROM FlowDepthValue WHERE (FlowDepthColumn=:parent)"
+                + " AND (station BETWEEN (:station-0.0001) AND (:station+0.0001))");
+        query.setParameter("parent", parent);
+        query.setParameter("station", this.station);
+        final List rows = query.list();
+        if (!rows.isEmpty())
+            return (FlowDepthValue) rows.get(0);
+        else
+            return null;
+    }
+
+
+    @Override
+    protected FlowDepthValue createValueItem(final FlowDepthColumn parent) {
+        return new FlowDepthValue(parent, this.station, this.depth);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/FlowDepthSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/FlowDepthSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,145 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImporterSession;
+import org.dive4elements.river.importer.common.StoreMode;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.FlowDepth;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported flow depth data series group of a river
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class FlowDepthSeriesImport {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(FlowDepthSeriesImport.class);
+
+    /**
+     * Name of the imported file without type extension
+     */
+    private final String filename;
+
+    private String kmrange_info;
+
+    private String comment;
+
+    private int year;
+
+    private String sounding_info;
+
+    private String evaluation_by;
+
+    private FlowDepth peer;
+
+    private StoreMode storeMode;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public FlowDepthSeriesImport(final String filename) {
+        this.filename = filename;
+    }
+
+
+    /***** METHODS *****/
+
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    public int getYear() {
+        return this.year;
+    }
+
+    public void setYear(final int year) {
+        this.year = year;
+    }
+
+    public String getSounding_info() {
+        return this.sounding_info;
+    }
+
+    public void setSounding_info(final String sounding_info) {
+        this.sounding_info = sounding_info;
+    }
+
+    public String getEvaluation_by() {
+        return this.evaluation_by;
+    }
+
+    public void setEvaluation_by(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    public StoreMode getStoreMode() {
+        return this.storeMode;
+    }
+
+    /**
+     * Gets the model object of the data series group, inserting it into the database if not already existing
+     */
+    public FlowDepth getPeer(final River river) {
+        if (this.peer != null) {
+            this.storeMode = StoreMode.NONE;
+            return this.peer;
+        }
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
+        final List<FlowDepth> rows = querySeriesItem(session, river);
+        if (rows.isEmpty()) {
+            log.info("Create new database instance");
+            this.peer = createSeriesItem(river);
+            session.save(this.peer);
+            this.storeMode = StoreMode.INSERT;
+        }
+        else {
+            this.peer = rows.get(0);
+            this.storeMode = StoreMode.UPDATE;
+        }
+        return this.peer;
+    }
+
+    private List<FlowDepth> querySeriesItem(final Session session, final River river) {
+        final Query query = session.createQuery("FROM FlowDepth WHERE river=:river AND lower(filename)=:filename");
+        query.setParameter("river", river);
+        query.setParameter("filename", this.filename.toLowerCase());
+        return query.list();
+    }
+
+    private FlowDepth createSeriesItem(final River river) {
+        return new FlowDepth(river, this.filename, this.kmrange_info, this.comment, this.year, this.sounding_info, this.evaluation_by);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/InfrastructureKmLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/InfrastructureKmLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,72 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.dive4elements.river.importer.ImportAttribute;
+import org.dive4elements.river.importer.common.AbstractKmLineImport;
+import org.dive4elements.river.model.sinfo.Infrastructure;
+import org.dive4elements.river.model.sinfo.InfrastructureValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported infrastructure of a river station.
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class InfrastructureKmLineImport extends AbstractKmLineImport<Infrastructure, InfrastructureValue> {
+
+    /***** FIELDS *****/
+
+    private double height;
+
+    private final ImportAttribute bankAttribute;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public InfrastructureKmLineImport(final Double km, final double height, final ImportAttribute bankAttribute) {
+        super(km.doubleValue());
+        this.height = height;
+        this.bankAttribute = bankAttribute;
+    }
+
+
+    /***** METHODS *****/
+
+    public void setHeight(final double height) {
+        this.height = height;
+    }
+
+    @Override
+    protected InfrastructureValue queryValueItem(final Session session, final Infrastructure parent) {
+        final Query query = session.createQuery("FROM InfrastructureValue WHERE (infrastructure=:parent) AND (attribute=:bank)"
+                + " AND (station BETWEEN (:station-0.0001) AND (:station+0.0001))");
+        query.setParameter("parent", parent);
+        query.setParameter("station", this.station);
+        query.setParameter("bank", this.bankAttribute.getPeer());
+        final List rows = query.list();
+        if (!rows.isEmpty())
+            return (InfrastructureValue) rows.get(0);
+        else
+            return null;
+    }
+
+
+    @Override
+    protected InfrastructureValue createValueItem(final Infrastructure parent) {
+        return new InfrastructureValue(parent, this.station, this.bankAttribute.getPeer(), this.height);
+    }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/InfrastructureSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/InfrastructureSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,88 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImportAnnotationType;
+import org.dive4elements.river.importer.common.AbstractSeriesImport;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.Infrastructure;
+import org.dive4elements.river.model.sinfo.InfrastructureValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported infrastructure data series of a river and a type
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class InfrastructureSeriesImport extends AbstractSeriesImport<Infrastructure, InfrastructureValue, InfrastructureKmLineImport> {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(InfrastructureSeriesImport.class);
+
+    private ImportAnnotationType type;
+
+    private String provider;
+
+    private String evaluation_by;
+
+    private int year;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public InfrastructureSeriesImport(final String filename) {
+        super(filename);
+    }
+
+
+    /***** METHODS *****/
+
+    public void setType(final ImportAnnotationType type) {
+        this.type = type;
+    }
+
+    public void setProvider(final String provider) {
+        this.provider = provider;
+    }
+
+    public void setEvaluation_by(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    public void setYear(final int year) {
+        this.year = year;
+    }
+
+    @Override
+    public Logger getLog() {
+        return log;
+    }
+
+    @Override
+    public List<Infrastructure> querySeriesItem(final Session session, final River river) {
+        final Query query = session.createQuery("FROM Infrastructure WHERE river=:river AND lower(filename)=:filename");
+        query.setParameter("river", river);
+        query.setParameter("filename", this.filename.toLowerCase());
+        return query.list();
+    }
+
+
+    @Override
+    public Infrastructure createSeriesItem(final River river) {
+        return new Infrastructure(river, this.filename, this.kmrange_info, this.comment, this.type.getPeer(), this.year, this.provider, this.evaluation_by);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/TkhColumnSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/TkhColumnSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,112 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.AbstractSeriesImport;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.TkhColumn;
+import org.dive4elements.river.model.sinfo.TkhValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported TKH data series of a river
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class TkhColumnSeriesImport extends AbstractSeriesImport<TkhColumn, TkhValue, TkhKmLineImport> {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(TkhColumnSeriesImport.class);
+
+    private int year;
+
+    private String sounding_info;
+
+    private String evaluation_by;
+
+    private final TkhSeriesImport parent;
+
+    private final String colName;
+
+    private final File relativeFilePath;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public TkhColumnSeriesImport(final String filename) {
+        this(filename, null, null, null);
+    }
+
+    public TkhColumnSeriesImport(final String filename, final TkhSeriesImport parent, final String colName, final File relativeFilePath) {
+        super(filename);
+        this.parent = parent;
+        this.colName = colName;
+        this.relativeFilePath = relativeFilePath;
+    }
+
+
+    /***** METHODS *****/
+
+    public int getYear() {
+        return this.year;
+    }
+
+    public void setYear(final int year) {
+        this.year = year;
+    }
+
+    public String getSounding_info() {
+        return this.sounding_info;
+    }
+
+    public void setSounding_info(final String sounding_info) {
+        this.sounding_info = sounding_info;
+    }
+
+    public String getEvaluation_by() {
+        return this.evaluation_by;
+    }
+
+    public void setEvaluation_by(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    @Override
+    public Logger getLog() {
+        return log;
+    }
+
+    @Override
+    public List<TkhColumn> querySeriesItem(final Session session, final River river) {
+        final Query query = session.createQuery("FROM TkhColumn WHERE tkh=:parent AND lower(name)=:colname");
+        query.setParameter("parent", this.parent.getPeer(river));
+        query.setParameter("colname", this.colName.toLowerCase());
+        return query.list();
+    }
+
+
+    @Override
+    public TkhColumn createSeriesItem(final River river) {
+        return new TkhColumn(this.parent.getPeer(river), this.colName);
+    }
+
+    @Override
+    protected void logStoreInfo() {
+        getLog().info(String.format("Store series column '%s':;'%s'", this.colName, this.relativeFilePath));
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/TkhKmLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/TkhKmLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,65 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.dive4elements.river.importer.common.AbstractKmLineImport;
+import org.dive4elements.river.model.sinfo.TkhColumn;
+import org.dive4elements.river.model.sinfo.TkhValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported Transportkörperhöhe value of a river station.
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class TkhKmLineImport extends AbstractKmLineImport<TkhColumn, TkhValue> {
+
+    /***** FIELDS *****/
+
+    /**
+     * TKH in m
+     */
+    private final double tkheight;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public TkhKmLineImport(final Double km, final double tkheight) {
+        super(km.doubleValue());
+        this.tkheight = tkheight;
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected TkhValue queryValueItem(final Session session, final TkhColumn parent) {
+        final Query query = session.createQuery("FROM TkhValue WHERE (tkhcolumn=:parent)"
+                + " AND (station BETWEEN (:station-0.0001) AND (:station+0.0001))");
+        query.setParameter("parent", parent);
+        query.setParameter("station", this.station);
+        final List rows = query.list();
+        if (!rows.isEmpty())
+            return (TkhValue) rows.get(0);
+        else
+            return null;
+    }
+
+
+    @Override
+    protected TkhValue createValueItem(final TkhColumn parent) {
+        return new TkhValue(parent, this.station, this.tkheight);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/TkhSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/importitem/TkhSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,145 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImporterSession;
+import org.dive4elements.river.importer.common.StoreMode;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.sinfo.Tkh;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported Transportkörperhöhe data series group of a river
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class TkhSeriesImport {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(TkhSeriesImport.class);
+
+    /**
+     * Name of the imported file without type extension
+     */
+    private final String filename;
+
+    private String kmrange_info;
+
+    private String comment;
+
+    private int year;
+
+    private String sounding_info;
+
+    private String evaluation_by;
+
+    private Tkh peer;
+
+    private StoreMode storeMode;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public TkhSeriesImport(final String filename) {
+        this.filename = filename;
+    }
+
+
+    /***** METHODS *****/
+
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    public int getYear() {
+        return this.year;
+    }
+
+    public void setYear(final int year) {
+        this.year = year;
+    }
+
+    public String getSounding_info() {
+        return this.sounding_info;
+    }
+
+    public void setSounding_info(final String sounding_info) {
+        this.sounding_info = sounding_info;
+    }
+
+    public String getEvaluation_by() {
+        return this.evaluation_by;
+    }
+
+    public void setEvaluation_by(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    public StoreMode getStoreMode() {
+        return this.storeMode;
+    }
+
+    /**
+     * Gets the model object of the data series group, inserting it into the database if not already existing
+     */
+    public Tkh getPeer(final River river) {
+        if (this.peer != null) {
+            this.storeMode = StoreMode.NONE;
+            return this.peer;
+        }
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
+        final List<Tkh> rows = querySeriesItem(session, river);
+        if (rows.isEmpty()) {
+            log.info("Create new database instance");
+            this.peer = createSeriesItem(river);
+            session.save(this.peer);
+            this.storeMode = StoreMode.INSERT;
+        }
+        else {
+            this.peer = rows.get(0);
+            this.storeMode = StoreMode.UPDATE;
+        }
+        return this.peer;
+    }
+
+    private List<Tkh> querySeriesItem(final Session session, final River river) {
+        final Query query = session.createQuery("FROM Tkh WHERE river=:river AND lower(filename)=:filename");
+        query.setParameter("river", river);
+        query.setParameter("filename", this.filename.toLowerCase());
+        return query.list();
+    }
+
+    private Tkh createSeriesItem(final River river) {
+        return new Tkh(river, this.filename, this.kmrange_info, this.comment, this.year, this.sounding_info, this.evaluation_by);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/BedMobilityParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/BedMobilityParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,87 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.sinfo.importitem.BedMobilityKmLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.BedMobilitySeriesImport;
+import org.dive4elements.river.model.sinfo.BedMobility;
+import org.dive4elements.river.model.sinfo.BedMobilityValue;
+
+/**
+ * Reads and parses an bed mobility file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class BedMobilityParser extends AbstractParser<BedMobility, BedMobilityValue, BedMobilityKmLineImport, BedMobilitySeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(BedMobilityParser.class);
+
+    private static final String MOBILE_KEYWORD = "mobil";
+
+    private static final String IMPORT_FILENAME = "Einteilung der Gewässersohle.csv";
+
+
+    /***** CONSTRUCTORS *****/
+
+    public BedMobilityParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        super(importPath, rootRelativePath, river);
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipSInfoBedMobility();
+    }
+
+    /**
+     * Creates a list of parsers for all bed mobility import files in a directory
+     */
+    public static List<BedMobilityParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<BedMobilityParser> parsers = new ArrayList<>();
+        parsers.add(new BedMobilityParser(new File(importDir, IMPORT_FILENAME), new File(relativeDir, IMPORT_FILENAME), river));
+        return parsers;
+    }
+
+    @Override
+    protected BedMobilitySeriesImport createSeriesImport(final String filename) {
+        return new BedMobilitySeriesImport(filename);
+    }
+
+    @Override
+    protected BedMobilityKmLineImport createKmLineImport(final Double km, final String[] values) {
+        return new BedMobilityKmLineImport(km, values[1].equalsIgnoreCase(MOBILE_KEYWORD));
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        return super.handleMetaOther();
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/ChannelParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/ChannelParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,149 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ParsingState;
+import org.dive4elements.river.importer.sinfo.importitem.ChannelKmLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.ChannelSeriesImport;
+import org.dive4elements.river.model.sinfo.Channel;
+import org.dive4elements.river.model.sinfo.ChannelValue;
+
+/**
+ * Reads and parses a channel size file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class ChannelParser extends AbstractParser<Channel, ChannelValue, ChannelKmLineImport, ChannelSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(ChannelParser.class);
+
+    private static final String IMPORT_FILENAME = "Fahrrinne.csv";
+
+    protected static final Pattern META_YEARS = Pattern.compile("^#\\sZeitraum:\\s*([12]\\d\\d\\d)*\\s*-\\s*([12]\\d\\d\\d)*.*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern WIDTH_COLUMNTITLE = Pattern.compile("Sollbreite\\s*\\[(.*)\\].*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern DEPTH_COLUMNTITLE = Pattern.compile("Solltiefe\\s*\\[(.*)\\].*", Pattern.CASE_INSENSITIVE);
+
+    private int widthColIndex;
+
+    private int depthColIndex;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public ChannelParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        super(importPath, rootRelativePath, river);
+        this.widthColIndex = -1;
+        this.depthColIndex = -1;
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipSInfoChannel();
+    }
+
+    /**
+     * Creates a list of parsers for all channel import files in a directory
+     */
+    public static List<ChannelParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<ChannelParser> parsers = new ArrayList<>();
+        parsers.add(new ChannelParser(new File(importDir, IMPORT_FILENAME), new File(relativeDir, IMPORT_FILENAME), river));
+        return parsers;
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        if (handleMetaYears())
+            return true;
+        else
+            return false;
+    }
+
+    private boolean handleMetaYears() {
+        final Matcher m = META_YEARS.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_YEARS);
+            if (m.group(1) != null)
+                this.seriesHeader.setYear_from(Integer.valueOf(m.group(1)));
+            else
+                this.seriesHeader.setYear_from(null);
+            if (m.group(2) != null)
+                this.seriesHeader.setYear_to(Integer.valueOf(m.group(2)));
+            else
+                this.seriesHeader.setYear_to(null);
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaColumnTitles() {
+        if (super.handleMetaColumnTitles()) {
+            for (int i = 1; i <= this.columnTitles.size() - 1; i++) {
+                if (DEPTH_COLUMNTITLE.matcher(this.columnTitles.get(i)).matches())
+                    this.depthColIndex = i;
+                else if (WIDTH_COLUMNTITLE.matcher(this.columnTitles.get(i)).matches())
+                    this.widthColIndex = i;
+            }
+            if ((this.widthColIndex < 0) || (this.depthColIndex < 0)) {
+                logError("Columns of width and/or depth values could not be identified, missing column title 'Sollbreite...'/'Sollhöhe...'");
+                this.headerParsingState = ParsingState.STOP;
+                return false;
+            }
+            return true;
+        }
+        else
+            return false;
+    }
+
+    @Override
+    protected ChannelSeriesImport createSeriesImport(final String filename) {
+        return new ChannelSeriesImport(filename);
+    }
+
+    @Override
+    protected ChannelKmLineImport createKmLineImport(final Double km, final String[] values) {
+        if (parseDoubleWithNull(values[this.widthColIndex]) == null) {
+            logError("Invalid width value in line " + this.in.getLineNumber());
+            return null;
+        }
+        if (parseDoubleWithNull(values[this.depthColIndex]) == null) {
+            logError("Invalid depth value in line " + this.in.getLineNumber());
+            return null;
+        }
+        return new ChannelKmLineImport(km, parseDoubleWithNull(values[this.widthColIndex]).doubleValue(),
+                parseDoubleWithNull(values[this.depthColIndex]).doubleValue());
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/CollisionParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/CollisionParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,191 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ParsingState;
+import org.dive4elements.river.importer.sinfo.importitem.CollisionKmLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.CollisionSeriesImport;
+import org.dive4elements.river.importer.sinfo.importitem.CollisionTypeImport;
+import org.dive4elements.river.model.sinfo.Collision;
+import org.dive4elements.river.model.sinfo.CollisionType;
+import org.dive4elements.river.model.sinfo.CollisionValue;
+
+/**
+ * Reads and parses a collision file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class CollisionParser extends AbstractParser<Collision, CollisionValue, CollisionKmLineImport, CollisionSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(CollisionParser.class);
+
+    private static final Pattern META_YEAR = Pattern.compile("^#\\sJahr:\\s*([12]\\d\\d\\d).*", Pattern.CASE_INSENSITIVE);
+
+    private enum ColTitlePattern {
+        DATE("Datum.*"), //
+        GAUGE_W("Pegelstand\\s*\\[(.*)\\].*"), //
+        GAUGE_NAME("Bezugspegel.*"), //
+        TYPE("Unfallart.*");
+
+        private final Pattern pattern;
+
+        ColTitlePattern(final String regexp) {
+            this.pattern = Pattern.compile(regexp, Pattern.CASE_INSENSITIVE);
+        }
+
+        public Pattern getPattern() {
+            return this.pattern;
+        }
+    }
+
+    private static final DateFormat dateFormat = new SimpleDateFormat("dd.MM.yyyy");
+
+    private final EnumMap<ColTitlePattern, Integer> cols = new EnumMap<>(ColTitlePattern.class);
+
+    private final HashMap<String, CollisionTypeImport> types;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public CollisionParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        super(importPath, rootRelativePath, river);
+        this.types = new HashMap<>();
+        for (final CollisionType type : CollisionType.getTypes())
+            this.types.put(type.getName().trim().toLowerCase(), new CollisionTypeImport(type.getName()));
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipSInfoCollision();
+    }
+
+    /**
+     * Creates a list of parsers for all collision import files in a directory
+     */
+    public static List<CollisionParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<CollisionParser> parsers = new ArrayList<>();
+        for (final File file : listFiles(importDir, ".csv"))
+            parsers.add(new CollisionParser(file, new File(relativeDir, file.getName()), river));
+        return parsers;
+    }
+
+    @Override
+    protected CollisionSeriesImport createSeriesImport(final String filename) {
+        return new CollisionSeriesImport(filename);
+    }
+
+    @Override
+    protected boolean kmMustBeUnique() {
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        if (handleMetaYear())
+            return true;
+        else
+            return false;
+    }
+
+    private boolean handleMetaYear() {
+        final Matcher m = META_YEAR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_YEAR);
+            this.seriesHeader.setYear(Integer.parseInt(m.group(1)));
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaColumnTitles() {
+        if (!super.handleMetaColumnTitles())
+            return false;
+        for (final ColTitlePattern col : ColTitlePattern.values())
+            this.cols.put(col, -1);
+        for (int i = 1; i <= this.columnTitles.size() - 1; i++) {
+            for (final ColTitlePattern col : ColTitlePattern.values()) {
+                if (col.getPattern().matcher(this.columnTitles.get(i)).matches()) {
+                    this.cols.put(col, i);
+                    break;
+                }
+            }
+        }
+        if (this.cols.get(ColTitlePattern.DATE) < 0)
+            logWarning("Column of the event dates could not be identified, missing column title 'Datum'");
+        if (this.cols.get(ColTitlePattern.TYPE) < 0) {
+            logError("Column of the collision types could not be identified, missing column title 'Unfallart'");
+            this.headerParsingState = ParsingState.STOP;
+            return false;
+        }
+        if (!this.metaPatternsMatched.contains(META_YEAR)) {
+            logError("Required meta info for the year is missing");
+            this.headerParsingState = ParsingState.STOP;
+        }
+        return true;
+    }
+
+    @Override
+    protected CollisionKmLineImport createKmLineImport(final Double km, final String[] values) {
+        Date eventDate = null;
+        try {
+            eventDate = dateFormat.parse(values[this.cols.get(ColTitlePattern.DATE)]);
+        }
+        catch (final Exception e) {
+            logError("Invalid date in line " + this.in.getLineNumber());
+            return null;
+        }
+        final String typeName = values[this.cols.get(ColTitlePattern.TYPE)].trim();
+        final String typeKey = typeName.toLowerCase();
+        CollisionTypeImport type = null;
+        if (this.types.containsKey(typeKey))
+            type = this.types.get(typeKey);
+        else {
+            type = new CollisionTypeImport(typeName);
+            this.types.put(typeKey, type);
+        }
+        String gaugeName = null;
+        if (this.cols.get(ColTitlePattern.GAUGE_NAME) >= 0)
+            gaugeName = values[this.cols.get(ColTitlePattern.GAUGE_NAME)].trim();
+        double gaugeW = Double.NaN;
+        if (this.cols.get(ColTitlePattern.GAUGE_W) >= 0)
+            gaugeW = parseDoubleWithNull(values[this.cols.get(ColTitlePattern.GAUGE_W)]).doubleValue();
+        return new CollisionKmLineImport(km, type, eventDate, gaugeName, gaugeW);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/DailyDischargeParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/DailyDischargeParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,166 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ParsingState;
+import org.dive4elements.river.importer.sinfo.importitem.DailyDischargeDayLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.DailyDischargeSeriesImport;
+import org.dive4elements.river.model.sinfo.DailyDischarge;
+import org.dive4elements.river.model.sinfo.DailyDischargeValue;
+
+/**
+ * Reads and parses a daily discharge file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class DailyDischargeParser extends AbstractParser<DailyDischarge, DailyDischargeValue, DailyDischargeDayLineImport, DailyDischargeSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(DailyDischargeParser.class);
+
+    static final Pattern IMPORT_FILENAME = Pattern.compile("^(.+)_mittlerer_Tagesabfluss.csv", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_GAUGENAME = Pattern.compile("^#\\s*Stations-*Name:\\s*(\\S[^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_GAUGENUMBER = Pattern.compile("^#\\s*Stations-*Nummer:\\s*(\\S[^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_COLUMNTITLES = Pattern.compile("^#*\\s*Datum\\s*;\\s*Q.*", Pattern.CASE_INSENSITIVE);
+
+    private static final DateFormat dateFormat = new SimpleDateFormat("dd.MM.yyyy");
+
+
+    /***** CONSTRUCTORS *****/
+
+    public DailyDischargeParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        super(importPath, rootRelativePath, river);
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipSInfoDailyDischarge();
+    }
+
+    /**
+     * Creates a list of parsers for all daily discharge import files in a directory
+     */
+    public static List<DailyDischargeParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<DailyDischargeParser> parsers = new ArrayList<>();
+        final File[] files = importDir.listFiles(new FilenameFilter() {
+            @Override
+            public boolean accept(final File dir, final String name) {
+                return IMPORT_FILENAME.matcher(name).matches();
+            }
+        });
+        for (final File file : files)
+            parsers.add(new DailyDischargeParser(file, new File(relativeDir, file.getName()), river));
+        return parsers;
+    }
+
+    @Override
+    protected boolean kmMustBeUnique() {
+        return false;
+    }
+
+    @Override
+    protected DailyDischargeSeriesImport createSeriesImport(final String filename) {
+        return new DailyDischargeSeriesImport(filename);
+    }
+
+    @Override
+    protected DailyDischargeDayLineImport createKmLineImport(final Double km, final String[] values) {
+        Date day = null;
+        try {
+            day = dateFormat.parse(values[0]);
+        }
+        catch (final Exception e) {
+            logError("Invalid date in line " + this.in.getLineNumber());
+            return null;
+        }
+        if (parseDoubleWithNull(values[1]) == null) {
+            logError("Invalid discharge value in line " + this.in.getLineNumber());
+            return null;
+        }
+        return new DailyDischargeDayLineImport(day, Double.valueOf(parseDoubleWithNull(values[1]).doubleValue()));
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        if (handleMetaGaugeName())
+            return true;
+        else if (handleMetaGaugeNumber())
+            return true;
+        else
+            return false;
+    }
+
+    private boolean handleMetaGaugeName() {
+        final Matcher m = META_GAUGENAME.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_GAUGENAME);
+            this.seriesHeader.setGaugeName(m.group(1).trim());
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaGaugeNumber() {
+        final Matcher m = META_GAUGENUMBER.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_GAUGENUMBER);
+            this.seriesHeader.setGaugeNumber(Long.parseLong(m.group(1)));
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaColumnTitles() {
+        if (!META_COLUMNTITLES.matcher(this.currentLine).matches())
+            return false;
+        this.metaPatternsMatched.add(META_COLUMNTITLES);
+        this.columnTitles.clear();
+        final String[] titles = this.currentLine.split(SEPARATOR_CHAR, 0);
+        for (int i = 0; i <= titles.length - 1; i++)
+            this.columnTitles.add(titles[i].trim());
+        this.seriesHeader.setGauge(this.river.getPeer().findGauge(this.seriesHeader.getGaugeNumber(), this.seriesHeader.getGaugeName()));
+        if (this.seriesHeader.getGauge() == null) {
+            logError("Gauge not found, file skipped");
+            this.headerParsingState = ParsingState.STOP;
+        }
+        return true;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/DepthEvolutionParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/DepthEvolutionParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,197 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ParsingState;
+import org.dive4elements.river.importer.sinfo.importitem.DepthEvolutionKmLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.DepthEvolutionSeriesImport;
+import org.dive4elements.river.model.sinfo.DepthEvolution;
+import org.dive4elements.river.model.sinfo.DepthEvolutionValue;
+
+/**
+ * Reads and parses a depth evolution file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class DepthEvolutionParser extends AbstractParser<DepthEvolution, DepthEvolutionValue, DepthEvolutionKmLineImport, DepthEvolutionSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(DepthEvolutionParser.class);
+
+    protected static final Pattern META_REFERENCE_YEAR = Pattern.compile("^#\\sBezugsjahr:\\s*([12]\\d\\d\\d).*", Pattern.CASE_INSENSITIVE);
+
+    protected static final Pattern META_START_YEAR = Pattern.compile("^#\\sAusgangsjahr:\\s*([12]\\d\\d\\d).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_CURR_SOUNDING = Pattern.compile("^#\\sAktuelle Peilung\\s*/\\s*Epoche:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_OLD_SOUNDING = Pattern.compile("^#\\sHistorische Peilung\\s*/\\s*Epoche:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_CURR_WSP = Pattern.compile("^#\\sAktuelle Wasserspiegellage:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_OLD_WSP = Pattern.compile("^#\\sHistorische Wasserspiegellage:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+
+    /***** CONSTRUCTORS *****/
+
+    public DepthEvolutionParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        super(importPath, rootRelativePath, river);
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipSInfoDepthEvolution();
+    }
+
+    /**
+     * Creates a list of parsers for all depth_evolution import files in a directory
+     */
+    public static List<DepthEvolutionParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<DepthEvolutionParser> parsers = new ArrayList<>();
+        for (final File file : listFiles(importDir, ".csv"))
+            parsers.add(new DepthEvolutionParser(file, new File(relativeDir, file.getName()), river));
+        return parsers;
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        if (handleMetaStartYear())
+            return true;
+        else if (handleMetaReferenceYear())
+            return true;
+        else if (handleMetaCurrSounding())
+            return true;
+        else if (handleMetaOldSounding())
+            return true;
+        else if (handleMetaCurrGlw())
+            return true;
+        else if (handleMetaOldGlw())
+            return true;
+        else
+            return false;
+    }
+
+    private boolean handleMetaStartYear() {
+        final Matcher m = META_START_YEAR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_START_YEAR);
+            this.seriesHeader.setStart_year(Integer.parseInt(m.group(1)));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaReferenceYear() {
+        final Matcher m = META_REFERENCE_YEAR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_REFERENCE_YEAR);
+            this.seriesHeader.setReference_year(Integer.parseInt(m.group(1)));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaCurrSounding() {
+        final Matcher m = META_CURR_SOUNDING.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_CURR_SOUNDING);
+            this.seriesHeader.setCurr_sounding(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaOldSounding() {
+        final Matcher m = META_OLD_SOUNDING.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_OLD_SOUNDING);
+            this.seriesHeader.setOld_sounding(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaCurrGlw() {
+        final Matcher m = META_CURR_WSP.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_CURR_WSP);
+            this.seriesHeader.setCurr_glw(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaOldGlw() {
+        final Matcher m = META_OLD_WSP.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_OLD_WSP);
+            this.seriesHeader.setOld_glw(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaColumnTitles() {
+        if (super.handleMetaColumnTitles()) {
+            if (!this.metaPatternsMatched.contains(META_START_YEAR) || !this.metaPatternsMatched.contains(META_REFERENCE_YEAR)
+                    || !this.metaPatternsMatched.contains(META_CURR_SOUNDING) || !this.metaPatternsMatched.contains(META_OLD_SOUNDING)
+                    || !this.metaPatternsMatched.contains(META_CURR_WSP) || !this.metaPatternsMatched.contains(META_OLD_WSP)) {
+                logError("One or more of the required meta infos are missing");
+                this.headerParsingState = ParsingState.STOP;
+            }
+            return true;
+        }
+        else
+            return false;
+    }
+
+    @Override
+    protected DepthEvolutionSeriesImport createSeriesImport(final String filename) {
+        return new DepthEvolutionSeriesImport(filename);
+    }
+
+    @Override
+    protected DepthEvolutionKmLineImport createKmLineImport(final Double km, final String[] values) {
+        if (parseDoubleWithNull(values[1]) == null) {
+            logError("Invalid total change in line " + this.in.getLineNumber());
+            return null;
+        }
+        if (parseDoubleWithNull(values[2]) == null) {
+            logError("Invalid change per year in line " + this.in.getLineNumber());
+            return null;
+        }
+        // cm to m
+        return new DepthEvolutionKmLineImport(km, parseDoubleWithNull(values[1]).doubleValue() / 100.0,
+                parseDoubleWithNull(values[2]).doubleValue() / 100.0);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/FlowDepthColumnParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/FlowDepthColumnParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,84 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ParsingState;
+import org.dive4elements.river.importer.sinfo.importitem.FlowDepthColumnSeriesImport;
+import org.dive4elements.river.importer.sinfo.importitem.FlowDepthKmLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.FlowDepthSeriesImport;
+import org.dive4elements.river.model.sinfo.FlowDepthColumn;
+import org.dive4elements.river.model.sinfo.FlowDepthValue;
+
+/**
+ * Reads and parses a column of a flow depth file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class FlowDepthColumnParser extends AbstractParser<FlowDepthColumn, FlowDepthValue, FlowDepthKmLineImport, FlowDepthColumnSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(FlowDepthColumnParser.class);
+
+    private final FlowDepthSeriesImport parent;
+
+    private final int colIndex;
+
+    private final String colName;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public FlowDepthColumnParser(final File importPath, final File rootRelativePath, final ImportRiver river, final FlowDepthSeriesImport parent,
+            final int colIndex,
+            final String colName) {
+        super(importPath, rootRelativePath, river);
+        this.parent = parent;
+        this.colIndex = colIndex;
+        this.colName = colName;
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    @Override
+    protected void logStartInfo() {
+        getLog().info(String.format("Start parsing column %d '%s':;'%s'", this.colIndex, this.colName, this.rootRelativePath));
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        this.headerParsingState = ParsingState.IGNORE;
+        return false;
+    }
+
+    @Override
+    protected FlowDepthColumnSeriesImport createSeriesImport(final String filename) {
+        return new FlowDepthColumnSeriesImport(filename, this.parent, this.colName, this.rootRelativePath);
+    }
+
+    @Override
+    protected FlowDepthKmLineImport createKmLineImport(final Double km, final String[] values) {
+        final double tkheight = parseDoubleWithNull(values[this.colIndex]).doubleValue();
+        return new FlowDepthKmLineImport(km, tkheight);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/FlowDepthParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/FlowDepthParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,221 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ParsingState;
+import org.dive4elements.river.importer.sinfo.importitem.FlowDepthColumnSeriesImport;
+import org.dive4elements.river.importer.sinfo.importitem.FlowDepthKmLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.FlowDepthSeriesImport;
+import org.dive4elements.river.model.sinfo.FlowDepthColumn;
+import org.dive4elements.river.model.sinfo.FlowDepthValue;
+
+/**
+ * Reads and parses the header of a flow depth file and handles the parse and store of the columns
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class FlowDepthParser extends AbstractParser<FlowDepthColumn, FlowDepthValue, FlowDepthKmLineImport, FlowDepthColumnSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(FlowDepthParser.class);
+
+    private static final Pattern META_YEAR = Pattern.compile("^#\\sBezugsjahr:\\s*([12]\\d\\d\\d).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_EVALUATOR = Pattern.compile("^#\\sAuswerter:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_SOUNDING = Pattern.compile("^#\\sPeilung:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_TYPE = Pattern.compile("^#\\sTyp:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern COLUMN_TITLE = Pattern.compile("Flie((.)|(ss))tiefe\\s*\\((.+?)\\)\\s*\\[m\\].*", Pattern.CASE_INSENSITIVE);
+
+    private final FlowDepthSeriesImport tkhGroup;
+
+    private final List<FlowDepthColumnParser> colParsers;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public FlowDepthParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        super(importPath, rootRelativePath, river);
+        this.tkhGroup = new FlowDepthSeriesImport(importPath.getName().replaceAll("\\.csv", ""));
+        this.seriesHeader = new FlowDepthColumnSeriesImport(this.tkhGroup.getFilename(), this.tkhGroup, null, null);
+        this.colParsers = new ArrayList<>();
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipSInfoFlowDepth();
+    }
+
+    /**
+     * Creates a list of parsers for all collision import files in a directory
+     */
+    public static List<FlowDepthParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<FlowDepthParser> parsers = new ArrayList<>();
+        for (final File file : listFiles(importDir, ".csv"))
+            parsers.add(new FlowDepthParser(file, new File(relativeDir, file.getName()), river));
+        return parsers;
+    }
+
+    @Override
+    public void parse() throws IOException {
+        getLog().info("Start parsing:;'" + this.rootRelativePath + "'");
+        // this.seriesHeader = createSeriesImport(this.importPath.getName().replaceAll("\\.csv", ""));
+        this.metaPatternsMatched.clear();
+        this.kmExists.clear();
+        this.colParsers.clear();
+        this.headerParsingState = ParsingState.CONTINUE;
+        try {
+            try {
+                this.in = new LineNumberReader(new InputStreamReader(new FileInputStream(this.importPath), ENCODING));
+            }
+            catch (final Exception e) {
+                logError("Could not open (" + e.getMessage() + ")");
+                this.headerParsingState = ParsingState.STOP;
+            }
+            this.currentLine = null;
+            while (this.headerParsingState == ParsingState.CONTINUE) {
+                this.currentLine = this.in.readLine();
+                if (this.currentLine == null)
+                    break;
+                this.currentLine = this.currentLine.trim();
+                if (this.currentLine.isEmpty())
+                    continue;
+                handleMetaLine();
+            }
+        }
+        finally {
+            if (this.in != null) {
+                this.in.close();
+                this.in = null;
+            }
+        }
+        if (this.headerParsingState == ParsingState.STOP) {
+            logError("Parsing of the file stopped due to a severe error");
+            return;
+        }
+        for (final FlowDepthColumnParser colParser : this.colParsers)
+            colParser.parse();
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        if (handleMetaYear())
+            return true;
+        else if (handleMetaType())
+            return true;
+        else if (handleMetaSounding())
+            return true;
+        else if (handleMetaEvaluator())
+            return true;
+        else
+            return false;
+    }
+
+    private boolean handleMetaYear() {
+        final Matcher m = META_YEAR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_YEAR);
+            this.tkhGroup.setYear(Integer.parseInt(m.group(1)));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaType() {
+        final Matcher m = META_TYPE.matcher(this.currentLine);
+        return m.matches();
+    }
+
+    private boolean handleMetaSounding() {
+        final Matcher m = META_SOUNDING.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_SOUNDING);
+            this.tkhGroup.setSounding_info(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaEvaluator() {
+        final Matcher m = META_EVALUATOR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_EVALUATOR);
+            this.tkhGroup.setEvaluation_by(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaColumnTitles() {
+        if (!super.handleMetaColumnTitles())
+            return false;
+        this.tkhGroup.setKmrange_info(this.seriesHeader.getKmrange_info());
+        this.tkhGroup.setComment(this.seriesHeader.getComment());
+        for (int i = 1; i <= this.columnTitles.size() - 1; i++) {
+            final Matcher m = COLUMN_TITLE.matcher(this.columnTitles.get(i));
+            if (m.matches())
+                this.colParsers.add(new FlowDepthColumnParser(this.importPath, this.rootRelativePath, this.river, this.tkhGroup, i, m.group(4).trim()));
+            else
+                logWarning("No title found in column " + i + ", skipped");
+        }
+        return true;
+    }
+
+    @Override
+    public void store() {
+        if (this.headerParsingState != ParsingState.STOP) {
+            this.tkhGroup.getPeer(this.river.getPeer());
+            for (final FlowDepthColumnParser colParser : this.colParsers)
+                colParser.store();
+        }
+        else
+            logWarning("Severe parsing errors, not storing series '" + this.tkhGroup.getFilename() + "'");
+    }
+
+    @Override
+    protected FlowDepthColumnSeriesImport createSeriesImport(final String filename) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    protected FlowDepthKmLineImport createKmLineImport(final Double km, final String[] values) {
+        throw new UnsupportedOperationException();
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/InfrastructureParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/InfrastructureParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,222 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportAnnotationType;
+import org.dive4elements.river.importer.ImportAttribute;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ParsingState;
+import org.dive4elements.river.importer.sinfo.importitem.InfrastructureKmLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.InfrastructureSeriesImport;
+import org.dive4elements.river.model.AnnotationType;
+import org.dive4elements.river.model.sinfo.Infrastructure;
+import org.dive4elements.river.model.sinfo.InfrastructureValue;
+
+/**
+ * Reads and parses an infrastructure file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class InfrastructureParser extends AbstractParser<Infrastructure, InfrastructureValue, InfrastructureKmLineImport, InfrastructureSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(InfrastructureParser.class);
+
+    private static final Pattern META_TYPE = Pattern.compile("^#\\sInfrastruktur:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_PROVIDER = Pattern.compile("^#\\sDatenherkunft:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_EVALUATOR = Pattern.compile("^#\\sAuswerter:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_YEAR = Pattern.compile("^#\\sStand:\\s*([12]\\d\\d\\d).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern HEIGHT_COLUMNTITLE = Pattern.compile("((H.he)|(Hoehe))\\s*\\[(.*)\\].*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern BANK_COLUMNTITLE = Pattern.compile("Uferseite.*", Pattern.CASE_INSENSITIVE);
+
+    private static final String DB_BANK_LEFT = "links"; // TODO: improve database design to make this secure
+
+    private static final String DB_BANK_RIGHT = "rechts";
+
+    private static final String DB_BANK_NULL = "";
+
+    private int heightColIndex;
+
+    private int bankColIndex;
+
+    private final HashMap<String, ImportAttribute> bankAttributes;
+
+    private final HashMap<String, ImportAnnotationType> types;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public InfrastructureParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        super(importPath, rootRelativePath, river);
+        this.heightColIndex = -1;
+        this.bankColIndex = -1;
+        this.bankAttributes = new HashMap<>();
+        this.bankAttributes.put("links", new ImportAttribute(DB_BANK_LEFT));
+        this.bankAttributes.put("rechts", new ImportAttribute(DB_BANK_RIGHT));
+        this.bankAttributes.put("", new ImportAttribute(DB_BANK_NULL));
+        this.types = new HashMap<>();
+        for (final AnnotationType type : AnnotationType.getTypes())
+            this.types.put(type.getName().trim().toLowerCase(), new ImportAnnotationType(type.getName()));
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipSInfoInfrastructure();
+    }
+
+    /**
+     * Creates a list of parsers for all infrastructure import files in a directory
+     */
+    public static List<InfrastructureParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<InfrastructureParser> parsers = new ArrayList<>();
+        for (final File file : listFiles(importDir, ".csv"))
+            parsers.add(new InfrastructureParser(file, new File(relativeDir, file.getName()), river));
+        return parsers;
+    }
+
+    @Override
+    protected InfrastructureSeriesImport createSeriesImport(final String filename) {
+        return new InfrastructureSeriesImport(filename);
+    }
+
+    @Override
+    protected boolean kmMustBeUnique() {
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        if (handleMetaType())
+            return true;
+        else if (handleMetaProvider())
+            return true;
+        else if (handleMetaEvaluator())
+            return true;
+        else if (handleMetaYear())
+            return true;
+        else
+            return false;
+    }
+
+    private boolean handleMetaType() {
+        final Matcher m = META_TYPE.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_TYPE);
+            if (this.types.containsKey(m.group(1).trim().toLowerCase()))
+                this.seriesHeader.setType(this.types.get(m.group(1).trim().toLowerCase()));
+            else {
+                final ImportAnnotationType type = new ImportAnnotationType(m.group(1).trim());
+                this.types.put(m.group(1).trim().toLowerCase(), type);
+                this.seriesHeader.setType(type);
+            }
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaProvider() {
+        final Matcher m = META_PROVIDER.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_PROVIDER);
+            this.seriesHeader.setProvider(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaEvaluator() {
+        final Matcher m = META_EVALUATOR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_EVALUATOR);
+            this.seriesHeader.setEvaluation_by(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaYear() {
+        final Matcher m = META_YEAR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_YEAR);
+            this.seriesHeader.setYear(Integer.parseInt(m.group(1)));
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaColumnTitles() {
+        if (super.handleMetaColumnTitles()) {
+            for (int i = 1; i <= this.columnTitles.size() - 1; i++) {
+                if (HEIGHT_COLUMNTITLE.matcher(this.columnTitles.get(i)).matches())
+                    this.heightColIndex = i;
+                else if (BANK_COLUMNTITLE.matcher(this.columnTitles.get(i)).matches())
+                    this.bankColIndex = i;
+            }
+            if (this.bankColIndex < 0)
+                logWarning("Column of river side value could not be identified, missing column title 'Uferseite'");
+            if (this.heightColIndex < 0) {
+                logError("Column of height values could not be identified, missing column title 'Höhe...'");
+                this.headerParsingState = ParsingState.STOP;
+                return false;
+            }
+            if (!this.metaPatternsMatched.contains(META_TYPE) || !this.metaPatternsMatched.contains(META_YEAR)) {
+                logError("Required meta info for infrastructure type and/or year is missing");
+                this.headerParsingState = ParsingState.STOP;
+            }
+            return true;
+        }
+        else
+            return false;
+    }
+
+    @Override
+    protected InfrastructureKmLineImport createKmLineImport(final Double km, final String[] values) {
+        if (parseDoubleWithNull(values[this.heightColIndex]) == null) {
+            logError("Invalid height value in line " + this.in.getLineNumber());
+            return null;
+        }
+        if ((this.bankColIndex >= 0) && this.bankAttributes.containsKey(values[this.bankColIndex].trim().toLowerCase()))
+            return new InfrastructureKmLineImport(km, parseDoubleWithNull(values[this.heightColIndex]).doubleValue(),
+                    this.bankAttributes.get(values[this.bankColIndex].trim().toLowerCase()));
+        else {
+            logError("Invalid bank value in line " + this.in.getLineNumber());
+            return null;
+        }
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/SelectedAdditionalParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/SelectedAdditionalParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,168 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.ImporterSession;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ImportParser;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.Wst;
+import org.hibernate.Query;
+import org.hibernate.SQLQuery;
+import org.hibernate.Session;
+
+/**
+ * Reads and parses a selected WST additionals link file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class SelectedAdditionalParser implements ImportParser {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(SelectedAdditionalParser.class);
+
+    private static final String IMPORT_Q_FILENAME = "Mit_Abflussdaten.txt";
+
+    private static final String IMPORT_W_FILENAME = "Ohne_Abflussdaten.txt";
+
+    private enum SelectionType {
+        WITH_Q("Q", "with discharge"), //
+        WITHOUT_Q("W", "without discharge");
+
+        private final String key;
+        private final String logText;
+
+        SelectionType(final String key, final String logText) {
+            this.key = key;
+            this.logText = logText;
+        }
+
+        public String getKey() {
+            return this.key;
+        }
+
+        public String getLogText() {
+            return this.logText;
+        }
+    }
+
+    private final File importPath;
+
+    private final File rootRelativePath;
+
+    private final ImportRiver river;
+
+    private final SelectionType selectionType;
+
+    private final List<String> links;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public SelectedAdditionalParser(final File importPath, final File rootRelativePath, final ImportRiver river, final SelectionType selectionType) {
+        this.importPath = importPath;
+        this.rootRelativePath = rootRelativePath;
+        this.river = river;
+        this.selectionType = selectionType;
+        this.links = new ArrayList<>();
+    }
+
+
+    /***** METHODS *****/
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipSInfoSelectedAdditional();
+    }
+
+    /**
+     * Creates a list of parsers for all selected additionals import files in a directory
+     */
+    public static List<SelectedAdditionalParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<SelectedAdditionalParser> parsers = new ArrayList<>();
+        parsers.add(new SelectedAdditionalParser(new File(importDir, IMPORT_Q_FILENAME), new File(relativeDir, IMPORT_Q_FILENAME),
+                river, SelectionType.WITH_Q));
+        parsers.add(new SelectedAdditionalParser(new File(importDir, IMPORT_W_FILENAME), new File(relativeDir, IMPORT_W_FILENAME),
+                river, SelectionType.WITHOUT_Q));
+        return parsers;
+    }
+
+    @Override
+    public void parse() throws IOException {
+        this.links.clear();
+        log.info("Parse '... " + this.rootRelativePath + "'");
+        LineNumberReader in = null;
+        try {
+            in = new LineNumberReader(new InputStreamReader(new FileInputStream(this.importPath), AbstractParser.ENCODING));
+            String line;
+            while (true) {
+                line = in.readLine();
+                if (line == null)
+                    break;
+                if (!line.trim().isEmpty() && !line.trim().startsWith(AbstractParser.START_META_CHAR))
+                    this.links.add(line.trim());
+            }
+            log.info("Number of file links found: " + this.links.size());
+        }
+        finally {
+            if (in != null)
+                in.close();
+        }
+    }
+
+    @Override
+    public void store() {
+        final Session session = ImporterSession.getInstance().getDatabaseSession();
+        final SQLQuery reset = session.createSQLQuery("UPDATE wsts SET sinfo_selection = NULL WHERE (river_id=:river_id) AND (kind=1)"
+                + " AND (sinfo_selection=:seltype)");
+        reset.setParameter("river_id", this.river.getPeer().getId());
+        reset.setParameter("seltype", this.selectionType.getKey());
+        reset.executeUpdate();
+        final Query query = session.createQuery("FROM Wst WHERE (river=:river) AND (kind=1) AND (lower(description) LIKE :path)");
+        query.setParameter("river", this.river);
+        int count = 0;
+        for (final String wstfile : this.links) {
+            count += updateWst(session, query, this.river.getPeer(), wstfile, this.selectionType);
+        }
+        log.info("Updated " + count + " wsts for selected additionals " + this.selectionType.getLogText());
+    }
+
+    private int updateWst(final Session session, final Query query, final River river, final String path, final SelectionType selectionType) {
+        final String pathPattern = path.toLowerCase().replace('/', '_').replace('\\', '_');
+        query.setParameter("path", pathPattern);
+        final List<Wst> rows = query.list();
+        if (rows.isEmpty()) {
+            log.warn("Wst not found for description '" + path + "'" + ";" + this.rootRelativePath);
+            return 0;
+        } else {
+            final Wst wst = rows.get(0);
+            wst.setSInfoSelection(selectionType.getKey());
+            final Serializable id = session.save(wst);
+            return 1;
+        }
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/TkhColumnParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/TkhColumnParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,83 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.sinfo.importitem.TkhColumnSeriesImport;
+import org.dive4elements.river.importer.sinfo.importitem.TkhKmLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.TkhSeriesImport;
+import org.dive4elements.river.model.sinfo.TkhColumn;
+import org.dive4elements.river.model.sinfo.TkhValue;
+
+/**
+ * Reads and parses a column of a TKH file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class TkhColumnParser extends AbstractParser<TkhColumn, TkhValue, TkhKmLineImport, TkhColumnSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(TkhColumnParser.class);
+
+    private final TkhSeriesImport parent;
+
+    private final int colIndex;
+
+    private final String colName;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public TkhColumnParser(final File importPath, final File rootRelativePath, final ImportRiver river, final TkhSeriesImport parent, final int colIndex,
+            final String colName) {
+        super(importPath, rootRelativePath, river);
+        this.parent = parent;
+        this.colIndex = colIndex;
+        this.colName = colName;
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    @Override
+    protected void logStartInfo() {
+        getLog().info(String.format("Start parsing column %d '%s':;'%s'", this.colIndex, this.colName, this.rootRelativePath));
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        // Skip any header line
+        return true;
+    }
+
+    @Override
+    protected TkhColumnSeriesImport createSeriesImport(final String filename) {
+        return new TkhColumnSeriesImport(filename, this.parent, this.colName, this.rootRelativePath);
+    }
+
+    @Override
+    protected TkhKmLineImport createKmLineImport(final Double km, final String[] values) {
+        // convert from cm to m
+        final double tkheight = parseDoubleWithNull(values[this.colIndex]).doubleValue() / 100.0;
+        return new TkhKmLineImport(km, tkheight);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/TkhParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/sinfo/parsers/TkhParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,221 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.sinfo.parsers;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ParsingState;
+import org.dive4elements.river.importer.sinfo.importitem.TkhColumnSeriesImport;
+import org.dive4elements.river.importer.sinfo.importitem.TkhKmLineImport;
+import org.dive4elements.river.importer.sinfo.importitem.TkhSeriesImport;
+import org.dive4elements.river.model.sinfo.TkhColumn;
+import org.dive4elements.river.model.sinfo.TkhValue;
+
+/**
+ * Reads and parses the header of a TKH file and handles the parse and store of the columns
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class TkhParser extends AbstractParser<TkhColumn, TkhValue, TkhKmLineImport, TkhColumnSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(TkhParser.class);
+
+    private static final Pattern META_YEAR = Pattern.compile("^#\\sBezugsjahr:\\s*([12]\\d\\d\\d).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_EVALUATOR = Pattern.compile("^#\\sAuswerter:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_SOUNDING = Pattern.compile("^#\\sPeilung:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern META_TYPE = Pattern.compile("^#\\sTyp:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private static final Pattern COLUMN_TITLE = Pattern.compile("Transportk((.)|(oe))rperh((.)|(oe))he\\s*\\((.+?)\\)\\s*\\[cm\\].*", Pattern.CASE_INSENSITIVE);
+
+    private final TkhSeriesImport tkhGroup;
+
+    private final List<TkhColumnParser> colParsers;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public TkhParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        super(importPath, rootRelativePath, river);
+        this.tkhGroup = new TkhSeriesImport(importPath.getName().replaceAll("\\.csv", ""));
+        this.seriesHeader = new TkhColumnSeriesImport(this.tkhGroup.getFilename(), this.tkhGroup, null, null);
+        this.colParsers = new ArrayList<>();
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipSInfoTkh();
+    }
+
+    /**
+     * Creates a list of parsers for all collision import files in a directory
+     */
+    public static List<TkhParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<TkhParser> parsers = new ArrayList<>();
+        for (final File file : listFiles(importDir, ".csv"))
+            parsers.add(new TkhParser(file, new File(relativeDir, file.getName()), river));
+        return parsers;
+    }
+
+    @Override
+    public void parse() throws IOException {
+        getLog().info("Start parsing:;'" + this.rootRelativePath + "'");
+        // this.seriesHeader = createSeriesImport(this.importPath.getName().replaceAll("\\.csv", ""));
+        this.metaPatternsMatched.clear();
+        this.kmExists.clear();
+        this.colParsers.clear();
+        this.headerParsingState = ParsingState.CONTINUE;
+        try {
+            try {
+                this.in = new LineNumberReader(new InputStreamReader(new FileInputStream(this.importPath), ENCODING));
+            }
+            catch (final Exception e) {
+                logError("Could not open (" + e.getMessage() + ")");
+                this.headerParsingState = ParsingState.STOP;
+            }
+            this.currentLine = null;
+            while (this.headerParsingState == ParsingState.CONTINUE) {
+                this.currentLine = this.in.readLine();
+                if (this.currentLine == null)
+                    break;
+                this.currentLine = this.currentLine.trim();
+                if (this.currentLine.isEmpty())
+                    continue;
+                handleMetaLine();
+            }
+        }
+        finally {
+            if (this.in != null) {
+                this.in.close();
+                this.in = null;
+            }
+        }
+        if (this.headerParsingState == ParsingState.STOP) {
+            logError("Parsing of the file stopped due to a severe error");
+            return;
+        }
+        for (final TkhColumnParser colParser : this.colParsers)
+            colParser.parse();
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        if (handleMetaYear())
+            return true;
+        else if (handleMetaType())
+            return true;
+        else if (handleMetaSounding())
+            return true;
+        else if (handleMetaEvaluator())
+            return true;
+        else
+            return false;
+    }
+
+    private boolean handleMetaYear() {
+        final Matcher m = META_YEAR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_YEAR);
+            this.tkhGroup.setYear(Integer.parseInt(m.group(1)));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaType() {
+        final Matcher m = META_TYPE.matcher(this.currentLine);
+        return m.matches();
+    }
+
+    private boolean handleMetaSounding() {
+        final Matcher m = META_SOUNDING.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_SOUNDING);
+            this.tkhGroup.setSounding_info(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    private boolean handleMetaEvaluator() {
+        final Matcher m = META_EVALUATOR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_EVALUATOR);
+            this.tkhGroup.setEvaluation_by(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaColumnTitles() {
+        if (!super.handleMetaColumnTitles())
+            return false;
+        this.tkhGroup.setKmrange_info(this.seriesHeader.getKmrange_info());
+        this.tkhGroup.setComment(this.seriesHeader.getComment());
+        for (int i = 1; i <= this.columnTitles.size() - 1; i++) {
+            final Matcher m = COLUMN_TITLE.matcher(this.columnTitles.get(i));
+            if (m.matches())
+                this.colParsers.add(new TkhColumnParser(this.importPath, this.rootRelativePath, this.river, this.tkhGroup, i, m.group(7).trim()));
+            else
+                logWarning("No title found in column " + i + ", skipped");
+        }
+        return true;
+    }
+
+    @Override
+    public void store() {
+        if (this.headerParsingState != ParsingState.STOP) {
+            this.tkhGroup.getPeer(this.river.getPeer());
+            for (final TkhColumnParser colParser : this.colParsers)
+                colParser.store();
+        }
+        else
+            logWarning("Severe parsing errors, not storing series '" + this.tkhGroup.getFilename() + "'");
+    }
+
+    @Override
+    protected TkhColumnSeriesImport createSeriesImport(final String filename) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    protected TkhKmLineImport createKmLineImport(final Double km, final String[] values) {
+        throw new UnsupportedOperationException();
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/uinfo/UInfoImporter.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/uinfo/UInfoImporter.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,112 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.uinfo;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.ImportParser;
+import org.dive4elements.river.importer.uinfo.parsers.SalixParser;
+
+/**
+ * Import all S-INFO files of a river from its import directory and subdirectories<br />
+ * <br />
+ * Requires river and its gauges to exist in the database already
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class UInfoImporter
+{
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(UInfoImporter.class);
+
+    private static final String UINFO_DIR = "Oekologie";
+
+    private enum UInfoDirName {
+        BASICS("Basisdaten"), //
+        SALIX("Salix-Linie_Fluss-Aue-Konnektivitaet" + File.separator + "Salix-Linie");
+
+        private final String dirname;
+
+        UInfoDirName(final String dirname) {
+            this.dirname = dirname;
+        }
+
+        public String getDir() {
+            return this.dirname;
+        }
+        public File getFile() {
+            return new File(getDir());
+        }
+
+        public File buildPath(final File rootDir) {
+            return new File(rootDir, getDir());
+        }
+    }
+
+    /**
+     * Series of river's stations with bed mobility flag.
+     */
+    private final List<ImportParser> parsers;
+
+    /**
+     * Path of the S-INFO data directory of the importing river.
+     */
+    private File rootDir;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public UInfoImporter() {
+        this.parsers = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    /**
+     * Inits the parser list regarding the skip flags.
+     */
+    public void setup(final File riverDir, final ImportRiver river) {
+        this.rootDir = new File(riverDir, UINFO_DIR);
+        log.info("Parse U-INFO files from " + this.rootDir);
+        this.parsers.clear();
+        if (!SalixParser.shallSkip()) {
+            if (!this.parsers.addAll(SalixParser.createParsers(UInfoDirName.SALIX.buildPath(this.rootDir), UInfoDirName.SALIX.getFile(), river)))
+                log.info("Salix: no files found");
+        }
+        else {
+            log.info("Salix: skipped");
+        }
+    }
+
+    /**
+     * Imports the files according to the active parser list
+     */
+    public void parse() throws IOException {
+        for (final ImportParser parser : this.parsers)
+            parser.parse();
+    }
+
+    /**
+     * Stores all pending import objects
+     */
+    public void store() {
+        for (final ImportParser parser : this.parsers)
+            parser.store();
+    }
+
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/uinfo/importitem/SalixKmLineImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/uinfo/importitem/SalixKmLineImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,65 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.uinfo.importitem;
+
+import java.util.List;
+
+import org.dive4elements.river.importer.common.AbstractKmLineImport;
+import org.dive4elements.river.model.uinfo.Salix;
+import org.dive4elements.river.model.uinfo.SalixValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported salix values of a river station.
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class SalixKmLineImport extends AbstractKmLineImport<Salix, SalixValue> {
+
+    /***** FIELDS *****/
+
+    private final double factor;
+
+    private final double mnw_mw_diff;
+
+
+    /***** CONSTRUCTOR *****/
+
+    public SalixKmLineImport(final Double km, final double factor, final double mnw_mw_diff) {
+        super(km.doubleValue());
+        this.factor = factor;
+        this.mnw_mw_diff = mnw_mw_diff;
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    public SalixValue queryValueItem(final Session session, final Salix parent) {
+        final Query query = session.createQuery("FROM SalixValue WHERE (salix=:parent)"
+                + " AND (station BETWEEN (:station-0.0001) AND (:station+0.0001))");
+        query.setParameter("parent", parent);
+        query.setParameter("station", this.station);
+        final List rows = query.list();
+        if (!rows.isEmpty())
+            return (SalixValue) rows.get(0);
+        else
+            return null;
+    }
+
+
+    @Override
+    public SalixValue createValueItem(final Salix parent) {
+        return new SalixValue(parent, this.station, this.factor, this.mnw_mw_diff);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/uinfo/importitem/SalixSeriesImport.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/uinfo/importitem/SalixSeriesImport.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,69 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.uinfo.importitem;
+
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.common.AbstractSeriesImport;
+import org.dive4elements.river.model.River;
+import org.dive4elements.river.model.uinfo.Salix;
+import org.dive4elements.river.model.uinfo.SalixValue;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+/**
+ * Imported salix data series of a river
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class SalixSeriesImport extends AbstractSeriesImport<Salix, SalixValue, SalixKmLineImport> {
+
+    /***** FIELDS *****/
+
+    private static Logger log = Logger.getLogger(SalixSeriesImport.class);
+
+    private String evaluation_by;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public SalixSeriesImport(final String filename) {
+        super(filename);
+    }
+
+
+    /***** METHODS *****/
+
+    public void setEvaluationBy(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    @Override
+    public Logger getLog() {
+        return log;
+    }
+
+    @Override
+    public List<Salix> querySeriesItem(final Session session, final River river) {
+        final Query query = session.createQuery("FROM Salix WHERE river=:river AND lower(filename)=:filename");
+        query.setParameter("river", river);
+        query.setParameter("filename", this.filename.toLowerCase());
+        return query.list();
+    }
+
+
+    @Override
+    public Salix createSeriesItem(final River river) {
+        return new Salix(river, this.filename, this.kmrange_info, this.comment, this.evaluation_by);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/importer/uinfo/parsers/SalixParser.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/importer/uinfo/parsers/SalixParser.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,152 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.importer.uinfo.parsers;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.importer.Config;
+import org.dive4elements.river.importer.ImportRiver;
+import org.dive4elements.river.importer.common.AbstractParser;
+import org.dive4elements.river.importer.common.ParsingState;
+import org.dive4elements.river.importer.uinfo.importitem.SalixKmLineImport;
+import org.dive4elements.river.importer.uinfo.importitem.SalixSeriesImport;
+import org.dive4elements.river.model.uinfo.Salix;
+import org.dive4elements.river.model.uinfo.SalixValue;
+
+/**
+ * Reads and parses a salix file
+ *
+ * @author Matthias Schäfer
+ *
+ */
+public class SalixParser extends AbstractParser<Salix, SalixValue, SalixKmLineImport, SalixSeriesImport> {
+
+    /***** FIELDS *****/
+
+    private static final Logger log = Logger.getLogger(SalixParser.class);
+
+    private static final String IMPORT_FILENAME = "Salix-Linie.csv";
+
+    private static final Pattern META_EVALUATOR = Pattern.compile("^#\\sAuswerter:\\s*([^;]*).*", Pattern.CASE_INSENSITIVE);
+
+    private enum ColTitlePattern {
+        FACTOR("Salix.Fa.tor\\s*\\[(.*)\\].*"), //
+        MWMNW("\\(MW-MNW\\).*\\[(.*)\\].*");
+
+        private final Pattern pattern;
+
+        ColTitlePattern(final String regexp) {
+            this.pattern = Pattern.compile(regexp, Pattern.CASE_INSENSITIVE);
+        }
+
+        public Pattern getPattern() {
+            return this.pattern;
+        }
+    }
+
+    private final EnumMap<ColTitlePattern, Integer> cols = new EnumMap<>(ColTitlePattern.class);
+
+
+    /***** CONSTRUCTORS *****/
+
+    public SalixParser(final File importPath, final File rootRelativePath, final ImportRiver river) {
+        super(importPath, rootRelativePath, river);
+    }
+
+
+    /***** METHODS *****/
+
+    @Override
+    protected Logger getLog() {
+        return log;
+    }
+
+    /**
+     * Whether this import type shall be skipped
+     */
+    public static boolean shallSkip() {
+        return Config.INSTANCE.skipUInfoSalix();
+    }
+
+    /**
+     * Creates a list of parsers for all salix import files in a directory
+     */
+    public static List<SalixParser> createParsers(final File importDir, final File relativeDir, final ImportRiver river) {
+        final List<SalixParser> parsers = new ArrayList<>();
+        parsers.add(new SalixParser(new File(importDir, IMPORT_FILENAME), new File(relativeDir, IMPORT_FILENAME), river));
+        return parsers;
+    }
+
+    @Override
+    protected SalixSeriesImport createSeriesImport(final String filename) {
+        return new SalixSeriesImport(filename);
+    }
+
+    @Override
+    protected boolean handleMetaOther() {
+        if (handleMetaEvaluator())
+            return true;
+        else
+            return false;
+    }
+
+    private boolean handleMetaEvaluator() {
+        final Matcher m = META_EVALUATOR.matcher(this.currentLine);
+        if (m.matches()) {
+            this.metaPatternsMatched.add(META_EVALUATOR);
+            this.seriesHeader.setEvaluationBy(parseMetaInfo(m.group(1).trim()));
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    protected boolean handleMetaColumnTitles() {
+        if (!super.handleMetaColumnTitles())
+            return false;
+        for (final ColTitlePattern col : ColTitlePattern.values())
+            this.cols.put(col, -1);
+        for (int i = 1; i <= this.columnTitles.size() - 1; i++) {
+            for (final ColTitlePattern col : ColTitlePattern.values()) {
+                if (col.getPattern().matcher(this.columnTitles.get(i)).matches()) {
+                    this.cols.put(col, i);
+                    break;
+                }
+            }
+        }
+        if ((this.cols.get(ColTitlePattern.FACTOR) < 0) || (this.cols.get(ColTitlePattern.MWMNW) < 0)) {
+            logError("Column of the salix factor and/or mnw-mw-diff could not be identified");
+            this.headerParsingState = ParsingState.STOP;
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    protected SalixKmLineImport createKmLineImport(final Double km, final String[] values) {
+        if (Double.isNaN(parseDoubleWithNull(values[this.cols.get(ColTitlePattern.FACTOR)]).doubleValue())) {
+            logError("Salix factor not found in line " + this.in.getLineNumber());
+            return null;
+        }
+        if (Double.isNaN(parseDoubleWithNull(values[this.cols.get(ColTitlePattern.MWMNW)]).doubleValue())) {
+            logError("MNW-MW-diff not found in line " + this.in.getLineNumber());
+            return null;
+        }
+        return new SalixKmLineImport(km, parseDoubleWithNull(values[this.cols.get(ColTitlePattern.FACTOR)]).doubleValue(),
+                parseDoubleWithNull(values[this.cols.get(ColTitlePattern.MWMNW)]).doubleValue());
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/River.java
--- a/backend/src/main/java/org/dive4elements/river/model/River.java	Tue Apr 03 10:02:01 2018 +0200
+++ b/backend/src/main/java/org/dive4elements/river/model/River.java	Tue Apr 03 10:18:30 2018 +0200
@@ -8,8 +8,6 @@
 
 package org.dive4elements.river.model;
 
-import org.dive4elements.river.backend.SessionHolder;
-
 import java.io.Serializable;
 import java.math.BigDecimal;
 import java.math.MathContext;
@@ -29,12 +27,12 @@
 import javax.persistence.SequenceGenerator;
 import javax.persistence.Table;
 
+import org.apache.log4j.Logger;
+import org.dive4elements.river.backend.SessionHolder;
 import org.hibernate.Query;
 import org.hibernate.Session;
 import org.hibernate.annotations.Type;
 
-import org.apache.log4j.Logger;
-
 @Entity
 @Table(name = "rivers")
 public class River
@@ -51,8 +49,8 @@
 
     public static final Comparator<Double> KM_CMP = new Comparator<Double>() {
         @Override
-        public int compare(Double a, Double b) {
-            double diff = a - b;
+        public int compare(final Double a, final Double b) {
+            final double diff = a - b;
             if (diff < -EPSILON) return -1;
             if (diff >  EPSILON) return +1;
             return 0;
@@ -77,46 +75,46 @@
 
     @Id
     @SequenceGenerator(
-        name           = "SEQUENCE_RIVERS_ID_SEQ",
-        sequenceName   = "RIVERS_ID_SEQ",
-        allocationSize = 1)
+            name           = "SEQUENCE_RIVERS_ID_SEQ",
+            sequenceName   = "RIVERS_ID_SEQ",
+            allocationSize = 1)
     @GeneratedValue(
-        strategy  = GenerationType.SEQUENCE,
-        generator = "SEQUENCE_RIVERS_ID_SEQ")
+            strategy  = GenerationType.SEQUENCE,
+            generator = "SEQUENCE_RIVERS_ID_SEQ")
     @Column(name = "id")
     public Integer getId() {
-        return id;
+        return this.id;
     }
 
-    public void setId(Integer id) {
+    public void setId(final Integer id) {
         this.id = id;
     }
 
     @Column(name = "official_number")
     public Long getOfficialNumber() {
-        return officialNumber;
+        return this.officialNumber;
     }
 
-    public void setOfficialNumber(Long officialNumber) {
+    public void setOfficialNumber(final Long officialNumber) {
         this.officialNumber = officialNumber;
     }
 
     @Column(name = "name")
     public String getName() {
-        return name;
+        return this.name;
     }
 
-    public void setName(String name) {
+    public void setName(final String name) {
         this.name = name;
     }
 
     @Type(type="numeric_boolean")
     @Column(name = "km_up")
     public boolean getKmUp() {
-        return kmUp;
+        return this.kmUp;
     }
 
-    public void setKmUp(boolean kmUp) {
+    public void setKmUp(final boolean kmUp) {
         this.kmUp = kmUp;
     }
 
@@ -125,14 +123,14 @@
         return this.modelUuid;
     }
 
-    public void setModelUuid(String modelUuid) {
+    public void setModelUuid(final String modelUuid) {
         this.modelUuid = modelUuid;
     }
 
     public River() {
     }
 
-    public River(String name, Unit wstUnit, String modelUuid) {
+    public River(final String name, final Unit wstUnit, final String modelUuid) {
         this.name      = name;
         this.modelUuid = modelUuid;
         this.wstUnit   = wstUnit;
@@ -141,20 +139,20 @@
     @OneToMany
     @JoinColumn(name="river_id")
     public List<Gauge> getGauges() {
-        return gauges;
+        return this.gauges;
     }
 
-    public void setGauges(List<Gauge> gauges) {
+    public void setGauges(final List<Gauge> gauges) {
         this.gauges = gauges;
     }
 
     @OneToOne
     @JoinColumn(name = "wst_unit_id" )
     public Unit getWstUnit() {
-        return wstUnit;
+        return this.wstUnit;
     }
 
-    public void setWstUnit(Unit wstUnit) {
+    public void setWstUnit(final Unit wstUnit) {
         this.wstUnit = wstUnit;
     }
 
@@ -168,7 +166,7 @@
      * @return The name River in the seddb.
      */
     public String nameForSeddb() {
-        SeddbName alt = getSeddbName();
+        final SeddbName alt = getSeddbName();
         if (alt == null) {
             return getName();
         }
@@ -179,16 +177,16 @@
     @OneToOne
     @JoinColumn(name = "seddb_name_id" )
     public SeddbName getSeddbName() {
-        return seddbName;
+        return this.seddbName;
     }
 
-    public void setSeddbName(SeddbName name) {
+    public void setSeddbName(final SeddbName name) {
         this.seddbName = name;
     }
 
     @Override
     public String toString() {
-        return name != null ? name : "";
+        return this.name != null ? this.name : "";
     }
 
 
@@ -202,16 +200,16 @@
      * @return the intersecting gauges.
      */
     public List<Gauge> determineGauges(double a, double b) {
-        Session session = SessionHolder.HOLDER.get();
+        final Session session = SessionHolder.HOLDER.get();
 
-        if (a > b) { double t = a; a = b; b = t; }
+        if (a > b) { final double t = a; a = b; b = t; }
 
-        Query query = session.createQuery(
-            "from Gauge where river=:river " +
-            "and not " +
-            "((:b < least(range.a, range.b)) or" +
-            " (:a > greatest(range.a, range.b)))" +
-            "order by a");
+        final Query query = session.createQuery(
+                "from Gauge where river=:river " +
+                        "and not " +
+                        "((:b < least(range.a, range.b)) or" +
+                        " (:a > greatest(range.a, range.b)))" +
+                "order by a");
         query.setParameter("river", this);
         query.setParameter("a", new BigDecimal(a, PRECISION));
         query.setParameter("b", new BigDecimal(b, PRECISION));
@@ -220,28 +218,28 @@
     }
 
     public Gauge maxOverlap(double a, double b) {
-        List<Gauge> gauges = determineGauges(a, b);
+        final List<Gauge> gauges = determineGauges(a, b);
         if (gauges == null) {
             return null;
         }
 
-        if (a > b) { double t = a; a = b; b = t; }
+        if (a > b) { final double t = a; a = b; b = t; }
 
         double max = -Double.MAX_VALUE;
 
         Gauge result = null;
 
-        for (Gauge gauge: gauges) {
-            Range  r = gauge.getRange();
+        for (final Gauge gauge: gauges) {
+            final Range  r = gauge.getRange();
             double c = r.getA().doubleValue();
             double d = r.getB().doubleValue();
 
-            if (c > d) { double t = c; c = d; d = t; }
+            if (c > d) { final double t = c; c = d; d = t; }
 
-            double start = c >= a ? c : a;
-            double stop  = d <= b ? d : b;
+            final double start = c >= a ? c : a;
+            final double stop  = d <= b ? d : b;
 
-            double length = stop - start;
+            final double length = stop - start;
 
             if (length > max) {
                 max = length;
@@ -252,17 +250,17 @@
         return result;
     }
 
-    public Gauge determineGaugeByName(String name) {
-        Session session = SessionHolder.HOLDER.get();
-        Query query = session.createQuery(
-            "from Gauge where river=:river and name=:name");
+    public Gauge determineGaugeByName(final String name) {
+        final Session session = SessionHolder.HOLDER.get();
+        final Query query = session.createQuery(
+                "from Gauge where river=:river and name=:name");
         query.setParameter("river", this);
         query.setParameter("name", name);
-        List<Gauge> gauges = query.list();
+        final List<Gauge> gauges = query.list();
         return gauges.isEmpty() ? null : gauges.get(0);
     }
 
-    public Gauge determineGaugeByPosition(double p) {
+    public Gauge determineGaugeByPosition(final double p) {
         // Per default, we prefer the gauge downstream
         return determineGaugeByPosition(p, getKmUp());
     }
@@ -272,16 +270,16 @@
      * @param kmLower At boundary of two gauge ranges, should gauge at lower
      * km be returned?
      */
-    public Gauge determineGaugeByPosition(double p, boolean kmLower) {
-        Session session = SessionHolder.HOLDER.get();
-        Query query = session.createQuery(
-            "from Gauge g where river=:river "  +
-            "and :p between " +
-            "least(g.range.a, g.range.b) and " +
-            "greatest(g.range.a, g.range.b)");
+    public Gauge determineGaugeByPosition(final double p, final boolean kmLower) {
+        final Session session = SessionHolder.HOLDER.get();
+        final Query query = session.createQuery(
+                "from Gauge g where river=:river "  +
+                        "and :p between " +
+                        "least(g.range.a, g.range.b) and " +
+                "greatest(g.range.a, g.range.b)");
         query.setParameter("river", this);
         query.setParameter("p", new BigDecimal(p, PRECISION));
-        List<Gauge> gauges = query.list();
+        final List<Gauge> gauges = query.list();
         if (gauges.isEmpty()) {
             return null;
         }
@@ -291,19 +289,19 @@
         if (gauges.size() > 2) {
             // TODO: database schema should prevent this.
             log.warn("More than two gauge ranges overlap km " + p +
-                ". Returning arbitrary result.");
+                    ". Returning arbitrary result.");
         }
-        Gauge g0 = gauges.get(0);
-        Gauge g1 = gauges.get(1);
+        final Gauge g0 = gauges.get(0);
+        final Gauge g1 = gauges.get(1);
         if (kmLower) {
             return
-                g0.getStation().doubleValue() < g1.getStation().doubleValue()
-                ? g0
-                : g1;
+                    g0.getStation().doubleValue() < g1.getStation().doubleValue()
+                    ? g0
+                            : g1;
         }
         return g0.getStation().doubleValue() > g1.getStation().doubleValue()
-            ? g0
-            : g1;
+                ? g0
+                        : g1;
     }
 
 
@@ -311,46 +309,46 @@
      * @param s station at which the gauge is requested.
      * @return Gauge within tolerance at given station. null if there is none.
      */
-    public Gauge determineGaugeAtStation(double s) {
-        Session session = SessionHolder.HOLDER.get();
+    public Gauge determineGaugeAtStation(final double s) {
+        final Session session = SessionHolder.HOLDER.get();
 
-        Query query = session.createQuery(
-            "from Gauge where river.id=:river " +
-            "and station between :a and :b");
+        final Query query = session.createQuery(
+                "from Gauge where river.id=:river " +
+                "and station between :a and :b");
         query.setParameter("river", getId());
         query.setParameter("a", new BigDecimal(s - GAUGE_EPSILON));
         query.setParameter("b", new BigDecimal(s + GAUGE_EPSILON));
 
-        List<Gauge> gauges = query.list();
+        final List<Gauge> gauges = query.list();
         if (gauges.size() > 1) {
             log.warn("More than one gauge found at km " + s +
-                " within +-" + GAUGE_EPSILON +
-                ". Returning arbitrary result.");
+                    " within +-" + GAUGE_EPSILON +
+                    ". Returning arbitrary result.");
         }
         return gauges.isEmpty() ? null : gauges.get(0);
     }
 
     public double[] determineMinMaxQ() {
-        Session session = SessionHolder.HOLDER.get();
+        final Session session = SessionHolder.HOLDER.get();
 
-        Query query = session.createQuery(
-            "select min(wqr.q) as min, max(wqr.q) as max " +
-            "from Wst as w " +
-            "join w.columns as wc " +
-            "join wc.columnQRanges as wcqr " +
-            "join wcqr.wstQRange as wqr " +
-            "where w.kind = 0 and river_id = :river");
+        final Query query = session.createQuery(
+                "select min(wqr.q) as min, max(wqr.q) as max " +
+                        "from Wst as w " +
+                        "join w.columns as wc " +
+                        "join wc.columnQRanges as wcqr " +
+                        "join wcqr.wstQRange as wqr " +
+                "where w.kind = 0 and river_id = :river");
 
         query.setParameter("river", getId());
 
-        double minmax[] = new double[] { Double.MAX_VALUE, -Double.MAX_VALUE };
+        final double minmax[] = new double[] { Double.MAX_VALUE, -Double.MAX_VALUE };
 
-        List<Object> results = query.list();
+        final List<Object> results = query.list();
 
         if (!results.isEmpty()) {
-            Object[] arr = (Object[]) results.get(0);
-            BigDecimal minq = (BigDecimal)arr[0];
-            BigDecimal maxq = (BigDecimal)arr[1];
+            final Object[] arr = (Object[]) results.get(0);
+            final BigDecimal minq = (BigDecimal)arr[0];
+            final BigDecimal maxq = (BigDecimal)arr[1];
             minmax[0] = minq.doubleValue();
             minmax[1] = maxq.doubleValue();
         }
@@ -362,11 +360,11 @@
      * Determine reference gauge dependent on direction of calculation
      * for a range calculation, otherwise dependent on flow direction.
      */
-    public Gauge determineRefGauge(double[] range, boolean isRange) {
+    public Gauge determineRefGauge(final double[] range, final boolean isRange) {
         if (isRange) {
             return determineGaugeByPosition(
-                range[0],
-                range[0] > range[1]);
+                    range[0],
+                    range[0] > range[1]);
         }
         else {
             return determineGaugeByPosition(range[0]);
@@ -381,21 +379,21 @@
      * @return the min and max distance of this river.
      */
     public double[] determineMinMaxDistance() {
-        Session session = SessionHolder.HOLDER.get();
+        final Session session = SessionHolder.HOLDER.get();
 
-        Query query = session.createQuery(
-            "select min(range.a), max(range.b) from Gauge "
-            + "where river=:river "
-            + "and range is not null");
+        final Query query = session.createQuery(
+                "select min(range.a), max(range.b) from Gauge "
+                        + "where river=:river "
+                        + "and range is not null");
         query.setParameter("river", this);
 
-        List<Object[]> result = query.list();
+        final List<Object[]> result = query.list();
 
         if (!result.isEmpty()) {
-            Object[] minMax = result.get(0);
+            final Object[] minMax = result.get(0);
             if (minMax[0] != null && minMax[1] != null) {
                 return new double[] { ((BigDecimal)minMax[0]).doubleValue(),
-                    ((BigDecimal)minMax[1]).doubleValue() };
+                        ((BigDecimal)minMax[1]).doubleValue() };
             }
         }
 
@@ -403,12 +401,12 @@
     }
 
     public Map<Double, Double> queryGaugeDatumsKMs() {
-        List<Gauge> gauges = getGauges();
-        Map<Double, Double> result = new TreeMap<Double, Double>(KM_CMP);
+        final List<Gauge> gauges = getGauges();
+        final Map<Double, Double> result = new TreeMap<>(KM_CMP);
 
-        for (Gauge gauge: gauges) {
-            BigDecimal km    = gauge.getStation();
-            BigDecimal datum = gauge.getDatum();
+        for (final Gauge gauge: gauges) {
+            final BigDecimal km    = gauge.getStation();
+            final BigDecimal datum = gauge.getDatum();
             if (km != null && datum != null) {
                 result.put(km.doubleValue(), datum.doubleValue());
             }
@@ -417,5 +415,17 @@
         return result;
     }
 
+    /**
+     * Searches the gauges list of the river for a gauge number or a gauge name
+     */
+    public Gauge findGauge(final long number, final String name) {
+        for (final Gauge gauge : getGauges()) {
+            if (gauge.getOfficialNumber().longValue() == number)
+                return gauge;
+            if (gauge.getName().equalsIgnoreCase(name))
+                return gauge;
+        }
+        return null;
+    }
 }
 // vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/BedMobility.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/BedMobility.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,137 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.River;
+
+/**
+ * Hibernate binding for the DB table bed_mobility
+ *
+ * @author Matthias Schäfer
+ *
+ */
+
+
+ at Entity
+ at Table(name = "bed_mobility")
+public class BedMobility implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -2315936652180822551L;
+
+    private Integer id;
+
+    private String kmrange_info;
+
+    private String filename;
+
+    private String comment;
+
+    private River river;
+
+    private List<BedMobilityValue> values;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public BedMobility() {
+    }
+
+
+    public BedMobility(final River river, final String filename, final String kmrange_info, final String comment) {
+        this.river = river;
+        this.filename = filename;
+        this.kmrange_info = kmrange_info;
+        this.comment = comment;
+        this.values = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_BED_MOBILITY_ID_SEQ", sequenceName = "BED_MOBILITY_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_BED_MOBILITY_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return this.river;
+    }
+
+    public void setRiver(final River river) {
+        this.river = river;
+    }
+
+    @Column(name = "filename")
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    @Column(name = "kmrange_info")
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    @Column(name = "comment")
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "bed_mobility_id")
+    public List<BedMobilityValue> getValues() {
+        return this.values;
+    }
+
+    public void setValues(final List<BedMobilityValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(final BedMobilityValue value) {
+        this.values.add(value);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/BedMobilityValue.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/BedMobilityValue.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,109 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+/**
+ * Hibernate binding for the DB table bed_mobility_values
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "bed_mobility_values")
+public class BedMobilityValue implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = 1784382276688203055L;
+
+    private Integer id;
+
+    private BedMobility bedMobility;
+
+    private Double station;
+
+    private int moving; /* 0 or 1 */
+
+
+    /***** CONSTRUCTORS *****/
+
+    public BedMobilityValue() {
+    }
+
+    public BedMobilityValue(final BedMobility bedMobility, final Double station, final int bedMoving) {
+        this.bedMobility = bedMobility;
+        this.station = station;
+        this.moving = bedMoving;
+    }
+
+    /**
+     * Parameter constructor with primitive double km and boolean
+     */
+    public BedMobilityValue(final BedMobility bedMobility, final double km, final boolean bedMoving) {
+        this(bedMobility, Double.valueOf(km), (bedMoving ? 1 : 0));
+    }
+
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_BED_MOBILITY_VALUE_ID_SEQ", sequenceName = "BED_MOBILITY_VALUES_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_BED_MOBILITY_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "bed_mobility_id")
+    public BedMobility getBedMobility() {
+        return this.bedMobility;
+    }
+
+    public void setBedMobility(final BedMobility bedMobility) {
+        this.bedMobility = bedMobility;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return this.station;
+    }
+
+    public void setStation(final Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "moving")
+    public int getMoving() {
+        return this.moving;
+    }
+
+    public void setMoving(final int bedMoving) {
+        this.moving = bedMoving;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/Channel.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/Channel.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,160 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.River;
+
+/**
+ * Hibernate binding for the DB table channel
+ *
+ * @author Matthias Schäfer
+ *
+ */
+
+
+ at Entity
+ at Table(name = "channel")
+public class Channel implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = 4845405421049630551L;
+
+    private Integer id;
+
+    private River river;
+
+    private String kmrange_info;
+
+    private String filename;
+
+    private String comment;
+
+    private Integer year_from;
+
+    private Integer year_to;
+
+    private List<ChannelValue> values;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public Channel() {
+    }
+
+    public Channel(final River river, final String filename, final String kmrange_info, final String comment, final Integer year_from, final Integer year_to) {
+        this.river = river;
+        this.filename = filename;
+        this.kmrange_info = kmrange_info;
+        this.comment = comment;
+        this.setYear_from(year_from);
+        this.setYear_to(year_to);
+        this.values = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_CHANNEL_ID_SEQ", sequenceName = "CHANNEL_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_CHANNEL_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return this.river;
+    }
+
+    public void setRiver(final River river) {
+        this.river = river;
+    }
+
+    @Column(name = "filename")
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    @Column(name = "kmrange_info")
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    @Column(name = "comment")
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    @Column(name = "year_from")
+    public Integer getYear_from() {
+        return this.year_from;
+    }
+
+    public void setYear_from(final Integer year_from) {
+        this.year_from = year_from;
+    }
+
+    @Column(name = "year_to")
+    public Integer getYear_to() {
+        return this.year_to;
+    }
+
+    public void setYear_to(final Integer year_to) {
+        this.year_to = year_to;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "channel_id")
+    public List<ChannelValue> getValues() {
+        return this.values;
+    }
+
+    public void setValues(final List<ChannelValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(final ChannelValue value) {
+        this.values.add(value);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/ChannelValue.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/ChannelValue.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,121 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+/**
+ * Hibernate binding for the DB table channel_values
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "channel_values")
+public class ChannelValue implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -3793243094875530557L;
+
+    private Integer id;
+
+    private Channel channel;
+
+    private Double station;
+
+    private Double width;
+
+    private Double depth;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public ChannelValue() {
+    }
+
+    public ChannelValue(final Channel channel, final Double station, final Double width, final Double depth) {
+        this.channel = channel;
+        this.station = station;
+        this.width = width;
+        this.depth = depth;
+    }
+
+    /**
+     * Parameter constructor with primitive parameter types
+     */
+    public ChannelValue(final Channel channel, final double km, final double width, final double depth) {
+        this(channel, Double.valueOf(km), Double.valueOf(width), Double.valueOf(depth));
+    }
+
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_CHANNEL_VALUE_ID_SEQ", sequenceName = "CHANNEL_VALUES_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_CHANNEL_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "channel_id")
+    public Channel getChannel() {
+        return this.channel;
+    }
+
+    public void setChannel(final Channel channel) {
+        this.channel = channel;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return this.station;
+    }
+
+    public void setStation(final Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "width")
+    public Double getWidth() {
+        return this.width;
+    }
+
+    public void setWidth(final Double width) {
+        this.width = width;
+    }
+
+    @Column(name = "depth")
+    public Double getDepth() {
+        return this.depth;
+    }
+
+    public void setDepth(final Double depth) {
+        this.depth = depth;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/Collision.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/Collision.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,148 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.River;
+
+/**
+ * Hibernate binding for the DB table collision
+ *
+ * @author Matthias Schäfer
+ *
+ */
+
+ at Entity
+ at Table(name = "collision")
+public class Collision implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -6950745885816251322L;
+
+    private Integer id;
+
+    private River river;
+
+    private String filename;
+
+    private String kmrange_info;
+
+    private String comment;
+
+    private Integer year;
+
+    private List<CollisionValue> values;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public Collision() {
+    }
+
+
+    public Collision(final River river, final String filename, final String kmrange_info, final String comment, final Integer year) {
+        this.river = river;
+        this.filename = filename;
+        this.kmrange_info = kmrange_info;
+        this.comment = comment;
+        this.year = year;
+        this.values = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_COLLISION_ID_SEQ", sequenceName = "COLLISION_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_COLLISION_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return this.river;
+    }
+
+    public void setRiver(final River river) {
+        this.river = river;
+    }
+
+    @Column(name = "filename")
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    @Column(name = "kmrange_info")
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    @Column(name = "comment")
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    @Column(name = "year")
+    public Integer getYear() {
+        return this.year;
+    }
+
+    public void setYear(final Integer year) {
+        this.year = year;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "collision_id")
+    public List<CollisionValue> getValues() {
+        return this.values;
+    }
+
+    public void setValues(final List<CollisionValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(final CollisionValue value) {
+        this.values.add(value);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/CollisionType.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/CollisionType.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,82 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.backend.SessionHolder;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+ at Entity
+ at Table(name = "collision_type")
+public class CollisionType implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -865783384903507910L;
+
+    private Integer id;
+
+    private String  name;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public CollisionType() {
+    }
+
+    public CollisionType(final String name) {
+        this.name = name;
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_COLLISION_TYPE_ID_SEQ", sequenceName = "COLLISION_TYPE_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_COLLISION_TYPE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(final String name) {
+        this.name = name;
+    }
+
+    /**
+     * Queries all collision types from the database, orders by name
+     */
+    public static List<CollisionType> getTypes() {
+        final Session session = SessionHolder.HOLDER.get();
+        final Query query = session.createQuery("FROM CollisionType ORDER BY name");
+        return new ArrayList<>(query.list());
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/CollisionValue.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/CollisionValue.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,149 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.Date;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+/**
+ * Hibernate binding for the DB table collision_values
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "collision_values")
+public class CollisionValue implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -1157324854470346513L;
+
+    private Integer id;
+
+    private Collision collision;
+
+    private Double station;
+
+    private CollisionType collisionType;
+
+    private Date eventDate;
+
+    private String gaugeName;
+
+    private Double gaugeW;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public CollisionValue() {
+    }
+
+    public CollisionValue(final Collision collision, final Double station, final CollisionType collisionType, final Date eventDate, final String gaugeName,
+            final Double gaugeW) {
+        this.collision = collision;
+        this.station = station;
+        this.collisionType = collisionType;
+        this.eventDate = eventDate;
+        this.gaugeName = gaugeName;
+        this.gaugeW = gaugeW;
+    }
+
+    /**
+     * Constructor with primitive parameter types
+     */
+    public CollisionValue(final Collision collision, final double km, final CollisionType collisionType, final Date eventDate, final String gaugeName,
+            final double gaugeW) {
+        this(collision, Double.valueOf(km), collisionType, eventDate, gaugeName, Double.valueOf(gaugeW));
+    }
+
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_COLLISION_VALUE_ID_SEQ", sequenceName = "COLLISION_VALUES_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_COLLISION_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "collision_id")
+    public Collision getCollision() {
+        return this.collision;
+    }
+
+    public void setCollision(final Collision collision) {
+        this.collision = collision;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return this.station;
+    }
+
+    public void setStation(final Double station) {
+        this.station = station;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "collision_type_id")
+    public CollisionType getCollisionType() {
+        return this.collisionType;
+    }
+
+    public void setCollisionType(final CollisionType collisionType) {
+        this.collisionType = collisionType;
+    }
+
+    @Column(name = "event_date")
+    public Date getEventDate() {
+        return this.eventDate;
+    }
+
+    public void setEventDate(final Date eventDate) {
+        this.eventDate = eventDate;
+    }
+
+    @Column(name = "gauge_name")
+    public String getGaugeName() {
+        return this.gaugeName;
+    }
+
+    public void setGaugeName(final String gaugeName) {
+        this.gaugeName = gaugeName;
+    }
+
+    @Column(name = "gauge_w")
+    public Double getGaugeW() {
+        return this.gaugeW;
+    }
+
+    public void setGaugeW(final Double gaugeW) {
+        this.gaugeW = gaugeW;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/DailyDischarge.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/DailyDischarge.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,147 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.Gauge;
+
+/**
+ * Hibernate binding for the DB table daily_discharge
+ *
+ * @author Matthias Schäfer
+ *
+ */
+
+
+ at Entity
+ at Table(name = "daily_discharge")
+public class DailyDischarge implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -3687154040155547884L;
+
+    private Integer id;
+
+    // private String kmrange_info;
+
+    private String filename;
+
+    // private String comment;
+
+    private Gauge gauge;
+
+    private List<DailyDischargeValue> values;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public DailyDischarge() {
+    }
+
+
+    public DailyDischarge(final Gauge gauge, final String filename, final String comment) {
+        this.gauge = gauge;
+        // this.river = river;
+        this.filename = filename;
+        // this.comment = comment;
+        this.values = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_DAILY_DISCHARGE_ID_SEQ", sequenceName = "DAILY_DISCHARGE_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_DAILY_DISCHARGE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    // @OneToOne
+    // @JoinColumn(name = "river_id")
+    // public River getRiver() {
+    // return this.river;
+    // }
+
+    // public void setRiver(final River river) {
+    // this.river = river;
+    // }
+
+    @OneToOne
+    @JoinColumn(name = "gauge_id")
+    public Gauge getGauge() {
+        return this.gauge;
+    }
+
+    public void setGauge(final Gauge gauge) {
+        this.gauge = gauge;
+    }
+
+    @Column(name = "filename")
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    // @Column(name = "kmrange_info")
+    // public String getKmrange_info() {
+    // return this.kmrange_info;
+    // }
+
+    // public void setKmrange_info(final String kmrange_info) {
+    // this.kmrange_info = kmrange_info;
+    // }
+
+    // @Column(name = "comment")
+    // public String getComment() {
+    // return this.comment;
+    // }
+
+    // public void setComment(final String comment) {
+    // this.comment = comment;
+    // }
+
+    @OneToMany
+    @JoinColumn(name = "daily_discharge_id")
+    public List<DailyDischargeValue> getValues() {
+        return this.values;
+    }
+
+    public void setValues(final List<DailyDischargeValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(final DailyDischargeValue value) {
+        this.values.add(value);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/DailyDischargeValue.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/DailyDischargeValue.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,110 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.Date;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+/**
+ * Hibernate binding for the DB table daily_discharge_values
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "daily_discharge_values")
+public class DailyDischargeValue implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -6192738825193230784L;
+
+    private Integer id;
+
+    private DailyDischarge dailyDischarge;
+
+    private Date day;
+
+    private Double discharge;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public DailyDischargeValue() {
+    }
+
+    public DailyDischargeValue(final DailyDischarge dailyDischarge, final Date day, final Double discharge) {
+        this.dailyDischarge = dailyDischarge;
+        this.day = day;
+        this.discharge = discharge;
+    }
+
+    /**
+     * Parameter constructor with primitive parameter types
+     */
+    public DailyDischargeValue(final DailyDischarge dailyDischarge, final Date day, final double discharge) {
+        this(dailyDischarge, day, Double.valueOf(discharge));
+    }
+
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_DAILY_DISCHARGE_VALUE_ID_SEQ", sequenceName = "DAILY_DISCHARGE_VALUES_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_DAILY_DISCHARGE_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "daily_discharge_id")
+    public DailyDischarge getDailyDischarge() {
+        return this.dailyDischarge;
+    }
+
+    public void setDailyDischarge(final DailyDischarge dailyDischarge) {
+        this.dailyDischarge = dailyDischarge;
+    }
+
+    @Column(name = "discharge")
+    public Double getDischarge() {
+        return this.discharge;
+    }
+
+    public void setDischarge(final Double discharge) {
+        this.discharge = discharge;
+    }
+
+    @Column(name = "day")
+    public Date getDay() {
+        return this.day;
+    }
+
+    public void setDay(final Date day) {
+        this.day = day;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/DepthEvolution.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/DepthEvolution.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,208 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.River;
+
+/**
+ * Hibernate binding for the DB table depth_evolution
+ *
+ * @author Matthias Schäfer
+ *
+ */
+
+ at Entity
+ at Table(name = "depth_evolution")
+public class DepthEvolution implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = 2665718161356165179L;
+
+    private Integer id;
+
+    private River river;
+
+    private String kmrange_info;
+
+    private String filename;
+
+    private String comment;
+
+    private Integer start_year;
+
+    private Integer reference_year;
+
+    private String curr_sounding;
+
+    private String old_sounding;
+
+    private String curr_glw;
+
+    private String old_glw;
+
+    private List<DepthEvolutionValue> values;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public DepthEvolution() {
+    }
+
+    public DepthEvolution(final River river, final String filename, final String kmrange_info, final String comment, final Integer start_year,
+            final Integer reference_year, final String curr_sounding, final String old_sounding, final String curr_glw, final String old_glw) {
+        this.river = river;
+        this.filename = filename;
+        this.kmrange_info = kmrange_info;
+        this.comment = comment;
+        this.setStart_year(start_year);
+        this.setReference_year(reference_year);
+        this.curr_sounding = curr_sounding;
+        this.old_sounding = old_sounding;
+        this.curr_glw = curr_glw;
+        this.old_glw = old_glw;
+        this.values = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_DEPTH_EVOLUTION_ID_SEQ", sequenceName = "DEPTH_EVOLUTION_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_DEPTH_EVOLUTION_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return this.river;
+    }
+
+    public void setRiver(final River river) {
+        this.river = river;
+    }
+
+    @Column(name = "filename")
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    @Column(name = "kmrange_info")
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    @Column(name = "comment")
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    @Column(name = "start_year")
+    public Integer getStart_year() {
+        return this.start_year;
+    }
+
+    public void setStart_year(final Integer start_year) {
+        this.start_year = start_year;
+    }
+
+    @Column(name = "reference_year")
+    public Integer getReference_year() {
+        return this.reference_year;
+    }
+
+    public void setReference_year(final Integer reference_year) {
+        this.reference_year = reference_year;
+    }
+
+    @Column(name = "curr_sounding")
+    public String getCurr_sounding() {
+        return this.curr_sounding;
+    }
+
+    public void setCurr_sounding(final String curr_sounding) {
+        this.curr_sounding = curr_sounding;
+    }
+
+    @Column(name = "old_sounding")
+    public String getOld_sounding() {
+        return this.old_sounding;
+    }
+
+    public void setOld_sounding(final String old_sounding) {
+        this.old_sounding = old_sounding;
+    }
+
+    @Column(name = "curr_glw")
+    public String getCurr_glw() {
+        return this.curr_glw;
+    }
+
+    public void setCurr_glw(final String curr_glw) {
+        this.curr_glw = curr_glw;
+    }
+
+    @Column(name = "old_glw")
+    public String getOld_glw() {
+        return this.old_glw;
+    }
+
+    public void setOld_glw(final String old_glw) {
+        this.old_glw = old_glw;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "depth_evolution_id")
+    public List<DepthEvolutionValue> getValues() {
+        return this.values;
+    }
+
+    public void setValues(final List<DepthEvolutionValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(final DepthEvolutionValue value) {
+        this.values.add(value);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/DepthEvolutionValue.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/DepthEvolutionValue.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,121 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+/**
+ * Hibernate binding for the DB table depth_evolution_values
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "depth_evolution_values")
+public class DepthEvolutionValue implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = 3164888119107103560L;
+
+    private Integer id;
+
+    private DepthEvolution depth_evolution;
+
+    private Double station;
+
+    private Double total_change;
+
+    private Double change_per_year;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public DepthEvolutionValue() {
+    }
+
+    public DepthEvolutionValue(final DepthEvolution depth_evolution, final Double station, final Double total_change, final Double change_per_year) {
+        this.depth_evolution = depth_evolution;
+        this.station = station;
+        this.total_change = total_change;
+        this.change_per_year = change_per_year;
+    }
+
+    /**
+     * Parameter constructor with primitive parameter types
+     */
+    public DepthEvolutionValue(final DepthEvolution depth_evolution, final double km, final double total_change, final double change_per_year) {
+        this(depth_evolution, Double.valueOf(km), Double.valueOf(total_change), Double.valueOf(change_per_year));
+    }
+
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_DEPTH_EVOLUTION_VALUE_ID_SEQ", sequenceName = "DEPTH_EVOLUTION_VALUES_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_DEPTH_EVOLUTION_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "depth_evolution_id")
+    public DepthEvolution getDepthEvolution() {
+        return this.depth_evolution;
+    }
+
+    public void setDepthEvolution(final DepthEvolution depth_evolution) {
+        this.depth_evolution = depth_evolution;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return this.station;
+    }
+
+    public void setStation(final Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "total_change")
+    public Double getTotal_change() {
+        return this.total_change;
+    }
+
+    public void setTotal_change(final Double total_change) {
+        this.total_change = total_change;
+    }
+
+    @Column(name = "change_per_year")
+    public Double getChange_per_year() {
+        return this.change_per_year;
+    }
+
+    public void setChange_per_year(final Double change_per_year) {
+        this.change_per_year = change_per_year;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/FlowDepth.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/FlowDepth.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,173 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.River;
+
+/**
+ * Hibernate binding for the DB table flow_depth
+ *
+ * @author Matthias Schäfer
+ *
+ */
+
+ at Entity
+ at Table(name = "flow_depth")
+public class FlowDepth implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -1643015200580514950L;
+
+    private Integer id;
+
+    private River river;
+
+    private String filename;
+
+    private String kmrange_info;
+
+    private String comment;
+
+    private Integer year;
+
+    private String sounding_info;
+
+    private String evaluation_by;
+
+    private List<FlowDepthColumn> columns;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public FlowDepth() {
+    }
+
+
+    public FlowDepth(final River river, final String filename, final String kmrange_info, final String comment, final Integer year, final String sounding_info,
+            final String evaluation_by) {
+        this.river = river;
+        this.filename = filename;
+        this.kmrange_info = kmrange_info;
+        this.comment = comment;
+        this.year = year;
+        this.setSounding_info(sounding_info);
+        this.setEvaluation_by(evaluation_by);
+        this.columns = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_FLOW_DEPTH_ID_SEQ", sequenceName = "FLOW_DEPTH_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_FLOW_DEPTH_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return this.river;
+    }
+
+    public void setRiver(final River river) {
+        this.river = river;
+    }
+
+    @Column(name = "filename")
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    @Column(name = "kmrange_info")
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    @Column(name = "comment")
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    @Column(name = "year")
+    public Integer getYear() {
+        return this.year;
+    }
+
+    public void setYear(final Integer year) {
+        this.year = year;
+    }
+
+    @Column(name = "sounding_info")
+    public String getSounding_info() {
+        return this.sounding_info;
+    }
+
+    public void setSounding_info(final String sounding_info) {
+        this.sounding_info = sounding_info;
+    }
+
+    @Column(name = "evaluation_by")
+    public String getEvaluation_by() {
+        return this.evaluation_by;
+    }
+
+    public void setEvaluation_by(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "flow_depth_id")
+    public List<FlowDepthColumn> getColumns() {
+        return this.columns;
+    }
+
+    public void setColumns(final List<FlowDepthColumn> values) {
+        this.columns = values;
+    }
+
+    public void addColumn(final FlowDepthColumn value) {
+        this.columns.add(value);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/FlowDepthColumn.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/FlowDepthColumn.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,106 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+/**
+ * Hibernate binding for the DB table flow_depth_column
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "flow_depth_column")
+public class FlowDepthColumn implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -8164345503234852700L;
+
+    private Integer id;
+
+    private FlowDepth parent;
+
+    private String  name;
+
+    private List<FlowDepthValue> values;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public FlowDepthColumn() {
+    }
+
+    public FlowDepthColumn(final FlowDepth parent, final String name) {
+        this.parent = parent;
+        this.name = name;
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_FLOW_DEPTH_COLUMN_ID_SEQ", sequenceName = "FLOW_DEPTH_COLUMN_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_FLOW_DEPTH_COLUMN_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "flow_depth_id")
+    public FlowDepth getFlowDepth() {
+        return this.parent;
+    }
+
+    public void setFlowDepth(final FlowDepth flow_depth) {
+        this.parent = flow_depth;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(final String name) {
+        this.name = name;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "flow_depth_column_id")
+    public List<FlowDepthValue> getValues() {
+        return this.values;
+    }
+
+    public void setValues(final List<FlowDepthValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(final FlowDepthValue value) {
+        this.values.add(value);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/FlowDepthValue.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/FlowDepthValue.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,112 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+/**
+ * Hibernate binding for the DB table flow_depth_values
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "flow_depth_values")
+public class FlowDepthValue implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -7465899199734466657L;
+
+    private Integer id;
+
+    private FlowDepthColumn flowDepthColumn;
+
+    private Double station;
+
+    /**
+     * Flow depth in m
+     */
+    private Double depth;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public FlowDepthValue() {
+    }
+
+    public FlowDepthValue(final FlowDepthColumn flowDepthColumn, final Double station, final Double depth) {
+        this.flowDepthColumn = flowDepthColumn;
+        this.station = station;
+        this.depth = depth;
+    }
+
+    /**
+     * Constructor with primitive parameter types
+     */
+    public FlowDepthValue(final FlowDepthColumn flow_depthColumn, final double km, final double depth) {
+        this(flow_depthColumn, Double.valueOf(km), Double.valueOf(depth));
+    }
+
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_FLOW_DEPTH_VALUE_ID_SEQ", sequenceName = "FLOW_DEPTH_VALUES_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_FLOW_DEPTH_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "flow_depth_column_id")
+    public FlowDepthColumn getFlowDepthColumn() {
+        return this.flowDepthColumn;
+    }
+
+    public void setFlowDepthColumn(final FlowDepthColumn flow_depth) {
+        this.flowDepthColumn = flow_depth;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return this.station;
+    }
+
+    public void setStation(final Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "depth")
+    public Double getDepth() {
+        return this.depth;
+    }
+
+    public void setDepth(final Double depth) {
+        this.depth = depth;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/Infrastructure.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/Infrastructure.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,187 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.AnnotationType;
+import org.dive4elements.river.model.River;
+
+/**
+ * Hibernate binding for the DB table infrastructure
+ *
+ * @author Matthias Schäfer
+ *
+ */
+
+ at Entity
+ at Table(name = "infrastructure")
+public class Infrastructure implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -5505080011501611034L;
+
+    private Integer id;
+
+    private River river;
+
+    private String filename;
+
+    private String kmrange_info;
+
+    private String comment;
+
+    private AnnotationType type;
+
+    private Integer year;
+
+    private String dataprovider;
+
+    private String evaluation_by;
+
+    private List<InfrastructureValue> values;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public Infrastructure() {
+    }
+
+
+    public Infrastructure(final River river, final String filename, final String kmrange_info, final String comment, final AnnotationType type, final Integer year,
+            final String dataprovider, final String evaluation_by) {
+        this.river = river;
+        this.filename = filename;
+        this.kmrange_info = kmrange_info;
+        this.comment = comment;
+        this.type = type;
+        this.year = year;
+        this.dataprovider = dataprovider;
+        this.evaluation_by = evaluation_by;
+        this.values = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_INFRASTRUCTURE_ID_SEQ", sequenceName = "INFRASTRUCTURE_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_INFRASTRUCTURE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return this.river;
+    }
+
+    public void setRiver(final River river) {
+        this.river = river;
+    }
+
+    @Column(name = "filename")
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    @Column(name = "kmrange_info")
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    @Column(name = "comment")
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "annotation_type_id")
+    public AnnotationType getType() {
+        return this.type;
+    }
+
+    public void setType(final AnnotationType type) {
+        this.type = type;
+    }
+
+    @Column(name = "year")
+    public Integer getYear() {
+        return this.year;
+    }
+
+    public void setYear(final Integer year) {
+        this.year = year;
+    }
+
+    @Column(name = "dataprovider")
+    public String getDataprovider() {
+        return this.dataprovider;
+    }
+
+    public void setDataprovider(final String dataprovider) {
+        this.dataprovider = dataprovider;
+    }
+
+    @Column(name = "evaluation_by")
+    public String getEvaluation_by() {
+        return this.evaluation_by;
+    }
+
+    public void setEvaluation_by(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "infrastructure_id")
+    public List<InfrastructureValue> getValues() {
+        return this.values;
+    }
+
+    public void setValues(final List<InfrastructureValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(final InfrastructureValue value) {
+        this.values.add(value);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/InfrastructureValue.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/InfrastructureValue.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,124 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.Attribute;
+
+
+/**
+ * Hibernate binding for the DB table infrastructure_values
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "infrastructure_values")
+public class InfrastructureValue implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -3887269325288851829L;
+
+    private Integer id;
+
+    private Infrastructure infrastructure;
+
+    private Double station;
+
+    private Attribute attribute;
+
+    private Double height;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public InfrastructureValue() {
+    }
+
+    public InfrastructureValue(final Infrastructure infrastructure, final Double station, final Attribute attribute, final Double height) {
+        this.infrastructure = infrastructure;
+        this.station = station;
+        this.attribute = attribute;
+        this.height = height;
+    }
+
+    /**
+     * Parameter constructor with primitive double km and height
+     */
+    public InfrastructureValue(final Infrastructure infrastructure, final double km, final Attribute attribute, final double height) {
+        this(infrastructure, Double.valueOf(km), attribute, Double.valueOf(height));
+    }
+
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_INFRASTRUCTURE_VALUE_ID_SEQ", sequenceName = "INFRASTRUCTURE_VALUES_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_INFRASTRUCTURE_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "infrastructure_id")
+    public Infrastructure getInfrastructure() {
+        return this.infrastructure;
+    }
+
+    public void setInfrastructure(final Infrastructure infrastructure) {
+        this.infrastructure = infrastructure;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return this.station;
+    }
+
+    public void setStation(final Double station) {
+        this.station = station;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "attribute_id")
+    public Attribute getAttribute() {
+        return this.attribute;
+    }
+
+    public void setAttribute(final Attribute attribute) {
+        this.attribute = attribute;
+    }
+
+    @Column(name = "height")
+    public Double getHeight() {
+        return this.height;
+    }
+
+    public void setHeight(final Double height) {
+        this.height = height;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/Tkh.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/Tkh.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,173 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.River;
+
+/**
+ * Hibernate binding for the DB table tkh
+ *
+ * @author Matthias Schäfer
+ *
+ */
+
+ at Entity
+ at Table(name = "tkh")
+public class Tkh implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = 5891445651697221261L;
+
+    private Integer id;
+
+    private River river;
+
+    private String filename;
+
+    private String kmrange_info;
+
+    private String comment;
+
+    private Integer year;
+
+    private String sounding_info;
+
+    private String evaluation_by;
+
+    private List<TkhColumn> columns;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public Tkh() {
+    }
+
+
+    public Tkh(final River river, final String filename, final String kmrange_info, final String comment, final Integer year, final String sounding_info,
+            final String evaluation_by) {
+        this.river = river;
+        this.filename = filename;
+        this.kmrange_info = kmrange_info;
+        this.comment = comment;
+        this.year = year;
+        this.setSounding_info(sounding_info);
+        this.setEvaluation_by(evaluation_by);
+        this.columns = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_TKH_ID_SEQ", sequenceName = "TKH_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_TKH_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return this.river;
+    }
+
+    public void setRiver(final River river) {
+        this.river = river;
+    }
+
+    @Column(name = "filename")
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    @Column(name = "kmrange_info")
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    @Column(name = "comment")
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    @Column(name = "year")
+    public Integer getYear() {
+        return this.year;
+    }
+
+    public void setYear(final Integer year) {
+        this.year = year;
+    }
+
+    @Column(name = "sounding_info")
+    public String getSounding_info() {
+        return this.sounding_info;
+    }
+
+    public void setSounding_info(final String sounding_info) {
+        this.sounding_info = sounding_info;
+    }
+
+    @Column(name = "evaluation_by")
+    public String getEvaluation_by() {
+        return this.evaluation_by;
+    }
+
+    public void setEvaluation_by(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "tkh_id")
+    public List<TkhColumn> getColumns() {
+        return this.columns;
+    }
+
+    public void setColumns(final List<TkhColumn> values) {
+        this.columns = values;
+    }
+
+    public void addColumn(final TkhColumn value) {
+        this.columns.add(value);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/TkhColumn.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/TkhColumn.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,106 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+/**
+ * Hibernate binding for the DB table tkh_column
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "tkh_column")
+public class TkhColumn implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -6567598780584866314L;
+
+    private Integer id;
+
+    private Tkh parent;
+
+    private String  name;
+
+    private List<TkhValue> values;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public TkhColumn() {
+    }
+
+    public TkhColumn(final Tkh parent, final String name) {
+        this.parent = parent;
+        this.name = name;
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_TKH_COLUMN_ID_SEQ", sequenceName = "TKH_COLUMN_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_TKH_COLUMN_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "tkh_id")
+    public Tkh getTkh() {
+        return this.parent;
+    }
+
+    public void setTkh(final Tkh tkh) {
+        this.parent = tkh;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(final String name) {
+        this.name = name;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "tkh_column_id")
+    public List<TkhValue> getValues() {
+        return this.values;
+    }
+
+    public void setValues(final List<TkhValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(final TkhValue value) {
+        this.values.add(value);
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/sinfo/TkhValue.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/sinfo/TkhValue.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,112 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.sinfo;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+/**
+ * Hibernate binding for the DB table tkh_values
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "tkh_values")
+public class TkhValue implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = 4514054828340199384L;
+
+    private Integer id;
+
+    private TkhColumn tkhColumn;
+
+    private Double station;
+
+    /**
+     * TKH in m
+     */
+    private Double tkheight;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public TkhValue() {
+    }
+
+    public TkhValue(final TkhColumn tkhColumn, final Double station, final Double tkheight) {
+        this.tkhColumn = tkhColumn;
+        this.station = station;
+        this.tkheight = tkheight;
+    }
+
+    /**
+     * Constructor with primitive parameter types
+     */
+    public TkhValue(final TkhColumn tkhColumn, final double km, final double tkheight) {
+        this(tkhColumn, Double.valueOf(km), Double.valueOf(tkheight));
+    }
+
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_TKH_VALUE_ID_SEQ", sequenceName = "TKH_VALUES_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_TKH_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "tkh_column_id")
+    public TkhColumn getTkhColumn() {
+        return this.tkhColumn;
+    }
+
+    public void setTkhColumn(final TkhColumn tkh) {
+        this.tkhColumn = tkh;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return this.station;
+    }
+
+    public void setStation(final Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "tkheight")
+    public Double getTkheight() {
+        return this.tkheight;
+    }
+
+    public void setTkheight(final Double tkheight) {
+        this.tkheight = tkheight;
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/uinfo/Salix.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/uinfo/Salix.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,148 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.uinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+import org.dive4elements.river.model.River;
+
+/**
+ * Hibernate binding for the DB table salix
+ *
+ * @author Matthias Schäfer
+ *
+ */
+
+ at Entity
+ at Table(name = "salix")
+public class Salix implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = -2765661962829259970L;
+
+    private Integer id;
+
+    private River river;
+
+    private String filename;
+
+    private String kmrange_info;
+
+    private String comment;
+
+    private String evaluation_by;
+
+    private List<SalixValue> values;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public Salix() {
+    }
+
+
+    public Salix(final River river, final String filename, final String kmrange_info, final String comment, final String evaluation_by) {
+        this.river = river;
+        this.filename = filename;
+        this.kmrange_info = kmrange_info;
+        this.comment = comment;
+        this.evaluation_by = evaluation_by;
+        this.values = new ArrayList<>();
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_SALIX_ID_SEQ", sequenceName = "SALIX_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_SALIX_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "river_id")
+    public River getRiver() {
+        return this.river;
+    }
+
+    public void setRiver(final River river) {
+        this.river = river;
+    }
+
+    @Column(name = "filename")
+    public String getFilename() {
+        return this.filename;
+    }
+
+    public void setFilename(final String filename) {
+        this.filename = filename;
+    }
+
+    @Column(name = "kmrange_info")
+    public String getKmrange_info() {
+        return this.kmrange_info;
+    }
+
+    public void setKmrange_info(final String kmrange_info) {
+        this.kmrange_info = kmrange_info;
+    }
+
+    @Column(name = "comment")
+    public String getComment() {
+        return this.comment;
+    }
+
+    public void setComment(final String comment) {
+        this.comment = comment;
+    }
+
+    @Column(name = "evaluation_by")
+    public String getEvaluation_by() {
+        return this.evaluation_by;
+    }
+
+    public void setEvaluation_by(final String evaluation_by) {
+        this.evaluation_by = evaluation_by;
+    }
+
+    @OneToMany
+    @JoinColumn(name = "salix_id")
+    public List<SalixValue> getValues() {
+        return this.values;
+    }
+
+    public void setValues(final List<SalixValue> values) {
+        this.values = values;
+    }
+
+    public void addValue(final SalixValue value) {
+        this.values.add(value);
+    }
+}
\ No newline at end of file
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/uinfo/SalixRank.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/uinfo/SalixRank.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,99 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.uinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+
+import org.dive4elements.river.backend.SessionHolder;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+ at Entity
+ at Table(name = "salix_rank")
+public class SalixRank implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = 7459539781698086683L;
+
+    private Integer id;
+
+    private String  name;
+
+    private Double min_value;
+
+    private Double max_value;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public SalixRank() {
+    }
+
+    public SalixRank(final String name) {
+        this.name = name;
+    }
+
+    /***** METHODS *****/
+
+    @Id
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @Column(name = "name")
+    public String getName() {
+        return this.name;
+    }
+
+    public void setName(final String name) {
+        this.name = name;
+    }
+
+    @Column(name = "min_value")
+    public Double getMin_value() {
+        return this.min_value;
+    }
+
+    public void setMin_value(final Double min_value) {
+        this.min_value = min_value;
+    }
+
+    @Column(name = "max_value")
+    public Double getMax_value() {
+        return this.max_value;
+    }
+
+    public void setMax_value(final Double max_value) {
+        this.max_value = max_value;
+    }
+
+    /**
+     * Queries all salix ranks from the database, orders by id
+     */
+    public static List<SalixRank> getTypes() {
+        final Session session = SessionHolder.HOLDER.get();
+        final Query query = session.createQuery("FROM SalixRank ORDER BY id");
+        return new ArrayList<>(query.list());
+    }
+}
diff -r da5dc7446652 -r 50416a0df385 backend/src/main/java/org/dive4elements/river/model/uinfo/SalixValue.java
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/backend/src/main/java/org/dive4elements/river/model/uinfo/SalixValue.java	Tue Apr 03 10:18:30 2018 +0200
@@ -0,0 +1,121 @@
+/* Copyright (C) 2017 by Bundesanstalt für Gewässerkunde
+ * Software engineering by
+ *  Björnsen Beratende Ingenieure GmbH
+ *  Dr. Schumacher Ingenieurbüro für Wasser und Umwelt
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+
+package org.dive4elements.river.model.uinfo;
+
+import java.io.Serializable;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+
+
+/**
+ * Hibernate binding for the DB table salix_values
+ *
+ * @author Matthias Schäfer
+ *
+ */
+ at Entity
+ at Table(name = "salix_values")
+public class SalixValue implements Serializable {
+
+    /***** FIELDS *****/
+
+    private static final long serialVersionUID = 7775536956084391338L;
+
+    private Integer id;
+
+    private Salix salix;
+
+    private Double station;
+
+    private Double factor;
+
+    private Double mnw_mw_diff;
+
+
+    /***** CONSTRUCTORS *****/
+
+    public SalixValue() {
+    }
+
+    public SalixValue(final Salix salix, final Double station, final Double factor, final Double mnw_mw_diff) {
+        this.salix = salix;
+        this.station = station;
+        this.factor = factor;
+        this.mnw_mw_diff = mnw_mw_diff;
+    }
+
+    /**
+     * Constructor with primitive parameter types
+     */
+    public SalixValue(final Salix salix, final double km, final SalixRank salixType, final double factor, final double mnw_mw_diff) {
+        this(salix, Double.valueOf(km), Double.valueOf(factor), Double.valueOf(mnw_mw_diff));
+    }
+
+
+    /***** METHODS *****/
+
+    @Id
+    @SequenceGenerator(name = "SEQUENCE_SALIX_VALUE_ID_SEQ", sequenceName = "SALIX_VALUES_ID_SEQ", allocationSize = 1)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQUENCE_SALIX_VALUE_ID_SEQ")
+    @Column(name = "id")
+    public Integer getId() {
+        return this.id;
+    }
+
+    public void setId(final Integer id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(name = "salix_id")
+    public Salix getSalix() {
+        return this.salix;
+    }
+
+    public void setSalix(final Salix salix) {
+        this.salix = salix;
+    }
+
+    @Column(name = "station")
+    public Double getStation() {
+        return this.station;
+    }
+
+    public void setStation(final Double station) {
+        this.station = station;
+    }
+
+    @Column(name = "factor")
+    public Double getFactor() {
+        return this.factor;
+    }
+
+    public void setFactor(final Double factor) {
+        this.factor = factor;
+    }
+
+    @Column(name = "mnw_mw_diff")
+    public Double getMnwMwDiff() {
+        return this.mnw_mw_diff;
+    }
+
+    public void setMnwMwDiff(final Double mnw_mw_diff) {
+        this.mnw_mw_diff = mnw_mw_diff;
+    }
+}


More information about the Dive4Elements-commits mailing list