[PATCH] New way to loaded sediment loads from database and cache it. The data structure a way more straight forward. TODO: Implement calculation on this basis
Wald Commits
scm-commit at wald.intevation.org
Tue Jul 15 12:48:03 CEST 2014
# HG changeset patch
# User Sascha L. Teichmann <teichmann at intevation.de>
# Date 1405421272 -7200
# Node ID 5e3f4b4fcb28301bc48477e1c1e022384d947724
# Parent fd3a24336e6a9f454e2bf21b78decf00119b70cc
New way to loaded sediment loads from database and cache it. The data structure a way more straight forward. TODO: Implement calculation on this basis.
diff -r fd3a24336e6a -r 5e3f4b4fcb28 artifacts/doc/conf/cache.xml
--- a/artifacts/doc/conf/cache.xml Mon Jul 14 15:36:44 2014 +0200
+++ b/artifacts/doc/conf/cache.xml Tue Jul 15 12:47:52 2014 +0200
@@ -161,6 +161,12 @@
timeToLiveSeconds="14400"
/>
+ <!-- This on is used to cache sediment loads of rivers. -->
+ <cache name="sediment-load-data"
+ maxElementsInMemory="3"
+ timeToLiveSeconds="86400"
+ />
+
<!-- This one is used for the cross section lookup
Because of lazy fetching and relatively big amount of data, disabled
diff -r fd3a24336e6a -r 5e3f4b4fcb28 artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadData.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadData.java Tue Jul 15 12:47:52 2014 +0200
@@ -0,0 +1,207 @@
+/* Copyright (C) 2014 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+package org.dive4elements.river.artifacts.model.minfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+import java.util.NavigableMap;
+import java.util.TreeMap;
+
+import org.dive4elements.river.utils.EpsilonComparator;
+
+public class SedimentLoadData implements Serializable
+{
+ public static final double STATION_EPS = 0.0001;
+
+ public static final int GF_COARSE = 0;
+ public static final int GF_FINE_MIDDLE = 1;
+ public static final int GF_SAND = 2;
+ public static final int GF_SUSP_SAND = 3;
+ public static final int GF_SUSP_SAND_BED = 4;
+ public static final int GF_SUSP_SEDIMENT = 5;
+ public static final int GF_MAX = 5;
+
+ public static final int grainFractionIndex(String name) {
+ if ("coarse".equals(name)) return GF_COARSE;
+ if ("fine_middle".equals(name)) return GF_FINE_MIDDLE;
+ if ("sand".equals(name)) return GF_SAND;
+ if ("susp_sand".equals(name)) return GF_SUSP_SAND;
+ if ("susp_sand_bed".equals(name)) return GF_SUSP_SAND_BED;
+ if ("suspended_sediment".equals(name)) return GF_SUSP_SEDIMENT;
+ return -1;
+ }
+
+ public static class Value implements Serializable {
+
+ private double value;
+
+ private Load load;
+
+ public Value() {
+ }
+
+ public Value(Load load, double value) {
+ this.load = load;
+ this.value = value;
+ }
+
+ public double getValue() {
+ return value;
+ }
+
+ public Load getLoad() {
+ return load;
+ }
+ } // class Value
+
+
+ public static class Load implements Serializable {
+
+ private int id;
+
+ private String description;
+
+ private Date startTime;
+ private Date stopTime;
+
+ public Load() {
+ }
+
+ public Load(int id, String description, Date startTime, Date stopTime) {
+ this.id = id;
+ this.description = description;
+ this.startTime = startTime;
+ this.stopTime = stopTime;
+ }
+
+ public int getId() {
+ return id;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public Date getStartTime() {
+ return startTime;
+ }
+
+ public Date getStopTime() {
+ return stopTime;
+ }
+
+ public boolean isEpoch() {
+ return startTime != null && stopTime != null;
+ }
+ } // class SedimentLoad
+
+ public static class Station implements Serializable {
+
+ public static final int BED_LOAD = 0;
+ public static final int SUSPENDED = 1;
+
+ public interface Visitor {
+ boolean accept(Station station);
+ boolean accept(int grainFraction);
+ boolean visit(Value value, int grainFraction);
+ }
+
+ private double station;
+
+ private int type;
+
+ private List<List<Value>> grainFractions;
+
+ public Station() {
+ this(BED_LOAD, 0.0);
+ }
+
+ public Station(int type, double station) {
+ grainFractions = new ArrayList<List<Value>>(GF_MAX+1);
+ for (int i = 0; i < GF_MAX+1; ++i) {
+ grainFractions.add(new ArrayList<Value>());
+ }
+ this.type = type;
+ this.station = station;
+ }
+
+ public double getStation() {
+ return station;
+ }
+
+ public int getType() {
+ return type;
+ }
+
+ public void addValue(int grainFraction, Value value) {
+ grainFractions.get(grainFraction).add(value);
+ }
+
+ public boolean hasGrainFraction(String grainFraction) {
+ return hasGrainFraction(grainFractionIndex(grainFraction));
+ }
+
+ public boolean hasGrainFraction(int grainFraction) {
+ List<Value> values = grainFractions.get(grainFraction);
+ return !values.isEmpty();
+ }
+
+ public void visit(Visitor visitor) {
+ if (!visitor.accept(this)) {
+ return;
+ }
+
+ for (int i = 0; i < GF_MAX+1; ++i) {
+ List<Value> values = grainFractions.get(i);
+ if (values.isEmpty() || !visitor.accept(i)) {
+ continue;
+ }
+ for (Value value: values) {
+ if (!visitor.visit(value, i)) {
+ break;
+ }
+ }
+ }
+ }
+
+ public boolean inside(double a, double b) {
+ return station >= a && station <= b;
+ }
+ } // class Station
+
+
+ private TreeMap<Double, List<Station>> stations;
+
+ public SedimentLoadData() {
+ stations = new TreeMap<Double, List<Station>>(EpsilonComparator.CMP);
+ }
+
+ public void addStation(Station station) {
+ Double key = station.getStation();
+ List<Station> sts = stations.get(key);
+ if (sts == null) {
+ sts = new ArrayList<Station>(2);
+ stations.put(key, sts);
+ }
+ sts.add(station);
+ }
+
+
+ public NavigableMap<Double, List<Station>> range(double from, double to) {
+
+ if (from > to) {
+ double t = from; from = to; to = t;
+ }
+
+ return stations.subMap(from, true, to, true);
+ }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r fd3a24336e6a -r 5e3f4b4fcb28 artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadDataFactory.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadDataFactory.java Tue Jul 15 12:47:52 2014 +0200
@@ -0,0 +1,169 @@
+/* Copyright (C) 2014 by Bundesanstalt für Gewässerkunde
+ * Software engineering by Intevation GmbH
+ *
+ * This file is Free Software under the GNU AGPL (>=v3)
+ * and comes with ABSOLUTELY NO WARRANTY! Check out the
+ * documentation coming with Dive4Elements River for details.
+ */
+package org.dive4elements.river.artifacts.model.minfo;
+
+import java.sql.Timestamp;
+import java.util.HashMap;
+import java.util.Iterator;
+
+import net.sf.ehcache.Cache;
+import net.sf.ehcache.Element;
+
+import org.apache.log4j.Logger;
+import org.dive4elements.river.artifacts.cache.CacheFactory;
+import org.dive4elements.river.backend.SessionHolder;
+import org.hibernate.SQLQuery;
+import org.hibernate.Session;
+import org.hibernate.type.StandardBasicTypes;
+
+public class SedimentLoadDataFactory
+{
+ private static Logger log = Logger.getLogger(SedimentLoadDataFactory.class);
+
+ public static final String CACHE_NAME = "sediment-load-data";
+
+ public static final String SUSPENDED_STRING = "Schwebstoff";
+
+ public static final String SQL_LOAD_RIVER_SEDIMENT_LOADS =
+ "SELECT " +
+ "sl.id AS sl_id, " +
+ "sl.description AS sl_description, " +
+ "ti.start_time AS ti_start_time, " +
+ "ti.stop_time AS ti_stoptime, " +
+ "slv.value AS slv_value, " +
+ "gf.name AS gf_name, " +
+ "ms.id AS ms_id, " +
+ "ms.station AS ms_station, " +
+ "ms.measurement_type AS ms_type " +
+ "FROM sediment_load_values slv " +
+ "JOIN sediment_load sl ON slv.sediment_load_id = sl.id " +
+ "JOIN time_intervals ti ON sl.time_interval_id = ti.id " +
+ "JOIN grain_fraction gf ON sl.grain_fraction_id = gf.id " +
+ "JOIN measurement_station ms ON slv.measurement_station_id = ms.id " +
+ "JOIN rivers r ON ms.river_id = r.id " +
+ "WHERE r.name = :river " +
+ "ORDER BY sl.id";
+
+ private SedimentLoadDataFactory() {
+ }
+
+ public SedimentLoadData getSedimentLoadData(String river) {
+ boolean debug = log.isDebugEnabled();
+
+ if (debug) {
+ log.debug(
+ "Looking for sediment load data for river '" + river + "'");
+ }
+
+ Cache cache = CacheFactory.getCache(CACHE_NAME);
+
+ if (cache == null) {
+ if (debug) {
+ log.debug("Cache not configured.");
+ }
+ return getUncached(river);
+ }
+
+ String key = "sediment-load-" + river;
+
+ Element element = cache.get(key);
+
+ if (element != null) {
+ if (debug) {
+ log.debug("Sediment load data found in cache");
+ }
+ return (SedimentLoadData)element.getValue();
+ }
+
+ SedimentLoadData sedimentLoad = getUncached(river);
+
+ if (sedimentLoad != null) {
+ if (debug) {
+ log.debug("Store sediment load data in cache.");
+ }
+ cache.put(new Element(key, sedimentLoad));
+ }
+
+ return sedimentLoad;
+ }
+
+ public SedimentLoadData getUncached(String river) {
+
+ Session session = SessionHolder.HOLDER.get();
+
+ SQLQuery sqlQuery = session.createSQLQuery(SQL_LOAD_RIVER_SEDIMENT_LOADS)
+ .addScalar("sl_id", StandardBasicTypes.INTEGER)
+ .addScalar("sl_description", StandardBasicTypes.STRING)
+ .addScalar("ti_start_time", StandardBasicTypes.TIMESTAMP)
+ .addScalar("ti_stop_time", StandardBasicTypes.TIMESTAMP)
+ .addScalar("slv_value", StandardBasicTypes.DOUBLE)
+ .addScalar("gf_name", StandardBasicTypes.STRING)
+ .addScalar("ms_id", StandardBasicTypes.INTEGER)
+ .addScalar("ms_station", StandardBasicTypes.DOUBLE)
+ .addScalar("ms_type", StandardBasicTypes.STRING);
+
+ sqlQuery.setString("river", river);
+
+ SedimentLoadData.Load load = null;
+ int grainFractionIndex = -1;
+
+ HashMap<Integer, SedimentLoadData.Station> id2station
+ = new HashMap<Integer, SedimentLoadData.Station>();
+
+ for (Iterator iter = sqlQuery.iterate(); iter.hasNext();) {
+ Object [] row = (Object [])iter.next();
+
+ Integer sl_id = (Integer)row[0];
+ String sl_description = (String)row[1];
+ Timestamp ti_start_time = (Timestamp)row[2];
+ Timestamp ti_stop_time = (Timestamp)row[3];
+ Double slv_value = (Double)row[4];
+ String gf_name = (String)row[5];
+ Integer ms_id = (Integer)row[6];
+ Double ms_station = (Double)row[7];
+ String ms_type = (String)row[8];
+
+ if (load == null || load.getId() != sl_id) {
+ load = new SedimentLoadData.Load(
+ sl_id, sl_description, ti_start_time, ti_stop_time);
+
+ // Grain fractions only change when a new sediment load starts.
+ grainFractionIndex =
+ SedimentLoadData.grainFractionIndex(gf_name);
+
+ if (grainFractionIndex == -1) {
+ log.error("Unknown grain fraction type: " + gf_name);
+ break;
+ }
+ }
+
+ SedimentLoadData.Station station = id2station.get(ms_id);
+ if (station == null) {
+ int type = ms_type.equalsIgnoreCase(SUSPENDED_STRING)
+ ? SedimentLoadData.Station.SUSPENDED
+ : SedimentLoadData.Station.BED_LOAD;
+
+ station = new SedimentLoadData.Station(type, ms_station);
+ id2station.put(ms_id, station);
+ }
+
+ station.addValue(
+ grainFractionIndex,
+ new SedimentLoadData.Value(load, slv_value));
+ }
+
+ SedimentLoadData sld = new SedimentLoadData();
+
+ for (SedimentLoadData.Station station: id2station.values()) {
+ sld.addStation(station);
+ }
+
+ return sld;
+ }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r fd3a24336e6a -r 5e3f4b4fcb28 artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java Mon Jul 14 15:36:44 2014 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFacet.java Tue Jul 15 12:47:52 2014 +0200
@@ -176,6 +176,7 @@
values[0][i*3+2] = endValue;
values[1][i*3+2] = kmLoad.getValue();
matchFound = true;
+ break;
}
}
// Store points without match for later assessment.
diff -r fd3a24336e6a -r 5e3f4b4fcb28 artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFactory.java
--- a/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFactory.java Mon Jul 14 15:36:44 2014 +0200
+++ b/artifacts/src/main/java/org/dive4elements/river/artifacts/model/minfo/SedimentLoadFactory.java Tue Jul 15 12:47:52 2014 +0200
@@ -243,7 +243,7 @@
SedimentLoadLSData[] values =
getSedimentLoadsUncached(river, type, startKm, endKm);
- if (values != null && key != null) {
+ if (values != null) {
log.debug("Store static sediment load values in cache.");
element = new Element(key, values);
cache.put(element);
@@ -296,7 +296,7 @@
syear,
eyear);
- if (values != null && key != null) {
+ if (values != null) {
log.debug("Store sediment loads in cache.");
element = new Element(key, values);
cache.put(element);
@@ -316,7 +316,7 @@
SQLQuery sqlQuery = session.createSQLQuery(SQL_SELECT_SINGLE_TIMES_BY_ID)
.addScalar("starttime", StandardBasicTypes.DATE)
.addScalar("stoptime", StandardBasicTypes.DATE);
- sqlQuery.setDouble("id", id);
+ sqlQuery.setInteger("id", id);
List<Object[]> results = sqlQuery.list();
Object[] row = results.get(0);
More information about the Dive4Elements-commits
mailing list