[Dive4elements-commits] [PATCH 3 of 3] Merged
Wald Commits
scm-commit at wald.intevation.org
Wed Apr 3 16:00:26 CEST 2013
# HG changeset patch
# User Christian Lins <christian.lins at intevation.de>
# Date 1364997621 -7200
# Node ID 25c2505df28fc6b7de1c1c5b0141fa8774136162
# Parent 8d0af912351c2c33cefb31707f35156dd5622591
# Parent b5912ad9926c8dcab9fe88578fa2bc83c9776af2
Merged
diff -r 8d0af912351c -r 25c2505df28f .hgtags
--- a/.hgtags Wed Apr 03 15:59:01 2013 +0200
+++ b/.hgtags Wed Apr 03 16:00:21 2013 +0200
@@ -37,3 +37,4 @@
88e3473a38467e8b5bb7d99e92c3f1a795515bf5 2.9.12
7fa94b793cbe0133503741e142832c8f2ff1aa4b 2.9.13
a5f5599f6fd5e37975d71b8a323aadfeb8d017e6 2.9.14
+437109b3cf49ce83d5bd4d005c71509e966b8cf7 2.9.15
diff -r 8d0af912351c -r 25c2505df28f contrib/make_flys_release/README
--- a/contrib/make_flys_release/README Wed Apr 03 15:59:01 2013 +0200
+++ b/contrib/make_flys_release/README Wed Apr 03 16:00:21 2013 +0200
@@ -7,6 +7,7 @@
FLYS_SOURCE_DIR
TOMCAT_PORT
MAPSERVER_URL
+FONT_PATH
WIKI_URL
LOG_DIR
DEVELOPER
diff -r 8d0af912351c -r 25c2505df28f contrib/make_flys_release/bin/make-importer-package.sh
--- a/contrib/make_flys_release/bin/make-importer-package.sh Wed Apr 03 15:59:01 2013 +0200
+++ b/contrib/make_flys_release/bin/make-importer-package.sh Wed Apr 03 16:00:21 2013 +0200
@@ -4,11 +4,9 @@
# See README for more information
# The working directory. Resulting tarball will be placed in the directory above.
-PKG_DIR=/tmp/flys-importer
-# Default conf
-CONF_DIR=/path/to/conf/dir
+PKG_DIR=~/tmp/flys-importer
# Path to the flys checkout
-FLYS_DIR=/path/to/flys/root
+FLYS_DIR=~/flys/flys/
# Tarball that will be extracted into flys-imprter/opt
EXTRAS=$1
@@ -48,12 +46,15 @@
fi
cp ${FLYS_DIR}/flys-backend/doc/schema/*.sql $PKG_DIR/schema
-cp ${FLYS_DIR}/flys-backend/doc/documentation/de/importer-manual.pdf $PKG_DIR
+
+cd ${FLYS_DIR}/flys-backend/doc/documentation/de/
+make importer-manual.pdf
+cp importer-manual.pdf $PKG_DIR
sed -i 's/shpimporter\/shp/geodaesie\/shp/' $PKG_DIR/run_geo.sh
cd $PKG_DIR/..
-DATE=$(date +%Y%m%d%H%m)
+DATE=$(date +%Y%m%d%H%M)
tar -czf flys-importer_${DATE}.tar.gz flys-importer
sha1sum flys-importer_${DATE}.tar.gz > flys-importer_${DATE}.tar.gz.sha1
echo Package is at: `readlink -f flys-importer_${DATE}.tar.gz`
diff -r 8d0af912351c -r 25c2505df28f contrib/make_flys_release/make_release.sh
--- a/contrib/make_flys_release/make_release.sh Wed Apr 03 15:59:01 2013 +0200
+++ b/contrib/make_flys_release/make_release.sh Wed Apr 03 16:00:21 2013 +0200
@@ -50,7 +50,7 @@
Default: $DEFAULT_WD
-t Tag the current default branch as "VERSION"
-o, --oracle Release is for oracle.
- VERSION must be in the format MAYOR.MINOR.PATCH
+ VERSION must be in the format MAYOR.MINOR.PATCH or default
EOF
exit 0
}
@@ -67,6 +67,8 @@
TOMCAT_PORT=${TOMCAT_PORT:-8282}
MAPSERVER_URL=${MAPSERVER_URL:-flys-devel.intevation.de}
WIKI_URL=${WIKI_URL:-https://flys-intern.intevation.de/Flys-3.0}
+# Prefix to FreeSans.ttf
+FONT_PATH=${FONT_PATH:-/opt/flys}
# Seddb Configuration
SEDDBURL=${SEDDBURL:-czech-republic.atlas.intevation.de}
@@ -74,11 +76,13 @@
SEDDBBACK=${SEDDBBACK:-XE}
SEDDBUSER=${SEDDBUSER:-seddb}
SEDDBPASS=${SEDDBPASS:-seddbpass}
+SEDINITSQLS=${SEDINITSQLS:-}
+
# Backend configuration
BACKENDURL=${BACKENDURL:-czech-republic.atlas.intevation.de}
BACKENDPORT=${BACKENDPORT:-5432}
-BACKENDBACK=${BACKENDBACK:-flys_2913}
+BACKENDBACK=${BACKENDBACK:-flys_devel}
BACKENDUSER=${BACKENDUSER:-flys_dami}
BACKENDPASS=${BACKENDPASS:-flys_dami}
INITSQLS=${INITSQLS:-}
@@ -117,7 +121,11 @@
fi
VERSION=$1
-ARTIFACT_PORT=${ARTIFACT_PORT:-`echo 1$VERSION | sed 's/\.//g'`}
+if [ "$VERSION" = "default" ]; then
+ ARTIFACT_PORT=${ARTIFACT_PORT:-29999}
+else
+ ARTIFACT_PORT=${ARTIFACT_PORT:-`echo 1$VERSION | sed 's/\.//g'`}
+fi
if [ -z $WORK_DIR ]; then
WORK_DIR=$DEFAULT_WD
@@ -261,6 +269,9 @@
sed -i -e "s at http://example.com/@http://${MAPSERVER_URL}/@g" \
$WORK_DIR/server/conf/rivermap.xml
+sed -i -e "s@/usr/share/fonts/truetype/freefont@${FONT_PATH}@g" \
+ $WORK_DIR/server/conf/mapserver/fontset.txt
+
sed -i -e "s@/tmp/flys-rivers-wms.log@${LOG_DIR}/rivers-wms-${VERSION}.log at g" \
$WORK_DIR/server/conf/mapserver/river-mapfile.vm
@@ -279,7 +290,8 @@
<password>$SEDDBPASS</password>
<dialect>org.hibernate.dialect.Oracle9iDialect</dialect>
<driver>oracle.jdbc.driver.OracleDriver</driver>
- <url>jdbc:oracle:thin:@//$SEDDBURL:$SEDDBPORT/$SEDDBBACK </url>
+ <url>jdbc:oracle:thin:@//$SEDDBURL:$SEDDBPORT/$SEDDBBACK</url>
+ <connection-init-sqls>$SEDINITSQLS</connection-init-sqls>
</seddb-database>
EOF
diff -r 8d0af912351c -r 25c2505df28f flys-aft/contrib/dips2html.xsl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-aft/contrib/dips2html.xsl Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,162 @@
+<?xml version="1.0"?>
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:html="http://www.w3.org/1999/xhtml" version="1.0" exclude-result-prefixes="html">
+ <xsl:output method="html"/>
+ <xsl:template name="out">
+ <xsl:param name="value"/>
+ <xsl:choose>
+ <xsl:when test="$value = ''">-/-</xsl:when>
+ <xsl:otherwise>
+ <xsl:value-of select="$value"/>
+ </xsl:otherwise>
+ </xsl:choose>
+ </xsl:template>
+ <xsl:template match="PEGELSTATION">
+ <tr>
+ <td>
+ <xsl:value-of select="@NAME"/>
+ </td>
+ <td>
+ <xsl:value-of select="@NUMMER"/>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@HOCHWERT"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@TK_BLATT"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@BETREIBER"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@GEOBREITE"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@GEOLAENGE"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@GEWAESSER"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@GUELTIGAB"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@LAGESTATUS"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@RECHTSWERT"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@ABLESUNGBIS"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@ABLESUNGSEIT"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@STATIONIERUNG"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@EINGERICHTETAM"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@EINZUGSGEBIET_AEO"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@GEBIETSKENNZIFFER"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@KILOMETRIERUNG_AB"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@LAGE_AM_GEWAESSER"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@ENTFERNUNGMUENDUNG"/>
+ </xsl:call-template>
+ </td>
+ <td>
+ <xsl:call-template name="out">
+ <xsl:with-param name="value" select="@KILOMETRIERUNGSRICHTUNG"/>
+ </xsl:call-template>
+ </td>
+ </tr>
+ <xsl:if test="string-length(@BESCHREIBUNG) > 0">
+ <tr>
+ <td/>
+ <td colspan="21">
+ <xsl:value-of select="@BESCHREIBUNG"/>
+ </td>
+ </tr>
+ </xsl:if>
+ </xsl:template>
+ <xsl:template match="/">
+ <html>
+ <head>
+ <title>DiPs</title>
+ </head>
+ <body>
+ <table border="1" cellspacing="0" width="50%">
+ <tr>
+ <th>Name</th>
+ <th>Nummer</th>
+ <th>Hochwert</th>
+ <th>TK-Blatt</th>
+ <th>Betreiber</th>
+ <th>Geo-Breite</th>
+ <th>Geo-Länge</th>
+ <th>Gewässer</th>
+ <th>Gültig ab</th>
+ <th>Lagestatus</th>
+ <th>Rechtswert</th>
+ <th>Ablesung seit</th>
+ <th>Ablesung bis</th>
+ <th>Stationierung</th>
+ <th>Eingerichtet am</th>
+ <th>Einzugsgebiet AEO</th>
+ <th>Gebietskennziffer</th>
+ <th>Kilometrierung ab</th>
+ <th>Lage am Gewässer</th>
+ <th>Entfernung zu Mündung</th>
+ <th>Kilometrierungsrichtung</th>
+ </tr>
+ <xsl:apply-templates select="/DIPSFLYS/STATIONEN"/>
+ </table>
+ </body>
+ </html>
+ </xsl:template>
+ <xsl:template match="text()"/>
+</xsl:stylesheet>
diff -r 8d0af912351c -r 25c2505df28f flys-aft/src/main/java/de/intevation/aft/River.java
--- a/flys-aft/src/main/java/de/intevation/aft/River.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-aft/src/main/java/de/intevation/aft/River.java Wed Apr 03 16:00:21 2013 +0200
@@ -196,7 +196,7 @@
int flysId;
try {
- if (rs.next()) {
+ if (!rs.next()) {
log.error(
"FLYS: No master discharge table found for gauge '" +
gauge.getAftName() + "'");
diff -r 8d0af912351c -r 25c2505df28f flys-aft/src/main/java/de/intevation/db/SymbolicStatement.java
--- a/flys-aft/src/main/java/de/intevation/db/SymbolicStatement.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-aft/src/main/java/de/intevation/db/SymbolicStatement.java Wed Apr 03 16:00:21 2013 +0200
@@ -143,14 +143,23 @@
}
public boolean execute() throws SQLException {
+ if (log.isDebugEnabled()) {
+ log.debug("execute: " + compiled);
+ }
return stmnt.execute();
}
public ResultSet executeQuery() throws SQLException {
+ if (log.isDebugEnabled()) {
+ log.debug("query: " + compiled);
+ }
return stmnt.executeQuery();
}
public int executeUpdate() throws SQLException {
+ if (log.isDebugEnabled()) {
+ log.debug("update: " + compiled);
+ }
return stmnt.executeUpdate();
}
diff -r 8d0af912351c -r 25c2505df28f flys-aft/src/main/resources/sql/aft-common.properties
--- a/flys-aft/src/main/resources/sql/aft-common.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-aft/src/main/resources/sql/aft-common.properties Wed Apr 03 16:00:21 2013 +0200
@@ -17,9 +17,9 @@
SELECT TAFELWERT_NR AS id, WASSERSTAND AS w, ABFLUSS AS q FROM TAFELWERT \
WHERE ABFLUSSTAFEL_NR = :number
select.bfg.id.current = \
- SELECT BFG_ID AS BFG_ID FROM ABFLUSSTAFEL \
+ SELECT ABT.BFG_ID AS BFG_ID FROM ABFLUSSTAFEL ABT \
WHERE GUELTIG_VON IN ( \
SELECT min(GUELTIG_VON) FROM ABFLUSSTAFEL \
WHERE GUELTIG_VON IS NOT NULL AND GUELTIG_BIS IS NULL \
AND MESSSTELLE_NR LIKE :number) \
- AND MESSSTELLE_NR :number
+ AND MESSSTELLE_NR LIKE :number
diff -r 8d0af912351c -r 25c2505df28f flys-aft/src/main/resources/sql/aft-oracle-jdbc-oracledriver.properties
--- a/flys-aft/src/main/resources/sql/aft-oracle-jdbc-oracledriver.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-aft/src/main/resources/sql/aft-oracle-jdbc-oracledriver.properties Wed Apr 03 16:00:21 2013 +0200
@@ -4,6 +4,6 @@
GUELTIG_VON, \
GUELTIG_BIS, \
PEGELNULLPUNKT, \
- BFG_ID \
- FROM ABFLUSSTAFEL \
+ AT.BFG_ID \
+ FROM ABFLUSSTAFEL AT \
WHERE MESSSTELLE_NR LIKE :number
diff -r 8d0af912351c -r 25c2505df28f flys-aft/src/main/resources/sql/flys-common.properties
--- a/flys-aft/src/main/resources/sql/flys-common.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-aft/src/main/resources/sql/flys-common.properties Wed Apr 03 16:00:21 2013 +0200
@@ -52,6 +52,6 @@
dt.id AS id, \
dt.bfg_id AS bfg_id \
FROM discharge_tables dt JOIN gauges g ON dt.gauge_id = g.id \
- WHERE g.id = :gauge_id AND g.kind = 0
+ WHERE g.id = :gauge_id AND dt.kind = 0
update.bfg.id.discharge.table = \
UPDATE discharge_tables SET bfg_id = :bfg_id WHERE id = :id
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/contrib/inline-dc-attribute.xsl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/contrib/inline-dc-attribute.xsl Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,71 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ inline-dc-attribute.xsl
+ =======================
+ Transforms datacage templates from:
+
+ <foo>
+ <dc:element name="bar" value="${baz}"/>
+ <dc:element name="bla" value="${blub}-${urgs}"/>
+ </foo>
+
+ to:
+
+ <foo bar="{$bar} bla="{$blub}-{$urgs}/>
+-->
+<xsl:stylesheet version="1.0"
+ xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ xmlns:dc="http://www.intevation.org/2011/Datacage">
+
+ <xsl:output method="xml" encoding="UTF-8" indent="yes"/>
+
+ <xsl:template name="string-replace-all">
+ <xsl:param name="text"/>
+ <xsl:param name="replace"/>
+ <xsl:param name="by"/>
+ <xsl:choose>
+ <xsl:when test="contains($text, $replace)">
+ <xsl:value-of select="substring-before($text,$replace)"/>
+ <xsl:value-of select="$by"/>
+ <xsl:call-template name="string-replace-all">
+ <xsl:with-param name="text" select="substring-after($text,$replace)"/>
+ <xsl:with-param name="replace" select="$replace"/>
+ <xsl:with-param name="by" select="$by"/>
+ </xsl:call-template>
+ </xsl:when>
+ <xsl:otherwise>
+ <xsl:value-of select="$text"/>
+ </xsl:otherwise>
+ </xsl:choose>
+ </xsl:template>
+
+ <xsl:template
+ match="node()[count(dc:attribute) > 0 and namespace-uri() != 'http://www.intevation.org/2011/Datacage']">
+ <xsl:copy>
+ <xsl:for-each select="./dc:attribute">
+ <xsl:attribute name="{@name}">
+ <xsl:call-template name="string-replace-all">
+ <xsl:with-param name="text" select="@value"/>
+ <xsl:with-param name="replace">${</xsl:with-param>
+ <xsl:with-param name="by">{$</xsl:with-param>
+ </xsl:call-template>
+ </xsl:attribute>
+ </xsl:for-each>
+ <xsl:apply-templates select="@*|node()" mode="ignore-text"/>
+ </xsl:copy>
+ </xsl:template>
+
+ <xsl:template match="dc:attribute|text()" mode="ignore-text"/>
+ <xsl:template match="@*|node()" mode="ignore-text">
+ <xsl:copy>
+ <xsl:apply-templates select="@*|node()"/>
+ </xsl:copy>
+ </xsl:template>
+
+ <xsl:template match="@*|node()">
+ <xsl:copy>
+ <xsl:apply-templates select="@*|node()"/>
+ </xsl:copy>
+ </xsl:template>
+
+</xsl:stylesheet>
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/conf/artifacts/manualpoints.xml
--- a/flys-artifacts/doc/conf/artifacts/manualpoints.xml Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/doc/conf/artifacts/manualpoints.xml Wed Apr 03 16:00:21 2013 +0200
@@ -17,6 +17,7 @@
<facet name="reference_curve.manualpoints" description="User-provided points *yawn*"/>
<facet name="reference_curve_normalized.manualpoints" description="points"/>
<facet name="historical_discharge.manualpoints" description="Points provided by user." />
+ <facet name="historical_discharge_wq.manualpoints" description="Points provided by user." />
<facet name="cross_section.manualline" description="Lines provided by user." />
<facet name="fix_wq_curve.manualpoints" description="Points provided by user." />
<facet name="fix_deltawt_curve.manualpoints" description="Points provided by user." />
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/conf/artifacts/map.xml
--- a/flys-artifacts/doc/conf/artifacts/map.xml Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/doc/conf/artifacts/map.xml Wed Apr 03 16:00:21 2013 +0200
@@ -32,6 +32,7 @@
<facet name="floodmap.floodmaps"/>
<facet name="floodmap.gauge_location"/>
<facet name="floodmap.externalwms"/>
+ <facet name="floodmap.jetties"/>
</facets>
</outputmode>
</outputmodes>
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/conf/artifacts/sqrelation.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/doc/conf/artifacts/sqrelation.xml Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,58 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<artifact name="staticsqrelation">
+ <states>
+ <state id="state.sqrelation.static" description="state.sqrelation.static" state="de.intevation.flys.artifacts.states.sq.SQStaticState">
+ <data name="river" type="String" />
+ <data name="station" type="String" />
+ <outputmodes>
+ <outputmode name="sq_relation_a" description="output.sq_relation" type="chart">
+ <facets>
+ <facet name="sq_a_curve" description="A facet for sq curve"/>
+ <facet name="sq_relation_a.manualpoints" />
+ </facets>
+ </outputmode>
+ <outputmode name="sq_relation_b" description="output.sq_relation" type="chart">
+ <facets>
+ <facet name="sq_b_curve" description="A facet for sq curve"/>
+ <facet name="sq_relation_b.manualpoints" />
+ </facets>
+ </outputmode>
+ <outputmode name="sq_relation_c" description="output.sq_relation" type="chart">
+ <facets>
+ <facet name="sq_c_curve" description="A facet for sq curve"/>
+ <facet name="sq_relation_c.manualpoints" />
+ </facets>
+ </outputmode>
+ <outputmode name="sq_relation_d" description="output.sq_relation" type="chart">
+ <facets>
+ <facet name="sq_d_curve" description="A facet for sq curve"/>
+ <facet name="sq_relation_d.manualpoints" />
+ </facets>
+ </outputmode>
+ <outputmode name="sq_relation_e" description="output.sq_relation" type="chart">
+ <facets>
+ <facet name="sq_e_curve" description="A facet for sq curve"/>
+ <facet name="sq_relation_e.manualpoints" />
+ </facets>
+ </outputmode>
+ <outputmode name="sq_relation_f" description="output.sq_relation" type="chart">
+ <facets>
+ <facet name="sq_relation_f.manualpoints" />
+ <facet name="sq_f_curve" description="A facet for sq curve"/>
+ </facets>
+ </outputmode>
+ <outputmode name="sq_overview" description="output.sq_overview" type="overview">
+ <facets>
+ <facet name="sq_chart_overview" description="A facet for sq chart overview"/>
+ </facets>
+ </outputmode>
+ <outputmode name="sqrelation_export" description="output.sqrelation_export" mime-type="text/plain" type="export">
+ <facets>
+ <facet name="csv" description="facet.sqrelation_export.csv" />
+ <facet name="pdf" description="facet.sqrelation_export.pdf" />
+ </facets>
+ </outputmode>
+ </outputmodes>
+ </state>
+ </states>
+</artifact>
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/conf/artifacts/winfo.xml
--- a/flys-artifacts/doc/conf/artifacts/winfo.xml Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/doc/conf/artifacts/winfo.xml Wed Apr 03 16:00:21 2013 +0200
@@ -566,6 +566,7 @@
<facet name="floodmap.floodmaps"/>
<facet name="floodmap.gauge_location"/>
<facet name="floodmap.externalwms"/>
+ <facet name="floodmap.jetties"/>
</facets>
</outputmode>
<outputmode name="wsplgen_report" description="output.wsplgen_report" mime-type="text/xml" type="report">
@@ -624,6 +625,7 @@
<facet name="historical_discharge.wq.curve"/>
<facet name="historical_discharge.mainvalues.q"/>
<facet name="historical_discharge.mainvalues.w"/>
+ <facet name="historical_discharge_wq.manualpoints"/>
</facets>
</outputmode>
<outputmode name="historical_discharge_export" description="output.historical_discharge.export" mime-type="text/plain" type="export">
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/conf/conf.xml
--- a/flys-artifacts/doc/conf/conf.xml Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/doc/conf/conf.xml Wed Apr 03 16:00:21 2013 +0200
@@ -111,6 +111,9 @@
<artifact-factory name="wmshwspointsfactory" description="Factory to create an artifact that generates WMS facets for HWS Points"
ttl="3600000"
artifact="de.intevation.flys.artifacts.WMSHWSPointsArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
+ <artifact-factory name="wmsjettiesfactory" description="Factory to create an artifact to be used in WINFO"
+ ttl="3600000"
+ artifact="de.intevation.flys.artifacts.WMSJettiesArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
<!-- MINFO specific Artifacts -->
<artifact-factory name="minfo" description="Factory to create an artifact to be used in module minfo."
@@ -119,6 +122,9 @@
<artifact-factory name="bedheight" description="Factory to create an artifact used in minfo datacage."
ttl="3600000"
artifact="de.intevation.flys.artifacts.BedHeightsArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
+ <artifact-factory name="staticsqrelation" description="Factory to create an artifact that generates sq relations from db."
+ ttl="3600000"
+ artifact="de.intevation.flys.artifacts.SQRelationArtifact">de.intevation.artifactdatabase.DefaultArtifactFactory</artifact-factory>
<artifact-factory name="gaugedischargecurve" description="Factory to create an artifact to show a discharge curve for a gauge."
ttl="3600000"
@@ -248,6 +254,7 @@
<artifact name="gaugedischarge" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="${artifacts.config.dir}/artifacts/gaugedischarge.xml" />
<artifact name="gaugedischargecurve" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="${artifacts.config.dir}/artifacts/gaugedischargecurve.xml" />
<artifact name="qsector" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="${artifacts.config.dir}/artifacts/qsector.xml" />
+ <artifact name="staticsqrelation" xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="${artifacts.config.dir}/artifacts/sqrelation.xml" />
</artifacts>
<modules>
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/conf/meta-data.xml
--- a/flys-artifacts/doc/conf/meta-data.xml Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/doc/conf/meta-data.xml Wed Apr 03 16:00:21 2013 +0200
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<?xml version="1.0" encoding="UTF-8"?>
<dc:template xmlns:dc="http://www.intevation.org/2011/Datacage">
<datacage>
<dc:comment>
@@ -36,13 +36,13 @@
COALESCE(ld_from, '') AS ldf,
COALESCE(ld_to, '') AS ldt
FROM master_artifacts_range
- WHERE gid = CAST(${artifact-id} as uuid)
+ WHERE gid = CAST(${artifact-id} AS uuid)
</dc:statement>
- <dc:elements>
+ <dc:for-each>
<dc:variable name="fromkm" type="number" expr="dc:fromValue($ldm, $ldl, $ldf)"/>
<dc:variable name="tokm" type="number" expr="dc:toValue($ldm, $ldl, $ldt)"/>
<dc:macro-body/>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</dc:when>
<dc:otherwise>
@@ -59,10 +59,10 @@
<dc:macro name="load-system">
<dc:context connection="system">
<dc:statement>
- SELECT id AS river_id, name as river_name FROM rivers
+ SELECT id AS river_id, name AS river_name FROM rivers
WHERE lower(name) LIKE lower(${river})
</dc:statement>
- <dc:elements>
+ <dc:for-each>
<dc:comment>
Base-data macros (mostly data imported from wst-files).
@@ -77,22 +77,19 @@
description AS prot_description
FROM wsts WHERE kind = 0 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <basedata>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <basedata name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="base_data-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="base_data-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</basedata>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</basedata>
</dc:call-macro>
@@ -108,22 +105,19 @@
description AS prot_description
FROM wsts WHERE kind = 0 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <basedata>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <basedata name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="base_data-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="wqinterpol"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="base_data-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="wqinterpol"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</basedata>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</basedata>
</dc:call-macro>
@@ -139,22 +133,18 @@
description AS prot_description
FROM wsts WHERE kind = 1 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <additional>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <additional name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="additionalsmarks-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
- </dc:context>
- </additional>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="additionalsmarks-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
+ </dc:context></additional>
+ </dc:for-each>
</dc:context>
</additionals>
</dc:call-macro>
@@ -170,22 +160,19 @@
description AS prot_description
FROM wsts WHERE kind = 1 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <additional>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <additional name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="additionals-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="additionals-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</additional>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</additionals>
</dc:call-macro>
@@ -201,22 +188,19 @@
description AS prot_description
FROM wsts WHERE kind = 1 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <relativepoint>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <relativepoint name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="additionals-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="additionals-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</relativepoint>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</additionals>
</dc:call-macro>
@@ -231,23 +215,19 @@
description AS prot_description
FROM wsts WHERE kind = 2 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <fixation>
- <dc:attribute name="name" value="${prot_description}"/>
- <!--dc:attribute name="ids" value="fixations-wstv-A-${prot_id}"/-->
- <dc:context>
+ <dc:for-each>
+ <fixation name="{$prot_description}">
+ <!--dc:attribute name="ids" value="fixations-wstv-A-${prot_id}"/--><dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="wqinterpol"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="fixations-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="wqinterpol"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</fixation>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</fixations>
</dc:call-macro>
@@ -262,22 +242,19 @@
description AS prot_description
FROM wsts WHERE kind = 2 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <fixation>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <fixation name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="wqinterpol"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="fixations-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="wqinterpol"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</fixation>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</fixations>
</dc:call-macro>
@@ -292,22 +269,19 @@
description AS prot_description
FROM wsts WHERE kind = 2 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <fixation>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <fixation name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="fixations-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</fixation>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</fixations>
</dc:call-macro>
@@ -322,22 +296,19 @@
description AS prot_description
FROM wsts WHERE kind = 2 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <relativepoint>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <relativepoint name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="fixations-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="fixations-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</relativepoint>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</fixations>
</dc:call-macro>
@@ -353,22 +324,19 @@
description AS prot_description
FROM wsts WHERE kind = 3 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <official>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <official name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="additionals-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwqkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="additionals-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwqkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</official>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</officiallines>
</dc:call-macro>
@@ -383,22 +351,19 @@
description AS prot_description
FROM wsts WHERE kind = 4 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <relativepoint>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <relativepoint name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="heightmarks_points-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="heightmarks_points-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</relativepoint>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</heightmarks>
</dc:call-macro>
@@ -413,22 +378,19 @@
description AS prot_description
FROM wsts WHERE kind = 4 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <heightmark>
- <dc:attribute name="name" value="${prot_description}"/>
+ <dc:for-each>
+ <heightmark name="{$prot_description}">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="heightmarks_points-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="heightmarks_points-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</heightmark>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</heightmarks>
</dc:call-macro>
@@ -443,22 +405,18 @@
description AS prot_description
FROM wsts WHERE kind = 4 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <heightmark>
- <dc:attribute name="name" value="${prot_description}"/>
- <dc:context>
+ <dc:for-each>
+ <heightmark name="{$prot_description}"><dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="heightmarks_annotations-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="wqinterpol"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="heightmarks_annotations-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="wqinterpol"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</heightmark>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</heightmarks>
</dc:call-macro>
@@ -466,129 +424,98 @@
<dc:macro name="basedata_5_flood-protections_relative_points">
<dc:call-macro name="user-range">
- <flood_protections>
- <dc:attribute name="id" value="flood-protections-${river_id}"/>
+ <flood_protections id="flood-protections-{$river_id}">
<dc:context connection="system">
<dc:statement>
SELECT id AS prot_id,
description AS prot_description
FROM wsts WHERE kind = 5 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <relativepoint>
- <dc:attribute name="name" value="${prot_description}"/>
- <dc:attribute name="db-id" value="${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <columns>
+ <dc:for-each>
+ <relativepoint name="{$prot_description}"
+ db-id="{$prot_id}"
+ factory="staticwkms">
+ <columns>
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="flood_protection-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="flood_protection-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</columns>
</relativepoint>
- </dc:elements>
- </dc:context>
- </flood_protections>
+ </dc:for-each>
+ </dc:context></flood_protections>
</dc:call-macro>
</dc:macro>
<dc:macro name="basedata_5_flood-protections">
<dc:call-macro name="user-range">
- <flood_protections>
- <dc:attribute name="id" value="flood-protections-${river_id}"/>
+ <flood_protections id="flood-protections-{$river_id}">
<dc:context connection="system">
<dc:statement>
SELECT id AS prot_id,
description AS prot_description
FROM wsts WHERE kind = 5 AND river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <flood_protection>
- <dc:attribute name="name" value="${prot_description}"/>
- <dc:attribute name="db-id" value="${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
+ <dc:for-each>
+ <flood_protection name="{$prot_description}"
+ db-id="{$prot_id}"
+ factory="staticwkms">
<dc:context>
<dc:call-macro name="SQL-wst_columns_statement"/>
- <dc:elements>
- <column>
- <dc:attribute name="name" value="${prot_column_name}"/>
- <dc:attribute name="ids" value="flood_protection-wstv-${prot_rel_pos}-${prot_id}"/>
- <dc:attribute name="factory" value="staticwkms"/>
- <dc:attribute name="info" value="${info} [km ${deffrom} - ${defto}]"/>
- </column>
- </dc:elements>
+ <dc:for-each>
+ <column name="{$prot_column_name}"
+ ids="flood_protection-wstv-{$prot_rel_pos}-{$prot_id}"
+ factory="staticwkms"
+ info="{$info} [km {$deffrom} - {$defto}]"/>
+ </dc:for-each>
</dc:context>
</flood_protection>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</flood_protections>
</dc:call-macro>
</dc:macro>
<dc:macro name="mainvalues">
- <mainvalue>
- <dc:attribute name="factory" value="mainvalue"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </mainvalue>
- <wmainvalue>
- <dc:attribute name="factory" value="mainvalue"/>
- <dc:attribute name="ids" value="${river_id}:w"/>
- </wmainvalue>
- <qmainvalue>
- <dc:attribute name="factory" value="mainvalue"/>
- <dc:attribute name="ids" value="${river_id}:q"/>
- </qmainvalue>
+ <mainvalue factory="mainvalue" ids="{$river_id}"/>
+ <wmainvalue factory="mainvalue" ids="{$river_id}:w"/>
+ <qmainvalue factory="mainvalue" ids="{$river_id}:q"/>
</dc:macro>
<dc:macro name="qsectors">
- <qsector>
- <dc:attribute name="factory" value="qsectors"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </qsector>
+ <qsector factory="qsectors" ids="{$river_id}"/>
</dc:macro>
<dc:macro name="annotations">
- <annotation>
- <dc:attribute name="factory" value="annotations"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </annotation>
+ <annotation factory="annotations" ids="{$river_id}"/>
</dc:macro>
<dc:macro name="annotations_per_type">
<annotations>
<dc:context>
- <annotation>
- <dc:attribute name="name" value="all_annotations"/>
- <dc:attribute name="factory" value="annotations"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </annotation>
+ <annotation name="all_annotations" factory="annotations" ids="{$river_id}"/>
<dc:statement>
SELECT id AS anno_id,
name AS anno_description
FROM annotation_types
</dc:statement>
- <dc:elements>
- <annotation>
- <dc:attribute name="name" value="${anno_description}"/>
- <dc:attribute name="factory" value="annotations"/>
- <dc:attribute name="ids" value="${river_id}:${anno_description}"/>
- </annotation>
- </dc:elements>
+ <dc:for-each>
+ <annotation name="{$anno_description}"
+ factory="annotations"
+ ids="{$river_id}:{$anno_description}"/>
+ </dc:for-each>
</dc:context>
</annotations>
</dc:macro>
<dc:macro name="cross_sections">
<dc:call-macro name="user-range">
- <cross_sections>
- <dc:attribute name="id" value="flood-protections-${river_id}"/>
+ <cross_sections id="flood-protections-{$river_id}">
<dc:context connection="system">
<dc:statement>
SELECT DISTINCT
@@ -599,13 +526,11 @@
WHERE cs.river_id = ${river_id}
AND csl.km BETWEEN ${fromkm} AND ${tokm}
</dc:statement>
- <dc:elements>
- <cross_section>
- <dc:attribute name="name" value="${prot_description}"/>
- <dc:attribute name="ids" value="${prot_id}"/>
- <dc:attribute name="factory" value="crosssections"/>
- </cross_section>
- </dc:elements>
+ <dc:for-each>
+ <cross_section name="{$prot_description}"
+ ids="{$prot_id}"
+ factory="crosssections"/>
+ </dc:for-each>
</dc:context>
</cross_sections>
</dc:call-macro>
@@ -613,8 +538,7 @@
<dc:macro name="hyks">
<dc:call-macro name="user-range">
- <hyks>
- <dc:attribute name="id" value="hyk-${river_id}"/>
+ <hyks id="hyk-{$river_id}">
<dc:context connection="system">
<dc:statement>
SELECT DISTINCT
@@ -625,52 +549,44 @@
WHERE river_id = ${river_id}
AND he.km BETWEEN ${fromkm} AND ${tokm}
</dc:statement>
- <dc:elements>
- <hyk>
- <dc:attribute name="name" value="${hyk_description}"/>
- <dc:attribute name="ids" value="${hyk_id}"/>
- <dc:attribute name="factory" value="hyk"/>
- </hyk>
- </dc:elements>
- </dc:context>
- </hyks>
+ <dc:for-each>
+ <hyk name="{$hyk_description}" ids="{$hyk_id}" factory="hyk"/>
+ </dc:for-each>
+ </dc:context></hyks>
</dc:call-macro>
</dc:macro>
<dc:macro name="flow_velocity_measurements">
<dc:call-macro name="user-range">
<flowvelocitymeasurement>
- <dc:context connection="system">
+ <dc:context connection="system">
<dc:statement>
SELECT id AS fvmid,
description AS fvmd
FROM flow_velocity_measurements WHERE river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <flow_velocity_measurement>
- <dc:attribute name="name" value="${fvmd}"/>
- <dc:attribute name="ids" value="${fvmid}"/>
- <dc:attribute name="factory" value="flowvelocity"/>
+ <dc:for-each>
+ <flow_velocity_measurement name="{$fvmd}"
+ ids="{$fvmid}"
+ factory="flowvelocity">
<dc:context>
- <dc:statement>
- SELECT id, description, station, datetime, v, w, q
- FROM flow_velocity_measure_values
- WHERE measurements_id = ${fvmid}
- AND station BETWEEN ${fromkm} AND ${tokm}
- </dc:statement>
- <dc:elements>
- <measurement_value>
- <dc:attribute name="name" value="${id}-${description}-${station}-${datetime}"/>
- <dc:attribute name="ids" value="${id}"/>
- <dc:attribute name="factory" value="flowvelocity"/>
- </measurement_value>
- </dc:elements>
+ <dc:statement>
+ SELECT id, description, station, datetime, v, w, q
+ FROM flow_velocity_measure_values
+ WHERE measurements_id = ${fvmid}
+ AND station BETWEEN ${fromkm} AND ${tokm}
+ </dc:statement>
+ <dc:for-each>
+ <measurement_value name="{$id}-{$description}-{$station}-{$datetime}"
+ ids="{$id}"
+ factory="flowvelocity"/>
+ </dc:for-each>
</dc:context>
</flow_velocity_measurement>
- </dc:elements>
- </dc:context>
- </flowvelocitymeasurement>
- </dc:call-macro>
+ </dc:for-each>
+ </dc:context>
+ </flowvelocitymeasurement>
+ </dc:call-macro>
</dc:macro>
<dc:macro name="sounding-width">
@@ -682,13 +598,11 @@
description AS bedh_descr
FROM bed_height_single WHERE river_id = ${river_id}
</dc:statement>
- <dc:elements>
- <height>
- <dc:attribute name="factory" value="bedheight"/>
- <dc:attribute name="ids" value="bedheight-singlevalues-${bedh_id}-${bedh_year}"/>
- <dc:attribute name="description" value="${bedh_descr}"/>
- </height>
- </dc:elements>
+ <dc:for-each>
+ <height factory="bedheight"
+ ids="bedheight-singlevalues-{$bedh_id}-{$bedh_year}"
+ description="{$bedh_descr}"/>
+ </dc:for-each>
</dc:context>
</soundings_width>
</dc:macro>
@@ -709,1047 +623,1182 @@
<dc:comment>
+ River-Node
</dc:comment>
- <river>
- <dc:attribute name="name" value="${river_name}"/>
+ <dc:attribute name="name" value="${river_name}"/>
- <dc:choose>
- <dc:when test="dc:contains($parameters, 'recommended')">
- <dc:comment>
- Recommendations (client shall load immediately).
- </dc:comment>
- <dc:if test="dc:contains($artifact-outs, 'w_differences') or (dc:contains($artifact-outs, 'discharge_longitudinal_section'))">
- <dc:call-macro name="annotations"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'historical_discharge_wq')">
- <dc:call-macro name="mainvalues"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'cross_section')">
- <dc:call-macro name="cross_sections"/>
- <dc:call-macro name="hyks"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve')">
- <dc:call-macro name="mainvalues"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'duration_curve')">
- <dc:call-macro name="mainvalues"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'reference_curve')">
- <dc:call-macro name="annotations"/>
- <dc:call-macro name="mainvalues"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')">
- <dc:call-macro name="qsectors"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'longitudinal_section')">
- <dc:call-macro name="annotations"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')">
- <dc:call-macro name="annotations"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'bed_difference_epoch')">
- <dc:call-macro name="annotations"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'bed_difference_year')">
- <dc:call-macro name="annotations"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'bed_difference_height_year')">
- <dc:call-macro name="annotations"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'flow_velocity')">
- <dc:call-macro name="annotations"/>
- </dc:if>
- </dc:when>
- <dc:otherwise>
- <dc:comment>
- Non - Recommendations.
- </dc:comment>
- <dc:if test="dc:contains($artifact-outs, 'cross_section')">
- <dc:call-macro name="basedata_0"/>
- <dc:call-macro name="basedata_1_additionals"/>
- <dc:call-macro name="basedata_2_fixations"/>
- <dc:call-macro name="basedata_3_officials"/>
- <dc:call-macro name="basedata_4_heightmarks-points"/>
- <dc:call-macro name="cross_sections"/>
- <dc:call-macro name="hyks"/>
- </dc:if>
- <!--dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve')">
- <dc:call-macro name="basedata_0_wq"/>
- <dc:call-macro name="basedata_4_heightmarks-wq"/>
- </dc:if-->
- <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'w_differences')) or (dc:contains($artifact-outs, 'discharge_longitudinal_section'))">
- <dc:call-macro name="longitudinal-section-prototype"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'duration_curve')">
- <dc:call-macro name="mainvalues"/>
- <dc:call-macro name="basedata_2_fixations_relative_point"/>
- <dc:call-macro name="basedata_4_heightmarks-points-relative_points"/>
- <dc:call-macro name="basedata_5_flood-protections_relative_points"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'reference_curve')">
- <dc:call-macro name="annotations"/>
- <!--dc:call-macro name="basedata_0"/-->
- <dc:call-macro name="basedata_1_additionals-relative_point"/>
- <dc:comment comment=" FIXATIONS ---------------------------"/>
+ <dc:choose>
+ <dc:when test="dc:contains($parameters, 'recommended')">
+ <dc:comment>
+ Recommendations (client shall load immediately).
+ </dc:comment>
+ <dc:if test="dc:contains($artifact-outs, 'w_differences') or (dc:contains($artifact-outs, 'discharge_longitudinal_section'))">
+ <dc:call-macro name="annotations"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'historical_discharge_wq')">
+ <dc:call-macro name="mainvalues"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'cross_section')">
+ <dc:call-macro name="cross_sections"/>
+ <dc:call-macro name="hyks"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve')">
+ <dc:call-macro name="mainvalues"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'duration_curve')">
+ <dc:call-macro name="mainvalues"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'reference_curve')">
+ <dc:call-macro name="annotations"/>
+ <dc:call-macro name="mainvalues"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')">
+ <dc:call-macro name="qsectors"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'longitudinal_section')">
+ <dc:call-macro name="annotations"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')">
+ <dc:call-macro name="annotations"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'bed_difference_epoch')">
+ <dc:call-macro name="annotations"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'bed_difference_year')">
+ <dc:call-macro name="annotations"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'bed_difference_height_year')">
+ <dc:call-macro name="annotations"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'flow_velocity')">
+ <dc:call-macro name="annotations"/>
+ </dc:if>
+ </dc:when>
+ <dc:otherwise>
+ <dc:comment>
+ Non - Recommendations.
+ </dc:comment>
+ <dc:if test="dc:contains($artifact-outs, 'cross_section')">
+ <dc:call-macro name="basedata_0"/>
+ <dc:call-macro name="basedata_1_additionals"/>
+ <dc:call-macro name="basedata_2_fixations"/>
+ <dc:call-macro name="basedata_3_officials"/>
+ <dc:call-macro name="basedata_4_heightmarks-points"/>
+ <dc:call-macro name="cross_sections"/>
+ <dc:call-macro name="hyks"/>
+ </dc:if>
+ <!--dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve')">
+ <dc:call-macro name="basedata_0_wq"/>
+ <dc:call-macro name="basedata_4_heightmarks-wq"/>
+ </dc:if-->
+ <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'w_differences')) or (dc:contains($artifact-outs, 'discharge_longitudinal_section'))">
+ <dc:call-macro name="longitudinal-section-prototype"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'duration_curve')">
+ <dc:call-macro name="mainvalues"/>
<dc:call-macro name="basedata_2_fixations_relative_point"/>
- <dc:comment comment=" HOEHENMARKEN ---------------------------"/>
<dc:call-macro name="basedata_4_heightmarks-points-relative_points"/>
<dc:call-macro name="basedata_5_flood-protections_relative_points"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')">
- <dc:call-macro name="basedata_0_wq"/>
- <dc:call-macro name="basedata_1_additionals_marks"/>
- <dc:call-macro name="basedata_2_fixations_wqkms"/>
- <dc:call-macro name="basedata_3_officials"/>
- <dc:call-macro name="basedata_4_heightmarks-points"/>
- <dc:call-macro name="basedata_5_flood-protections_relative_points"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'fix_deltawt_curve')">
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')">
- <dc:call-macro name="annotations"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'map')">
- <map>
- <dc:call-macro name="flood-map-complete"/>
- </map>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'flow_velocity')">
- <dc:call-macro name="annotations"/>
- <dc:call-macro name="flow_velocity_measurements"/>
- </dc:if>
- <dc:comment>
- MINFO bedheight middle
- </dc:comment>
- <dc:if test="dc:contains($artifact-outs, 'bedheight_middle')">
- <dc:call-macro name="sounding-width"/>
- </dc:if>
- <dc:comment comment="--- non-recommendations---"/>
- </dc:otherwise>
- </dc:choose>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'reference_curve')">
+ <dc:call-macro name="annotations"/>
+ <!--dc:call-macro name="basedata_0"/-->
+ <dc:call-macro name="basedata_1_additionals-relative_point"/>
+ <dc:comment comment=" FIXATIONS ---------------------------"/>
+ <dc:call-macro name="basedata_2_fixations_relative_point"/>
+ <dc:comment comment=" HOEHENMARKEN ---------------------------"/>
+ <dc:call-macro name="basedata_4_heightmarks-points-relative_points"/>
+ <dc:call-macro name="basedata_5_flood-protections_relative_points"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')">
+ <dc:call-macro name="basedata_0_wq"/>
+ <dc:call-macro name="basedata_1_additionals_marks"/>
+ <dc:call-macro name="basedata_2_fixations_wqkms"/>
+ <dc:call-macro name="basedata_3_officials"/>
+ <dc:call-macro name="basedata_4_heightmarks-points"/>
+ <dc:call-macro name="basedata_5_flood-protections_relative_points"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'fix_deltawt_curve')">
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')">
+ <dc:call-macro name="annotations"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'map')">
+ <dc:call-macro name="flood-map-complete"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'flow_velocity')">
+ <dc:call-macro name="annotations"/>
+ <dc:call-macro name="flow_velocity_measurements"/>
+ </dc:if>
+ <dc:comment>
+ MINFO bedheight middle
+ </dc:comment>
+ <dc:if test="dc:contains($artifact-outs, 'bedheight_middle')">
+ <dc:call-macro name="sounding-width"/>
+ </dc:if>
+ <dc:comment comment="--- non-recommendations---"/>
+ </dc:otherwise>
+ </dc:choose>
+ <dc:if test="dc:contains($artifact-outs, 'waterlevels')">
- <dc:if test="dc:contains($artifact-outs, 'waterlevels')">
+ <!-- base data -->
+ <dc:call-macro name="basedata_0"/>
- <!-- base data -->
- <dc:call-macro name="basedata_0"/>
+ <!-- extra-longitudinal-sections -->
+ <dc:call-macro name="basedata_1_additionals"/>
- <!-- extra-longitudinal-sections -->
- <dc:call-macro name="basedata_1_additionals"/>
+ <!-- fixations -->
+ <dc:call-macro name="basedata_2_fixations"/>
- <!-- fixations -->
- <dc:call-macro name="basedata_2_fixations"/>
+ <!-- flood water marks-->
+ <dc:call-macro name="basedata_4_heightmarks-points"/>
- <!-- flood water marks-->
- <dc:call-macro name="basedata_4_heightmarks-points"/>
+ <!-- flood protection -->
+ <dc:call-macro name="basedata_5_flood-protections"/>
- <!-- flood protection -->
- <dc:call-macro name="basedata_5_flood-protections"/>
+ </dc:if>
- </dc:if>
+ <dc:comment><!-- XXX: Why is this taken out?
<dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve') and (dc:contains($parameters, 'recommended'))">
- <!--dc:call-macro name="basedata_2_fixations_wst"/-->
+ <dc:call-macro name="basedata_2_fixations_wst"/>
+ </dc:if>
+ -->
+ </dc:comment>
+
+ <dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve') and not (dc:contains($parameters, 'recommended'))">
+ <discharge_table_nn>
+ <discharge_table_gauge>
+ <dc:context>
+ <dc:statement>
+ SELECT id AS gauge_id,
+ name AS gauge_name
+ FROM gauges WHERE river_id = ${river_id}
+ </dc:statement>
+ <dc:for-each>
+ <gauge name="{$gauge_name}"
+ db-id="{$gauge_id}"
+ factory="gaugedischarge"
+ from="{$g_start}"
+ to="{$g_stop}"
+ ids="{$gauge_name}"/>
+ <dc:comment>
+ <!--
+ <gauge>
+ <dc:attribute name="name" value="${gauge_name}"/>
+ <dc:attribute name="db-id" value="${gauge_id}"/>
+ <dc:context>
+ <dc:statement>
+ SELECT description AS gauge_desc,
+ d.id AS discharge_id,
+ ti.start_time AS g_start,
+ ti.stop_time AS g_stop
+ FROM discharge_tables d JOIN time_intervals ti
+ ON d.time_interval_id = ti.id
+ WHERE d.gauge_id = ${gauge_id} AND d.kind = 1
+ </dc:statement>
+ <dc:for-each>
+ <historical>
+ <dc:attribute name="name" value="${gauge_desc}"/>
+ <dc:attribute name="factory" value="gaugedischarge"/>
+ <dc:attribute name="from" value="${g_start}"/>
+ <dc:attribute name="to" value="${g_stop}"/>
+ <dc:attribute name="ids" value="${discharge_id}-${g_start}-${g_stop}"/>
+ </historical>
+ </dc:for-each>
+ </dc:context>
+ </gauge>
+ -->
+ </dc:comment>
+ </dc:for-each>
+ </dc:context>
+ </discharge_table_gauge>
+ </discharge_table_nn>
+
+ <dc:call-macro name="basedata_2_fixations_wst"/>
+
+ <dc:call-macro name="basedata_5_flood-protections"/>
+
+ <!-- former waterlevels -->
+ <dc:call-macro name="basedata_0"/>
+
+ <dc:call-macro name="basedata_1_additionals"/>
+
+ <!-- former flood-water-marks -->
+ <dc:call-macro name="basedata_4_heightmarks-points"/>
+ <computed_discharge_curve>
+ <dc:call-macro name="mainvalues"/>
+ </computed_discharge_curve>
+ </dc:if>
+
+ <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve') and not (dc:contains($parameters, 'recommended'))">
+ <discharge_table_nn>
+ <discharge_table_gauge>
+ <dc:context>
+ <dc:statement>
+ SELECT id AS gauge_id,
+ name AS gauge_name
+ FROM gauges WHERE river_id = ${river_id}
+ </dc:statement>
+ <dc:for-each>
+ <gauge name="{$gauge_name}"
+ db-id="{$gauge_id}"
+ factory="gaugedischarge"
+ from="{$g_start}"
+ to="{$g_stop}"
+ ids="{$gauge_name}"/>
+ </dc:for-each>
+ </dc:context>
+ </discharge_table_gauge>
+ </discharge_table_nn>
+ </dc:if>
+
+ <dc:if test="dc:contains($artifact-outs, 'floodmap') or dc:contains($artifact-outs, 'floodmap-hws')">
+ <floodmap>
+ <dc:choose>
+ <dc:when test="dc:contains($parameters, 'recommended')">
+ <dc:call-macro name="flood-map-recommended"/>
+ </dc:when>
+ <dc:when test="dc:contains($parameters, 'dem')">
+ <dc:call-macro name="flood-map-dem"/>
+ </dc:when>
+ <dc:otherwise>
+ <dc:call-macro name="flood-map-complete"/>
+ </dc:otherwise>
+ </dc:choose>
+ </floodmap>
+
+ <dc:if test="dc:contains($parameters, 'hws')">
+ <hws>
+ <dc:call-macro name="flood-map-hws-lines"/>
+ <dc:call-macro name="flood-map-hws-points"/>
+ </hws>
</dc:if>
- <dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve') and not (dc:contains($parameters, 'recommended'))">
- <discharge_table_nn>
- <discharge_table_gauge>
- <dc:context>
- <dc:statement>
- SELECT id AS gauge_id,
- name AS gauge_name
- FROM gauges WHERE river_id = ${river_id}
- </dc:statement>
- <dc:elements>
- <gauge>
- <dc:attribute name="name" value="${gauge_name}"/>
- <dc:attribute name="db-id" value="${gauge_id}"/>
- <dc:attribute name="factory" value="gaugedischarge"/>
- <dc:attribute name="from" value="${g_start}"/>
- <dc:attribute name="to" value="${g_stop}"/>
- <dc:attribute name="ids" value="${gauge_name}"/>
- </gauge>
- <!--
- <gauge>
- <dc:attribute name="name" value="${gauge_name}"/>
- <dc:attribute name="db-id" value="${gauge_id}"/>
- <dc:context>
- <dc:statement>
- SELECT description AS gauge_desc,
- d.id AS discharge_id,
- ti.start_time AS g_start,
- ti.stop_time AS g_stop
- FROM discharge_tables d JOIN time_intervals ti
- ON d.time_interval_id = ti.id
- WHERE d.gauge_id = ${gauge_id} AND d.kind = 1
- </dc:statement>
- <dc:elements>
- <historical>
- <dc:attribute name="name" value="${gauge_desc}"/>
- <dc:attribute name="factory" value="gaugedischarge"/>
- <dc:attribute name="from" value="${g_start}"/>
- <dc:attribute name="to" value="${g_stop}"/>
- <dc:attribute name="ids" value="${discharge_id}-${g_start}-${g_stop}"/>
- </historical>
- </dc:elements>
- </dc:context>
- </gauge>
- -->
- </dc:elements>
- </dc:context>
- </discharge_table_gauge>
+ <dc:macro name="flood-map-recommended">
+ <dc:comment>
+ FIXME: Following two macros look identical to me.
+ </dc:comment>
+ <kilometrage>
+ <riveraxis factory="riveraxis" ids="{$river_id}"/>
+ </kilometrage>
+ <rastermap>
+ <background factory="wmsbackground" ids="{$river_id}"/>
+ </rastermap>
+ </dc:macro>
- </discharge_table_nn>
+ <dc:macro name="flood-map-dem">
+ <dems>
+ <dc:context>
+ <dc:statement>
+ SELECT d.id AS dem_id,
+ r.a AS dem_lower,
+ r.b AS dem_upper,
+ d.name AS name,
+ d.projection || ' | ' || t.start_time || ' - ' || t.stop_time AS info
+ FROM dem d
+ JOIN ranges r ON d.range_id = r.id
+ LEFT JOIN time_intervals t ON d.time_interval_id = t.id
+ WHERE d.river_id = ${river_id}
+ </dc:statement>
+ <dc:for-each>
+ <dem factory="demfactory" ids="{$dem_id}" name="{$name}" info="{$info}"/>
+ </dc:for-each>
+ </dc:context>
+ </dems>
+ </dc:macro>
- <dc:call-macro name="basedata_2_fixations_wst"/>
+ <dc:macro name="filter_hws_ddg">
+ <dc:macro name="durchlass_damm_graben">
+ <dc:macro name="ddg_factory">
+ <dc:for-each>
+ <hws factory="hwsfactory" name="{$hws_name}"/>
+ </dc:for-each>
+ </dc:macro>
- <dc:call-macro name="basedata_5_flood-protections"/>
+ <dc:filter expr="$kws_kind=1">
+ <dc:if test="dc:has-result()">
+ <Durchlass><dc:call-macro name="ddg_factory"/></Durchlass>
+ </dc:if>
+ </dc:filter>
- <!-- former waterlevels -->
- <dc:call-macro name="basedata_0"/>
+ <dc:filter expr="$kws_kind=2">
+ <dc:if test="dc:has-result()">
+ <Damm><dc:call-macro name="ddg_factory"/></Damm>
+ </dc:if>
+ </dc:filter>
- <dc:call-macro name="basedata_1_additionals"/>
+ <dc:filter expr="$kws_kind=3">
+ <dc:if test="dc:has-result()">
+ <Graben><dc:call-macro name="ddg_factory"/></Graben>
+ </dc:if>
+ </dc:filter>
+ </dc:macro>
- <!-- former flood-water-marks -->
- <dc:call-macro name="basedata_4_heightmarks-points"/>
- <computed_discharge_curve>
- <dc:call-macro name="mainvalues"/>
- </computed_discharge_curve>
- </dc:if>
+ <dc:filter expr="$hws_official=1">
+ <dc:if test="dc:has-result()">
+ <official>
+ <dc:call-macro name="durchlass_damm_graben"/>
+ </official>
+ </dc:if>
+ </dc:filter>
- <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve') and not (dc:contains($parameters, 'recommended'))">
- <discharge_table_nn>
- <discharge_table_gauge>
- <dc:context>
- <dc:statement>
- SELECT id AS gauge_id,
- name AS gauge_name
- FROM gauges WHERE river_id = ${river_id}
- </dc:statement>
- <dc:elements>
- <gauge>
- <dc:attribute name="name" value="${gauge_name}"/>
- <dc:attribute name="db-id" value="${gauge_id}"/>
- <dc:attribute name="factory" value="gaugedischarge"/>
- <dc:attribute name="from" value="${g_start}"/>
- <dc:attribute name="to" value="${g_stop}"/>
- <dc:attribute name="ids" value="${gauge_name}"/>
- </gauge>
- </dc:elements>
- </dc:context>
- </discharge_table_gauge>
- </discharge_table_nn>
- </dc:if>
+ <dc:filter expr="$hws_official=0">
+ <dc:if test="dc:has-result()">
+ <inofficial>
+ <dc:call-macro name="durchlass_damm_graben"/>
+ </inofficial>
+ </dc:if>
+ </dc:filter>
+ </dc:macro>
- <dc:if test="dc:contains($artifact-outs, 'floodmap') or dc:contains($artifact-outs, 'floodmap-hws')">
- <floodmap>
- <dc:choose>
- <dc:when test="dc:contains($parameters, 'recommended')">
- <dc:call-macro name="flood-map-recommended"/>
- </dc:when>
- <dc:when test="dc:contains($parameters, 'dem')">
- <dc:call-macro name="flood-map-dem"/>
- </dc:when>
- <dc:otherwise>
- <dc:call-macro name="flood-map-complete"/>
- </dc:otherwise>
- </dc:choose>
- </floodmap>
- <dc:if test="dc:contains($parameters, 'hws')">
- <hws>
- <dc:call-macro name="flood-map-hws-lines" />
- <dc:call-macro name="flood-map-hws-points" />
- </hws>
+ <dc:macro name="flood-map-hws-lines">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ name AS hws_name,
+ official AS hws_official,
+ kind_id AS hws_kind
+ FROM hws_lines
+ WHERE river_id = ${river_id}
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <lines>
+ <dc:call-macro name="filter_hws_ddg"/>
+ </lines>
</dc:if>
+ </dc:context>
+ </dc:macro>
- <dc:macro name="flood-map-recommended">
- <dc:comment>
- FIXME: Following two macros look identical to me.
- </dc:comment>
- <kilometrage>
- <riveraxis>
- <dc:attribute name="factory" value="riveraxis"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </riveraxis>
- </kilometrage>
- <rastermap>
- <background>
- <dc:attribute name="factory" value="wmsbackground"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </background>
- </rastermap>
- </dc:macro>
- <dc:macro name="flood-map-dem">
- <dems>
- <dc:context>
- <dc:statement>
- SELECT d.id AS dem_id,
- r.a AS dem_lower,
- r.b AS dem_upper,
- d.name AS name,
- d.projection || ' | ' || t.start_time || ' - ' || t.stop_time AS info
- FROM dem d
- JOIN ranges r ON d.range_id = r.id
- JOIN time_intervals t ON d.time_interval_id = t.id
- WHERE d.river_id = ${river_id}
- </dc:statement>
- <dc:elements>
- <dem>
- <dc:attribute name="factory" value="demfactory"/>
- <dc:attribute name="ids" value="${dem_id}"/>
- <dc:attribute name="name" value="${name}"/>
- <dc:attribute name="info" value="${info}"/>
- </dem>
- </dc:elements>
- </dc:context>
- </dems>
- </dc:macro>
- <dc:macro name="flood-map-hws-lines">
+ <dc:macro name="flood-map-hws-points">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ name AS hws_name,
+ official AS hws_official,
+ kind_id AS hws_kind
+ FROM hws_points
+ WHERE river_id = ${river_id}
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <points>
+ <dc:call-macro name="filter_hws_ddg"/>
+ </points>
+ </dc:if>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-km">
+ <dc:context>
+ <dc:statement>
+ SELECT id FROM river_axes_km WHERE river_id = ${river_id}
+ </dc:statement>
+ <dc:for-each>
+ <kilometrage factory="wmskmfactory" ids="{$river_id}"/>
+ </dc:for-each>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-qps">
+ <dc:context>
+ <dc:comment>Grab only the actual first</dc:comment>
+ <dc:statement>
+ SELECT DISTINCT
+ cs.kind_id AS kind_id,
+ ck.name AS kind_name
+ FROM cross_section_tracks cs
+ JOIN cross_section_track_kinds ck on cs.kind_id = ck.id
+ WHERE river_id = ${river_id}
+ AND kind_id=1
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <dc:for-each>
+ <actual description="{$kind_name}"
+ factory="wmsqpsfactory"
+ ids="{$river_id};{$kind_name};{$kind_id}"/>
+ </dc:for-each>
+ </dc:if>
+ </dc:context>
+ <dc:context>
+ <dc:comment>Now the other tracks</dc:comment>
+ <dc:statement>
+ SELECT DISTINCT
+ cs.kind_id AS kind_id,
+ ck.name AS kind_name,
+ cs.name AS layer_name
+ FROM cross_section_tracks cs
+ JOIN cross_section_track_kinds ck on cs.kind_id = ck.id
+ WHERE river_id = ${river_id}
+ AND kind_id=0
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <other>
+ <dc:for-each>
+ <misc-qps description="{$layer_name}"
+ factory="wmsqpsfactory"
+ ids="{$river_id};{$layer_name};{$kind_id}"/>
+ </dc:for-each>
+ </other>
+ </dc:if>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-riveraxis">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ ax.kind_id AS kind_id,
+ ak.name AS kind_name
+ FROM river_axes ax
+ JOIN axis_kinds ak on ax.kind_id = ak.id
+ WHERE river_id = ${river_id}
+ AND kind_id=1
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <dc:for-each>
+ <actual description="{$kind_name}"
+ ids="{$river_id};{$kind_name};{$kind_id}"
+ factory="riveraxis"/>
+ </dc:for-each>
+ </dc:if>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ ak.name AS kind_name,
+ ax.kind_id AS kind_id,
+ ax.name AS layer_name
+ FROM river_axes ax
+ JOIN axis_kinds ak on ax.kind_id = ak.id
+ WHERE river_id = ${river_id}
+ AND kind_id <> 1
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <other>
+ <dc:for-each>
+ <misc-axis description="{$layer_name}"
+ ids="{$river_id};{$layer_name};{$kind_id}"
+ factory="riveraxis"/>
+ </dc:for-each>
+ </other>
+ </dc:if>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-hydr-boundaries-state">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ name
+ FROM hydr_boundaries
+ WHERE river_id = ${river_id}
+ AND kind = 2
+ </dc:statement>
+ <dc:for-each>
+ <line factory="wmshydrboundariesfactory"
+ ids="{$river_id};{$name};2"
+ name="{$name}"/>
+ </dc:for-each>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ name
+ FROM hydr_boundaries_poly
+ WHERE river_id = ${river_id}
+ AND kind = 2
+ </dc:statement>
+ <dc:for-each>
+ <line factory="wmshydrboundariespolyfactory"
+ ids="{$river_id};{$name};2"
+ name="{$name}"/>
+ </dc:for-each>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-hydr-boundaries-lines">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ name
+ FROM hydr_boundaries
+ WHERE river_id = ${river_id}
+ AND kind = 1
+ </dc:statement>
+ <dc:comment> What about all other line kinds?</dc:comment>
+ <dc:if test="dc:has-result()">
+ <lines>
+ <dc:for-each>
+ <line factory="wmshydrboundariesfactory"
+ ids="{$river_id};{$name};1"
+ name="{$name}"/>
+ </dc:for-each>
+ </lines>
+ </dc:if>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-hydr-boundaries-poly">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ b.sectie AS sectie_id,
+ sk.name AS sectie
+ FROM hydr_boundaries_poly b
+ JOIN sectie_kinds sk ON b.sectie = sk.id
+ WHERE b.river_id = ${river_id}
+ AND b.kind = 1
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <sobek_areas>
+ <dc:for-each>
+ <boundary name="{$sectie}"
+ factory="wmshydrboundariespolyfactory"
+ ids="{$river_id};{$sectie};1;{$sectie_id};-1"/>
+ </dc:for-each>
+ </sobek_areas>
+ </dc:if>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ b.sobek AS sobek_id,
+ sk.name AS sobek
+ FROM hydr_boundaries_poly b
+ JOIN sobek_kinds sk ON b.sobek = sk.id
+ WHERE b.river_id = ${river_id}
+ AND b.kind = 1
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <sobek_flooded>
+ <dc:for-each>
+ <boundary name="{$sobek}"
+ factory="wmshydrboundariespolyfactory"
+ ids="{$river_id};{$sobek};1;-1;{$sobek_id}"/>
+ </dc:for-each>
+ </sobek_flooded>
+ </dc:if>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-hydr-boundaries">
+ <bfg_model>
+ <areas>
+ <dc:call-macro name="flood-map-hydr-boundaries-poly"/>
+ </areas>
+ <dc:call-macro name="flood-map-hydr-boundaries-lines"/>
+ </bfg_model>
+ <federal>
+ <dc:call-macro name="flood-map-hydr-boundaries-state"/>
+ </federal>
+ </dc:macro>
+
+ <dc:macro name="flood-map-floodplain">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ fp.kind_id AS kind_id,
+ flk.name AS kind_name
+ FROM floodplain fp
+ JOIN floodplain_kinds flk on fp.kind_id = flk.id
+ WHERE river_id = ${river_id}
+ AND kind_id=1
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <dc:for-each>
+ <floody factory="wmsfloodplainfactory"
+ description="{$kind_name}"
+ ids="{$river_id};{$kind_name};{$kind_id}"/>
+ </dc:for-each>
+ </dc:if>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ flk.name AS kind_name,
+ fp.kind_id AS kind_id,
+ fp.name AS layer_name
+ FROM floodplain fp
+ JOIN floodplain_kinds flk on fp.kind_id = flk.id
+ WHERE river_id = ${river_id}
+ AND kind_id <> 1
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <other>
+ <dc:for-each>
+ <floody factory="wmsfloodplainfactory"
+ description="{$layer_name}"
+ ids="{$river_id};{$layer_name};{$kind_id}"/>
+ </dc:for-each>
+ </other>
+ </dc:if>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="hwslines_by_kind">
+ <dc:comment>
+ Call from a context where fed_name hws_kind hws_name and river_id is
+ availble
+ </dc:comment>
+
+ <dc:macro name="hwslines_by_kind_factory">
+ <dc:for-each>
+ <hws factory="wmshwslinesfactory"
+ ids="{$river_id};{$hws_name}"
+ name="{$hws_name}"/>
+ </dc:for-each>
+ </dc:macro>
+
+ <dc:filter expr="$hws_kind=1">
+ <dc:if test="dc:has-result()">
+ <Durchlass>
+ <dc:call-macro name="hwslines_by_kind_factory"/>
+ </Durchlass>
+ </dc:if>
+ </dc:filter>
+
+ <dc:filter expr="$hws_kind=2">
+ <dc:if test="dc:has-result()">
+ <Damm>
+ <dc:call-macro name="hwslines_by_kind_factory"/>
+ </Damm>
+ </dc:if>
+ </dc:filter>
+
+ <dc:filter expr="$hws_kind=3">
+ <dc:if test="dc:has-result()">
+ <Graben>
+ <dc:call-macro name="hwslines_by_kind_factory"/>
+ </Graben>
+ </dc:if>
+ </dc:filter>
+ </dc:macro>
+
+ <dc:macro name="hwslines">
+ <hws_lines>
+ <official>
<dc:context>
<dc:statement>
SELECT DISTINCT
- name AS hws_name,
- official AS hws_official,
- kind_id AS hws_kind
- FROM hws_lines
+ fs.name AS fed_name,
+ fs.id AS fed_id
+ FROM hws_lines hws
+ JOIN fed_states fs ON hws.fed_state_id = fs.id
WHERE river_id = ${river_id}
+ AND hws.official=1
</dc:statement>
- <lines>
- <official>
- <Durchlass>
- <dc:elements filter="$hws_kind=1 and $hws_official=1">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
- </Durchlass>
- <Damm>
- <dc:elements filter="$hws_kind=2 and $hws_official=1">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
- </Damm>
- <Graben>
- <dc:elements filter="$hws_kind=3 and $hws_official=1">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
- </Graben>
- </official>
- <inofficial>
- <Durchlass>
- <dc:elements filter="$hws_kind=1 and $hws_official=0">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
- </Durchlass>
- <Damm>
- <dc:elements filter="$hws_kind=2 and $hws_official=0">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
- </Damm>
- <Graben>
- <dc:elements filter="$hws_kind=3 and $hws_official=0">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
- </Graben>
- </inofficial>
- </lines>
- </dc:context>
- </dc:macro>
- <dc:macro name="flood-map-hws-points">
- <dc:context>
- <dc:statement>
- SELECT DISTINCT
- name AS hws_points_name,
- official AS hws_points_official,
- kind_id AS hws_points_kind
- FROM hws_points
- WHERE river_id = ${river_id}
- </dc:statement>
- <points>
- <official>
- <Durchlass>
- <dc:elements filter="$hws_points_kind=1 and $hws_points_official=1">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_points_name}"/>
- </hws>
- </dc:elements>
- </Durchlass>
- <Damm>
- <dc:elements filter="$hws_points_kind=2 and $hws_points_official=1">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_points_name}"/>
- </hws>
- </dc:elements>
- </Damm>
- <Graben>
- <dc:elements filter="$hws_kind=3 and $hws_official=1">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_points_name}"/>
- </hws>
- </dc:elements>
- </Graben>
- </official>
- <inofficial>
- <Durchlass>
- <dc:elements filter="$hws_points_kind=1 and $hws_points_official=0">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_points_name}"/>
- </hws>
- </dc:elements>
- </Durchlass>
- <Damm>
- <dc:elements filter="$hws_points_kind=2 and $hws_points_official=0">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_points_name}"/>
- </hws>
- </dc:elements>
- </Damm>
- <Graben>
- <dc:elements filter="$hws_points_kind=3 and $hws_points_official=0">
- <hws>
- <dc:attribute name="factory" value="hwsfactory"/>
- <dc:attribute name="name" value="${hws_points_name}"/>
- </hws>
- </dc:elements>
- </Graben>
- </inofficial>
- </points>
- </dc:context>
- </dc:macro>
- <dc:macro name="flood-map-km">
- <dc:context>
- <dc:statement>
- SELECT count(*) as km_exists
- FROM river_axes_km WHERE river_id = ${river_id}
- </dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <km>
- <dc:attribute name="factory" value="wmskmfactory"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </km>
- </dc:if>
- </dc:elements>
- </dc:context>
- </dc:macro>
- <dc:macro name="flood-map-qps">
- <dc:context>
- <dc:statement>
- SELECT count(*) as km_exists
- FROM cross_section_tracks WHERE river_id = ${river_id}
- </dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <qps>
- <dc:attribute name="factory" value="wmsqpsfactory"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </qps>
- </dc:if>
- </dc:elements>
- </dc:context>
- </dc:macro>
- <dc:macro name="flood-map-hydr-boundaries">
- <hydr_boundaries_lines>
- <dc:call-macro name="flood-map-hydr-boundaries-lines"/>
- </hydr_boundaries_lines>
- <hydr_boundaries_polygons>
- <dc:call-macro name="flood-map-hydr-boundaries-poly"/>
- </hydr_boundaries_polygons>
- </dc:macro>
- <dc:macro name="flood-map-hydr-boundaries-lines">
- <bfg>
- <dc:context>
- <dc:statement>
- SELECT count(*) as km_exists, name as name
- FROM hydr_boundaries WHERE river_id = ${river_id} AND kind = 1 GROUP BY name
- </dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <hydrboundary>
- <dc:attribute name="factory" value="wmshydrboundariesfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </hydrboundary>
- </dc:if>
- </dc:elements>
- </dc:context>
- </bfg>
- <land>
- <dc:context>
- <dc:statement>
- SELECT count(*) as km_exists, name as name
- FROM hydr_boundaries WHERE river_id = ${river_id} AND kind = 2 GROUP BY name
- </dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <hydrboundary>
- <dc:attribute name="factory" value="wmshydrboundariesfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </hydrboundary>
- </dc:if>
- </dc:elements>
- </dc:context>
- </land>
- </dc:macro>
- <dc:macro name="flood-map-hydr-boundaries-poly">
- <bfg>
- <dc:context>
- <dc:statement>
- SELECT count(*) as km_exists, name as name
- FROM hydr_boundaries_poly WHERE river_id = ${river_id} AND kind = 1 GROUP BY name
- </dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <hws>
- <dc:attribute name="factory" value="wmshydrboundariespolyfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </hws>
- </dc:if>
- </dc:elements>
- </dc:context>
- </bfg>
- <land>
- <dc:context>
- <dc:statement>
- SELECT count(*) as km_exists, name as name
- FROM hydr_boundaries_poly WHERE river_id = ${river_id} AND kind = 2 GROUP BY name
- </dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <hws>
- <dc:attribute name="factory" value="wmshydrboundariespolyfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </hws>
- </dc:if>
- </dc:elements>
- </dc:context>
- </land>
- </dc:macro>
- <dc:macro name="flood-map-floodplain">
- <dc:context>
- <dc:statement>
- SELECT count(*) as km_exists
- FROM floodplain WHERE river_id = ${river_id}
- </dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <floodplain>
- <dc:attribute name="factory" value="wmsfloodplainfactory"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </floodplain>
- </dc:if>
- </dc:elements>
- </dc:context>
- </dc:macro>
-
- <dc:macro name="hwslines_by_kind">
- <dc:comment>
- Call from a context where fed_name hws_kind hws_name and river_id is
- availble
- </dc:comment>
- <Durchlass>
- <dc:elements filter="$hws_kind=1">
- <hws>
- <dc:attribute name="factory" value="wmshwslinesfactory"/>
- <dc:attribute name="ids" value="${river_id};${hws_name}"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
- </Durchlass>
- <Damm>
- <dc:elements filter="$hws_kind=2">
- <hws>
- <dc:attribute name="factory" value="wmshwslinesfactory"/>
- <dc:attribute name="ids" value="${river_id};${hws_name}"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
- </Damm>
- <Graben>
- <dc:elements filter="$hws_kind=3">
- <hws>
- <dc:attribute name="factory" value="wmshwslinesfactory"/>
- <dc:attribute name="ids" value="${river_id};${hws_name}"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
- </Graben>
- </dc:macro>
-
- <dc:macro name="hwslines">
- <hws_lines>
- <official>
+ <dc:for-each>
<dc:context>
<dc:statement>
SELECT DISTINCT
- fs.name AS fed_name,
- fs.id AS fed_id
- FROM hws_lines hws
- JOIN fed_states fs ON hws.fed_state_id = fs.id
- WHERE river_id = ${river_id}
- AND hws.official=1
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT DISTINCT
- name AS hws_name,
- kind_id AS hws_kind
- FROM hws_lines
- WHERE river_id = ${river_id}
- AND official=1
- AND fed_state_id = ${fed_id} ORDER BY name
- </dc:statement>
- <fedstate>
- <dc:attribute name="description" value="${fed_name}"/>
- <dc:call-macro name="hwslines_by_kind"/>
- </fedstate>
- </dc:context>
- </dc:elements>
- </dc:context>
- <dc:context>
- <dc:statement>
- SELECT distinct
name AS hws_name,
kind_id AS hws_kind
FROM hws_lines
WHERE river_id = ${river_id}
AND official=1
- AND fed_state_id IS NULL
- ORDER BY name
+ AND fed_state_id = ${fed_id} ORDER BY name
</dc:statement>
- <hws_fed_unknown>
+ <fedstate description="{$fed_name}">
<dc:call-macro name="hwslines_by_kind"/>
- </hws_fed_unknown>
+ </fedstate>
</dc:context>
- </official>
- <inofficial>
+ </dc:for-each>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT distinct
+ name AS hws_name,
+ kind_id AS hws_kind
+ FROM hws_lines
+ WHERE river_id = ${river_id}
+ AND official=1
+ AND fed_state_id IS NULL
+ ORDER BY name
+ </dc:statement>
+ <hws_fed_unknown>
+ <dc:call-macro name="hwslines_by_kind"/>
+ </hws_fed_unknown>
+ </dc:context>
+ </official>
+ <inofficial>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ fs.name AS fed_name,
+ fs.id AS fed_id
+ FROM hws_lines hws
+ JOIN fed_states fs ON hws.fed_state_id = fs.id
+ WHERE river_id = ${river_id}
+ AND hws.official=0
+ </dc:statement>
+ <dc:for-each>
<dc:context>
<dc:statement>
SELECT DISTINCT
- fs.name AS fed_name,
- fs.id AS fed_id
- FROM hws_lines hws
- JOIN fed_states fs ON hws.fed_state_id = fs.id
- WHERE river_id = ${river_id}
- AND hws.official=0
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT DISTINCT
- name AS hws_name,
- kind_id AS hws_kind
- FROM hws_lines
- WHERE river_id = ${river_id}
- AND official=0
- AND fed_state_id = ${fed_id} ORDER BY name
- </dc:statement>
- <fedstate>
- <dc:attribute name="description" value="${fed_name}"/>
- <dc:call-macro name="hwslines_by_kind"/>
- </fedstate>
- </dc:context>
- </dc:elements>
- </dc:context>
- <dc:context>
- <dc:statement>
- SELECT distinct
name AS hws_name,
kind_id AS hws_kind
FROM hws_lines
WHERE river_id = ${river_id}
AND official=0
- AND fed_state_id IS NULL ORDER BY name
+ AND fed_state_id = ${fed_id} ORDER BY name
</dc:statement>
- <hws_fed_unknown>
+ <fedstate description="{$fed_name}">
<dc:call-macro name="hwslines_by_kind"/>
- </hws_fed_unknown>
+ </fedstate>
</dc:context>
- </inofficial>
- </hws_lines>
+ </dc:for-each>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT distinct
+ name AS hws_name,
+ kind_id AS hws_kind
+ FROM hws_lines
+ WHERE river_id = ${river_id}
+ AND official=0
+ AND fed_state_id IS NULL ORDER BY name
+ </dc:statement>
+ <hws_fed_unknown>
+ <dc:call-macro name="hwslines_by_kind"/>
+ </hws_fed_unknown>
+ </dc:context>
+ </inofficial>
+ </hws_lines>
+ </dc:macro>
+
+ <dc:macro name="hwspoints_by_kind">
+ <dc:comment>
+ Call from a context where fed_name hws_kind hws_name and river_id is
+ availble
+ </dc:comment>
+
+ <dc:macro name="hwspoints_by_kind_factory">
+ <dc:for-each>
+ <hws factory="wmshwspointsfactory"
+ ids="{$river_id};{$hws_name}"
+ name="{$hws_name}"/>
+ </dc:for-each>
</dc:macro>
- <dc:macro name="hwspoints_by_kind">
- <dc:comment>
- Call from a context where fed_name hws_kind hws_name and river_id is
- availble
- </dc:comment>
+ <dc:filter expr="$hws_kind=1">
+ <dc:if test="dc:has-result()">
<Durchlass>
- <dc:elements filter="$hws_kind=1">
- <hws>
- <dc:attribute name="factory" value="wmshwspointsfactory"/>
- <dc:attribute name="ids" value="${river_id};${hws_name}"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
+ <dc:call-macro name="hwspoints_by_kind_factory"/>
</Durchlass>
+ </dc:if>
+ </dc:filter>
+
+ <dc:filter expr="$hws_kind=2">
+ <dc:if test="dc:has-result()">
<Damm>
- <dc:elements filter="$hws_kind=2">
- <hws>
- <dc:attribute name="factory" value="wmshwspointsfactory"/>
- <dc:attribute name="ids" value="${river_id};${hws_name}"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
+ <dc:call-macro name="hwspoints_by_kind_factory"/>
</Damm>
+ </dc:if>
+ </dc:filter>
+
+ <dc:filter expr="$hws_kind=3">
+ <dc:if test="dc:has-result()">
<Graben>
- <dc:elements filter="$hws_kind=3">
- <hws>
- <dc:attribute name="factory" value="wmshwspointsfactory"/>
- <dc:attribute name="ids" value="${river_id};${hws_name}"/>
- <dc:attribute name="name" value="${hws_name}"/>
- </hws>
- </dc:elements>
+ <dc:call-macro name="hwspoints_by_kind_factory"/>
</Graben>
- </dc:macro>
+ </dc:if>
+ </dc:filter>
+ </dc:macro>
- <dc:macro name="hwspoints">
- <hws_points>
- <official>
+ <dc:macro name="hwspoints">
+ <hws_points>
+ <official>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ fs.name AS fed_name,
+ fs.id AS fed_id
+ FROM hws_points hws
+ JOIN fed_states fs ON hws.fed_state_id = fs.id
+ WHERE river_id = ${river_id}
+ AND hws.official=1
+ </dc:statement>
+ <dc:for-each>
<dc:context>
<dc:statement>
SELECT DISTINCT
- fs.name AS fed_name,
- fs.id AS fed_id
- FROM hws_points hws
- JOIN fed_states fs ON hws.fed_state_id = fs.id
- WHERE river_id = ${river_id}
- AND hws.official=1
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT DISTINCT
- name AS hws_name,
- kind_id AS hws_kind
- FROM hws_points
- WHERE river_id = ${river_id}
- AND official=1
- AND fed_state_id = ${fed_id} ORDER BY name
- </dc:statement>
- <fedstate>
- <dc:attribute name="description" value="${fed_name}"/>
- <dc:call-macro name="hwspoints_by_kind"/>
- </fedstate>
- </dc:context>
- </dc:elements>
- </dc:context>
- <dc:context>
- <dc:statement>
- SELECT distinct
name AS hws_name,
kind_id AS hws_kind
FROM hws_points
WHERE river_id = ${river_id}
AND official=1
- AND fed_state_id IS NULL
- ORDER BY name
+ AND fed_state_id = ${fed_id} ORDER BY name
</dc:statement>
- <hws_fed_unknown>
+ <fedstate description="{$fed_name}">
<dc:call-macro name="hwspoints_by_kind"/>
- </hws_fed_unknown>
+ </fedstate>
</dc:context>
- </official>
- <inofficial>
+ </dc:for-each>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT distinct
+ name AS hws_name,
+ kind_id AS hws_kind
+ FROM hws_points
+ WHERE river_id = ${river_id}
+ AND official=1
+ AND fed_state_id IS NULL
+ ORDER BY name
+ </dc:statement>
+ <hws_fed_unknown>
+ <dc:call-macro name="hwspoints_by_kind"/>
+ </hws_fed_unknown>
+ </dc:context>
+ </official>
+ <inofficial>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ fs.name AS fed_name,
+ fs.id AS fed_id
+ FROM hws_points hws
+ JOIN fed_states fs ON hws.fed_state_id = fs.id
+ WHERE river_id = ${river_id}
+ AND hws.official=0
+ </dc:statement>
+ <dc:for-each>
<dc:context>
<dc:statement>
SELECT DISTINCT
- fs.name AS fed_name,
- fs.id AS fed_id
- FROM hws_points hws
- JOIN fed_states fs ON hws.fed_state_id = fs.id
- WHERE river_id = ${river_id}
- AND hws.official=0
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT DISTINCT
- name AS hws_name,
- kind_id AS hws_kind
- FROM hws_points
- WHERE river_id = ${river_id}
- AND official=0
- AND fed_state_id = ${fed_id} ORDER BY name
- </dc:statement>
- <fedstate>
- <dc:attribute name="description" value="${fed_name}"/>
- <dc:call-macro name="hwspoints_by_kind"/>
- </fedstate>
- </dc:context>
- </dc:elements>
- </dc:context>
- <dc:context>
- <dc:statement>
- SELECT distinct
name AS hws_name,
kind_id AS hws_kind
FROM hws_points
WHERE river_id = ${river_id}
AND official=0
- AND fed_state_id IS NULL ORDER BY name
+ AND fed_state_id = ${fed_id} ORDER BY name
</dc:statement>
- <hws_fed_unknown>
+ <fedstate description="{$fed_name}">
<dc:call-macro name="hwspoints_by_kind"/>
- </hws_fed_unknown>
+ </fedstate>
</dc:context>
- </inofficial>
- </hws_points>
- </dc:macro>
-
-
- <dc:macro name="flood-map-buildings">
+ </dc:for-each>
+ </dc:context>
<dc:context>
<dc:statement>
- SELECT count(*) as km_exists, name as name
- FROM buildings WHERE river_id = ${river_id} GROUP BY name
+ SELECT distinct
+ name AS hws_name,
+ kind_id AS hws_kind
+ FROM hws_points
+ WHERE river_id = ${river_id}
+ AND official=0
+ AND fed_state_id IS NULL ORDER BY name
</dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <building>
- <dc:attribute name="factory" value="wmsbuildingsfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </building>
+ <hws_fed_unknown>
+ <dc:call-macro name="hwspoints_by_kind"/>
+ </hws_fed_unknown>
+ </dc:context>
+ </inofficial>
+ </hws_points>
+ </dc:macro>
+
+ <dc:macro name="flood-map-buildings">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ b.kind_id AS building_kind_id,
+ bk.name AS building_kind
+ FROM buildings b
+ JOIN building_kinds bk ON b.kind_id = bk.id
+ WHERE b.river_id = ${river_id}
+ AND b.kind_id <> 0
+ </dc:statement>
+ <dc:for-each>
+ <buildings description="{$building_kind}"
+ factory="wmsbuildingsfactory"
+ ids="{$river_id};{$building_kind};{$building_kind_id}"/>
+ </dc:for-each>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ b.kind_id AS building_kind_id,
+ b.name AS building_name,
+ bk.name AS building_kind
+ FROM buildings b
+ JOIN building_kinds bk ON b.kind_id = bk.id
+ WHERE river_id = ${river_id}
+ AND kind_id = 0 OR
+ kind_id IS NULL
+ </dc:statement>
+ <dc:for-each>
+ <dc:element name="${building_kind}">
+ <dc:for-each>
+ <buildings description="{$building_name}"
+ factory="wmsbuildingsfactory"
+ ids="{$river_id};{$building_name}"/>
+ </dc:for-each>
+ </dc:element>
+ </dc:for-each>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ j.kind_id AS jetty_kind_id,
+ jk.name AS jetty_kind
+ FROM jetties j
+ JOIN jetty_kinds jk ON j.kind_id = jk.id
+ WHERE river_id = ${river_id}
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <jetties>
+ <dc:for-each>
+ <jetty description="{$jetty_kind}"
+ factory="wmsjettiesfactory"
+ ids="{$river_id};{$jetty_kind};{$jetty_kind_id}"/>
+ </dc:for-each>
+ </jetties>
+ </dc:if>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-fixpoints">
+ <dc:context>
+ <dc:statement>
+ SELECT name AS name
+ FROM fixpoints WHERE river_id = ${river_id} GROUP BY name
+ </dc:statement>
+ <dc:for-each>
+ <fixpoint factory="wmsfixpointsfactory"
+ ids="{$river_id};{$name}"
+ name="{$name}"/>
+ </dc:for-each>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-gaugelocations">
+ <dc:attribute name="factory" value="externalwmsfactory"/>
+ <dc:attribute name="ids" value="http://www.pegelonline.wsv.de/webservices/gis/wms;Pegelpunkte;Pegelonline-Pegelpunkte"/>
+ </dc:macro>
+
+ <dc:macro name="flood-map-uesk">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT 1 from floodmaps where river_id = ${river_id}
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <uesk>
+ <calculations>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT 1 from floodmaps where river_id = ${river_id}
+ AND (kind = 112 OR kind = 111)
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <current>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT name AS name
+ FROM floodmaps
+ WHERE river_id = ${river_id} AND kind = 111
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <bfg>
+ <dc:for-each>
+ <floodmaps factory="wmsfloodmapsfactory"
+ ids="{$river_id};{$name}"
+ name="{$name}"/>
+ </dc:for-each>
+ </bfg>
+ </dc:if>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT name AS name
+ FROM floodmaps
+ WHERE river_id = ${river_id} AND kind = 112
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <federal>
+ <dc:for-each>
+ <floodmaps factory="wmsfloodmapsfactory"
+ ids="{$river_id};{$name}"
+ name="{$name}"/>
+ </dc:for-each>
+ </federal>
+ </dc:if>
+ </dc:context>
+ </current>
</dc:if>
- </dc:elements>
- </dc:context>
- </dc:macro>
- <dc:macro name="flood-map-fixpoints">
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT 1 from floodmaps where river_id = ${river_id}
+ AND (kind = 122 OR kind = 121)
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <potential>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT name AS name
+ FROM floodmaps
+ WHERE river_id = ${river_id} AND kind = 121
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <bfg>
+ <dc:for-each>
+ <floodmaps factory="wmsfloodmapsfactory"
+ ids="{$river_id};{$name}"
+ name="{$name}"/>
+ </dc:for-each>
+ </bfg>
+ </dc:if>
+ </dc:context>
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ name AS name
+ FROM floodmaps
+ WHERE river_id = ${river_id} AND kind = 122
+ </dc:statement>
+ <dc:if test="dc:has-result()">
+ <federal>
+ <dc:for-each>
+ <floodmaps factory="wmsfloodmapsfactory"
+ ids="{$river_id};{$name}"
+ name="{$name}"/>
+ </dc:for-each>
+ </federal>
+ </dc:if>
+ </dc:context>
+ </potential>
+ </dc:if>
+ </dc:context>
+ </calculations>
<dc:context>
<dc:statement>
- SELECT count(*) as km_exists, name as name
- FROM fixpoints WHERE river_id = ${river_id} GROUP BY name
+ SELECT DISTINCT
+ source AS source
+ FROM floodmaps
+ WHERE river_id = ${river_id} AND kind = 200
</dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <fixpoint>
- <dc:attribute name="factory" value="wmsfixpointsfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </fixpoint>
- </dc:if>
- </dc:elements>
+ <dc:if test="dc:has-result()">
+ <measurements>
+ <dc:for-each>
+ <year name="{$source}">
+ <dc:context>
+ <dc:statement>
+ SELECT DISTINCT
+ name AS name
+ FROM floodmaps
+ WHERE river_id = ${river_id} AND kind = 200 AND source =
+ ${source}
+ </dc:statement>
+ <dc:for-each>
+ <floodmaps factory="wmsfloodmapsfactory"
+ ids="{$river_id};{$name}"
+ name="{$name}"/>
+ </dc:for-each>
+ </dc:context>
+ </year>
+ </dc:for-each>
+ </measurements>
+ </dc:if>
</dc:context>
- </dc:macro>
- <dc:macro name="flood-map-gaugelocations">
- <dc:context>
- <dc:statement>
- SELECT count(*) as km_exists, name as name
- FROM gauge_location WHERE river_id = ${river_id} GROUP BY name
- </dc:statement>
- <dc:elements>
- <dc:if test="$km_exists>0">
- <gaugelocation>
- <dc:attribute name="factory" value="wmsgaugelocationfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </gaugelocation>
- </dc:if>
- </dc:elements>
- </dc:context>
- </dc:macro>
- <dc:macro name="flood-map-uesk">
- <uesk>
- <calculations>
- <current>
- <bfg>
- <dc:context>
- <dc:statement>
- SELECT count(*) as uesg_exist, name as name
- FROM floodmaps
- WHERE river_id = ${river_id} AND kind = 111
- GROUP BY name, kind
- </dc:statement>
- <dc:elements>
- <dc:if test="$uesg_exist>0">
- <floodmaps>
- <dc:attribute name="factory" value="wmsfloodmapsfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </floodmaps>
- </dc:if>
- </dc:elements>
- </dc:context>
- </bfg>
- <land>
- <dc:context>
- <dc:statement>
- SELECT count(*) as uesg_exist, name as name
- FROM floodmaps
- WHERE river_id = ${river_id} AND kind = 112
- GROUP BY name, kind
- </dc:statement>
- <dc:elements>
- <dc:if test="$uesg_exist>0">
- <floodmaps>
- <dc:attribute name="factory" value="wmsfloodmapsfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </floodmaps>
- </dc:if>
- </dc:elements>
- </dc:context>
- </land>
- </current>
- <potentiel>
- <bfg>
- <dc:context>
- <dc:statement>
- SELECT count(*) as uesg_exist, name as name
- FROM floodmaps
- WHERE river_id = ${river_id} AND kind = 121
- GROUP BY name, kind
- </dc:statement>
- <dc:elements>
- <dc:if test="$uesg_exist>0">
- <floodmaps>
- <dc:attribute name="factory" value="wmsfloodmapsfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </floodmaps>
- </dc:if>
- </dc:elements>
- </dc:context>
- </bfg>
- <land>
- <dc:context>
- <dc:statement>
- SELECT count(*) as uesg_exist, name as name
- FROM floodmaps
- WHERE river_id = ${river_id} AND kind = 122
- GROUP BY name, kind
- </dc:statement>
- <dc:elements>
- <dc:if test="$uesg_exist>0">
- <floodmaps>
- <dc:attribute name="factory" value="wmsfloodmapsfactory"/>
- <dc:attribute name="ids" value="${river_id};${name}"/>
- <dc:attribute name="name" value="${name}"/>
- </floodmaps>
- </dc:if>
- </dc:elements>
- </dc:context>
- </land>
- </potentiel>
- </calculations>
- </uesk>
- </dc:macro>
- <dc:macro name="flood-map-complete">
- <buildings>
- <dc:call-macro name="flood-map-buildings"/>
- </buildings>
- <catchments>
- <dc:call-macro name="flood-map-catchments"/>
- </catchments>
- <fixpoints>
- <dc:call-macro name="flood-map-fixpoints"/>
- </fixpoints>
- <hydrboundaries>
- <dc:call-macro name="flood-map-hydr-boundaries"/>
- <dc:call-macro name="flood-map-floodplain"/>
- </hydrboundaries>
- <kilometrage>
- <riveraxis>
- <dc:attribute name="factory" value="riveraxis"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </riveraxis>
- <dc:call-macro name="flood-map-km"/>
- <dc:call-macro name="flood-map-qps"/>
- </kilometrage>
- <hws>
- <dc:call-macro name="hwslines"/>
- <dc:call-macro name="hwspoints"/>
- </hws>
- <dc:call-macro name="flood-map-uesk"/>
- <gaugelocations>
- <dc:call-macro name="flood-map-gaugelocations"/>
- </gaugelocations>
- <rastermap>
- <background>
- <dc:attribute name="factory" value="wmsbackground"/>
- <dc:attribute name="ids" value="${river_id}"/>
- </background>
- </rastermap>
- </dc:macro>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'minfo-heights')">
- <dc:call-macro name="minfo-heights"/>
- <dc:macro name="minfo-heights">
- <bedheights>
- <dc:call-macro name="bed-heights-single"/>
- <dc:call-macro name="bed-heights-epoch"/>
- </bedheights>
- </dc:macro>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'minfo-heights-epoch')">
+ </uesk>
+ </dc:if>
+ </dc:context>
+ </dc:macro>
+
+ <dc:macro name="flood-map-catchments">
+ <dc:attribute name="factory" value="externalwmsfactory"/>
+ <dc:attribute name="ids" value="http://geoportal.bafg.de/wmsproxy/INSPIRE/DrainageBasin;HY.PHYSICALWATERS.CATCHMENTS;Einzugsgebiet"/>
+ </dc:macro>
+
+ <dc:macro name="flood-map-routing">
+ <qps>
+ <dc:call-macro name="flood-map-qps"/>
+ </qps>
+ <dc:call-macro name="flood-map-fixpoints"/>
+ <dc:call-macro name="flood-map-km"/>
+ <axis>
+ <dc:call-macro name="flood-map-riveraxis"/>
+ </axis>
+ </dc:macro>
+
+ <dc:macro name="flood-map-complete">
+ <buildings>
+ <dc:call-macro name="flood-map-buildings"/>
+ </buildings>
+ <catchments>
+ <dc:call-macro name="flood-map-catchments"/>
+ </catchments>
+ <dc:comment><!-- TODO: HW-Marken --></dc:comment>
+ <hws>
+ <dc:call-macro name="hwslines"/>
+ <dc:call-macro name="hwspoints"/>
+ </hws>
+ <route_data>
+ <dc:call-macro name="flood-map-routing"/>
+ </route_data>
+ <hydrboundaries>
+ <dc:call-macro name="flood-map-floodplain"/>
+ <dc:call-macro name="flood-map-hydr-boundaries"/>
+ </hydrboundaries>
+ <dc:call-macro name="flood-map-uesk"/>
+ <gaugelocations>
+ <dc:call-macro name="flood-map-gaugelocations"/>
+ </gaugelocations>
+ <background factory="wmsbackground" ids="{$river_id}"/>
+ </dc:macro>
+
+ </dc:if>
+
+ <dc:if test="dc:contains($artifact-outs, 'minfo-heights')">
+ <dc:call-macro name="minfo-heights"/>
+ <dc:macro name="minfo-heights">
<bedheights>
+ <dc:call-macro name="bed-heights-single"/>
<dc:call-macro name="bed-heights-epoch"/>
</bedheights>
- </dc:if>
- <dc:macro name="bed-heights-single">
- <single>
- <dc:context>
- <dc:statement>
- SELECT id AS bedh_id,
- year AS bedh_year,
- description AS bedh_descr
- FROM bed_height_single WHERE river_id = ${river_id}
- </dc:statement>
- <dc:elements>
- <height>
- <dc:attribute name="factory" value="bedheight"/>
- <dc:attribute name="ids" value="bedheight-single-${bedh_id}-${bedh_year}"/>
- <dc:attribute name="description" value="${bedh_descr}"/>
- </height>
- </dc:elements>
- </dc:context>
- </single>
</dc:macro>
- <dc:macro name="bed-heights-epoch">
- <epoch>
- <dc:context>
- <dc:statement>
- SELECT id AS bedh_id,
- time_interval_id AS bedh_interval_id,
- description AS bedh_descr
- FROM bed_height_epoch WHERE river_id = ${river_id}
- </dc:statement>
- <dc:elements>
- <height>
- <dc:attribute name="factory" value="bedheight"/>
- <dc:attribute name="ids" value="bedheight-epoch-${bedh_id}-${bedh_interval_id}"/>
- <dc:attribute name="description" value="${bedh_descr}"/>
- </height>
- </dc:elements>
- </dc:context>
- </epoch>
- </dc:macro>
- </river>
- </dc:elements>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'minfo-heights-epoch')">
+ <bedheights>
+ <dc:call-macro name="bed-heights-epoch"/>
+ </bedheights>
+ </dc:if>
+
+ <dc:macro name="bed-heights-single">
+ <single>
+ <dc:context>
+ <dc:statement>
+ SELECT id AS bedh_id,
+ year AS bedh_year,
+ description AS bedh_descr
+ FROM bed_height_single WHERE river_id = ${river_id}
+ </dc:statement>
+ <dc:for-each>
+ <height factory="bedheight"
+ ids="bedheight-single-{$bedh_id}-{$bedh_year}"
+ description="{$bedh_descr}"/>
+ </dc:for-each>
+ </dc:context>
+ </single>
+ </dc:macro>
+
+ <dc:macro name="bed-heights-epoch">
+ <epoch>
+ <dc:context>
+ <dc:statement>
+ SELECT id AS bedh_id,
+ time_interval_id AS bedh_interval_id,
+ description AS bedh_descr
+ FROM bed_height_epoch WHERE river_id = ${river_id}
+ </dc:statement>
+ <dc:for-each>
+ <height factory="bedheight"
+ ids="bedheight-epoch-{$bedh_id}-{$bedh_interval_id}"
+ description="{$bedh_descr}"/>
+ </dc:for-each>
+ </dc:context>
+ </epoch>
+ </dc:macro>
+
+ </dc:for-each>
</dc:context>
</dc:macro>
@@ -1760,718 +1809,741 @@
</dc:comment>
<dc:when test="dc:contains($parameters, 'user-id')">
+ <old_calculations>
- <old_calculations>
- <!-- <dc:macro name="load-user">-->
+ <dc:comment><!-- <dc:macro name="load-user">--></dc:comment>
<dc:call-macro name="user-range">
- <dc:context connection="user">
- <dc:comment>
- Get the user and collection-id.
- </dc:comment>
- <dc:statement>
- SELECT u.id AS user_id, c.id AS collection_id, c.name as collection_name
- FROM collections c JOIN users u ON c.user_id = u.id
- WHERE u.gid = CAST(${user-id} AS uuid)
- ORDER BY c.creation DESC
- </dc:statement>
+ <dc:context connection="user">
+ <dc:comment> Get the user and collection-id. </dc:comment>
+ <dc:statement>
+ SELECT u.id AS user_id, c.id AS collection_id, c.name AS collection_name
+ FROM collections c JOIN users u ON c.user_id = u.id
+ WHERE u.gid = CAST(${user-id} AS uuid)
+ ORDER BY c.creation DESC
+ </dc:statement>
-
- <dc:macro name="range-filter">
- <dc:statement>
- SELECT m.id AS a_id,
- m.state AS a_state,
- m.gid AS a_gid,
- m.creation AS a_creation,
- COALESCE(ld_mode, '') AS ld_m,
- COALESCE(ld_locations, '') AS ld_l,
- COALESCE(ld_from, '') AS ld_f,
- COALESCE(ld_to, '') AS ld_t
- FROM master_artifacts_range m
- WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid)
- AND EXISTS (
- SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river})
- </dc:statement>
- <dc:elements>
- <dc:variable name="from" type="number" expr="dc:fromValue($ld_m, $ld_l, $ld_f)"/>
- <dc:variable name="to" type="number" expr="dc:toValue($ld_m, $ld_l, $ld_t)"/>
- <dc:if test="($from >= $fromkm and $from <= $tokm) or ($to <= $tokm and $to >= $fromkm) or ($from <= $fromkm and $to >= $tokm)">
- <dc:macro-body/>
- </dc:if>
- </dc:elements>
- </dc:macro>
-
- <!-- OFFICIAL LINES -->
- <dc:if test="dc:contains($artifact-outs, 'longitudinal_section')">
- <dc:comment comment=".wst -------------------------------"/>
- <officiallines>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT m.id AS a_id,
- m.state AS a_state,
- m.gid AS a_gid,
- m.creation AS a_creation,
- ardg.v AS gaugy,
- arv.v AS wqsingle
- FROM master_artifacts m,
- artifact_data ardg,
- artifact_data arv
- WHERE m.collection_id = ${collection_id}
- AND m.gid = CAST(${artifact-id} AS uuid)
- AND ardg.artifact_id = m.id
- AND ardg.k = 'ld_gaugename'
- AND arv.artifact_id = m.id
- AND arv.k = 'wq_single'
- AND EXISTS (
- SELECT id
- FROM artifact_data ad
- WHERE ad.artifact_id = m.id
- AND k = 'river'
- AND v = ${river})
- </dc:statement>
- <dc:elements>
+ <dc:macro name="range-filter">
+ <dc:statement>
+ SELECT m.id AS a_id,
+ m.state AS a_state,
+ m.gid AS a_gid,
+ m.creation AS a_creation,
+ COALESCE(ld_mode, '') AS ld_m,
+ COALESCE(ld_locations, '') AS ld_l,
+ COALESCE(ld_from, '') AS ld_f,
+ COALESCE(ld_to, '') AS ld_t
+ FROM master_artifacts_range m
+ WHERE m.collection_id = ${collection_id} AND m.gid <> CAST(${artifact-id} AS uuid)
+ AND EXISTS (
+ SELECT id FROM artifact_data ad WHERE ad.artifact_id = m.id AND k = 'river' AND v = ${river})
+ </dc:statement>
+ <dc:for-each>
+ <dc:variable name="from" type="number" expr="dc:fromValue($ld_m, $ld_l, $ld_f)"/>
+ <dc:variable name="to" type="number" expr="dc:toValue($ld_m, $ld_l, $ld_t)"/>
+ <dc:if test="($from >= $fromkm and $from <= $tokm) or ($to <= $tokm and $to >= $fromkm) or ($from <= $fromkm and $to >= $tokm)">
+ <dc:macro-body/>
+ </dc:if>
+ </dc:for-each>
+ </dc:macro>
+
+ <!-- OFFICIAL LINES -->
+ <dc:if test="dc:contains($artifact-outs, 'longitudinal_section')">
+ <dc:comment comment=".wst -------------------------------"/>
+ <officiallines>
+ <dc:for-each>
+ <dc:context>
+ <dc:statement>
+ SELECT m.id AS a_id,
+ m.state AS a_state,
+ m.gid AS a_gid,
+ m.creation AS a_creation,
+ ardg.v AS gaugy,
+ arv.v AS wqsingle
+ FROM master_artifacts m,
+ artifact_data ardg,
+ artifact_data arv
+ WHERE m.collection_id = ${collection_id}
+ AND m.gid = CAST(${artifact-id} AS uuid)
+ AND ardg.artifact_id = m.id
+ AND ardg.k = 'ld_gaugename'
+ AND arv.artifact_id = m.id
+ AND arv.k = 'wq_single'
+ AND EXISTS (
+ SELECT id
+ FROM artifact_data ad
+ WHERE ad.artifact_id = m.id
+ AND k = 'river'
+ AND v = ${river})
+ </dc:statement>
+ <dc:for-each>
<dc:context connection="system">
<dc:statement>
SELECT ol.wst_id AS wstid, ol.wst_column_pos AS wstcolpos, ol.name AS olname, ol.value AS oval
FROM official_q_values ol
WHERE ol.value = CAST(${wqsingle} AS NUMERIC(10,2)) AND ol.gauge_name = ${gaugy}
</dc:statement>
- <dc:elements>
+ <dc:for-each>
<dc:element name="${olname}">
<dc:attribute name="name" value="${olname}"/>
<dc:attribute name="ids" value="additionals-wstv-${wstcolpos}-${wstid}"/>
<dc:attribute name="factory" value="staticwkms"/>
</dc:element>
- </dc:elements>
+ </dc:for-each>
</dc:context>
- </dc:elements>
- </dc:context>
- </dc:elements>
- </officiallines>
- </dc:if>
- <!-- END OFFICIAL LINES -->
+ </dc:for-each>
+ </dc:context>
+ </dc:for-each>
+ </officiallines>
+ </dc:if>
+ <!-- END OFFICIAL LINES -->
- <dc:comment>
- SHOW W-DIFFERENCES
- </dc:comment>
+ <dc:comment>
+ SHOW W-DIFFERENCES
+ </dc:comment>
- <dc:macro name="differences">
- <differences>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
+ <dc:macro name="differences">
+ <differences>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
<dc:context>
<dc:statement>
- SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description
- FROM outs as o, facets as f, artifacts as a
+ SELECT a.gid AS aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description AS facet_description
+ FROM outs AS o, facets AS f, artifacts AS a
WHERE f.name = 'w_differences' and f.out_id = o.id and o.artifact_id = ${a_id} and a.id = ${a_id}
</dc:statement>
- <dc:elements>
+ <dc:for-each>
<dc:element name="${facet_name}">
<dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="factory" value="winfo"/>
+ <dc:attribute name="factory" value="winfo"/>
<dc:attribute name="artifact-id" value="${aid}"/>
- <dc:attribute name="ids" value="${aid}"/>
- <dc:attribute name="out" value="w_differences"/>
+ <dc:attribute name="ids" value="${aid}"/>
+ <dc:attribute name="out" value="w_differences"/>
</dc:element>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</dc:call-macro>
- </dc:context>
- </dc:elements>
- </differences>
- </dc:macro>
+ </dc:context>
+ </dc:for-each>
+ </differences>
+ </dc:macro>
- <dc:comment>
- SHOW REFERENCE CURVE
- </dc:comment>
+ <dc:comment>
+ SHOW REFERENCE CURVE
+ </dc:comment>
-
- <dc:macro name="reference-curves">
- <reference_curves>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="user-range">
+ <dc:macro name="reference-curves">
+ <reference_curves>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="user-range">
<dc:context>
<dc:statement>
- SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description
- FROM outs as o, facets as f, artifacts as a
+ SELECT a.gid AS aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description AS facet_description
+ FROM outs AS o, facets AS f, artifacts AS a
WHERE f.name = 'reference_curve' and f.out_id = o.id and o.artifact_id = ${a_id} and a.id = ${a_id}
</dc:statement>
- <dc:elements>
+ <dc:for-each>
<dc:element name="${facet_name}">
<dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="factory" value="winfo"/>
+ <dc:attribute name="factory" value="winfo"/>
<dc:attribute name="artifact-id" value="${aid}"/>
- <dc:attribute name="ids" value="${aid}"/>
- <dc:attribute name="out" value="reference_curve"/>
+ <dc:attribute name="ids" value="${aid}"/>
+ <dc:attribute name="out" value="reference_curve"/>
</dc:element>
- </dc:elements>
+ </dc:for-each>
+ </dc:context>
+ </dc:call-macro>
+ </dc:context>
+ </dc:for-each>
+ </reference_curves>
+ </dc:macro>
+
+ <dc:comment>
+ SHOW COMPUTED DISCHARGE CURVES
+ </dc:comment>
+
+ <dc:macro name="computed-discharge-curve">
+ <computed_discharge_curves>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
+ <dc:context>
+ <dc:statement>
+ SELECT a.gid AS aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description AS facet_description
+ FROM outs AS o, facets AS f, artifacts AS a
+ WHERE f.name = 'computed_discharge_curve.q' and f.out_id = o.id and o.artifact_id = ${a_id} and a.id = ${a_id}
+ </dc:statement>
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="factory" value="winfo"/>
+ <dc:attribute name="artifact-id" value="${aid}"/>
+ <dc:attribute name="ids" value="${aid}"/>
+ <dc:attribute name="out" value="computed_discharge_curve"/>
+ </dc:element>
+ </dc:for-each>
</dc:context>
</dc:call-macro>
- </dc:context>
- </dc:elements>
- </reference_curves>
- </dc:macro>
+ </dc:context>
+ </dc:for-each>
+ </computed_discharge_curves>
+ </dc:macro>
- <dc:comment>
- SHOW COMPUTED DISCHARGE CURVES
- </dc:comment>
+ <dc:comment>
+ CROSS SECTION
+ </dc:comment>
- <dc:macro name="computed-discharge-curve">
- <computed_discharge_curves>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
+ <dc:macro name="waterlevels">
+ <waterlevels>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
<dc:context>
<dc:statement>
- SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description
- FROM outs as o, facets as f, artifacts as a
- WHERE f.name = 'computed_discharge_curve.q' and f.out_id = o.id and o.artifact_id = ${a_id} and a.id = ${a_id}
+ SELECT id AS out_id
+ FROM outs
+ WHERE artifact_id = ${a_id} AND name = 'cross_section'
</dc:statement>
- <dc:elements>
+ <dc:for-each>
+ <dc:context>
+ <dc:statement>
+ SELECT name AS facet_name, num AS facet_num, description AS facet_description
+ FROM facets
+ WHERE out_id = ${out_id}
+ ORDER BY num ASC, name DESC
+ </dc:statement>
+ <longitudinal_section_columns description="{$river} {$a_creation}">
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="factory" value="winfo"/>
+ <dc:attribute name="artifact-id" value="${a_gid}"/>
+ <dc:attribute name="out" value="cross_section"/>
+ </dc:element>
+ </dc:for-each>
+ </longitudinal_section_columns>
+ </dc:context>
+ </dc:for-each>
+ </dc:context>
+ </dc:call-macro>
+ </dc:context>
+ </dc:for-each>
+ </waterlevels>
+ </dc:macro>
+
+ <dc:macro name="longitudinal">
+ <waterlevels>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
+ <dc:context>
+ <dc:statement>
+ SELECT id AS out_id
+ FROM outs
+ WHERE artifact_id = ${a_id} AND name = 'longitudinal_section'
+ </dc:statement>
+ <dc:for-each>
+ <dc:context>
+ <dc:statement>
+ SELECT name AS facet_name, num AS facet_num, description AS facet_description
+ FROM facets
+ WHERE out_id = ${out_id}
+ ORDER BY num ASC, name DESC
+ </dc:statement>
+ <longitudinal_section_columns description="{$river} {$a_creation}">
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="factory" value="winfo"/>
+ <dc:attribute name="artifact-id" value="${a_gid}"/>
+ <dc:attribute name="out" value="longitudinal_section"/>
+ </dc:element>
+ </dc:for-each>
+ </longitudinal_section_columns>
+ </dc:context>
+ </dc:for-each>
+ </dc:context>
+ </dc:call-macro>
+ </dc:context>
+ </dc:for-each>
+ </waterlevels>
+ </dc:macro>
+
+ <dc:macro name="longitudinal-section">
+ <waterlevels>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
+ <dc:context>
+ <dc:statement>
+ SELECT id AS out_id
+ FROM outs
+ WHERE artifact_id = ${a_id} AND name = 'fix_longitudinal_section_curve'
+ </dc:statement>
+ <dc:for-each>
+ <dc:context>
+ <dc:comment><!-- average und deviation ls_0 . ls_1 ...--></dc:comment>
+ <dc:statement>
+ SELECT name AS facet_name, num AS facet_num, description AS facet_description
+ FROM facets
+ WHERE out_id = ${out_id} AND (
+ name LIKE 'fix_deviation_ls%' OR
+ name LIKE 'fix_sector_average_ls%' OR
+ name LIKE 'fix_analysis_events_ls%' OR
+ name LIKE 'fix_reference_events_ls%' )
+ ORDER BY num ASC, name DESC
+ </dc:statement>
+ <waterlevels description="{$river} {$a_creation} {$collection_name}">
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="factory" value="fixanalysis"/>
+ <dc:attribute name="artifact-id" value="${a_gid}"/>
+ <dc:attribute name="out" value="fix_longitudinal_section_curve"/>
+ </dc:element>
+ </dc:for-each>
+ </waterlevels>
+ </dc:context>
+ </dc:for-each>
+ </dc:context>
+ </dc:call-macro>
+ </dc:context>
+ </dc:for-each>
+ </waterlevels>
+ </dc:macro>
+
+ <dc:macro name="delta-wt">
+ <waterlevels>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
+ <dc:context>
+ <dc:statement>
+ SELECT id AS out_id
+ FROM outs
+ WHERE artifact_id = ${a_id} AND name = 'fix_deltawt_curve'
+ </dc:statement>
+ <dc:for-each>
+ <dc:context>
+ <dc:statement>
+ SELECT name AS facet_name, num AS facet_num, description AS facet_description
+ FROM facets
+ WHERE out_id = ${out_id} and (
+ name LIKE 'fix_sector_average_dwt%' OR
+ name LIKE 'fix_deviation_dwt%' OR
+ name = 'fix_analysis_events_dwt' OR
+ name = 'fix_reference_events_dwt' OR
+ name = 'fix_analysis_periods_dwt' )
+ ORDER BY num ASC, name DESC
+ </dc:statement>
+ <waterlevels description="{$river} {$a_creation} {$collection_name}">
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="factory" value="fixanalysis"/>
+ <dc:attribute name="artifact-id" value="${a_gid}"/>
+ <dc:attribute name="out" value="fix_deltawt_curve"/>
+ </dc:element>
+ </dc:for-each>
+ </waterlevels>
+ </dc:context>
+ </dc:for-each>
+ </dc:context>
+ </dc:call-macro>
+ </dc:context>
+ </dc:for-each>
+ </waterlevels>
+ </dc:macro>
+
+ <dc:macro name="fix-derivate-curve">
+ <waterlevels>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
+ <dc:context>
+ <dc:statement>
+ SELECT id AS out_id
+ FROM outs
+ WHERE artifact_id = ${a_id} AND name = 'fix_derivate_curve'
+ </dc:statement>
+ <dc:for-each>
+ <dc:context>
+ <dc:statement>
+ SELECT name AS facet_name, num AS facet_num, description AS facet_description
+ FROM facets
+ WHERE out_id = ${out_id} and name = 'fix_derivate_curve'
+ ORDER BY num ASC, name DESC
+ </dc:statement>
+ <waterlevels description="{$river} {$a_creation} {$collection_name}">
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="factory" value="fixanalysis"/>
+ <dc:attribute name="artifact-id" value="${a_gid}"/>
+ <dc:attribute name="out" value="fix_derivate_curve"/>
+ </dc:element>
+ </dc:for-each>
+ </waterlevels>
+ </dc:context>
+ </dc:for-each>
+ </dc:context>
+ </dc:call-macro>
+ </dc:context>
+ </dc:for-each>
+ </waterlevels>
+ </dc:macro>
+
+ <dc:macro name="fix-wq-curve">
+ <waterlevels>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
+ <dc:context>
+ <dc:statement>
+ SELECT id AS out_id
+ FROM outs
+ WHERE artifact_id = ${a_id} AND name = 'fix_wq_curve'
+ </dc:statement>
+ <dc:for-each>
+ <dc:context>
+ <dc:statement>
+ SELECT name AS facet_name, num AS facet_num, description AS facet_description
+ FROM facets
+ WHERE out_id = ${out_id} and (
+ name LIKE 'fix_sector_average_wq%' OR
+ name = 'fix_wq_curve' OR
+ name LIKE 'fix_analysis_events_wq%' OR
+ name LIKE 'fix_reference_events_wq%' )
+ ORDER BY num ASC, name DESC
+ </dc:statement>
+ <waterlevels description="{$river} {$a_creation} {$collection_name}">
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="factory" value="fixanalysis"/>
+ <dc:attribute name="artifact-id" value="${a_gid}"/>
+ <dc:attribute name="out" value="fix_wq_curve"/>
+ </dc:element>
+ </dc:for-each>
+ </waterlevels>
+ </dc:context>
+ </dc:for-each>
+ </dc:context>
+ </dc:call-macro>
+ </dc:context>
+ </dc:for-each>
+ </waterlevels>
+ </dc:macro>
+
+ <dc:macro name="duration-curve">
+ <computed_discharge_curves>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
+ <dc:context>
+ <dc:statement>
+ SELECT a.gid AS aid,
+ f.id AS fid,
+ f.name AS facet_name,
+ f.num AS facet_num,
+ f.description AS facet_description
+ FROM outs AS o, facets AS f, artifacts AS a
+ WHERE
+ (f.name = 'duration_curve.q' OR f.name = 'duration_curve.w') AND
+ f.out_id = o.id AND
+ o.artifact_id = ${a_id} AND
+ a.id = ${a_id}
+ </dc:statement>
+ <dc:for-each>
<dc:element name="${facet_name}">
<dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="factory" value="winfo"/>
+ <dc:attribute name="factory" value="winfo"/>
<dc:attribute name="artifact-id" value="${aid}"/>
- <dc:attribute name="ids" value="${aid}"/>
- <dc:attribute name="out" value="computed_discharge_curve"/>
+ <dc:attribute name="ids" value="${aid}"/>
+ <dc:attribute name="out" value="duration_curve"/>
</dc:element>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</dc:call-macro>
- </dc:context>
- </dc:elements>
- </computed_discharge_curves>
- </dc:macro>
+ </dc:context>
+ </dc:for-each>
+ </computed_discharge_curves>
+ </dc:macro>
- <dc:comment>
- CROSS SECTION
- </dc:comment>
+ <dc:comment>
+ WATERLEVELS - ONLY SHOW Ws
+ </dc:comment>
+ <dc:comment><!-- TODO doesnt work nicely for fix/wq-diags. --></dc:comment>
- <dc:macro name="waterlevels">
- <waterlevels>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
- <dc:context>
- <dc:statement>
- SELECT id AS out_id
- FROM outs
- WHERE artifact_id = ${a_id} AND name = 'cross_section'
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT name AS facet_name, num as facet_num, description AS facet_description
- FROM facets
- WHERE out_id = ${out_id}
- ORDER BY num ASC, name DESC
- </dc:statement>
- <longitudinal_section_columns>
- <dc:attribute name="description" value="${river} ${a_creation}"/>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="factory" value="winfo"/>
- <dc:attribute name="artifact-id" value="${a_gid}"/>
- <dc:attribute name="out" value="cross_section"/>
- </dc:element>
- </dc:elements>
- </longitudinal_section_columns>
- </dc:context>
- </dc:elements>
- </dc:context>
- </dc:call-macro>
- </dc:context>
- </dc:elements>
- </waterlevels>
- </dc:macro>
-
-
- <dc:macro name="longitudinal">
- <waterlevels>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
+ <dc:macro name="waterlevels-fix">
+ <waterlevels>
+ <dc:for-each>
<dc:context>
- <dc:statement>
- SELECT id AS out_id
- FROM outs
- WHERE artifact_id = ${a_id} AND name = 'longitudinal_section'
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT name AS facet_name, num as facet_num, description AS facet_description
- FROM facets
- WHERE out_id = ${out_id}
- ORDER BY num ASC, name DESC
- </dc:statement>
- <longitudinal_section_columns>
- <dc:attribute name="description" value="${river} ${a_creation}"/>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="factory" value="winfo"/>
- <dc:attribute name="artifact-id" value="${a_gid}"/>
- <dc:attribute name="out" value="longitudinal_section"/>
- </dc:element>
- </dc:elements>
- </longitudinal_section_columns>
- </dc:context>
- </dc:elements>
- </dc:context>
- </dc:call-macro>
- </dc:context>
- </dc:elements>
- </waterlevels>
- </dc:macro>
-
-
- <dc:macro name="longitudinal-section">
- <waterlevels>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
- <dc:context>
- <dc:statement>
- SELECT id AS out_id
- FROM outs
- WHERE artifact_id = ${a_id} AND name = 'fix_longitudinal_section_curve'
- </dc:statement>
- <dc:elements>
- <dc:context>
- <!-- average und deviation ls_0 . ls_1 ...-->
- <dc:statement>
- SELECT name AS facet_name, num as facet_num, description AS facet_description
- FROM facets
- WHERE out_id = ${out_id} AND ( name LIKE 'fix_deviation_ls%' OR name LIKE 'fix_sector_average_ls%' OR name LIKE 'fix_analysis_events_ls%' OR name LIKE 'fix_reference_events_ls%' )
- ORDER BY num ASC, name DESC
- </dc:statement>
- <waterlevels>
- <dc:attribute name="description" value="${river} ${a_creation} ${collection_name}"/>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="factory" value="fixanalysis"/>
- <dc:attribute name="artifact-id" value="${a_gid}"/>
- <dc:attribute name="out" value="fix_longitudinal_section_curve"/>
- </dc:element>
- </dc:elements>
- </waterlevels>
- </dc:context>
- </dc:elements>
- </dc:context>
- </dc:call-macro>
- </dc:context>
- </dc:elements>
- </waterlevels>
- </dc:macro>
-
- <dc:macro name="delta-wt">
- <waterlevels>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
- <dc:context>
- <dc:statement>
- SELECT id AS out_id
- FROM outs
- WHERE artifact_id = ${a_id} AND name = 'fix_deltawt_curve'
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT name AS facet_name, num as facet_num, description AS facet_description
- FROM facets
- WHERE out_id = ${out_id} and ( name LIKE 'fix_sector_average_dwt%' OR name LIKE 'fix_deviation_dwt%' OR name = 'fix_analysis_events_dwt' OR name = 'fix_reference_events_dwt' OR name = 'fix_analysis_periods_dwt' )
- ORDER BY num ASC, name DESC
- </dc:statement>
- <waterlevels>
- <dc:attribute name="description" value="${river} ${a_creation} ${collection_name}"/>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="factory" value="fixanalysis"/>
- <dc:attribute name="artifact-id" value="${a_gid}"/>
- <dc:attribute name="out" value="fix_deltawt_curve"/>
- </dc:element>
- </dc:elements>
- </waterlevels>
- </dc:context>
- </dc:elements>
- </dc:context>
- </dc:call-macro>
- </dc:context>
- </dc:elements>
- </waterlevels>
- </dc:macro>
-
-
- <dc:macro name="fix-derivate-curve">
- <waterlevels>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
- <dc:context>
- <dc:statement>
- SELECT id AS out_id
- FROM outs
- WHERE artifact_id = ${a_id} AND name = 'fix_derivate_curve'
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT name AS facet_name, num as facet_num, description AS facet_description
- FROM facets
- WHERE out_id = ${out_id} and name = 'fix_derivate_curve'
- ORDER BY num ASC, name DESC
- </dc:statement>
- <waterlevels>
- <dc:attribute name="description" value="${river} ${a_creation} ${collection_name}"/>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="factory" value="fixanalysis"/>
- <dc:attribute name="artifact-id" value="${a_gid}"/>
- <dc:attribute name="out" value="fix_derivate_curve"/>
- </dc:element>
- </dc:elements>
- </waterlevels>
- </dc:context>
- </dc:elements>
- </dc:context>
- </dc:call-macro>
- </dc:context>
- </dc:elements>
- </waterlevels>
- </dc:macro>
-
-
- <dc:macro name="fix-wq-curve">
- <waterlevels>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
- <dc:context>
- <dc:statement>
- SELECT id AS out_id
- FROM outs
- WHERE artifact_id = ${a_id} AND name = 'fix_wq_curve'
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT name AS facet_name, num as facet_num, description AS facet_description
- FROM facets
- WHERE out_id = ${out_id} and ( name LIKE 'fix_sector_average_wq%' OR name = 'fix_wq_curve' OR name LIKE 'fix_analysis_events_wq%' OR name LIKE 'fix_reference_events_wq%' )
- ORDER BY num ASC, name DESC
- </dc:statement>
- <waterlevels>
- <dc:attribute name="description" value="${river} ${a_creation} ${collection_name}"/>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="factory" value="fixanalysis"/>
- <dc:attribute name="artifact-id" value="${a_gid}"/>
- <dc:attribute name="out" value="fix_wq_curve"/>
- </dc:element>
- </dc:elements>
- </waterlevels>
- </dc:context>
- </dc:elements>
- </dc:context>
- </dc:call-macro>
- </dc:context>
- </dc:elements>
- </waterlevels>
- </dc:macro>
-
-
- <dc:macro name="duration-curve">
- <computed_discharge_curves>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
+ <dc:call-macro name="range-filter">
<dc:context>
<dc:statement>
- SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description
- FROM outs as o, facets as f, artifacts as a
- WHERE (f.name = 'duration_curve.q' or f.name = 'duration_curve.w') and f.out_id = o.id and o.artifact_id = ${a_id} and a.id = ${a_id}
+ SELECT id AS out_id
+ FROM outs
+ WHERE artifact_id = ${a_id} AND name = 'longitudinal_section'
</dc:statement>
- <dc:elements>
+ <dc:for-each>
+ <dc:context>
+ <dc:statement>
+ SELECT name AS facet_name, num AS facet_num, description AS facet_description
+ FROM facets
+ WHERE out_id = ${out_id} and name = 'longitudinal_section.w'
+ ORDER BY num ASC, name DESC
+ </dc:statement>
+ <waterlevels description="{$river} {$a_creation} {$collection_name}">
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="factory" value="winfo"/>
+ <dc:attribute name="artifact-id" value="${a_gid}"/>
+ <dc:attribute name="out" value="longitudinal_section"/>
+ </dc:element>
+ </dc:for-each>
+ </waterlevels>
+ </dc:context>
+ </dc:for-each>
+ </dc:context>
+ </dc:call-macro>
+ </dc:context>
+ </dc:for-each>
+ </waterlevels>
+ </dc:macro>
+
+ <dc:comment>
+ SHOW FLOODMAPS
+ </dc:comment>
+
+ <dc:macro name="flood-map">
+ <floodmap>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
+ <dc:context>
+ <dc:statement>
+ SELECT a.gid AS aid,
+ f.id AS fid,
+ f.name AS facet_name,
+ f.num AS facet_num,
+ f.description AS facet_description
+ FROM outs AS o, facets AS f, artifacts AS a
+ WHERE f.name = 'floodmap.wsplgen' AND
+ f.out_id = o.id AND
+ o.artifact_id = ${a_id} AND
+ a.id = ${a_id}
+ </dc:statement>
+ <dc:for-each>
<dc:element name="${facet_name}">
<dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="factory" value="winfo"/>
+ <dc:attribute name="factory" value="winfo"/>
<dc:attribute name="artifact-id" value="${aid}"/>
- <dc:attribute name="ids" value="${aid}"/>
- <dc:attribute name="out" value="duration_curve"/>
+ <dc:attribute name="ids" value="${aid}"/>
+ <dc:attribute name="out" value="floodmap"/>
</dc:element>
- </dc:elements>
+ </dc:for-each>
</dc:context>
</dc:call-macro>
- </dc:context>
- </dc:elements>
- </computed_discharge_curves>
- </dc:macro>
+ </dc:context>
+ </dc:for-each>
+ </floodmap>
+ </dc:macro>
- <dc:comment>
- WATERLEVELS - ONLY SHOW Ws
- </dc:comment>
+ <dc:comment>
+ MINFO bedheight difference
+ </dc:comment>
- <!-- TODO doesnt work nicely for fix/wq-diags. -->
-
- <dc:macro name="waterlevels-fix">
- <waterlevels>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
- <dc:context>
- <dc:statement>
- SELECT id AS out_id
- FROM outs
- WHERE artifact_id = ${a_id} AND name = 'longitudinal_section'
- </dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT name AS facet_name, num as facet_num, description AS facet_description
- FROM facets
- WHERE out_id = ${out_id} and name = 'longitudinal_section.w'
- ORDER BY num ASC, name DESC
- </dc:statement>
- <waterlevels>
- <dc:attribute name="description" value="${river} ${a_creation} ${collection_name}"/>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="factory" value="winfo"/>
- <dc:attribute name="artifact-id" value="${a_gid}"/>
- <dc:attribute name="out" value="longitudinal_section"/>
- </dc:element>
- </dc:elements>
- </waterlevels>
- </dc:context>
- </dc:elements>
- </dc:context>
- </dc:call-macro>
- </dc:context>
- </dc:elements>
- </waterlevels>
- </dc:macro>
-
- <dc:comment>
- SHOW FLOODMAPS
- </dc:comment>
-
-
- <dc:macro name="flood-map">
- <floodmap>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
+ <dc:macro name="bed-difference">
+ <fix_longitudinal_section_curve>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
<dc:context>
<dc:statement>
- SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description
- FROM outs as o, facets as f, artifacts as a
- WHERE f.name = 'floodmap.wsplgen' and f.out_id = o.id and o.artifact_id = ${a_id} and a.id = ${a_id}
+ SELECT a.gid AS aid,
+ f.id AS fid,
+ f.name AS facet_name,
+ f.num AS facet_num,
+ f.description AS facet_description
+ FROM outs AS o, facets AS f, artifacts AS a
+ WHERE (
+ f.name = 'fix_sector_average_ls_0' OR
+ f.name = 'fix_sector_average_ls_1' OR
+ f.name = 'fix_sector_average_ls_2' OR
+ f.name = 'fix_sector_average_ls_3' OR
+ f.name = 'fix_analysis_events_ls' OR
+ f.name = 'fix_reference_events_ls'
+ ) AND f.out_id = o.id AND o.artifact_id = ${a_id} AND a.id = ${a_id}
</dc:statement>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="factory" value="winfo"/>
- <dc:attribute name="artifact-id" value="${aid}"/>
- <dc:attribute name="ids" value="${aid}"/>
- <dc:attribute name="out" value="floodmap"/>
- </dc:element>
- </dc:elements>
+ <fix_longitudinal_section_curve description="{$river} {$a_creation} {$collection_name}">
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="factory" value="fixanalysis"/>
+ <dc:attribute name="artifact-id" value="${aid}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="out" value="fix_longitudinal_section_curve"/>
+ </dc:element>
+ </dc:for-each>
+ </fix_longitudinal_section_curve>
</dc:context>
- </dc:call-macro>
- </dc:context>
- </dc:elements>
- </floodmap>
- </dc:macro>
+ </dc:call-macro>
+ </dc:context>
+ </dc:for-each>
+ </fix_longitudinal_section_curve>
+ </dc:macro>
- <dc:comment>
- MINFO bedheight difference
- </dc:comment>
+ <dc:comment>
+ MINFO bedheight middle
+ </dc:comment>
- <dc:macro name="bed-difference">
- <fix_longitudinal_section_curve>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
+ <dc:macro name="bed-height">
+ <fix_vollmer_wq_curve>
+ <dc:for-each>
+ <dc:context>
+ <dc:call-macro name="range-filter">
<dc:context>
<dc:statement>
- SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description
- FROM outs as o, facets as f, artifacts as a
- WHERE (f.name = 'fix_sector_average_ls_0' or f.name = 'fix_sector_average_ls_1' or f.name = 'fix_sector_average_ls_2'
- or f.name = 'fix_sector_average_ls_3' or f.name = 'fix_analysis_events_ls' or f.name = 'fix_reference_events_ls')
- and f.out_id = o.id and o.artifact_id = ${a_id} and a.id = ${a_id}
+ SELECT a.gid AS aid,
+ f.id AS fid,
+ f.name AS facet_name,
+ f.num AS facet_num,
+ f.description AS facet_description
+ FROM outs AS o, facets AS f, artifacts AS a
+ WHERE (
+ f.name = 'longitudinal_section.w' OR
+ f.name = 'heightmarks_points'
+ ) AND f.out_id = o.id AND o.artifact_id = ${a_id} AND a.id = ${a_id}
</dc:statement>
- <fix_longitudinal_section_curve>
- <dc:attribute name="description" value="${river} ${a_creation} ${collection_name}"/>
- <dc:elements>
+ <fix_vollmer_wq_curve description="{$river} {$a_creation} {$collection_name}">
+ <dc:for-each>
<dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="factory" value="fixanalysis"/>
- <dc:attribute name="artifact-id" value="${aid}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="out" value="fix_longitudinal_section_curve"/>
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="factory" value="fixanalysis"/>
+ <dc:attribute name="artifact-id" value="${aid}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="out" value="longitudinal_section"/>
</dc:element>
- </dc:elements>
- </fix_longitudinal_section_curve>
- </dc:context>
+ </dc:for-each>
+ </fix_vollmer_wq_curve>
+ </dc:context>
</dc:call-macro>
- </dc:context>
- </dc:elements>
- </fix_longitudinal_section_curve>
- </dc:macro>
+ </dc:context>
+ </dc:for-each>
+ </fix_vollmer_wq_curve>
+ </dc:macro>
- <dc:comment>
- MINFO bedheight middle
- </dc:comment>
-
- <dc:macro name="bed-height">
- <fix_vollmer_wq_curve>
- <dc:elements>
- <dc:context>
- <dc:call-macro name="range-filter">
- <dc:context>
- <dc:statement>
- SELECT a.gid as aid, f.id AS fid, f.name AS facet_name, f.num AS facet_num, f.description as facet_description
- FROM outs as o, facets as f, artifacts as a
- WHERE (f.name = 'longitudinal_section.w' or f.name = 'heightmarks_points')
- and f.out_id = o.id and o.artifact_id = ${a_id} and a.id = ${a_id}
- </dc:statement>
- <fix_vollmer_wq_curve>
- <dc:attribute name="description" value="${river} ${a_creation} ${collection_name}"/>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="factory" value="fixanalysis"/>
- <dc:attribute name="artifact-id" value="${aid}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="out" value="longitudinal_section"/>
- </dc:element>
- </dc:elements>
- </fix_vollmer_wq_curve>
- </dc:context>
- </dc:call-macro>
- </dc:context>
- </dc:elements>
- </fix_vollmer_wq_curve>
- </dc:macro>
-
- <dc:macro name="floodmap-hws-user">
+ <dc:macro name="floodmap-hws-user">
+ <dc:context>
+ <dc:statement>
+ SELECT id AS out_id
+ FROM outs
+ WHERE artifact_id = ${a_id} AND name = 'floodmap'
+ </dc:statement>
+ <dc:for-each>
<dc:context>
<dc:statement>
- SELECT id AS out_id
- FROM outs
- WHERE artifact_id = ${a_id} AND name = 'floodmap'
+ SELECT name AS facet_name,
+ num AS facet_num,
+ description AS facet_description
+ FROM facets
+ WHERE out_id = ${out_id} AND name = 'floodmap.usershape'
+ ORDER BY num ASC, name DESC
</dc:statement>
- <dc:elements>
- <dc:context>
- <dc:statement>
- SELECT name AS facet_name, num as facet_num, description AS facet_description
- FROM facets
- WHERE out_id = ${out_id} and name = 'floodmap.usershape'
- ORDER BY num ASC, name DESC
- </dc:statement>
- <own-hws>
- <dc:elements>
- <dc:element name="${facet_name}">
- <dc:attribute name="description" value="${facet_description}"/>
- <dc:attribute name="ids" value="${facet_num}"/>
- <dc:attribute name="factory" value="winfo"/>
- <dc:attribute name="artifact-id" value="${a_gid}"/>
- <dc:attribute name="out" value="floodmap"/>
- </dc:element>
- </dc:elements>
- </own-hws>
- </dc:context>
- </dc:elements>
+ <own-hws>
+ <dc:for-each>
+ <dc:element name="${facet_name}">
+ <dc:attribute name="description" value="${facet_description}"/>
+ <dc:attribute name="ids" value="${facet_num}"/>
+ <dc:attribute name="factory" value="winfo"/>
+ <dc:attribute name="artifact-id" value="${a_gid}"/>
+ <dc:attribute name="out" value="floodmap"/>
+ </dc:element>
+ </dc:for-each>
+ </own-hws>
</dc:context>
- </dc:macro>
- <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'discharge_longitudinal_section') or (dc:contains($artifact-outs, 'w_differences')))">
- <dc:call-macro name="longitudinal"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'fix_deltawt_curve')">
- <dc:call-macro name="delta-wt"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'w_differences') or (dc:contains($artifact-outs, 'discharge_longitudinal_section')))">
- <dc:call-macro name="differences"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'reference_curve')">
- <dc:call-macro name="reference-curves"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve')">
- <dc:call-macro name="computed-discharge-curve"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'cross_section')">
- <dc:call-macro name="waterlevels"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')">
- <dc:call-macro name="longitudinal-section"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'fix_derivate_curve')">
- <dc:call-macro name="fix-derivate-curve"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')">
- <dc:call-macro name="fix-wq-curve"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'duration_curve')">
- <dc:call-macro name="duration-curve"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'waterlevels') or (dc:contains($artifact-outs, 'fix_wq_curve'))">
- <dc:call-macro name="waterlevels-fix"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'floodmap') or dc:contains($artifact-outs, 'map')">
- <dc:call-macro name="flood-map"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'bed_difference_year') or dc:contains($artifact-outs, 'bed_difference_height_year')">
- <dc:call-macro name="bed-difference"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'bedheight_middle')">
- <dc:call-macro name="bed-height"/>
- </dc:if>
- <dc:if test="dc:contains($artifact-outs, 'floodmap-hws')">
- <dc:call-macro name="floodmap-hws-user"/>
- </dc:if>
- </dc:context>
- </dc:call-macro>
+ </dc:for-each>
+ </dc:context>
+ </dc:macro>
+
+ <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'discharge_longitudinal_section') or (dc:contains($artifact-outs, 'w_differences')))">
+ <dc:call-macro name="longitudinal"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'fix_deltawt_curve')">
+ <dc:call-macro name="delta-wt"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'longitudinal_section') or (dc:contains($artifact-outs, 'w_differences') or (dc:contains($artifact-outs, 'discharge_longitudinal_section')))">
+ <dc:call-macro name="differences"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'reference_curve')">
+ <dc:call-macro name="reference-curves"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'computed_discharge_curve')">
+ <dc:call-macro name="computed-discharge-curve"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'cross_section')">
+ <dc:call-macro name="waterlevels"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'fix_longitudinal_section_curve')">
+ <dc:call-macro name="longitudinal-section"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'fix_derivate_curve')">
+ <dc:call-macro name="fix-derivate-curve"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'fix_wq_curve')">
+ <dc:call-macro name="fix-wq-curve"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'duration_curve')">
+ <dc:call-macro name="duration-curve"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'waterlevels') or (dc:contains($artifact-outs, 'fix_wq_curve'))">
+ <dc:call-macro name="waterlevels-fix"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'floodmap') or dc:contains($artifact-outs, 'map')">
+ <dc:call-macro name="flood-map"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'bed_difference_year') or dc:contains($artifact-outs, 'bed_difference_height_year')">
+ <dc:call-macro name="bed-difference"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'bedheight_middle')">
+ <dc:call-macro name="bed-height"/>
+ </dc:if>
+ <dc:if test="dc:contains($artifact-outs, 'floodmap-hws')">
+ <dc:call-macro name="floodmap-hws-user"/>
+ </dc:if>
+ </dc:context>
+ </dc:call-macro>
</old_calculations>
-
- <dc:comment>
- Include System specific part when 'load-system' is in parameters.
- -----------------------------------------------------------------
- </dc:comment>
- <dc:choose>
- <dc:when test="dc:contains($parameters,'load-system')">
- <dc:call-macro name="load-system"/>
- </dc:when>
- </dc:choose>
+ <dc:if test="dc:contains($parameters,'load-system')">
+ <dc:comment>
+ Include System specific part when 'load-system' is in parameters.
+ -----------------------------------------------------------------
+ </dc:comment>
+ <dc:call-macro name="load-system"/>
+ </dc:if>
</dc:when>
-
- <dc:comment>
- Include System specific part only if no user ID is given.
- ---------------------------------------------------------
- </dc:comment>
<dc:otherwise>
- <dc:call-macro name="load-system"/>
+ <dc:comment>
+ Include System specific part only if no user ID is given.
+ ---------------------------------------------------------
+ </dc:comment>
+ <dc:call-macro name="load-system"/>
</dc:otherwise>
</dc:choose>
+
</datacage>
</dc:template>
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/conf/themes.xml
--- a/flys-artifacts/doc/conf/themes.xml Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/doc/conf/themes.xml Wed Apr 03 16:00:21 2013 +0200
@@ -214,6 +214,7 @@
<mapping from="floodmap.fixpoints" to="Fixpoints" />
<mapping from="floodmap.floodmaps" to="Floodmaps" />
<mapping from="floodmap.gauge_location" to="GaugeLocation" />
+ <mapping from="floodmap.jetties" to="Jetties" />
<mapping from="other.wq" to="WQPoints" />
<mapping from="other.wkms" to="WKms" />
<mapping from="other.wkms.marks" to="WKmsAnnotation" />
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/conf/themes/default/floodmap.xml
--- a/flys-artifacts/doc/conf/themes/default/floodmap.xml Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/doc/conf/themes/default/floodmap.xml Wed Apr 03 16:00:21 2013 +0200
@@ -121,4 +121,12 @@
<inherit from="Map" />
</inherits>
</theme>
+
+ <theme name="Jetties">
+ <inherits>
+ <inherit from="MapLines"/>
+ <inherit from="Label" />
+ <inherit from="Symbol" />
+ </inherits>
+ </theme>
<!--/themegroup-->
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/conf/themes/second/floodmap.xml
--- a/flys-artifacts/doc/conf/themes/second/floodmap.xml Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/doc/conf/themes/second/floodmap.xml Wed Apr 03 16:00:21 2013 +0200
@@ -121,4 +121,12 @@
<inherit from="Map" />
</inherits>
</theme>
+
+ <theme name="Jetties">
+ <inherits>
+ <inherit from="MapLines"/>
+ <inherit from="Label" />
+ <inherit from="Symbol" />
+ </inherits>
+ </theme>
<!--/themegroup-->
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/doc/howto_wmsartifact.txt
--- a/flys-artifacts/doc/howto_wmsartifact.txt Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/doc/howto_wmsartifact.txt Wed Apr 03 16:00:21 2013 +0200
@@ -19,3 +19,10 @@
- German localization:
flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_de.properties
+You might also want a theme:
+- Add a mapping from your Facet to the theme in:
+ flys-artifacts/doc/conf/themes.xml
+- Define the theme in:
+ flys-artifacts/doc/conf/themes/default/floodmap.xml
+ flys-artifacts/doc/conf/themes/second/floodmap.xml
+
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/pom.xml
--- a/flys-artifacts/pom.xml Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/pom.xml Wed Apr 03 16:00:21 2013 +0200
@@ -112,7 +112,7 @@
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
- <version>1.2.2</version>
+ <version>1.4</version>
</dependency>
<dependency>
<groupId>org.geotools</groupId>
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/MainValuesArtifact.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/MainValuesArtifact.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/MainValuesArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -380,5 +380,15 @@
}
return filteredList;
}
+
+
+ /**
+ * Returns the name of this artifact ('mainvalue').
+ *
+ * @return 'mainvalue'
+ */
+ public String getName() {
+ return ARTIFACT_NAME;
+ }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/RiverAxisArtifact.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/RiverAxisArtifact.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/RiverAxisArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -11,6 +11,7 @@
import de.intevation.flys.artifacts.model.FacetTypes;
import de.intevation.flys.artifacts.model.RiverFactory;
import de.intevation.flys.model.River;
+import de.intevation.flys.model.RiverAxis;
import de.intevation.flys.utils.FLYSUtils;
import de.intevation.flys.utils.GeometryUtils;
@@ -80,23 +81,6 @@
public RiverAxisState(FLYSArtifact artifact) {
super(artifact);
- riverId = 0;
- }
-
- @Override
- public int getRiverId() {
- if (riverId == 0) {
- String ids = artifact.getDataAsString("ids");
-
- try {
- riverId = Integer.parseInt(ids);
- }
- catch (NumberFormatException nfe) {
- logger.error("Cannot parse river id from '" + ids + "'");
- }
- }
-
- return riverId;
}
@Override
@@ -123,23 +107,63 @@
@Override
protected Envelope getExtent(boolean reproject) {
River river = RiverFactory.getRiver(getRiverId());
+ List<RiverAxis> axes;
- if (reproject) {
- logger.debug("Query extent for RiverAxis with Srid: " + getSrid());
- return GeometryUtils.transform(
- GeometryUtils.getRiverBoundary(river.getName()),
- getSrid());
+ String kind = getIdPart(2);
+
+ if (kind != null && kind.equals("1")) {
+ axes = RiverAxis.getRiverAxis(river.getName(),
+ Integer.parseInt(kind));
+ } else if (kind != null) {
+ axes = RiverAxis.getRiverAxis(river.getName(),
+ getName(), Integer.parseInt(kind));
+ } else {
+ if (reproject) {
+ logger.debug("Query extent for RiverAxis with Srid: " + getSrid());
+ return GeometryUtils.transform(
+ GeometryUtils.getRiverBoundary(river.getName()),
+ getSrid());
+ } else {
+ return GeometryUtils.transform(
+ GeometryUtils.getRiverBoundary(river.getName()),
+ "31467");
+ }
}
- else {
- return GeometryUtils.transform(
- GeometryUtils.getRiverBoundary(river.getName()),
- "31467");
+
+ Envelope max = null;
+
+ for (RiverAxis ax: axes) {
+ Envelope env = ax.getGeom().getEnvelopeInternal();
+
+ if (max == null) {
+ max = env;
+ continue;
+ }
+
+ max.expandToInclude(env);
}
+
+ return max != null && reproject
+ ? GeometryUtils.transform(max, getSrid())
+ : max;
}
@Override
protected String getFilter() {
- return "river_id=" + String.valueOf(getRiverId());
+ String kind = getIdPart(2);
+ if (kind != null && kind.equals("1")) {
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND kind_id=" + kind;
+ } else if (kind != null) {
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND kind_id=" + kind +
+ " AND name='" + getName() + "'";
+ } else if (getIdPart(1) != null) {
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND name='" + getName() + "'";
+ }
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND kind_id=" + kind;
}
@Override
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/SQRelationArtifact.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/SQRelationArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,158 @@
+package de.intevation.flys.artifacts;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.w3c.dom.Document;
+
+import de.intevation.artifactdatabase.data.DefaultStateData;
+import de.intevation.artifactdatabase.state.DefaultOutput;
+import de.intevation.artifactdatabase.state.Facet;
+import de.intevation.artifacts.ArtifactFactory;
+import de.intevation.artifacts.ArtifactNamespaceContext;
+import de.intevation.artifacts.CallMeta;
+import de.intevation.artifacts.common.utils.XMLUtils;
+import de.intevation.flys.artifacts.model.sq.StaticSQRelation;
+import de.intevation.flys.artifacts.resources.Resources;
+import de.intevation.flys.artifacts.states.StaticState;
+import de.intevation.flys.artifacts.states.sq.SQStaticState;
+
+
+public class SQRelationArtifact
+extends AbstractStaticStateArtifact
+{
+ private static final Logger logger =
+ Logger.getLogger(SQRelationArtifact.class);
+
+ public static final String XPATH_RIVER = "/art:action/art:river/@art:name";
+ public static final String XPATH_STATION =
+ "/art:action/art:measurement_station/@art:number";
+ public static final String NAME = "staticsqrelation";
+ public static final String STATIC_STATE_NAME = "state.sqrelation.static";
+ public static final String UIPROVIDER = "static_sqrelation";
+ public static final String SQ_RELATION_OUT_A = "sq_relation_a";
+ public static final String SQ_RELATION_OUT_B = "sq_relation_b";
+ public static final String SQ_RELATION_OUT_C = "sq_relation_c";
+ public static final String SQ_RELATION_OUT_D = "sq_relation_d";
+ public static final String SQ_RELATION_OUT_E = "sq_relation_e";
+ public static final String SQ_RELATION_OUT_F = "sq_relation_f";
+
+
+ @Override
+ public void setup(
+ String identifier,
+ ArtifactFactory factory,
+ Object context,
+ CallMeta callmeta,
+ Document data
+ ) {
+ logger.debug("SQRelationArtifact.setup()");
+
+ String river = XMLUtils.xpathString(
+ data,
+ XPATH_RIVER,
+ ArtifactNamespaceContext.INSTANCE);
+ String station = XMLUtils.xpathString(
+ data,
+ XPATH_STATION,
+ ArtifactNamespaceContext.INSTANCE);
+
+ addData(
+ "river",
+ new DefaultStateData(
+ "river",
+ Resources.getMsg(callmeta, "static.sq.river", "Rivername"),
+ "String",
+ river));
+ addData(
+ "station",
+ new DefaultStateData(
+ "station",
+ Resources.getMsg(callmeta, "static.sq.station", "Station"),
+ "String",
+ station));
+ super.setup(identifier, factory, context, callmeta, data);
+ }
+
+ @Override
+ protected void initStaticState() {
+ StaticState state = new SQStaticState(STATIC_STATE_NAME);
+
+ List<Facet> fs = new ArrayList<Facet>();
+ state.staticCompute(fs, this);
+
+ if (hasParameter(StaticSQRelation.Parameter.A, fs)) {
+ DefaultOutput outputA = new DefaultOutput(
+ SQ_RELATION_OUT_A,
+ "output.static.sqrelation.a",
+ "image/png",
+ fs,
+ "chart");
+ state.addOutput(outputA);
+ }
+ if (hasParameter(StaticSQRelation.Parameter.B, fs)) {
+ DefaultOutput outputB = new DefaultOutput(
+ SQ_RELATION_OUT_B,
+ "output.static.sqrelation.b",
+ "image/png",
+ fs,
+ "chart");
+ state.addOutput(outputB);
+ }
+ if (hasParameter(StaticSQRelation.Parameter.C, fs)) {
+ DefaultOutput outputC = new DefaultOutput(
+ SQ_RELATION_OUT_C,
+ "output.static.sqrelation.c",
+ "image/png",
+ fs,
+ "chart");
+ state.addOutput(outputC);
+ }
+ if (hasParameter(StaticSQRelation.Parameter.D, fs)) {
+ DefaultOutput outputD = new DefaultOutput(
+ SQ_RELATION_OUT_D,
+ "output.static.sqrelation.d",
+ "image/png",
+ fs,
+ "chart");
+ state.addOutput(outputD);
+ }
+ if (hasParameter(StaticSQRelation.Parameter.E, fs)) {
+ DefaultOutput outputE = new DefaultOutput(
+ SQ_RELATION_OUT_E,
+ "output.static.sqrelation.e",
+ "image/png",
+ fs,
+ "chart");
+ state.addOutput(outputE);
+ }
+ if (hasParameter(StaticSQRelation.Parameter.F, fs)) {
+ DefaultOutput outputF = new DefaultOutput(
+ SQ_RELATION_OUT_F,
+ "output.static.sqrelation.f",
+ "image/png",
+ fs,
+ "chart");
+ state.addOutput(outputF);
+ }
+ addFacets(STATIC_STATE_NAME, fs);
+ state.setUIProvider(UIPROVIDER);
+ setStaticState(state);
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+
+ private boolean hasParameter(StaticSQRelation.Parameter p, List<Facet> fs) {
+ for (Facet f : fs) {
+ if (f.getName().equals("sq_" +
+ p.toString().toLowerCase() + "_curve")) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/WINFOArtifact.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WINFOArtifact.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WINFOArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -85,7 +85,9 @@
private static final String [] INACTIVES = new String[] {
LONGITUDINAL_Q,
- DURATION_Q
+ DURATION_Q,
+ HISTORICAL_DISCHARGE_MAINVALUES_W,
+ HISTORICAL_DISCHARGE_MAINVALUES_Q
};
static {
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSBuildingsArtifact.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSBuildingsArtifact.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSBuildingsArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -102,8 +102,14 @@
@Override
protected Envelope getExtent(boolean reproject) {
- List<Building> buildings =
- Building.getBuildings(getRiverId(), getName());
+ List<Building> buildings;
+ String kind = getIdPart(2);
+
+ if (kind != null) {
+ buildings = Building.getBuildings(getRiverId(), Integer.parseInt(kind));
+ } else {
+ buildings = Building.getBuildings(getRiverId(), name);
+ }
Envelope max = null;
@@ -125,6 +131,15 @@
@Override
protected String getFilter() {
+ // The expected id string is:
+ // river_id;layer-name;kind
+
+ String kind = getIdPart(2);
+ if (kind != null) {
+ // If kind is provided we filter by kind
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND kind_id=" + kind;
+ }
return "river_id=" + String.valueOf(getRiverId()) +
" AND name='" + getName() + "'";
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSDBArtifact.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSDBArtifact.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSDBArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -149,17 +149,16 @@
public int getRiverId() {
if (riverId == 0) {
- String ids = artifact.getDataAsString("ids");
- String[] parts = ids.split(";");
+ String rid = getIdPart(0);
try {
- riverId = Integer.parseInt(parts[0]);
+ riverId = Integer.parseInt(rid);
}
catch (NumberFormatException nfe) {
- logger.error("Cannot parse river id from '" + parts[0] + "'");
+ logger.error("Cannot parse river id from '" +
+ artifact.getDataAsString("ids") + "'");
}
}
-
return riverId;
}
@@ -178,18 +177,33 @@
*/
public String getName() {
if (name == null) {
- String ids = artifact.getDataAsString("ids");
-
- String parts[] = ids != null ? ids.split(";") : null;
-
- if (parts != null && parts.length >= 2) {
- name = parts[1];
- }
+ name = getIdPart(1);
}
return name;
}
+ /**
+ * Returns a part of the ID string. This method splits the
+ * 'ids' data string. It is expected, that the 'ids' string is
+ * seperated by ';'.
+ *
+ * @param number the position of the id data string
+ *
+ * @return the part of the id string at position number.
+ * Null if number was out of bounds.
+ */
+ public String getIdPart(int number) {
+ String ids = artifact.getDataAsString("ids");
+
+ String parts[] = ids != null ? ids.split(";") : null;
+
+ if (parts != null && parts.length >= number + 1) {
+ return parts[number];
+ }
+ return null;
+ }
+
/**
* Returns the name of the layer (returned by getName()) or the layer
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSFloodplainArtifact.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSFloodplainArtifact.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSFloodplainArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -84,21 +84,6 @@
riverId = 0;
}
- public int getRiverId() {
- if (riverId == 0) {
- String ids = artifact.getDataAsString("ids");
-
- try {
- riverId = Integer.parseInt(ids);
- }
- catch (NumberFormatException nfe) {
- logger.error("Cannot parse river id from '" + ids + "'");
- }
- }
-
- return riverId;
- }
-
protected River getRiver() {
return RiverFactory.getRiver(getRiverId());
}
@@ -121,19 +106,46 @@
@Override
protected Envelope getExtent(boolean reproject) {
- River river = getRiver();
- Floodplain plain = Floodplain.getFloodplain(river.getName());
+ River river = getRiver();
+ List<Floodplain> fps;
- Envelope e = plain.getGeom().getEnvelopeInternal();
+ String kind = getIdPart(2);
- return e != null && reproject
- ? GeometryUtils.transform(e, getSrid())
- : e;
+ if (kind != null && ! kind.equals("1")) {
+ fps = Floodplain.getFloodplains(river.getName(),
+ getName(), Integer.parseInt(kind));
+ } else {
+ fps = Floodplain.getFloodplains(river.getName(), 1);
+ }
+
+ Envelope max = null;
+
+ for (Floodplain fp: fps) {
+ Envelope env = fp.getGeom().getEnvelopeInternal();
+
+ if (max == null) {
+ max = env;
+ continue;
+ }
+
+ max.expandToInclude(env);
+ }
+
+ return max != null && reproject
+ ? GeometryUtils.transform(max, getSrid())
+ : max;
}
@Override
protected String getFilter() {
- return "river_id=" + String.valueOf(getRiverId());
+ String kind = getIdPart(2);
+ if (kind != null && ! kind.equals("1")) {
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND kind_id=" + kind +
+ " AND name='" + getName() + "'";
+ }
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND kind_id=1";
}
@Override
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHydrBoundaryArtifact.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHydrBoundaryArtifact.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHydrBoundaryArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -102,8 +102,14 @@
@Override
protected Envelope getExtent(boolean reproject) {
- List<HydrBoundary> boundaries = HydrBoundary.getHydrBoundaries(
- getRiverId(), getName());
+ List<HydrBoundary> boundaries;
+ String kind = getIdPart(2);
+ if (kind != null) {
+ boundaries = HydrBoundary.getHydrBoundaries(getRiverId(),
+ getName(), Integer.parseInt(kind));
+ } else {
+ boundaries = HydrBoundary.getHydrBoundaries(getRiverId(), getName());
+ }
Envelope max = null;
@@ -125,6 +131,12 @@
@Override
protected String getFilter() {
+ String kind = getIdPart(2);
+ if (kind != null) {
+ return "river_id=" + String.valueOf(getRiverId())
+ + " AND name='" + getName() + "'"
+ + " AND kind = " + kind;
+ }
return "river_id=" + String.valueOf(getRiverId())
+ " AND name='" + getName() + "'";
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHydrBoundaryPolyArtifact.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHydrBoundaryPolyArtifact.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSHydrBoundaryPolyArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -102,8 +102,31 @@
@Override
protected Envelope getExtent(boolean reproject) {
- List<HydrBoundaryPoly> boundaries = HydrBoundaryPoly.getHydrBoundaries(
- getRiverId(), getName());
+ String kind = getIdPart(2);
+ String sectie = getIdPart(3);
+ String sobek = getIdPart(4);
+ int kindId = -1;
+ int sectieId = -1;
+ int sobekId = -1;
+
+ if (kind != null) {
+ kindId = Integer.parseInt(kind);
+ }
+ if (sectie != null && !sectie.equals("-1")) {
+ sectieId = Integer.parseInt(sectie);
+ }
+ if (sobek != null && !sobek.equals("-1")) {
+ sobekId = Integer.parseInt(sobek);
+ }
+
+ List<HydrBoundaryPoly> boundaries;
+ if (kindId == -1 && sobekId == -1 && sectieId == -1) {
+ boundaries = HydrBoundaryPoly.getHydrBoundaries(
+ getRiverId(), getName());
+ } else {
+ boundaries = HydrBoundaryPoly.getHydrBoundaries(
+ getRiverId(), kindId, sectieId, sobekId);
+ }
Envelope max = null;
@@ -125,8 +148,29 @@
@Override
protected String getFilter() {
+ // Expected id string:
+ // river_id;layer_name;kind;sectie;sobek
+ String kind = getIdPart(2);
+ String sectie = getIdPart(3);
+ String sobek = getIdPart(4);
+
+ String filter = "";
+ if (kind != null && !kind.equals("-1")) {
+ filter += " AND kind = " + kind;
+ }
+ if (sectie != null && !sectie.equals("-1")) {
+ filter += " AND sectie = " + sectie;
+ }
+ if (sobek != null && !sobek.equals("-1")) {
+ filter += " AND sobek = " + sobek;
+ }
+
+ if (filter.isEmpty()) {
+ filter = " AND name='" + getName() + "'";
+ }
+
return "river_id=" + String.valueOf(getRiverId())
- + " AND name='" + getName() + "'";
+ + filter;
}
@Override
@@ -134,10 +178,10 @@
String srid = getSrid();
if (FLYSUtils.isUsingOracle()) {
- return "geom FROM hydr_boundaries USING SRID " + srid;
+ return "geom FROM hydr_boundaries_poly USING SRID " + srid;
}
else {
- return "geom FROM hydr_boundaries USING UNIQUE id USING SRID " + srid;
+ return "geom FROM hydr_boundaries_poly USING UNIQUE id USING SRID " + srid;
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSQPSArtifact.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSQPSArtifact.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/WMSQPSArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -77,26 +77,8 @@
private static final Logger logger =
Logger.getLogger(WMSQPSState.class);
- protected int riverId;
-
public WMSQPSState(WMSDBArtifact artifact) {
super(artifact);
- riverId = 0;
- }
-
- public int getRiverId() {
- if (riverId == 0) {
- String ids = artifact.getDataAsString("ids");
-
- try {
- riverId = Integer.parseInt(ids);
- }
- catch (NumberFormatException nfe) {
- logger.error("Cannot parse river id from '" + ids + "'");
- }
- }
-
- return riverId;
}
@Override
@@ -118,9 +100,20 @@
@Override
protected Envelope getExtent(boolean reproject) {
River river = RiverFactory.getRiver(getRiverId());
+ List<CrossSectionTrack> qps;
- List<CrossSectionTrack> qps =
- CrossSectionTrack.getCrossSectionTrack(river.getName());
+ String kind = getIdPart(2);
+
+ if (kind != null && kind.equals("1")) {
+ qps = CrossSectionTrack.getCrossSectionTrack(river.getName(),
+ Integer.parseInt(kind));
+ } else if (kind != null) {
+ qps = CrossSectionTrack.getCrossSectionTrack(river.getName(),
+ getName(), Integer.parseInt(kind));
+ } else {
+ qps = CrossSectionTrack.getCrossSectionTrack(river.getName(),
+ getName());
+ }
Envelope max = null;
@@ -142,7 +135,18 @@
@Override
protected String getFilter() {
- return "river_id=" + String.valueOf(getRiverId());
+ String kind = getIdPart(2);
+ if (kind != null && kind.equals("1")) {
+ // There can be several layers named qps that differ in kind
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND kind_id=" + kind;
+ } else if (kind != null) {
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND kind_id=" + kind +
+ " AND name='" + getName() + "'";
+ }
+ return "river_id=" + String.valueOf(getRiverId()) +
+ " AND name='" + getName() + "'";
}
@Override
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/access/StaticSQRelationAccess.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/access/StaticSQRelationAccess.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,31 @@
+package de.intevation.flys.artifacts.access;
+
+import org.apache.log4j.Logger;
+
+import de.intevation.flys.artifacts.FLYSArtifact;
+
+
+public class StaticSQRelationAccess
+extends RiverAccess
+{
+ /** The logger that is used in this state. */
+ private static final Logger logger =
+ Logger.getLogger(StaticSQRelationAccess.class);
+
+ private String measurementStation;
+
+ public StaticSQRelationAccess(FLYSArtifact artifact) {
+ super(artifact);
+ }
+
+ /** Get measurement station */
+ public String getMeasurementStation() {
+ if (measurementStation == null) {
+ measurementStation = getString("station");
+ }
+ if (logger.isDebugEnabled()) {
+ logger.debug("measurement station: '" + measurementStation + "'");
+ }
+ return measurementStation;
+ }
+}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/Recommendations.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/Recommendations.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/Recommendations.java Wed Apr 03 16:00:21 2013 +0200
@@ -29,6 +29,7 @@
import de.intevation.flys.artifacts.FLYSArtifact;
+import de.intevation.flys.backend.SedDBSessionHolder;
import de.intevation.flys.backend.SessionHolder;
import de.intevation.artifactdatabase.data.StateData;
@@ -41,6 +42,12 @@
*/
public class Recommendations
{
+ public static final String CONNECTION_USER = "user";
+ public static final String CONNECTION_SYSTEM = "system";
+ public static final String CONNECTION_SEDDB = "seddb";
+
+ public static final String DEFAULT_CONNECTION_NAME = CONNECTION_SYSTEM;
+
private static Logger log = Logger.getLogger(Recommendations.class);
private static final boolean DEVELOPMENT_MODE =
@@ -198,44 +205,68 @@
final Map<String, Object> parameters,
final String userId,
final Node result,
- Session session
+ Session systemSession
) {
- session.doWork(new Work() {
+ systemSession.doWork(new Work() {
@Override
- public void execute(Connection systemConnection)
+ public void execute(final Connection systemConnection)
throws SQLException
{
- List<Builder.NamedConnection> connections =
- new ArrayList<Builder.NamedConnection>(2);
-
- Connection userConnection = userId != null
- ? DBConfig
- .getInstance()
- .getDBConnection()
- .getDataSource()
- .getConnection()
- : null;
-
- try {
- if (userConnection != null) {
- connections.add(new Builder.NamedConnection(
- Builder.CONNECTION_USER, userConnection, false));
+ SedDBSessionHolder.HOLDER.get().doWork(new Work() {
+ @Override
+ public void execute(Connection sedDBConnection)
+ throws SQLException
+ {
+ recommend(
+ parameters, userId, result,
+ systemConnection,
+ sedDBConnection);
}
-
- connections.add(new Builder.NamedConnection(
- Builder.CONNECTION_SYSTEM, systemConnection, true));
-
- getBuilder().build(connections, result, parameters);
- }
- finally {
- if (userConnection != null) {
- userConnection.close();
- }
- }
+ });
}
});
}
+ public void recommend(
+ Map<String, Object> parameters,
+ String userId,
+ Node result,
+ Connection systemConnection,
+ Connection seddbConnection
+ ) throws SQLException
+ {
+ List<Builder.NamedConnection> connections =
+ new ArrayList<Builder.NamedConnection>(3);
+
+ Connection userConnection = userId != null
+ ? DBConfig
+ .getInstance()
+ .getDBConnection()
+ .getDataSource()
+ .getConnection()
+ : null;
+
+ try {
+ connections.add(new Builder.NamedConnection(
+ CONNECTION_SYSTEM, systemConnection, true));
+
+ connections.add(new Builder.NamedConnection(
+ CONNECTION_SEDDB, seddbConnection, true));
+
+ if (userConnection != null) {
+ connections.add(new Builder.NamedConnection(
+ CONNECTION_USER, userConnection, false));
+ }
+
+
+ getBuilder().build(connections, result, parameters);
+ }
+ finally {
+ if (userConnection != null) {
+ userConnection.close();
+ }
+ }
+ }
/** Get singleton instance. */
public static synchronized Recommendations getInstance() {
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/templating/Builder.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/templating/Builder.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/templating/Builder.java Wed Apr 03 16:00:21 2013 +0200
@@ -21,15 +21,18 @@
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.log4j.Logger;
+import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
+import org.w3c.dom.NamedNodeMap;
/** Handles and evaluate meta-data template against dbs. */
@@ -37,13 +40,12 @@
{
private static Logger log = Logger.getLogger(Builder.class);
- public static final String CONNECTION_USER = "user";
- public static final String CONNECTION_SYSTEM = "system";
- public static final String DEFAULT_CONNECTION_NAME = CONNECTION_SYSTEM;
-
public static final Pattern STRIP_LINE_INDENT =
Pattern.compile("\\s*\\r?\\n\\s*");
+ public static final Pattern BRACKET_XPATH =
+ Pattern.compile("\\{([^}]+)\\}");
+
public static final String DC_NAMESPACE_URI =
"http://www.intevation.org/2011/Datacage";
@@ -57,6 +59,8 @@
protected Map<String, CompiledStatement> compiledStatements;
+ protected Map<String, Element> macros;
+
/** Connection to either of the databases. */
public static class NamedConnection {
@@ -94,6 +98,9 @@
protected Map<String, CompiledStatement.Instance> statements;
protected Deque<Pair<NamedConnection, ResultData>> connectionsStack;
protected Deque<NodeList> macroBodies;
+ protected FunctionResolver functionResolver;
+ protected Map<String, XPathExpression> expressions;
+
public BuildHelper(
Node output,
@@ -110,9 +117,11 @@
this.output = output;
frames = new StackFrames(parameters);
owner = getOwnerDocument(output);
- statements =
+ macroBodies = new ArrayDeque<NodeList>();
+ functionResolver = new FunctionResolver(this);
+ expressions = new HashMap<String, XPathExpression>();
+ statements =
new HashMap<String, CompiledStatement.Instance>();
- macroBodies = new ArrayDeque<NodeList>();
}
public void build() throws SQLException {
@@ -173,7 +182,7 @@
}
/**
- * Handle a \<context\> node.
+ * Handle a dc:context node.
*/
protected void context(Node parent, Element current)
throws SQLException
@@ -236,6 +245,10 @@
}
}
+ public boolean hasResult() {
+ return !connectionsStack.isEmpty()
+ && !connectionsStack.peek().getB().isEmpty();
+ }
protected ResultData createFilteredResultData(ResultData rd, String filter) {
if (filter == null) return rd;
@@ -266,72 +279,82 @@
return new ResultData(rd.getColumnLabels(), filtered);
}
+ protected void filter(Node parent, Element current)
+ throws SQLException
+ {
+ String expr = current.getAttribute("expr");
+
+ if ((expr = expr.trim()).length() == 0) {
+ expr = null;
+ }
+
+ NodeList subs = current.getChildNodes();
+ int S = subs.getLength();
+ if (S == 0) {
+ log.debug("dc:filter has no children");
+ return;
+ }
+
+ ResultData orig = null;
+ Pair<Builder.NamedConnection, ResultData> pair = null;
+
+ if (expr != null && !connectionsStack.isEmpty()) {
+ pair = connectionsStack.peek();
+ orig = pair.getB();
+ pair.setB(createFilteredResultData(orig, expr));
+ }
+
+ try {
+ for (int i = 0; i < S; ++i) {
+ build(parent, subs.item(i));
+ }
+ }
+ finally {
+ if (orig != null) {
+ pair.setB(orig);
+ }
+ }
+ }
+
/**
* Kind of foreach over results of a statement within a context.
*/
- protected void elements(Node parent, Element current)
+ protected void foreach(Node parent, Element current)
throws SQLException
{
- log.debug("dc:elements");
+ log.debug("dc:for-each");
if (connectionsStack.isEmpty()) {
- log.warn("dc:elements without having results");
+ log.debug("dc:for-each without having results");
return;
}
- String filter = current.getAttribute("filter");
-
- if ((filter = filter.trim()).length() == 0) {
- filter = null;
- }
-
NodeList subs = current.getChildNodes();
int S = subs.getLength();
if (S == 0) {
- log.debug("dc:elements has no children");
+ log.debug("dc:for-each has no children");
return;
}
Pair<Builder.NamedConnection, ResultData> pair =
connectionsStack.peek();
- ResultData rd = connectionsStack.peek().getB();
- ResultData orig = rd;
+ ResultData rd = pair.getB();
- if (filter != null) {
- ResultData rdCopy = createFilteredResultData(rd, filter);
- pair.setB(rdCopy);
- rd = rdCopy;
- }
- try {
- String [] columns = rd.getColumnLabels();
+ String [] columns = rd.getColumnLabels();
- //if (log.isDebugEnabled()) {
- // log.debug("pushing vars: "
- // + java.util.Arrays.toString(columns));
- //}
-
- for (Object [] row: rd.getRows()) {
- frames.enter();
- try {
- frames.put(columns, row);
- //if (log.isDebugEnabled()) {
- // log.debug("current vars: " + frames.dump());
- //}
- for (int i = 0; i < S; ++i) {
- build(parent, subs.item(i));
- }
- }
- finally {
- frames.leave();
+ for (Object [] row: rd.getRows()) {
+ frames.enter();
+ try {
+ frames.put(columns, row);
+ for (int i = 0; i < S; ++i) {
+ build(parent, subs.item(i));
}
}
- }
- finally {
- if (filter != null) {
- pair.setB(orig);
- }
+ finally {
+ frames.leave();
+ }
}
}
@@ -403,18 +426,7 @@
return;
}
- NodeList macros = template.getElementsByTagNameNS(
- DC_NAMESPACE_URI, "macro");
-
- Element macro = null;
-
- for (int i = 0, N = macros.getLength(); i < N; ++i) {
- Element m = (Element) macros.item(i);
- if (name.equals(m.getAttribute("name"))) {
- macro = m;
- break;
- }
- }
+ Element macro = macros.get(name);
if (macro != null) {
macroBodies.push(current.getChildNodes());
@@ -534,6 +546,20 @@
}
}
+ protected XPathExpression getXPathExpression(String expr)
+ throws XPathExpressionException
+ {
+ XPathExpression x = expressions.get(expr);
+ if (x == null) {
+ XPath xpath = XPATH_FACTORY.newXPath();
+ xpath.setXPathVariableResolver(frames);
+ xpath.setXPathFunctionResolver(functionResolver);
+ x = xpath.compile(expr);
+ expressions.put(expr, x);
+ }
+ return x;
+ }
+
protected Object evaluateXPath(String expr, QName returnType) {
if (log.isDebugEnabled()) {
@@ -541,10 +567,8 @@
}
try {
- XPath xpath = XPATH_FACTORY.newXPath();
- xpath.setXPathVariableResolver(frames);
- xpath.setXPathFunctionResolver(FunctionResolver.FUNCTIONS);
- return xpath.evaluate(expr, EVAL_DOCUMENT, returnType);
+ XPathExpression x = getXPathExpression(expr);
+ return x.evaluate(EVAL_DOCUMENT, returnType);
}
catch (XPathExpressionException xpee) {
log.error("expression: " + expr, xpee);
@@ -613,6 +637,26 @@
return sb.toString();
}
+ protected void evaluateAttributeValue(Attr attr) {
+ String value = attr.getValue();
+ if (value.indexOf('{') >= 0) {
+ StringBuffer sb = new StringBuffer();
+ Matcher m = BRACKET_XPATH.matcher(value);
+ while (m.find()) {
+ String expr = m.group(1);
+ Object result = evaluateXPath(expr, XPathConstants.STRING);
+ if (result instanceof String) {
+ m.appendReplacement(sb, (String)result);
+ }
+ else {
+ m.appendReplacement(sb, "");
+ }
+ }
+ m.appendTail(sb);
+ attr.setValue(sb.toString());
+ }
+ }
+
protected void build(Node parent, Node current)
throws SQLException
{
@@ -623,45 +667,47 @@
}
else {
String localName = current.getLocalName();
+ Element curr = (Element)current;
if ("attribute".equals(localName)) {
- attribute(parent, (Element)current);
+ attribute(parent, curr);
}
else if ("context".equals(localName)) {
- context(parent, (Element)current);
+ context(parent, curr);
}
else if ("if".equals(localName)) {
- ifClause(parent, (Element)current);
+ ifClause(parent, curr);
}
else if ("choose".equals(localName)) {
- choose(parent, (Element)current);
+ choose(parent, curr);
}
else if ("call-macro".equals(localName)) {
- callMacro(parent, (Element)current);
+ callMacro(parent, curr);
}
else if ("macro-body".equals(localName)) {
- macroBody(parent, (Element)current);
+ macroBody(parent, curr);
}
- else if ("macro".equals(localName)) {
- // Simply ignore the definition.
+ else if ("macro".equals(localName)
+ || "comment".equals(localName)
+ || "statement".equals(localName)) {
+ // Simply ignore them.
}
else if ("element".equals(localName)) {
- element(parent, (Element)current);
+ element(parent, curr);
}
- else if ("elements".equals(localName)) {
- elements(parent, (Element)current);
+ else if ("for-each".equals(localName)) {
+ foreach(parent, curr);
+ }
+ else if ("filter".equals(localName)) {
+ filter(parent, curr);
}
else if ("text".equals(localName)) {
- text(parent, (Element)current);
+ text(parent, curr);
}
else if ("variable".equals(localName)) {
- variable((Element)current);
- }
- else if ("comment".equals(localName)
- || "statement".equals(localName)) {
- // ignore comments and statements in output
+ variable(curr);
}
else if ("convert".equals(localName)) {
- convert((Element)current);
+ convert(curr);
}
else {
log.warn("unknown '" + localName + "' -> ignore");
@@ -688,6 +734,15 @@
for (int i = 0, N = children.getLength(); i < N; ++i) {
build(copy, children.item(i));
}
+ if (copy.getNodeType() == Node.ELEMENT_NODE) {
+ NamedNodeMap nnm = ((Element)copy).getAttributes();
+ for (int i = 0, N = nnm.getLength(); i < N; ++i) {
+ Node n = nnm.item(i);
+ if (n.getNodeType() == Node.ATTRIBUTE_NODE) {
+ evaluateAttributeValue((Attr)n);
+ }
+ }
+ }
parent.appendChild(copy);
}
} // class BuildHelper
@@ -695,11 +750,13 @@
public Builder() {
compiledStatements = new HashMap<String, CompiledStatement>();
+ macros = new HashMap<String, Element>();
}
public Builder(Document template) {
this();
this.template = template;
+ extractMacros();
compileStatements();
}
@@ -731,6 +788,16 @@
}
}
+ protected void extractMacros() {
+ NodeList ms = template.getElementsByTagNameNS(
+ DC_NAMESPACE_URI, "macro");
+
+ for (int i = 0, N = ms.getLength(); i < N; ++i) {
+ Element m = (Element)ms.item(i);
+ macros.put(m.getAttribute("name"), m);
+ }
+ }
+
protected List<Node> rootsToList() {
NodeList roots = template.getElementsByTagNameNS(
@@ -762,22 +829,6 @@
return document != null ? document : (Document)node;
}
- private static final List<NamedConnection> wrap(Connection connection) {
- List<NamedConnection> list = new ArrayList<NamedConnection>(1);
- list.add(new NamedConnection(DEFAULT_CONNECTION_NAME, connection));
- return list;
- }
-
- public void build(
- Connection connection,
- Node output,
- Map<String, Object> parameters
- )
- throws SQLException
- {
- build(wrap(connection), output, parameters);
- }
-
public void build(
List<NamedConnection> connections,
Node output,
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/templating/FunctionResolver.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/templating/FunctionResolver.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/datacage/templating/FunctionResolver.java Wed Apr 03 16:00:21 2013 +0200
@@ -39,168 +39,48 @@
}
} // class Entry
- public static final FunctionResolver FUNCTIONS = new FunctionResolver();
-
- static {
- /** Implementation of case-ignoring dc:contains. */
- FUNCTIONS.addFunction("contains", 2, new XPathFunction() {
- @Override
- public Object evaluate(List args) throws XPathFunctionException {
- Object haystack = args.get(0);
- Object needle = args.get(1);
-
- if (needle instanceof String) {
- needle = ((String)needle).toUpperCase();
- }
-
- try {
- if (haystack instanceof Collection) {
- return Boolean.valueOf(
- ((Collection)haystack).contains(needle));
- }
-
- if (haystack instanceof Map) {
- return Boolean.valueOf(
- ((Map)haystack).containsKey(needle));
- }
-
- if (haystack instanceof Object []) {
- for (Object straw: (Object [])haystack) {
- if (straw.equals(needle)) {
- return Boolean.TRUE;
- }
- }
- }
-
- return Boolean.FALSE;
- }
- catch (Exception e) {
- log.error(e);
- throw new XPathFunctionException(e);
- }
- }
- });
- }
-
- static {
- /** Implementation for getting the minimum value of location or distance
- * dc:fromValue. */
- FUNCTIONS.addFunction("fromValue", 3, new XPathFunction() {
- @Override
- public Object evaluate(List args) throws XPathFunctionException {
- Object mode = args.get(0);
- Object locations = args.get(1);
- Object from = args.get(2);
-
- if (!(mode instanceof String)){
- return -99999d;
- }
-
- if (mode.equals("locations")) {
- if (!(locations instanceof String)) {
- return -99999d;
- }
- String loc = ((String)locations).replace(" ", "");
- String[] split = loc.split(",");
- if (split.length < 1) {
- return -99999d;
- }
- try {
- double min = Double.parseDouble(split[0]);
- for (int i = 1; i < split.length; ++i) {
- double v = Double.parseDouble(split[i]);
- if (v < min) {
- min = v;
- }
- }
- return min;
- }
- catch (NumberFormatException nfe) {
- return -99999d;
- }
- }
- else if (mode.equals("distance")) {
- if (!(from instanceof String)) {
- return -99999d;
- }
- String f = (String)from;
- try {
- return Double.parseDouble(f);
- }
- catch(NumberFormatException nfe) {
- return -99999d;
- }
- }
- else {
- return -99999d;
- }
- }
- });
- }
-
- static {
- /** Implementation for getting the maximum value of location or distance
- * dc:toValue. */
- FUNCTIONS.addFunction("toValue", 3, new XPathFunction() {
- @Override
- public Object evaluate(List args) throws XPathFunctionException {
- Object mode = args.get(0);
- Object locations = args.get(1);
- Object to = args.get(2);
-
- if (!(mode instanceof String)){
- return 99999d;
- }
-
- if (mode.equals("locations")) {
- if (!(locations instanceof String)) {
- return 99999d;
- }
- try {
- String loc = ((String)locations).replace(" ", "");
- String[] split = loc.split(",");
- if (split.length < 1) {
- return 99999d;
- }
- double max = Double.parseDouble(split[0]);
- for (int i = 1; i < split.length; ++i) {
- double v = Double.parseDouble(split[i]);
- if (v > max) {
- max = v;
- }
- }
- return max;
- }
- catch (NumberFormatException nfe) {
- return 99999d;
- }
- }
- else if (mode.equals("distance")) {
- if (!(to instanceof String)) {
- return 99999d;
- }
- else {
- String t = (String)to;
- try {
- return Double.parseDouble(t);
- }
- catch(NumberFormatException nfe) {
- return 99999d;
- }
- }
- }
- else {
- return 99999d;
- }
- }
- });
- }
-
/** List of functions. */
protected List<Entry> functions;
+ protected Builder.BuildHelper buildHelper;
+
+
public FunctionResolver() {
+ this(null);
+ }
+
+ public FunctionResolver(Builder.BuildHelper buildHelper) {
+ this.buildHelper = buildHelper;
+
functions = new ArrayList<Entry>();
+
+ addFunction("contains", 2, new XPathFunction() {
+ @Override
+ public Object evaluate(List args) throws XPathFunctionException {
+ return contains(args);
+ }
+ });
+
+ addFunction("fromValue", 3, new XPathFunction() {
+ @Override
+ public Object evaluate(List args) throws XPathFunctionException {
+ return fromValue(args);
+ }
+ });
+
+ addFunction("toValue", 3, new XPathFunction() {
+ @Override
+ public Object evaluate(List args) throws XPathFunctionException {
+ return toValue(args);
+ }
+ });
+
+ addFunction("has-result", 0, new XPathFunction() {
+ @Override
+ public Object evaluate(List args) throws XPathFunctionException {
+ return FunctionResolver.this.buildHelper.hasResult();
+ }
+ });
}
/**
@@ -229,5 +109,147 @@
return null;
}
+
+ /** Implementation of case-ignoring dc:contains. */
+ public Object contains(List args) throws XPathFunctionException {
+ Object haystack = args.get(0);
+ Object needle = args.get(1);
+
+ if (needle instanceof String) {
+ needle = ((String)needle).toUpperCase();
+ }
+
+ try {
+ if (haystack instanceof Collection) {
+ return Boolean.valueOf(
+ ((Collection)haystack).contains(needle));
+ }
+
+ if (haystack instanceof Map) {
+ return Boolean.valueOf(
+ ((Map)haystack).containsKey(needle));
+ }
+
+ if (haystack instanceof Object []) {
+ for (Object straw: (Object [])haystack) {
+ if (straw.equals(needle)) {
+ return Boolean.TRUE;
+ }
+ }
+ }
+
+ return Boolean.FALSE;
+ }
+ catch (Exception e) {
+ log.error(e);
+ throw new XPathFunctionException(e);
+ }
+ }
+
+ /** Implementation for getting the minimum value of location or distance
+ * dc:fromValue.
+ */
+ public Object fromValue(List args) throws XPathFunctionException {
+ Object mode = args.get(0);
+ Object locations = args.get(1);
+ Object from = args.get(2);
+
+ if (!(mode instanceof String)){
+ return -99999d;
+ }
+
+ if (mode.equals("locations")) {
+ if (!(locations instanceof String)) {
+ return -99999d;
+ }
+ String loc = ((String)locations).replace(" ", "");
+ String[] split = loc.split(",");
+ if (split.length < 1) {
+ return -99999d;
+ }
+ try {
+ double min = Double.parseDouble(split[0]);
+ for (int i = 1; i < split.length; ++i) {
+ double v = Double.parseDouble(split[i]);
+ if (v < min) {
+ min = v;
+ }
+ }
+ return min;
+ }
+ catch (NumberFormatException nfe) {
+ return -99999d;
+ }
+ }
+ else if (mode.equals("distance")) {
+ if (!(from instanceof String)) {
+ return -99999d;
+ }
+ String f = (String)from;
+ try {
+ return Double.parseDouble(f);
+ }
+ catch(NumberFormatException nfe) {
+ return -99999d;
+ }
+ }
+ else {
+ return -99999d;
+ }
+ }
+
+ /** Implementation for getting the maximum value of location or distance
+ * dc:toValue.
+ */
+ public Object toValue(List args) throws XPathFunctionException {
+ Object mode = args.get(0);
+ Object locations = args.get(1);
+ Object to = args.get(2);
+
+ if (!(mode instanceof String)){
+ return 99999d;
+ }
+
+ if (mode.equals("locations")) {
+ if (!(locations instanceof String)) {
+ return 99999d;
+ }
+ try {
+ String loc = ((String)locations).replace(" ", "");
+ String[] split = loc.split(",");
+ if (split.length < 1) {
+ return 99999d;
+ }
+ double max = Double.parseDouble(split[0]);
+ for (int i = 1; i < split.length; ++i) {
+ double v = Double.parseDouble(split[i]);
+ if (v > max) {
+ max = v;
+ }
+ }
+ return max;
+ }
+ catch (NumberFormatException nfe) {
+ return 99999d;
+ }
+ }
+ else if (mode.equals("distance")) {
+ if (!(to instanceof String)) {
+ return 99999d;
+ }
+ else {
+ String t = (String)to;
+ try {
+ return Double.parseDouble(t);
+ }
+ catch(NumberFormatException nfe) {
+ return 99999d;
+ }
+ }
+ }
+ else {
+ return 99999d;
+ }
+ }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/FacetTypes.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/FacetTypes.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/FacetTypes.java Wed Apr 03 16:00:21 2013 +0200
@@ -135,7 +135,8 @@
SQD("sq_relation_d"),
SQE("sq_relation_e"),
SQF("sq_relation_f"),
- HD("historical_discharge");
+ HD("historical_discharge"),
+ HDWQ("historical_discharge_wq");
private String chartTypeString;
@@ -173,6 +174,7 @@
String FLOODMAP_FLOODMAPS = "floodmap.floodmaps";
String FLOODMAP_GAUGE_LOCATION = "floodmap.gauge_location";
String FLOODMAP_EXTERNAL_WMS = "floodmap.externalwms";
+ String FLOODMAP_JETTIES = "floodmap.jetties";
String DISCHARGE_LONGITUDINAL_W = "discharge_longitudinal_section.w";
String DISCHARGE_LONGITUDINAL_Q = "discharge_longitudinal_section.q";
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/QRangeTree.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/QRangeTree.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/QRangeTree.java Wed Apr 03 16:00:21 2013 +0200
@@ -269,6 +269,26 @@
return parent;
}
+ public double averageQ() {
+ double sum = 0d;
+ int n = 0;
+ for (Node node = head(); node != null; node = node.next) {
+ sum += node.q;
+ ++n;
+ }
+ return sum/n;
+ }
+
+ public double maxQ() {
+ double max = -Double.MAX_VALUE;
+ for (Node node = head(); node != null; node = node.next) {
+ if (node.q > max) {
+ max = node.q;
+ }
+ }
+ return max;
+ }
+
public double findQ(double pos) {
return root != null ? root.findQ(pos) : Double.NaN;
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/WaterlevelFacet.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/WaterlevelFacet.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/WaterlevelFacet.java Wed Apr 03 16:00:21 2013 +0200
@@ -38,7 +38,13 @@
protected WQKms [] getWQKms(CalculationResult res) {
if (res.getData() instanceof ExtremeResult)
return ((ExtremeResult) res.getData()).getWQKms();
- return (WQKms []) res.getData();
+ else if (res.getData() instanceof WQKms[]) {
+ return (WQKms []) res.getData();
+ }
+ else {
+ logger.error("WaterlevelFacet got wrong data type " + res.getData());
+ return null;
+ }
}
/**
@@ -70,47 +76,73 @@
WQKms [] wqkms = getWQKms(res);
Object KM = context.getContextValue("currentKm");
+
+ // Interpolation.
if (KM != null) {
- logger.debug("interpolate at given km");
- // TODO handle exact match.
+ linearInterpolate(wqkms[index], (Double) KM);
+ }
+ else {
+ logger.debug("Do not interpolate.");
+ }
- WQKms wqkmsI = wqkms[index];
- double km = (Double)KM;
+ return wqkms != null ? wqkms[index] : null;
+ }
- // TODO employ DataUtils interface to TDoubleArraList
- int size = wqkmsI.size();
- boolean kmIncreasing = wqkmsI.getKm(0) < wqkmsI.getKm(size-1);
- int mod = kmIncreasing ? +1 : -1;
- int idx = 0;
- if (!kmIncreasing) {
- while (idx < size && wqkmsI.getKm(idx) < km) {
- idx++;
+
+ /**
+ * Linear interpolation of WQKms.
+ * TODO rewrite.
+ * @return [w, q, km]
+ */
+ public WQKms linearInterpolate(WQKms wqkms, double km) {
+ logger.debug("interpolate at given km (" + km + ")");
+
+ WQKms resultWQKms = new WQKms();
+ int size = wqkms.size();
+ boolean kmIncreasing = wqkms.getKm(0) < wqkms.getKm(size-1);
+ int mod = kmIncreasing ? +1 : -1;
+ int idx = 0;
+ // Move idx to closest from one direction, check for match.
+ if (!kmIncreasing) {
+ while (idx < size && wqkms.getKm(idx) < km) {
+ if (Math.abs(wqkms.getKm(idx) - km) < 0.01d) {
+ resultWQKms.add(wqkms.getW(idx), wqkms.getQ(idx), wqkms.getKm(idx));
+ return resultWQKms;
}
+ idx++;
}
- else {
- idx = size-1;
- while (idx > 0 && wqkmsI.getKm(idx) > km) {
- idx--;
+ }
+ else {
+ idx = size-1;
+ while (idx > 0 && wqkms.getKm(idx) > km) {
+ if (Math.abs(wqkms.getKm(idx) - km) < 0.01d) {
+ resultWQKms.add(wqkms.getW(idx), wqkms.getQ(idx), wqkms.getKm(idx));
+ return resultWQKms;
}
+ idx--;
}
-
- WQKms resultWQKms = new WQKms();
- if ((idx != -1) && (idx < size) && (idx - mod != -1) && (idx - mod < size)) {
- double inW = Linear.linear(
- km,
- wqkmsI.getKm(idx), wqkmsI.getKm(idx - mod),
- wqkmsI.getW(idx), wqkmsI.getW(idx - mod));
- double inQ = Linear.linear(
- km,
- wqkmsI.getKm(idx), wqkmsI.getKm(idx - mod),
- wqkmsI.getQ(idx), wqkmsI.getQ(idx - mod));
- resultWQKms.add(inW, inQ, km);
- }
-
+ }
+ if (Math.abs(wqkms.getKm(idx) - km) < 0.01d) {
+ resultWQKms.add(wqkms.getW(idx), wqkms.getQ(idx), wqkms.getKm(idx));
return resultWQKms;
}
- return wqkms != null ? wqkms[index] : null;
+ if ((idx != -1) && (idx < size) && (idx - mod != -1) && (idx - mod < size)) {
+ double inW = Linear.linear(
+ km,
+ wqkms.getKm(idx), wqkms.getKm(idx - mod),
+ wqkms.getW(idx), wqkms.getW(idx - mod));
+ double inQ = Linear.linear(
+ km,
+ wqkms.getKm(idx), wqkms.getKm(idx - mod),
+ wqkms.getQ(idx), wqkms.getQ(idx - mod));
+ resultWQKms.add(inW, inQ, km);
+ }
+ else {
+ logger.debug("waterlevelfacet stuff " + idx + " size " + size + " mod: " + mod);
+ }
+
+ return resultWQKms;
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/WstValueTableFactory.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/WstValueTableFactory.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/WstValueTableFactory.java Wed Apr 03 16:00:21 2013 +0200
@@ -98,8 +98,8 @@
// Fetch data for one column only.
WstValueTable.Column [] columns = loadColumns(session, wst_id);
- loadQRanges(session, columns, wst_id);
- List<WstValueTable.Row> rows = loadRows(session, wst_id, columns.length);
+ int [] map = loadQRangesMap(session, columns, wst_id);
+ List<WstValueTable.Row> rows = loadRows(session, wst_id, columns.length, map);
WstValueTable valueTable = new WstValueTable(columns, rows);
@@ -204,9 +204,10 @@
WstValueTable.Column [] columns = loadColumns(session, wst);
- loadQRanges(session, columns, wst);
+ int map [] = loadQRangesMap(session, columns, wst);
- List<WstValueTable.Row> rows = loadRows(session, wst, columns.length);
+ List<WstValueTable.Row> rows =
+ loadRows(session, wst.getId(), columns.length, map);
return new WstValueTable(columns, rows);
}
@@ -274,6 +275,15 @@
int wst_id,
int numColumns
) {
+ return loadRows(session, wst_id, numColumns, null);
+ }
+
+ protected static List<WstValueTable.Row> loadRows(
+ Session session,
+ int wst_id,
+ int numColumns,
+ int [] map
+ ) {
SQLQuery sqlQuery = session.createSQLQuery(SQL_SELECT_WS)
.addScalar("km", StandardBasicTypes.DOUBLE)
.addScalar("w", StandardBasicTypes.DOUBLE)
@@ -298,7 +308,8 @@
rows.add(row);
}
Double w = (Double)result[1];
- ws[column] = w != null ? w : Double.NaN;
+ int index = map != null ? map[column] : column;
+ ws[index] = w != null ? w : Double.NaN;
lastColumn = column;
}
@@ -387,7 +398,7 @@
columns[0].setQRangeTree(qRangeTree);
}
- protected static void loadQRanges(
+ protected static int [] loadQRangesMap(
Session session,
WstValueTable.Column [] columns,
int wst_id
@@ -426,6 +437,8 @@
columns[lastColumn].setQRangeTree(qRangeTree);
}
+ return sortColumnsByAverageQ(columns);
+
/* This is debug code to visualize the q ranges trees
java.io.PrintWriter out = null;
@@ -454,15 +467,53 @@
}
}
*/
-
}
- protected static void loadQRanges(
+ private static final class QIndex implements Comparable<QIndex> {
+ double q;
+ int index;
+
+ QIndex(double q, int index) {
+ this.q = q;
+ this.index = index;
+ }
+
+ @Override
+ public int compareTo(QIndex other) {
+ double diff = q - other.q;
+ if (diff < 0d) return -1;
+ if (diff > 0d) return +1;
+ return 0;
+ }
+ } // class QIndex
+
+ /** Ensure that the q colums are sorted in ascending order. */
+ protected static int [] sortColumnsByAverageQ(WstValueTable.Column [] columns) {
+ QIndex [] order = new QIndex[columns.length];
+ for (int i = 0; i < order.length; ++i) {
+ QRangeTree tree = columns[i].getQRangeTree();
+ double avg = tree.averageQ();
+ double max = tree.maxQ();
+ double q = (avg+max)*0.5d;
+ order[i] = new QIndex(q, i);
+ }
+ Arrays.sort(order);
+ WstValueTable.Column [] copy = new WstValueTable.Column[order.length];
+ int [] map = new int[order.length];
+ for (int i = 0; i < copy.length; ++i) {
+ copy[i] = columns[order[i].index];
+ map[order[i].index] = i;
+ }
+ System.arraycopy(copy, 0, columns, 0, order.length);
+ return map;
+ }
+
+ protected static int [] loadQRangesMap(
Session session,
WstValueTable.Column [] columns,
Wst wst
) {
- loadQRanges(session, columns, wst.getId());
+ return loadQRangesMap(session, columns, wst.getId());
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/Outlier.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/Outlier.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/Outlier.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,7 +1,5 @@
package de.intevation.flys.artifacts.model.sq;
-import de.intevation.artifacts.common.utils.Config;
-
import de.intevation.flys.artifacts.math.GrubbsOutlier;
import de.intevation.flys.artifacts.math.StdDevOutlier;
@@ -16,9 +14,6 @@
{
private static Logger log = Logger.getLogger(Outlier.class);
- private static final String OUTLIER_METHOD =
- "/artifact-database/options/minfo-sq/outlier-method/@name";
-
private static final String GRUBBS = "outlier.method.grubbs";
//private static final String STD_DEV = "std-dev";
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/StaticSQCacheKey.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/StaticSQCacheKey.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,30 @@
+package de.intevation.flys.artifacts.model.sq;
+
+import java.io.Serializable;
+
+
+public class StaticSQCacheKey
+implements Serializable
+{
+ public static final String CACHE_NAME = "static-sq-relation";
+
+ private String river;
+ private int measurementId;
+
+ public StaticSQCacheKey(String river, int measurementId) {
+ this.river = river;
+ this.measurementId = measurementId;
+ }
+
+ public int hashCode() {
+ return this.river.hashCode() | measurementId;
+ }
+
+ public boolean equals(Object other) {
+ if (!(other instanceof StaticSQCacheKey)) {
+ return false;
+ }
+ StaticSQCacheKey o = (StaticSQCacheKey) other;
+ return this.river == o.river && this.measurementId == o.measurementId;
+ }
+}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/StaticSQContainer.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/StaticSQContainer.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,85 @@
+package de.intevation.flys.artifacts.model.sq;
+
+import java.util.ArrayList;
+import java.util.List;
+
+
+public class StaticSQContainer
+{
+ private String description;
+ private String stationName;
+ private double km;
+
+ private List<StaticSQRelation> relations;
+
+
+ public StaticSQContainer() {
+ relations = new ArrayList<StaticSQRelation>();
+ }
+
+ public StaticSQContainer(
+ String stationName,
+ String description,
+ double km
+ ) {
+ this.stationName = stationName;
+ this.description = description;
+ this.km = km;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getStationName() {
+ return stationName;
+ }
+
+ public void setStationName(String stationName) {
+ this.stationName = stationName;
+ }
+
+ public double getKm() {
+ return km;
+ }
+
+ public void setKm(double km) {
+ this.km = km;
+ }
+
+ public List<StaticSQRelation> getSQRelations() {
+ return relations;
+ }
+
+ public void setSQRelations(List<StaticSQRelation> relations) {
+ this.relations = relations;
+ }
+
+ public void addSQRelation(StaticSQRelation relation) {
+ this.relations.add(relation);
+ }
+
+ public StaticSQRelation getSQRelation(int ndx) {
+ return this.relations.get(ndx);
+ }
+
+ public int size() {
+ return this.relations.size();
+ }
+
+ public List<StaticSQRelation> getRelationsByParameter(
+ StaticSQRelation.Parameter parameter
+ ) {
+ List<StaticSQRelation> result = new ArrayList<StaticSQRelation>();
+ for (StaticSQRelation relation : relations) {
+ if (relation.getParameter() == parameter) {
+ result.add(relation);
+ }
+ }
+ return result;
+ }
+}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/StaticSQFactory.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/StaticSQFactory.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,123 @@
+package de.intevation.flys.artifacts.model.sq;
+
+import java.math.BigDecimal;
+import java.util.Date;
+import java.util.List;
+
+import net.sf.ehcache.Cache;
+import net.sf.ehcache.Element;
+
+import org.apache.log4j.Logger;
+import org.hibernate.Query;
+import org.hibernate.Session;
+
+import de.intevation.flys.artifacts.cache.CacheFactory;
+import de.intevation.flys.backend.SessionHolder;
+
+
+public class StaticSQFactory
+{
+ private static final Logger log =
+ Logger.getLogger(StaticSQFactory.class);
+
+ public static final String SQL_SQ =
+ "SELECT " +
+ "sq.description AS description,"+
+ "ti.start_time AS start_time," +
+ "ti.stop_time AS stop_time, " +
+ "ms.name AS station_name, " +
+ "ms.station AS station_km, " +
+ "ms.measurement_type AS measurement_type, " +
+ "sqv.parameter AS parameter, " +
+ "sqv.a AS a, " +
+ "sqv.b AS b, " +
+ "sqv.qmax AS qmax " +
+ "FROM sq_relation sq " +
+ "JOIN time_intervals ti ON ti.id = sq.time_interval_id " +
+ "JOIN rivers r ON r.id = sq.river_id " +
+ "JOIN sq_relation_value sqv ON sqv.sq_relation_id = sq.id " +
+ "JOIN measurement_station ms ON sqv.measurement_station_id = ms.id " +
+ "WHERE " +
+ "r.name = :river " +
+ "AND ms.id = :ms_id ";
+
+
+ private StaticSQFactory() {
+ }
+
+ public static StaticSQContainer getSQRelations(
+ String river,
+ int measurementStation
+ ) {
+ Cache cache = CacheFactory.getCache(StaticSQCacheKey.CACHE_NAME);
+
+ StaticSQCacheKey cacheKey;
+
+ if (cache != null) {
+ cacheKey = new StaticSQCacheKey(river, measurementStation);
+ Element element = cache.get(cacheKey);
+ if (element != null) {
+ log.debug("Got static bedheight values from cache");
+ return (StaticSQContainer)element.getValue();
+ }
+ }
+ else {
+ cacheKey = null;
+ }
+
+ StaticSQContainer values = getUncached(river, measurementStation);
+
+ if (values != null && cacheKey != null) {
+ log.debug("Store static sq relations in cache.");
+ Element element = new Element(cacheKey, values);
+ cache.put(element);
+ }
+ return values;
+ }
+
+ private static StaticSQContainer getUncached(
+ String river,
+ int measurementStation
+ ) {
+ Session session = SessionHolder.HOLDER.get();
+
+ Query query = session.createSQLQuery(SQL_SQ)
+ .addScalar("description")
+ .addScalar("start_time")
+ .addScalar("stop_time")
+ .addScalar("station_name")
+ .addScalar("station_km")
+ .addScalar("measurement_type")
+ .addScalar("parameter")
+ .addScalar("a")
+ .addScalar("b")
+ .addScalar("qmax");
+
+ query.setParameter("river", river);
+ query.setParameter("ms_id", measurementStation);
+
+ List<Object []> list = query.list();
+
+ if (list.isEmpty()) {
+ return new StaticSQContainer();
+ }
+
+ StaticSQContainer sq = new StaticSQContainer();
+ sq.setDescription((String)list.get(0)[0]);
+ sq.setStationName((String)list.get(0)[3]);
+ sq.setKm(((BigDecimal)list.get(0)[4]).doubleValue());
+
+ for (Object[] row : list) {
+ StaticSQRelation relation = new StaticSQRelation();
+ relation.setStartTime((Date)row[1]);
+ relation.setStopTime((Date)row[2]);
+ relation.setType((String)row[5]);
+ relation.setParameter((String)row[6]);
+ relation.setA(((BigDecimal)row[7]).doubleValue());
+ relation.setB(((BigDecimal)row[8]).doubleValue());
+ relation.setQmax(((BigDecimal)row[9]).doubleValue());
+ sq.addSQRelation(relation);
+ }
+ return sq;
+ }
+}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/StaticSQRelation.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/model/sq/StaticSQRelation.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,103 @@
+package de.intevation.flys.artifacts.model.sq;
+
+import java.io.Serializable;
+import java.util.Date;
+
+
+public class StaticSQRelation implements Serializable{
+
+ private Date startTime;
+ private Date stopTime;
+ private String type;
+ private Parameter parameter;
+ private double a;
+ private double b;
+ private double qmax;
+
+ public static enum Parameter {
+ A, B, C, D, E, F
+ }
+
+
+ public StaticSQRelation() {
+ }
+
+ public StaticSQRelation(
+ Date startTime,
+ Date stopTime,
+ String type,
+ Parameter parameter,
+ double a,
+ double b
+ ) {
+ this.startTime = startTime;
+ this.stopTime = stopTime;
+ this.type = type;
+ this.parameter = parameter;
+ this.a = a;
+ this.b = b;
+ }
+
+ public Date getStartTime() {
+ return startTime;
+ }
+
+ public void setStartTime(Date startTime) {
+ this.startTime = startTime;
+ }
+
+ public Date getStopTime() {
+ return stopTime;
+ }
+
+ public void setStopTime(Date stopTime) {
+ this.stopTime = stopTime;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public Parameter getParameter() {
+ return parameter;
+ }
+
+ public void setParameter(Parameter parameter) {
+ this.parameter = parameter;
+ }
+
+ public void setParameter(String parameter) {
+ if (parameter == null) {
+ return;
+ }
+ this.parameter = Parameter.valueOf(parameter);
+ }
+
+ public double getA() {
+ return a;
+ }
+
+ public void setA(double a) {
+ this.a = a;
+ }
+
+ public double getB() {
+ return b;
+ }
+
+ public void setB(double b) {
+ this.b = b;
+ }
+
+ public double getQmax() {
+ return qmax;
+ }
+
+ public void setQmax(double qmax) {
+ this.qmax = qmax;
+ }
+}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/services/DischargeTablesOverview.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/services/DischargeTablesOverview.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/services/DischargeTablesOverview.java Wed Apr 03 16:00:21 2013 +0200
@@ -25,7 +25,6 @@
import de.intevation.artifacts.CallMeta;
import de.intevation.artifacts.GlobalContext;
import de.intevation.flys.artifacts.model.DischargeTables;
-import de.intevation.flys.artifacts.model.GaugeRange;
import de.intevation.flys.artifacts.model.GaugesFactory;
import de.intevation.flys.artifacts.resources.Resources;
import de.intevation.flys.backend.SessionHolder;
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/services/MeasurementStationInfoService.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/services/MeasurementStationInfoService.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/services/MeasurementStationInfoService.java Wed Apr 03 16:00:21 2013 +0200
@@ -15,7 +15,6 @@
import de.intevation.artifacts.GlobalContext;
import de.intevation.flys.model.MeasurementStation;
-import de.intevation.flys.model.Gauge;
import de.intevation.flys.model.Range;
import de.intevation.flys.model.TimeInterval;
@@ -106,8 +105,7 @@
}
}
- Gauge gauge = mstation.getGauge();
- String gaugename = gauge.getName();
+ String gaugename= mstation.getGaugeName();
if (gaugename != null) {
Element egauge = ec.create("gauge");
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/GaugeTimerangeState.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/GaugeTimerangeState.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/GaugeTimerangeState.java Wed Apr 03 16:00:21 2013 +0200
@@ -19,10 +19,12 @@
*/
public class GaugeTimerangeState extends IntRangeState {
+ /** Private logger. */
private static final Logger logger =
Logger.getLogger(GaugeTimerangeState.class);
+ /** Get 'min' and 'max'times of gauge time intervals. */
protected long[] getLowerUpper(FLYSArtifact flys) {
Gauge gauge = FLYSUtils.getReferenceGauge(flys);
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/WaterlevelSelectState.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/WaterlevelSelectState.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/WaterlevelSelectState.java Wed Apr 03 16:00:21 2013 +0200
@@ -15,6 +15,7 @@
import de.intevation.flys.artifacts.FLYSArtifact;
import de.intevation.flys.artifacts.model.CalculationResult;
import de.intevation.flys.artifacts.model.WQKms;
+import de.intevation.flys.artifacts.model.extreme.ExtremeResult;
import de.intevation.flys.artifacts.resources.Resources;
import de.intevation.flys.utils.FLYSUtils;
import de.intevation.flys.utils.StringUtil;
@@ -123,7 +124,14 @@
ComputeType.ADVANCE,
false);
- WQKms[] wqkms = (WQKms[]) rawData.getData();
+ WQKms[] wqkms;
+
+ if (rawData.getData() instanceof ExtremeResult) {
+ wqkms = ((ExtremeResult) rawData.getData()).getWQKms();
+ }
+ else {
+ wqkms = (WQKms[]) rawData.getData();
+ }
int idx = -1;
try {
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/sq/SQStaticState.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/sq/SQStaticState.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,80 @@
+package de.intevation.flys.artifacts.states.sq;
+
+import java.text.DateFormat;
+import java.util.List;
+
+
+import org.apache.log4j.Logger;
+
+import de.intevation.artifactdatabase.state.Facet;
+import de.intevation.flys.artifacts.FLYSArtifact;
+import de.intevation.flys.artifacts.access.StaticSQRelationAccess;
+import de.intevation.flys.artifacts.model.sq.StaticSQContainer;
+import de.intevation.flys.artifacts.model.sq.StaticSQFactory;
+import de.intevation.flys.artifacts.model.sq.StaticSQRelation;
+import de.intevation.flys.artifacts.states.StaticState;
+
+
+public class SQStaticState
+extends StaticState
+{
+ private static final Logger log =
+ Logger.getLogger(SQStaticState.class);
+
+ public SQStaticState() {
+ super();
+ }
+
+ public SQStaticState(String name) {
+ super(name);
+ }
+
+ @Override
+ public Object staticCompute(
+ List<Facet> facets,
+ FLYSArtifact artifact
+ ) {
+ StaticSQRelationAccess access = new StaticSQRelationAccess(artifact);
+
+ String river = access.getRiver();
+ String measurementStation = access.getMeasurementStation();
+
+ int ms = -1;
+ try {
+ ms = Integer.parseInt(measurementStation);
+ }
+ catch (NumberFormatException nfe) {
+ log.error("Unparseable measurement station: " + measurementStation);
+ return null;
+ }
+
+ StaticSQContainer sqRelations =
+ StaticSQFactory.getSQRelations(river, ms);
+
+ for (StaticSQRelation.Parameter p: StaticSQRelation.Parameter.values()) {
+
+ List<StaticSQRelation> relations =
+ sqRelations.getRelationsByParameter(p);
+
+ if (!relations.isEmpty()) {
+ int count = 0;
+
+ for (StaticSQRelation relation : relations) {
+ String name = "sq_" + p.toString().toLowerCase() + "_curve";
+ DateFormat df =
+ DateFormat.getDateInstance(DateFormat.SHORT);
+ String desc = p.toString() + ": " +
+ df.format(relation.getStartTime()) + " - " +
+ df.format(relation.getStopTime());
+ facets.add(new StaticSQRelationFacet(
+ count,
+ name,
+ desc,
+ relation));
+ count++;
+ }
+ }
+ }
+ return null;
+ }
+}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/sq/StaticSQRelationFacet.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-artifacts/src/main/java/de/intevation/flys/artifacts/states/sq/StaticSQRelationFacet.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,49 @@
+package de.intevation.flys.artifacts.states.sq;
+
+import de.intevation.artifactdatabase.state.DefaultFacet;
+import de.intevation.artifactdatabase.state.Facet;
+import de.intevation.artifacts.Artifact;
+import de.intevation.artifacts.CallContext;
+import de.intevation.flys.artifacts.math.fitting.Function;
+import de.intevation.flys.artifacts.math.fitting.FunctionFactory;
+import de.intevation.flys.artifacts.model.sq.SQFunction;
+import de.intevation.flys.artifacts.model.sq.StaticSQRelation;
+
+
+public class StaticSQRelationFacet
+extends DefaultFacet
+implements Facet
+{
+ public static final String FUNCTION = "sq-pow";
+
+ private StaticSQRelation relation;
+
+
+ public StaticSQRelationFacet(
+ int ndx,
+ String name,
+ String description,
+ StaticSQRelation relation) {
+ super(ndx, name, description);
+ this.relation = relation;
+ }
+
+ @Override
+ public Object getData(Artifact artifact, CallContext context) {
+ double qmax = relation.getQmax();
+ double[] coeffs = new double[] {relation.getA(), relation.getB()};
+ Function func = FunctionFactory.getInstance().getFunction(FUNCTION);
+ de.intevation.flys.artifacts.math.Function function =
+ func.instantiate(coeffs);
+ SQFunction sqf = new SQFunction(function, 0, qmax);
+ return sqf;
+ }
+
+ @Override
+ public Facet deepCopy() {
+ StaticSQRelationFacet copy =
+ new StaticSQRelationFacet(index, name, description, relation);
+ copy.set(this);
+ return copy;
+ }
+}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/exports/ComputedDischargeCurveExporter.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/ComputedDischargeCurveExporter.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/ComputedDischargeCurveExporter.java Wed Apr 03 16:00:21 2013 +0200
@@ -48,7 +48,6 @@
private static Logger logger =
Logger.getLogger(ComputedDischargeCurveExporter.class);
-
public static final String CSV_W_HEADER =
"export.computed.discharge.curve.csv.header.w";
@@ -119,8 +118,10 @@
protected void writeCSVHeader(CSVWriter writer) {
logger.debug("ComputedDischargeCurveExporter.writeCSVHeader");
+ String unit = FLYSUtils.getRiver((FLYSArtifact) master).getWstUnit().getName();
+
writer.writeNext(new String[] {
- msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER),
+ msg(CSV_W_HEADER, DEFAULT_CSV_W_HEADER, new Object[] { unit }),
msg(CSV_Q_HEADER, DEFAULT_CSV_Q_HEADER)
});
}
@@ -223,7 +224,6 @@
});
}
}
-
}
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/exports/ComputedDischargeCurveGenerator.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/ComputedDischargeCurveGenerator.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/ComputedDischargeCurveGenerator.java Wed Apr 03 16:00:21 2013 +0200
@@ -2,6 +2,7 @@
import de.intevation.artifactdatabase.state.ArtifactAndFacet;
import de.intevation.artifactdatabase.state.Facet;
+import de.intevation.flys.artifacts.FLYSArtifact;
import de.intevation.flys.artifacts.StaticWKmsArtifact;
import de.intevation.flys.artifacts.WINFOArtifact;
import de.intevation.flys.artifacts.model.FacetTypes;
@@ -11,6 +12,8 @@
import de.intevation.flys.jfree.StickyAxisAnnotation;
import de.intevation.flys.jfree.StyledXYSeries;
+import de.intevation.flys.utils.FLYSUtils;
+
import java.util.ArrayList;
import java.util.List;
@@ -74,7 +77,11 @@
@Override
protected String getDefaultYAxisLabel(int pos) {
- return msg(I18N_YAXIS_LABEL, I18N_YAXIS_LABEL_DEFAULT);
+ FLYSArtifact flys = (FLYSArtifact) master;
+
+ String unit = FLYSUtils.getRiver(flys).getWstUnit().getName();
+
+ return msg(I18N_YAXIS_LABEL, I18N_YAXIS_LABEL_DEFAULT, new Object[] { unit });
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/exports/HistoricalDischargeCurveExporter.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/HistoricalDischargeCurveExporter.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/HistoricalDischargeCurveExporter.java Wed Apr 03 16:00:21 2013 +0200
@@ -136,7 +136,6 @@
logger.warn("Error generating PDF Report!");
je.printStackTrace();
}
-
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/exports/HistoricalDischargeWQCurveGenerator.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/HistoricalDischargeWQCurveGenerator.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/HistoricalDischargeWQCurveGenerator.java Wed Apr 03 16:00:21 2013 +0200
@@ -10,6 +10,8 @@
import de.intevation.flys.artifacts.model.FacetTypes;
import de.intevation.flys.artifacts.model.Timerange;
import de.intevation.flys.artifacts.model.WQKms;
+
+import de.intevation.flys.jfree.FLYSAnnotation;
import de.intevation.flys.jfree.StyledValueMarker;
import de.intevation.flys.jfree.StyledXYSeries;
import de.intevation.flys.utils.FLYSUtils;
@@ -124,6 +126,14 @@
doPoints(artifactFacet.getData(context), artifactFacet, theme,
visible, YAXIS.W.idx);
}
+ else if (HISTORICAL_DISCHARGE_MAINVALUES_Q.equals(name)) {
+ doAnnotations((FLYSAnnotation)
+ artifactFacet.getData(context), artifactFacet, theme, visible);
+ }
+ else if (HISTORICAL_DISCHARGE_MAINVALUES_W.equals(name)) {
+ doAnnotations((FLYSAnnotation)
+ artifactFacet.getData(context), artifactFacet, theme, visible);
+ }
else {
logger.warn("doOut(): unknown facet name: " + name);
return;
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/exports/WaterlevelExporter.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/exports/WaterlevelExporter.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/exports/WaterlevelExporter.java Wed Apr 03 16:00:21 2013 +0200
@@ -567,7 +567,9 @@
if (segments != null) {
for (Segment segment: segments) {
if (segment.inside(result[2])) {
- colDesc = "" + segment.getValues()[0];
+ NumberFormat nf =
+ Formatter.getFormatter(context.getMeta() , 0, 0);
+ colDesc = nf.format(segment.getValues()[0]);
}
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/utils/GeometryUtils.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/GeometryUtils.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/GeometryUtils.java Wed Apr 03 16:00:21 2013 +0200
@@ -73,9 +73,10 @@
}
}
catch(HibernateException iae) {
- logger.warn("No vaild river axis forund for " + rivername);
+ logger.warn("No vaild river axis found for " + rivername);
return null;
}
+ logger.warn("No vaild river axis found for " + rivername);
return null;
}
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/utils/MapUtils.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/MapUtils.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/MapUtils.java Wed Apr 03 16:00:21 2013 +0200
@@ -64,7 +64,7 @@
String host = m.group(2);
String port = m.group(3);
String backend = m.group(4);
- connection = user + "/" + pass + "@" + host + "/" + backend;
+ connection = user + "/" + pass + "@" + host + ":" + port + "/" + backend;
}
else {
if (groups < 4) {
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/java/de/intevation/flys/utils/RiverMapfileGenerator.java
--- a/flys-artifacts/src/main/java/de/intevation/flys/utils/RiverMapfileGenerator.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/java/de/intevation/flys/utils/RiverMapfileGenerator.java Wed Apr 03 16:00:21 2013 +0200
@@ -116,7 +116,7 @@
} else {
layerInfo.setData("geom FROM river_axes");
}
- layerInfo.setFilter("river_id = " + riverID);
+ layerInfo.setFilter("river_id = " + riverID + " and kind_id = 1");
layerInfo.setTitle(riverName + " RiverAxis");
File layerFile = new File("river-" + riverName + ".map");
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/resources/messages.properties
--- a/flys-artifacts/src/main/resources/messages.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/resources/messages.properties Wed Apr 03 16:00:21 2013 +0200
@@ -169,7 +169,7 @@
chart.discharge.curve.curve.valid.range = {0} (valid from {1,date,short} - {2,date,short})
chart.computed.discharge.curve.title = Discharge Curve
chart.computed.discharge.curve.subtitle = {0}-km: {1,number,#.###}
-chart.computed.discharge.curve.yaxis.label = W [NN + m]
+chart.computed.discharge.curve.yaxis.label = W [{0}]
chart.computed.discharge.curve.curve.label = Discharge Curve {0} km {1}
chart.computed.discharge.curve.gauge = Discharge curve at gauge {0} (km {1})
chart.duration.curve.title = Duration Curve
@@ -260,6 +260,8 @@
facet.longitudinal_section.annotations = POIs
facet.discharge_curves.mainvalues.q = Q (main values)
facet.discharge_curves.mainvalues.w = W (main values)
+historical_discharge.mainvalues.q = Q Main Values
+historical_discharge.mainvalues.w = W Main Values
facet.flow_velocity.mainchannel = v Mainchannel at {0}
facet.flow_velocity.totalchannel = v Totalchannel at {0}
facet.flow_velocity.tauchannel = TAU Mainchannel at {0}
@@ -322,7 +324,7 @@
export.waterlevel.csv.meta.q = # Q (m\u00b3/s): {0}
export.waterlevel.csv.meta.w = # W (NN + m): {0} - {1}
export.waterlevel.csv.not.in.gauge.range = Outside selected gauge
-export.computed.discharge.curve.csv.header.w = W [NN + m]
+export.computed.discharge.curve.csv.header.w = W [{0}]
export.computed.discharge.curve.csv.header.q = Q [m\u00b3/s]
export.duration.curve.csv.header.duration = D [Days]
export.duration.curve.csv.header.w = W [NN + m]
@@ -590,6 +592,8 @@
sq.km.chart.title = Measuring Points
sq.km.chart.km.axis = km
sq.km.chart.date.axis = Date
+static.sq.river = River
+static.sq.station = Station
module.winfo = WINFO
module.minfo = MINFO
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/resources/messages_de.properties
--- a/flys-artifacts/src/main/resources/messages_de.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/resources/messages_de.properties Wed Apr 03 16:00:21 2013 +0200
@@ -169,7 +169,7 @@
chart.discharge.curve.curve.valid.range = {0} (g\u00fcltig ab {1,date,medium} - {2,date,medium})
chart.computed.discharge.curve.title = Abflusskurve
chart.computed.discharge.curve.subtitle = {0}-km: {1,number,#.###}
-chart.computed.discharge.curve.yaxis.label = W [NN + m]
+chart.computed.discharge.curve.yaxis.label = W [{0}]
chart.computed.discharge.curve.curve.label = Abflusskurve {0} km {1}
chart.computed.discharge.curve.gauge = Abflusskurve an Pegel {0} (km {1})
chart.duration.curve.title = Dauerlinie
@@ -261,6 +261,8 @@
facet.longitudinal_section.annotations = Streckenfavoriten
facet.discharge_curves.mainvalues.q = Q (Haupt- und Extremwerte)
facet.discharge_curves.mainvalues.w = W (Haupt- und Extremwerte)
+historical_discharge.mainvalues.q = Q (Haupt- und Extremwerte)
+historical_discharge.mainvalues.w = W (Haupt- und Extremwerte)
facet.flow_velocity.mainchannel = v Hauptgerinne bei {0}
facet.flow_velocity.totalchannel = v Gesamtgerinne bei {0}
facet.flow_velocity.discharge = Abfluss bei {0}
@@ -322,10 +324,10 @@
export.waterlevel.csv.meta.q = # Q (m\u00b3/s): {0}
export.waterlevel.csv.meta.w = # W (NN + m): {0} - {1}
export.waterlevel.csv.not.in.gauge.range = au\u00dferhalb gew\u00e4hlter Bezugspegels
-export.computed.discharge.curve.csv.header.w = W [NN + m]
+export.computed.discharge.curve.csv.header.w = W [{0}]
export.computed.discharge.curve.csv.header.q = Q [m\u00b3/s]
export.duration.curve.csv.header.duration = D [Tagen]
-export.duration.curve.csv.header.w = W [NN + m]
+export.duration.curve.csv.header.w = W [{0}]
export.duration.curve.csv.header.q = Q [m\u00b3/s]
export.discharge.longitudinal.section.csv.header.km = Fluss-Km
export.discharge.longitudinal.section.csv.header.w = W [NN + m]
@@ -593,6 +595,8 @@
sq.km.chart.title = Messstellen
sq.km.chart.km.axis = km
sq.km.chart.date.axis = Datum
+static.sq.river = Gewaesser
+static.sq.station = Messstelle
module.winfo = WINFO
module.minfo = MINFO
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/resources/messages_de_DE.properties
--- a/flys-artifacts/src/main/resources/messages_de_DE.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/resources/messages_de_DE.properties Wed Apr 03 16:00:21 2013 +0200
@@ -168,7 +168,7 @@
chart.discharge.curve.curve.valid.range = {0} (g\u00fcltig ab {1,date,medium} - {2,date,medium})
chart.computed.discharge.curve.title = Abflusskurve
chart.computed.discharge.curve.subtitle = {0}-km: {1,number,#.###}
-chart.computed.discharge.curve.yaxis.label = W [NN + m]
+chart.computed.discharge.curve.yaxis.label = W [{0}]
chart.computed.discharge.curve.curve.label = Abflusskurve {0} km {1}
chart.computed.discharge.curve.gauge = Abflusskurve an Pegel {0} (km {1})
chart.duration.curve.title = Dauerlinie
@@ -258,6 +258,8 @@
facet.longitudinal_section.annotations = Streckenfavoriten
facet.discharge_curves.mainvalues.q = Q (Haupt- und Extremwerte)
facet.discharge_curves.mainvalues.w = W (Haupt- und Extremwerte)
+historical_discharge.mainvalues.q = Q (Haupt- und Extremwerte)
+historical_discharge.mainvalues.w = W (Haupt- und Extremwerte)
facet.flow_velocity.mainchannel = v Hauptgerinne bei {0}
facet.flow_velocity.totalchannel = v Gesamtgerinne bei {0}
facet.flow_velocity.tauchannel = TAU Hauptgerinne bei {0}
@@ -319,10 +321,10 @@
export.waterlevel.csv.meta.q = # Q (m\u00b3/s): {0}
export.waterlevel.csv.meta.w = # W (NN + m): {0} - {1}
export.waterlevel.csv.not.in.gauge.range = au\u00dferhalb gew\u00e4hlter Bezugspegels
-export.computed.discharge.curve.csv.header.w = W [NN + m]
+export.computed.discharge.curve.csv.header.w = W [{0}]
export.computed.discharge.curve.csv.header.q = Q [m\u00b3/s]
export.duration.curve.csv.header.duration = D [Tagen]
-export.duration.curve.csv.header.w = W [NN + m]
+export.duration.curve.csv.header.w = W [{0}]
export.duration.curve.csv.header.q = Q [m\u00b3/s]
export.discharge.longitudinal.section.csv.header.km = Fluss-Km
export.discharge.longitudinal.section.csv.header.w = W [NN + m]
@@ -591,6 +593,8 @@
sq.km.chart.title = Messstellen
sq.km.chart.km.axis = km
sq.km.chart.date.axis = Datum
+static.sq.river = Gewaesser
+static.sq.station = Messstelle
module.winfo = WINFO
module.minfo = MINFO
diff -r 8d0af912351c -r 25c2505df28f flys-artifacts/src/main/resources/messages_en.properties
--- a/flys-artifacts/src/main/resources/messages_en.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-artifacts/src/main/resources/messages_en.properties Wed Apr 03 16:00:21 2013 +0200
@@ -169,7 +169,7 @@
chart.discharge.curve.curve.valid.range = {0} (valid from {1,date,short} - {2,date,short})
chart.computed.discharge.curve.title = Discharge Curve
chart.computed.discharge.curve.subtitle = {0}-km: {1,number,#.###}
-chart.computed.discharge.curve.yaxis.label = W [NN + m]
+chart.computed.discharge.curve.yaxis.label = W [{0}]
chart.computed.discharge.curve.curve.label = Discharge Curve {0} km {1}
chart.duration.curve.title = Duration Curve
chart.duration.curve.subtitle = {0}-km: {1,number,#.###}
@@ -263,6 +263,8 @@
facet.longitudinal_section.annotations = POIs
facet.discharge_curves.mainvalues.q = Q (main values)
facet.discharge_curves.mainvalues.w = W (main values)
+historical_discharge.mainvalues.q = Q Main Values
+historical_discharge.mainvalues.w = W Main Values
facet.flow_velocity.mainchannel = v Mainchannel at {0}
facet.flow_velocity.discharge = Discharge at {0}
facet.flow_velocity.totalchannel = v Totalchannel at {0}
@@ -324,10 +326,10 @@
export.waterlevel.csv.meta.q = # Q (m\u00b3/s): {0}
export.waterlevel.csv.meta.w = # W (NN + m): {0} - {1}
export.waterlevel.csv.not.in.gauge.range = Outside selected gauge
-export.computed.discharge.curve.csv.header.w = W [NN + m]
+export.computed.discharge.curve.csv.header.w = W [{0}]
export.computed.discharge.curve.csv.header.q = Q [m\u00b3/s]
export.duration.curve.csv.header.duration = D [Days]
-export.duration.curve.csv.header.w = W [NN + m]
+export.duration.curve.csv.header.w = W [{0}]
export.duration.curve.csv.header.q = Q [m\u00b3/s]
export.discharge.longitudinal.section.csv.header.km = River-Km
export.discharge.longitudinal.section.csv.header.w = W [NN + m]
@@ -591,6 +593,8 @@
sq.km.chart.title = Measuring Points
sq.km.chart.km.axis = km
sq.km.chart.date.axis = Date
+static.sq.river = River
+static.sq.station = Station
module.winfo = WINFO
module.minfo = MINFO
diff -r 8d0af912351c -r 25c2505df28f flys-backend/contrib/import_river.sh
--- a/flys-backend/contrib/import_river.sh Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/contrib/import_river.sh Wed Apr 03 16:00:21 2013 +0200
@@ -156,9 +156,9 @@
fi
GEW_FILE="$1"
-RIVER_NAME=$(grep "Gew.sser" "$1" | awk '{print $2}')
+RIVER_NAME=$(grep "Gew.sser" "$1" | sed 's/Gew.sser: //')
DATE=$(date +%Y.%m.%d_%H%M)
-LOG_DIR=${LOG}/${RIVER_NAME}-$DATE
+LOG_DIR=${LOG}/`basename $GEW_FILE .gew`-$DATE
mkdir -p ${LOG_DIR}
if [ "$POSTGRES" = "TRUE" ]; then
diff -r 8d0af912351c -r 25c2505df28f flys-backend/contrib/shpimporter/boundaries.py
--- a/flys-backend/contrib/shpimporter/boundaries.py Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/contrib/shpimporter/boundaries.py Wed Apr 03 16:00:21 2013 +0200
@@ -32,7 +32,7 @@
def isShapeRelevant(self, name, path):
shp = ogr.Open(path)
if self.isGeometryValid(shp.GetLayerByName(name).GetGeomType()) and \
- self.getKind(path) > 0:
+ self.getKind(path) > 0 and not "talaue" in path.lower():
return True
else:
return False
@@ -59,9 +59,13 @@
newFeat.SetField("kind", kind)
if self.IsFieldSet(feat, "SECTIE"):
newFeat.SetField("sectie", feat.GetField("SECTIE"))
+ else:
+ newFeat.SetField("sectie", 0)
if self.IsFieldSet(feat, "SOBEK"):
newFeat.SetField("sobek", feat.GetField("SOBEK"))
+ else:
+ newFeat.SetField("sobek", 0)
if self.IsFieldSet(feat, "river_id"):
newFeat.SetField("river_id", feat.GetField("river_id"))
@@ -102,10 +106,13 @@
if self.IsFieldSet(feat, "SECTIE"):
newFeat.SetField("sectie", feat.GetField("SECTIE"))
+ else:
+ newFeat.SetField("sectie", 0)
if self.IsFieldSet(feat, "SOBEK"):
newFeat.SetField("sobek", feat.GetField("SOBEK"))
-
+ else:
+ newFeat.SetField("sobek", 0)
if self.IsFieldSet(feat, "river_id"):
newFeat.SetField("river_id", feat.GetField("river_id"))
diff -r 8d0af912351c -r 25c2505df28f flys-backend/contrib/shpimporter/buildings.py
--- a/flys-backend/contrib/shpimporter/buildings.py Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/contrib/shpimporter/buildings.py Wed Apr 03 16:00:21 2013 +0200
@@ -62,9 +62,13 @@
return BUILDING_KINDS["pegel"]
# Now it gets ugly when we search all attributes
- if self.searchValue(feat, "^br[ueü]{0,2}cke[n]{0,1}$"):
+ ret = self.searchValue(feat, "^br[ueü]{0,2}cke[n]{0,1}$")
+ if ret:
+ self.handled(ret)
return BUILDING_KINDS["brücken"]
- if self.searchValue(feat, "^wehr[e]{0,1}$"):
+ ret = self.searchValue(feat, "^wehr[e]{0,1}$")
+ if ret:
+ self.handled(ret)
return BUILDING_KINDS["wehre"]
return BUILDING_KINDS["sonstige"]
diff -r 8d0af912351c -r 25c2505df28f flys-backend/contrib/shpimporter/importer.py
--- a/flys-backend/contrib/shpimporter/importer.py Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/contrib/shpimporter/importer.py Wed Apr 03 16:00:21 2013 +0200
@@ -47,12 +47,17 @@
def searchValue(self, feat, regex):
"""
- Searches for a value that matches regx in all attribute
+ Searches for a value that matches regex in all attribute
fields of a feature.
+
+ @returns the name of the field where a match was found or None
"""
for val in feat.items():
- match = re.match(regex, val, re.IGNORECASE)
- return match != None
+ if not isinstance(feat.items()[val], basestring):
+ continue
+ match = re.match(regex, feat.items()[val], re.IGNORECASE)
+ if match:
+ return val
def searchField(self, regex):
"""
diff -r 8d0af912351c -r 25c2505df28f flys-backend/contrib/shpimporter/jetties.py
--- a/flys-backend/contrib/shpimporter/jetties.py Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/contrib/shpimporter/jetties.py Wed Apr 03 16:00:21 2013 +0200
@@ -68,6 +68,8 @@
newFeat.SetGeometry(geometry)
+ newFeat.SetField("river_id", self.river_id)
+
artname = self.searchField("^type$")
if self.IsFieldSet(feat, artname):
self.handled(artname)
diff -r 8d0af912351c -r 25c2505df28f flys-backend/doc/schema/oracle-drop-minfo.sql
--- a/flys-backend/doc/schema/oracle-drop-minfo.sql Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/doc/schema/oracle-drop-minfo.sql Wed Apr 03 16:00:21 2013 +0200
@@ -12,9 +12,7 @@
ALTER TABLE bed_height_epoch DROP CONSTRAINT fk_epoch_cur_elevation_model;
ALTER TABLE bed_height_epoch DROP CONSTRAINT fk_epoch_old_elevation_model;
ALTER TABLE bed_height_epoch DROP CONSTRAINT fk_epoch_range;
-ALTER TABLE depths DROP CONSTRAINT fk_depths_unit_id;
ALTER TABLE sediment_density DROP CONSTRAINT fk_sd_depth_id;
-ALTER TABLE sediment_density DROP CONSTRAINT fk_sd_unit_id;
ALTER TABLE sediment_density_values DROP CONSTRAINT fk_sdv_sediment_density_id;
ALTER TABLE morphologic_width DROP CONSTRAINT fk_mw_river_id;
ALTER TABLE morphologic_width DROP CONSTRAINT fk_mw_unit_id;
@@ -40,6 +38,7 @@
ALTER TABLE sq_relation DROP CONSTRAINT fk_sqr_tinterval_id;
ALTER TABLE sq_relation DROP CONSTRAINT fk_sqr_river_id;
ALTER TABLE sq_relation_value DROP CONSTRAINT fk_sqr_id;
+ALTER TABLE sq_relation_value DROP CONSTRAINT fk_mstation_id;
ALTER TABLE measurement_station DROP CONSTRAINT fk_ms_river_id;
ALTER TABLE measurement_station DROP CONSTRAINT fk_ms_range_id;
ALTER TABLE measurement_station DROP CONSTRAINT fk_ms_reference_gauge_id;
diff -r 8d0af912351c -r 25c2505df28f flys-backend/doc/schema/oracle-drop-spatial.sql
--- a/flys-backend/doc/schema/oracle-drop-spatial.sql Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/doc/schema/oracle-drop-spatial.sql Wed Apr 03 16:00:21 2013 +0200
@@ -67,6 +67,16 @@
DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'GAUGE_LOCATION';
DROP SEQUENCE GAUGE_LOCATION_ID_SEQ;
+DROP TRIGGER jetties_trigger;
+DROP TABLE jetties;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'JETTIES';
+DROP SEQUENCE JETTIES_ID_SEQ;
+
+DROP TRIGGER flood_marks_trigger;
+DROP TABLE flood_marks;
+DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = 'FLOOD_MARKS';
+DROP SEQUENCE FLOOD_MARKS_ID_SEQ;
+
DROP TABLE hws_kinds;
DROP TABLE sectie_kinds;
DROP TABLE sobek_kinds;
@@ -75,3 +85,4 @@
DROP TABLE boundary_kinds;
DROP TABLE cross_section_track_kinds;
DROP TABLE floodplain_kinds;
+DROP TABLE building_kinds;
diff -r 8d0af912351c -r 25c2505df28f flys-backend/doc/schema/oracle-minfo.sql
--- a/flys-backend/doc/schema/oracle-minfo.sql Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/doc/schema/oracle-minfo.sql Wed Apr 03 16:00:21 2013 +0200
@@ -40,13 +40,13 @@
CREATE TABLE bed_height_single (
id NUMBER(38,0) NOT NULL,
river_id NUMBER(38,0) NOT NULL,
- year NUMBER(38,0) NOT NULL,
- sounding_width NUMBER(38,0) NOT NULL,
+ year NUMBER(38,0),
+ sounding_width NUMBER(38,0),
type_id NUMBER(38,0) NOT NULL,
location_system_id NUMBER(38,0) NOT NULL,
cur_elevation_model_id NUMBER(38,0) NOT NULL,
old_elevation_model_id NUMBER(38,0),
- range_id NUMBER(38,0) NOT NULL,
+ range_id NUMBER(38,0),
evaluation_by VARCHAR(255),
description VARCHAR(255),
PRIMARY KEY(id),
@@ -69,7 +69,7 @@
-- type_id NUMBER(38,0) NOT NULL,
cur_elevation_model_id NUMBER(38,0) NOT NULL,
old_elevation_model_id NUMBER(38,0),
- range_id NUMBER(38,0) NOT NULL,
+ range_id NUMBER(38,0),
evaluation_by VARCHAR(255),
description VARCHAR(255),
PRIMARY KEY(id),
@@ -114,9 +114,7 @@
id NUMBER(38,0) NOT NULL,
lower NUMBER(38,2) NOT NULL,
upper NUMBER(38,2) NOT NULL,
- unit_id NUMBER(38,0) NOT NULL,
- PRIMARY KEY(id),
- CONSTRAINT fk_depths_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+ PRIMARY KEY(id)
);
@@ -126,12 +124,10 @@
id NUMBER(38,0) NOT NULL,
river_id NUMBER(38,0) NOT NULL,
depth_id NUMBER(38,0) NOT NULL,
- unit_id NUMBER(38,0) NOT NULL,
description VARCHAR(256),
PRIMARY KEY(id),
CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id),
- CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id),
- CONSTRAINT fk_sd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+ CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id)
);
@@ -141,6 +137,7 @@
id NUMBER(38,0) NOT NULL,
sediment_density_id NUMBER(38,0) NOT NULL,
station NUMBER(38,2) NOT NULL,
+ shore_offset NUMBER(38,2),
density NUMBER(38,2) NOT NULL,
description VARCHAR(256),
year NUMBER(38,0),
@@ -287,10 +284,12 @@
name VARCHAR(256) NOT NULL,
river_id NUMBER(38) NOT NULL,
station NUMBER(38,3) NOT NULL,
- range_id NUMBER(38) NOT NULL,
+ range_id NUMBER(38),
measurement_type VARCHAR(64) NOT NULL,
riverside VARCHAR(16),
reference_gauge_id NUMBER(38),
+ -- store name of reference gauges here too, as not all are in gauges
+ reference_gauge_name VARCHAR(64),
observation_timerange_id NUMBER(38),
operator VARCHAR(64),
description VARCHAR(512),
@@ -298,8 +297,7 @@
CONSTRAINT fk_ms_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
CONSTRAINT fk_ms_range_id FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE,
CONSTRAINT fk_ms_reference_gauge_id FOREIGN KEY (reference_gauge_id) REFERENCES gauges(id) ON DELETE CASCADE,
- CONSTRAINT fk_ms_observation_timerange_id FOREIGN KEY (observation_timerange_id) REFERENCES time_intervals(id),
- UNIQUE (river_id, station)
+ CONSTRAINT fk_ms_observation_timerange_id FOREIGN KEY (observation_timerange_id) REFERENCES time_intervals(id)
);
@@ -319,14 +317,19 @@
CREATE SEQUENCE SQ_RELATION_VALUES_ID_SEQ;
CREATE TABLE sq_relation_value (
- id NUMBER(38,0) NOT NULL,
- sq_relation_id NUMBER(38,0) NOT NULL,
- parameter VARCHAR(16) NOT NULL,
- fraction VARCHAR(32) NOT NULL,
- function VARCHAR(32) NOT NULL,
- km NUMBER(38,3) NOT NULL,
- a NUMBER(38, 3) NOT NULL,
- b NUMBER(38,3) NOT NULL,
+ id NUMBER(38,0) NOT NULL,
+ sq_relation_id NUMBER(38,0) NOT NULL,
+ measurement_station_id NUMBER(38,0) NOT NULL,
+ parameter VARCHAR(1) NOT NULL,
+ a NUMBER(38,20) NOT NULL,
+ b NUMBER(38,20) NOT NULL,
+ qmax NUMBER(38,20) NOT NULL,
+ rsq NUMBER(38,3),
+ ntot NUMBER(38,0),
+ noutl NUMBER(38,0),
+ cferguson NUMBER(38,20),
+ cduan NUMBER(38,20),
PRIMARY KEY (id),
- CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id)
+ CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id),
+ CONSTRAINT fk_mstation_id FOREIGN KEY (measurement_station_id) REFERENCES measurement_station(id)
);
diff -r 8d0af912351c -r 25c2505df28f flys-backend/doc/schema/oracle-spatial.sql
--- a/flys-backend/doc/schema/oracle-spatial.sql Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/doc/schema/oracle-spatial.sql Wed Apr 03 16:00:21 2013 +0200
@@ -5,7 +5,7 @@
name VARCHAR(64)
);
INSERT INTO axis_kinds(id, name) VALUES (0, 'Unbekannt');
-INSERT INTO axis_kinds(id, name) VALUES (1, 'Aktuell');
+INSERT INTO axis_kinds(id, name) VALUES (1, 'aktuelle Achse');
INSERT INTO axis_kinds(id, name) VALUES (2, 'Sonstige');
-- Geodaesie/Flussachse+km/achse
@@ -54,7 +54,7 @@
name VARCHAR(64)
);
INSERT INTO cross_section_track_kinds(id, name) VALUES (0, 'Sonstige');
-INSERT INTO cross_section_track_kinds(id, name) VALUES (1, 'Aktuell');
+INSERT INTO cross_section_track_kinds(id, name) VALUES (1, 'aktuelle Querprofilspuren');
CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
CREATE TABLE cross_section_tracks (
@@ -92,8 +92,8 @@
OGR_FID NUMBER(38),
GEOM MDSYS.SDO_GEOMETRY,
river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
- kind_id NUMBER(38) REFERENCES building_kinds(id) NOT NULL DEFAULT 0,
- km NUMBER(38,11) NOT NULL,
+ kind_id NUMBER(38) DEFAULT 0 NOT NULL REFERENCES building_kinds(id),
+ km NUMBER(38,11),
name VARCHAR2(255), -- The layername
description VARCHAR(256), -- Name taken from attributes
path VARCHAR(256),
@@ -137,7 +137,7 @@
name VARCHAR(64)
);
INSERT INTO floodplain_kinds(id, name) VALUES (0, 'Sonstige');
-INSERT INTO floodplain_kinds(id, name) VALUES (1, 'Aktuell');
+INSERT INTO floodplain_kinds(id, name) VALUES (1, 'aktuelle Talaue');
CREATE SEQUENCE FLOODPLAIN_ID_SEQ;
CREATE TABLE floodplain(
@@ -242,7 +242,7 @@
id NUMBER PRIMARY KEY NOT NULL,
name VARCHAR(64) NOT NULL
);
-INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sectie_kinds (id, name) VALUES (0, 'SECTIE Unbekannt');
INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch');
INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank');
INSERT INTO sectie_kinds (id, name) VALUES (3, 'Überflutungsbereich');
@@ -251,7 +251,7 @@
id NUMBER PRIMARY KEY NOT NULL,
name VARCHAR(64) NOT NULL
);
-INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sobek_kinds (id, name) VALUES (0, 'SOBEK Unbekannt');
INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend');
INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd');
@@ -337,9 +337,9 @@
GEOM MDSYS.SDO_GEOMETRY,
river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
name VARCHAR(255),
- kind NUMBER(38) REFERENCES boundary_kinds(id),
- sectie NUMBER(38) REFERENCES sectie_kinds(id),
- sobek NUMBER(38) REFERENCES sobek_kinds(id),
+ kind NUMBER(38) DEFAULT 0 NOT NULL REFERENCES boundary_kinds(id),
+ sectie NUMBER(38) DEFAULT 0 NOT NULL REFERENCES sectie_kinds(id),
+ sobek NUMBER(38) DEFAULT 0 NOT NULL REFERENCES sobek_kinds(id),
path VARCHAR(256),
id NUMBER PRIMARY KEY NOT NULL
);
@@ -356,9 +356,9 @@
GEOM MDSYS.SDO_GEOMETRY,
river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
name VARCHAR(255),
- kind NUMBER(38) REFERENCES boundary_kinds(id),
- sectie NUMBER(38) REFERENCES sectie_kinds(id),
- sobek NUMBER(38) REFERENCES sobek_kinds(id),
+ kind NUMBER(38) DEFAULT 0 NOT NULL REFERENCES boundary_kinds(id),
+ sectie NUMBER(38) DEFAULT 0 NOT NULL REFERENCES sectie_kinds(id),
+ sobek NUMBER(38) DEFAULT 0 NOT NULL REFERENCES sobek_kinds(id),
path VARCHAR(256),
id NUMBER PRIMARY KEY NOT NULL
);
@@ -412,3 +412,22 @@
SELECT JETTIES_ID_SEQ.nextval INTO :new.id FROM dual;
END;
/
+
+CREATE SEQUENCE FLOOD_MARKS_ID_SEQ;
+CREATE TABLE flood_marks (
+ OGR_FID NUMBER(38),
+ GEOM MDSYS.SDO_GEOMETRY,
+ id NUMBER PRIMARY KEY NOT NULL,
+ river_id NUMBER(38) REFERENCES rivers(id) ON DELETE CASCADE,
+ path VARCHAR(256),
+ km NUMBER(7,3),
+ z NUMBER(38,12),
+ location VARCHAR(64),
+ year NUMBER(38,0)
+);
+INSERT INTO USER_SDO_GEOM_METADATA VALUES ('flood_marks', 'GEOM', MDSYS.SDO_DIM_ARRAY(MDSYS.SDO_DIM_ELEMENT('X',3282450,3912240,0.001),MDSYS.SDO_DIM_ELEMENT('Y',5248260,6100130,0.001)), 31467);
+CREATE OR REPLACE TRIGGER flood_marks_trigger BEFORE INSERT ON flood_marks FOR EACH ROW
+ BEGIN
+ SELECT FLOOD_MARKS_ID_SEQ.nextval INTO :new.id FROM dual;
+ END;
+/
diff -r 8d0af912351c -r 25c2505df28f flys-backend/doc/schema/oracle-spatial_idx.sql
--- a/flys-backend/doc/schema/oracle-spatial_idx.sql Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/doc/schema/oracle-spatial_idx.sql Wed Apr 03 16:00:21 2013 +0200
@@ -22,3 +22,4 @@
CREATE INDEX jetties_idx ON jetties(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
+CREATE INDEX flood_marks_idx ON flood_marks(GEOM) indextype IS MDSYS.SPATIAL_INDEX parameters ('LAYER_GTYPE=POINT');
diff -r 8d0af912351c -r 25c2505df28f flys-backend/doc/schema/postgresql-drop-spatial.sql
--- a/flys-backend/doc/schema/postgresql-drop-spatial.sql Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/doc/schema/postgresql-drop-spatial.sql Wed Apr 03 16:00:21 2013 +0200
@@ -45,5 +45,6 @@
DROP TABLE sectie_kinds;
DROP TABLE boundary_kinds;
DROP TABLE axis_kinds;
+DROP TABLE building_kinds;
COMMIT;
diff -r 8d0af912351c -r 25c2505df28f flys-backend/doc/schema/postgresql-minfo.sql
--- a/flys-backend/doc/schema/postgresql-minfo.sql Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/doc/schema/postgresql-minfo.sql Wed Apr 03 16:00:21 2013 +0200
@@ -40,13 +40,13 @@
CREATE TABLE bed_height_single (
id int NOT NULL,
river_id int NOT NULL,
- year int NOT NULL,
- sounding_width int NOT NULL,
+ year int,
+ sounding_width int,
type_id int NOT NULL,
location_system_id int NOT NULL,
cur_elevation_model_id int NOT NULL,
old_elevation_model_id int,
- range_id int NOT NULL,
+ range_id int,
evaluation_by VARCHAR(255),
description VARCHAR(255),
PRIMARY KEY(id),
@@ -69,7 +69,7 @@
-- type_id int NOT NULL,
cur_elevation_model_id int NOT NULL,
old_elevation_model_id int,
- range_id int NOT NULL,
+ range_id int,
evaluation_by VARCHAR(255),
description VARCHAR(255),
PRIMARY KEY(id),
@@ -114,9 +114,7 @@
id int NOT NULL,
lower NUMERIC NOT NULL,
upper NUMERIC NOT NULL,
- unit_id int NOT NULL,
- PRIMARY KEY(id),
- CONSTRAINT fk_depths_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+ PRIMARY KEY(id)
);
@@ -126,12 +124,10 @@
id int NOT NULL,
river_id int NOT NULL,
depth_id int NOT NULL,
- unit_id int NOT NULL,
description VARCHAR(256),
PRIMARY KEY(id),
CONSTRAINT fk_sd_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
- CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id),
- CONSTRAINT fk_sd_unit_id FOREIGN KEY (unit_id) REFERENCES units(id)
+ CONSTRAINT fk_sd_depth_id FOREIGN KEY (depth_id) REFERENCES depths(id)
);
@@ -141,6 +137,7 @@
id int NOT NULL,
sediment_density_id int NOT NULL,
station NUMERIC NOT NULL,
+ shore_offset NUMERIC,
density NUMERIC NOT NULL,
description VARCHAR(256),
year int,
@@ -287,10 +284,12 @@
name VARCHAR(256) NOT NULL,
river_id int NOT NULL,
station NUMERIC NOT NULL,
- range_id int NOT NULL,
+ range_id int,
measurement_type VARCHAR(64) NOT NULL,
riverside VARCHAR(16),
reference_gauge_id int,
+ -- store name of reference gauges here too, as not all are in gauges
+ reference_gauge_name VARCHAR(64),
observation_timerange_id int,
operator VARCHAR(64),
description VARCHAR(512),
@@ -298,8 +297,7 @@
CONSTRAINT fk_ms_river_id FOREIGN KEY (river_id) REFERENCES rivers(id) ON DELETE CASCADE,
CONSTRAINT fk_ms_range_id FOREIGN KEY (range_id) REFERENCES ranges(id) ON DELETE CASCADE,
CONSTRAINT fk_ms_reference_gauge_id FOREIGN KEY (reference_gauge_id) REFERENCES gauges(id) ON DELETE CASCADE,
- CONSTRAINT fk_ms_observation_timerange_id FOREIGN KEY (observation_timerange_id) REFERENCES time_intervals(id),
- UNIQUE (river_id, station)
+ CONSTRAINT fk_ms_observation_timerange_id FOREIGN KEY (observation_timerange_id) REFERENCES time_intervals(id)
);
@@ -319,15 +317,20 @@
CREATE SEQUENCE SQ_RELATION_VALUES_ID_SEQ;
CREATE TABLE sq_relation_value (
- id int NOT NULL,
- sq_relation_id int NOT NULL,
- parameter VARCHAR(16) NOT NULL,
- fraction VARCHAR(32) NOT NULL,
- function VARCHAR(32) NOT NULL,
- km NUMERIC NOT NULL,
- a NUMERIC NOT NULL,
- b NUMERIC NOT NULL,
+ id int NOT NULL,
+ sq_relation_id int NOT NULL,
+ measurement_station_id int NOT NULL,
+ parameter VARCHAR(1) NOT NULL,
+ a NUMERIC NOT NULL,
+ b NUMERIC NOT NULL,
+ qmax NUMERIC NOT NULL,
+ rsq NUMERIC,
+ ntot int,
+ noutl int,
+ cferguson NUMERIC,
+ cduan NUMERIC,
PRIMARY KEY (id),
- CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id) ON DELETE CASCADE
+ CONSTRAINT fk_sqr_id FOREIGN KEY (sq_relation_id) REFERENCES sq_relation(id) ON DELETE CASCADE,
+ CONSTRAINT fk_mstation_id FOREIGN KEY (measurement_station_id) REFERENCES measurement_station(id)
);
COMMIT;
diff -r 8d0af912351c -r 25c2505df28f flys-backend/doc/schema/postgresql-spatial.sql
--- a/flys-backend/doc/schema/postgresql-spatial.sql Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/doc/schema/postgresql-spatial.sql Wed Apr 03 16:00:21 2013 +0200
@@ -5,7 +5,7 @@
name VARCHAR(64)
);
INSERT INTO axis_kinds(id, name) VALUES (0, 'Unbekannt');
-INSERT INTO axis_kinds(id, name) VALUES (1, 'Aktuell');
+INSERT INTO axis_kinds(id, name) VALUES (1, 'aktuelle Achse');
INSERT INTO axis_kinds(id, name) VALUES (2, 'Sonstige');
-- Geodaesie/Flussachse+km/achse
@@ -41,7 +41,7 @@
name VARCHAR(64)
);
INSERT INTO cross_section_track_kinds(id, name) VALUES (0, 'Sonstige');
-INSERT INTO cross_section_track_kinds(id, name) VALUES (1, 'Aktuell');
+INSERT INTO cross_section_track_kinds(id, name) VALUES (1, 'aktuelle Querprofilspuren');
CREATE SEQUENCE CROSS_SECTION_TRACKS_ID_SEQ;
CREATE TABLE cross_section_tracks (
@@ -102,7 +102,7 @@
name VARCHAR(64)
);
INSERT INTO floodplain_kinds(id, name) VALUES (0, 'Sonstige');
-INSERT INTO floodplain_kinds(id, name) VALUES (1, 'Aktuell');
+INSERT INTO floodplain_kinds(id, name) VALUES (1, 'aktuelle Talaue');
CREATE SEQUENCE FLOODPLAIN_ID_SEQ;
CREATE TABLE floodplain (
@@ -250,7 +250,7 @@
id int PRIMARY KEY NOT NULL,
name VARCHAR(64) NOT NULL
);
-INSERT INTO sectie_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sectie_kinds (id, name) VALUES (0, 'SECTIE Unbekannt');
INSERT INTO sectie_kinds (id, name) VALUES (1, 'Flussschlauch');
INSERT INTO sectie_kinds (id, name) VALUES (2, 'Uferbank');
INSERT INTO sectie_kinds (id, name) VALUES (3, 'Überflutungsbereich');
@@ -259,7 +259,7 @@
id int PRIMARY KEY NOT NULL,
name VARCHAR(64) NOT NULL
);
-INSERT INTO sobek_kinds (id, name) VALUES (0, 'Unbekannt');
+INSERT INTO sobek_kinds (id, name) VALUES (0, 'SOBEK Unbekannt');
INSERT INTO sobek_kinds (id, name) VALUES (1, 'Stromführend');
INSERT INTO sobek_kinds (id, name) VALUES (2, 'Stromspeichernd');
@@ -277,9 +277,9 @@
id int PRIMARY KEY NOT NULL,
river_id int REFERENCES rivers(id) ON DELETE CASCADE,
name VARCHAR(255),
- kind int REFERENCES boundary_kinds(id),
- sectie int REFERENCES sectie_kinds(id),
- sobek int REFERENCES sobek_kinds(id),
+ kind int REFERENCES boundary_kinds(id) NOT NULL DEFAULT 0,
+ sectie int REFERENCES sectie_kinds(id) NOT NULL DEFAULT 0,
+ sobek int REFERENCES sobek_kinds(id) NOT NULL DEFAULT 0,
path VARCHAR(256)
);
SELECT AddGeometryColumn('hydr_boundaries','geom',31467,'MULTILINESTRING',3);
@@ -291,9 +291,9 @@
id int PRIMARY KEY NOT NULL,
river_id int REFERENCES rivers(id) ON DELETE CASCADE,
name VARCHAR(255),
- kind int REFERENCES boundary_kinds(id),
- sectie int REFERENCES sectie_kinds(id),
- sobek int REFERENCES sobek_kinds(id),
+ kind int REFERENCES boundary_kinds(id) NOT NULL DEFAULT 0,
+ sectie int REFERENCES sectie_kinds(id) NOT NULL DEFAULT 0,
+ sobek int REFERENCES sobek_kinds(id) NOT NULL DEFAULT 0,
path VARCHAR(256)
);
SELECT AddGeometryColumn('hydr_boundaries_poly','geom',31467,'MULTIPOLYGON',3);
@@ -331,5 +331,17 @@
SELECT AddGeometryColumn('jetties','geom',31467,'POINT',2);
ALTER TABLE jetties ALTER COLUMN id SET DEFAULT NEXTVAL('JETTIES_ID_SEQ');
+CREATE SEQUENCE FLOOD_MARKS_ID_SEQ;
+CREATE TABLE flood_marks (
+ id int PRIMARY KEY NOT NULL,
+ river_id int REFERENCES rivers(id) ON DELETE CASCADE,
+ path VARCHAR(256),
+ km FLOAT8,
+ z FLOAT8,
+ location VARCHAR(64),
+ year int
+);
+SELECT AddGeometryColumn('flood_marks','geom',31467,'POINT',2);
+ALTER TABLE flood_marks ALTER COLUMN id SET DEFAULT NEXTVAL('FLOOD_MARKS_ID_SEQ');
COMMIT;
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java
--- a/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/backend/FLYSCredentials.java Wed Apr 03 16:00:21 2013 +0200
@@ -47,6 +47,7 @@
import de.intevation.flys.model.HYKFormation;
import de.intevation.flys.model.HydrBoundary;
import de.intevation.flys.model.HydrBoundaryPoly;
+import de.intevation.flys.model.Jetty;
import de.intevation.flys.model.LocationSystem;
import de.intevation.flys.model.MainValue;
import de.intevation.flys.model.MainValueType;
@@ -167,6 +168,7 @@
HYKFormation.class,
HYKFlowZoneType.class,
HYKFlowZone.class,
+ Jetty.class,
LocationSystem.class,
MainValueType.class,
MeasurementStation.class,
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightEpoch.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,13 +1,11 @@
package de.intevation.flys.importer;
-import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.hibernate.Query;
import org.hibernate.Session;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.BedHeightEpoch;
import de.intevation.flys.model.ElevationModel;
@@ -101,8 +99,7 @@
}
@Override
- public void storeDependencies(River river) throws SQLException,
- ConstraintViolationException {
+ public void storeDependencies(River river) {
log.info("Store dependencies for epoch: '" + getDescription() + "'");
BedHeightEpoch peer = getPeer(river);
@@ -159,8 +156,7 @@
Range theRange = range != null ? range.getPeer(river) : null;
if (theRange == null) {
- log.warn("BHE: Skip file - invalid km range.");
- return null;
+ log.warn("BHE: invalid km range.");
}
Session session = ImporterSession.getInstance()
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportBedHeightSingle.java Wed Apr 03 16:00:21 2013 +0200
@@ -3,13 +3,10 @@
import java.util.ArrayList;
import java.util.List;
-import java.sql.SQLException;
-
import org.apache.log4j.Logger;
import org.hibernate.Session;
import org.hibernate.Query;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.BedHeightSingle;
import de.intevation.flys.model.BedHeightType;
@@ -100,9 +97,7 @@
}
@Override
- public void storeDependencies(River river)
- throws SQLException, ConstraintViolationException
- {
+ public void storeDependencies(River river) {
log.info("Store dependencies for single: '" + getDescription() + "'");
if (type != null) {
@@ -153,9 +148,8 @@
}
if (theRange == null) {
- log.warn("BHS: No km-range given. Skip file '" +
+ log.warn("BHS: No km-range given: '" +
description + "'");
- return null;
}
Session session = ImporterSession.getInstance().getDatabaseSession();
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportDepth.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportDepth.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDepth.java Wed Apr 03 16:00:21 2013 +0200
@@ -22,13 +22,10 @@
protected BigDecimal lower;
protected BigDecimal upper;
- protected ImportUnit unit;
-
- public ImportDepth(BigDecimal lower, BigDecimal upper, ImportUnit unit) {
+ public ImportDepth(BigDecimal lower, BigDecimal upper) {
this.lower = lower;
this.upper = upper;
- this.unit = unit;
}
@@ -48,19 +45,17 @@
Query query = session.createQuery(
"from Depth where " +
" lower=:lower and " +
- " upper=:upper and " +
- " unit=:unit");
+ " upper=:upper");
query.setParameter("lower", lower);
query.setParameter("upper", upper);
- query.setParameter("unit", unit.getPeer());
List<Depth> depths = query.list();
if (depths.isEmpty()) {
log.debug("Create new Depth DB instance.");
- peer = new Depth(lower, upper, unit.getPeer());
+ peer = new Depth(lower, upper);
session.save(peer);
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportDischargeZone.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportDischargeZone.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportDischargeZone.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,14 +1,12 @@
package de.intevation.flys.importer;
import java.math.BigDecimal;
-import java.sql.SQLException;
import java.util.List;
import org.apache.log4j.Logger;
import org.hibernate.Session;
import org.hibernate.Query;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.DischargeZone;
import de.intevation.flys.model.River;
@@ -43,9 +41,7 @@
}
- public void storeDependencies(River river)
- throws SQLException, ConstraintViolationException
- {
+ public void storeDependencies(River river) {
log.debug("store dependencies");
getPeer(river);
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurement.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurement.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurement.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,6 +1,5 @@
package de.intevation.flys.importer;
-import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
@@ -8,7 +7,6 @@
import org.hibernate.Session;
import org.hibernate.Query;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.FlowVelocityMeasurement;
import de.intevation.flys.model.River;
@@ -42,8 +40,7 @@
this.values.add(value);
}
- public void storeDependencies(River river) throws SQLException,
- ConstraintViolationException {
+ public void storeDependencies(River river) {
log.debug("store dependencies");
FlowVelocityMeasurement peer = getPeer(river);
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurementValue.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurementValue.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityMeasurementValue.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,7 +1,7 @@
package de.intevation.flys.importer;
import java.math.BigDecimal;
-import java.sql.SQLException;
+
import java.util.Date;
import java.util.List;
@@ -9,7 +9,6 @@
import org.hibernate.Session;
import org.hibernate.Query;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.FlowVelocityMeasurement;
import de.intevation.flys.model.FlowVelocityMeasurementValue;
@@ -51,9 +50,7 @@
- public void storeDependencies(FlowVelocityMeasurement measurement)
- throws SQLException, ConstraintViolationException
- {
+ public void storeDependencies(FlowVelocityMeasurement measurement) {
log.debug("store dependencies");
getPeer(measurement);
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModel.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,6 +1,5 @@
package de.intevation.flys.importer;
-import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
@@ -8,7 +7,6 @@
import org.hibernate.Session;
import org.hibernate.Query;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.DischargeZone;
import de.intevation.flys.model.FlowVelocityModel;
@@ -52,8 +50,7 @@
this.values.add(value);
}
- public void storeDependencies(River river) throws SQLException,
- ConstraintViolationException {
+ public void storeDependencies(River river) {
log.debug("store dependencies");
if (dischargeZone == null) {
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportFlowVelocityModelValue.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,12 +1,11 @@
package de.intevation.flys.importer;
import java.math.BigDecimal;
-import java.sql.SQLException;
+
import java.util.List;
import org.hibernate.Session;
import org.hibernate.Query;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.FlowVelocityModel;
import de.intevation.flys.model.FlowVelocityModelValue;
@@ -38,9 +37,7 @@
}
- public void storeDependencies(FlowVelocityModel model)
- throws SQLException, ConstraintViolationException
- {
+ public void storeDependencies(FlowVelocityModel model) {
getPeer(model);
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportGrainFraction.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportGrainFraction.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportGrainFraction.java Wed Apr 03 16:00:21 2013 +0200
@@ -8,7 +8,6 @@
import org.hibernate.Query;
import de.intevation.flys.model.GrainFraction;
-import de.intevation.flys.model.Unit;
public class ImportGrainFraction {
@@ -21,8 +20,6 @@
private Double lower;
private Double upper;
- private ImportUnit unit;
-
private GrainFraction peer;
@@ -34,13 +31,11 @@
public ImportGrainFraction(
String name,
Double lower,
- Double upper,
- ImportUnit unit
+ Double upper
) {
this.name = name;
this.lower = lower;
this.upper = upper;
- this.unit = unit;
}
@@ -54,28 +49,24 @@
public GrainFraction getPeer() {
log.debug("get peer");
- Unit u = unit != null ? unit.getPeer() : null;
-
if (peer == null) {
Session session = ImporterSession.getInstance().getDatabaseSession();
Query query = session.createQuery(
"from GrainFraction where " +
" name=:name and " +
" lower=:lower and " +
- " upper=:upper and " +
- " unit=:unit"
+ " upper=:upper"
);
query.setParameter("name", name);
query.setParameter("lower", lower);
query.setParameter("upper", upper);
- query.setParameter("unit", u);
List<GrainFraction> fractions = query.list();
if (fractions.isEmpty()) {
log.info("create new GrainFraction");
- peer = new GrainFraction(name, lower, upper, u);
+ peer = new GrainFraction(name, lower, upper);
session.save(peer);
}
else {
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportMeasurementStation.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportMeasurementStation.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMeasurementStation.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,11 +1,9 @@
package de.intevation.flys.importer;
-import java.sql.SQLException;
import java.util.List;
import org.apache.log4j.Logger;
import org.hibernate.Session;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.Gauge;
import de.intevation.flys.model.MeasurementStation;
@@ -29,7 +27,7 @@
public String gauge;
public ImportTimeInterval observationTimerange;
public String operator;
- public String comment;
+ public String description;
public ImportMeasurementStation() {
}
@@ -46,8 +44,7 @@
return gauges.isEmpty() ? null : gauges.get(0);
}
- public boolean storeDependencies(River river) throws SQLException,
- ConstraintViolationException {
+ public boolean storeDependencies(River river) {
getPeer(river);
return peer != null;
@@ -59,40 +56,44 @@
try {
gauge = getGaugeFromDB();
if (gauge == null) {
- log.warn("Skip measurement station '" + name
- + "': unable to find gauge with name '" + this.gauge
+ log.warn("No gauge found for measurement station '" + name
+ "'");
- return null;
}
}
catch (Exception e) {
log.error("Exception: " + e.getMessage());
}
- Range range = this.range.getPeer(river);
+ Range range = null;
+
+ if (this.range != null) {
+ range = this.range.getPeer(river);
+ }
+
if (range == null) {
- log.warn("Skip measurement station '" + name
- + "': unable to get range");
- return null;
+ log.warn("No range found for measurement station '" + name + "'");
}
TimeInterval observationTimerange = this.observationTimerange
.getPeer();
if (observationTimerange == null) {
- log.warn("Skip measurement station '" + name
- + "': unable to get time interval for observation time");
- return null;
+ log.warn("No time range found for measurement station '"
+ + name + "'");
}
Session session = ImporterSession.getInstance()
.getDatabaseSession();
org.hibernate.Query query = session
- .createQuery("FROM MeasurementStation "
- + " WHERE river=:river AND station=:station");
+ .createQuery(
+ "FROM MeasurementStation " +
+ "WHERE river=:river" +
+ " AND station=:station " +
+ " AND measurement_type=:measurement_type ");
query.setParameter("river", river);
query.setParameter("station", station);
+ query.setParameter("measurement_type", measurementType);
List<MeasurementStation> stations = query.list();
@@ -100,8 +101,8 @@
log.info("create new measurement station '" + name + "'");
peer = new MeasurementStation(river, name, measurementType,
- riverside, station, range, gauge, observationTimerange,
- operator, comment);
+ riverside, station, range, gauge, this.gauge,
+ observationTimerange, operator, description);
session.save(peer);
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportMorphWidth.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportMorphWidth.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportMorphWidth.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,16 +1,15 @@
package de.intevation.flys.importer;
-import java.sql.SQLException;
+import de.intevation.flys.model.MorphologicalWidth;
+import de.intevation.flys.model.River;
+
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
+
import org.hibernate.Query;
import org.hibernate.Session;
-import org.hibernate.exception.ConstraintViolationException;
-
-import de.intevation.flys.model.MorphologicalWidth;
-import de.intevation.flys.model.River;
public class ImportMorphWidth {
@@ -35,8 +34,7 @@
this.unit = unit;
}
- public void storeDependencies(River river) throws SQLException,
- ConstraintViolationException {
+ public void storeDependencies(River river) {
log.info("store dependencies");
MorphologicalWidth peer = getPeer(river);
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportRiver.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,23 +1,9 @@
package de.intevation.flys.importer;
-import java.io.File;
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.log4j.Logger;
-import org.hibernate.Query;
-import org.hibernate.Session;
-import org.hibernate.exception.ConstraintViolationException;
+import de.intevation.artifacts.common.utils.FileTools.HashedFile;
import de.intevation.artifacts.common.utils.FileTools;
-import de.intevation.artifacts.common.utils.FileTools.HashedFile;
+
import de.intevation.flys.importer.parsers.AnnotationClassifier;
import de.intevation.flys.importer.parsers.AnnotationsParser;
import de.intevation.flys.importer.parsers.BedHeightEpochParser;
@@ -39,9 +25,26 @@
import de.intevation.flys.importer.parsers.WaterlevelDifferencesParser;
import de.intevation.flys.importer.parsers.WaterlevelParser;
import de.intevation.flys.importer.parsers.WstParser;
+
import de.intevation.flys.model.River;
import de.intevation.flys.model.Unit;
+import java.io.File;
+import java.io.IOException;
+
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.log4j.Logger;
+
+import org.hibernate.Query;
+import org.hibernate.Session;
+
/** Import all river-related data (files) that can be found. */
public class ImportRiver
@@ -584,7 +587,7 @@
File coredataFile = new File(minfoBaseDir, MINFO_CORE_DATA_FILE);
if (coredataFile == null || !coredataFile.exists()) {
- log.warn("No core data file '" + MINFO_CORE_DATA_FILE + "' found");
+ log.warn("No core data file '" + coredataFile.getAbsolutePath() + "' found");
return;
}
@@ -1237,15 +1240,8 @@
log.debug("name: " + desc);
- try {
- single.storeDependencies(river);
- }
- catch (SQLException sqle) {
- log.error("File '" + desc + "' is broken!");
- }
- catch (ConstraintViolationException cve) {
- log.error("File '" + desc + "' is broken!");
- }
+ single.storeDependencies(river);
+
}
}
else {
@@ -1265,15 +1261,8 @@
log.debug("name: " + desc);
- try {
- epoch.storeDependencies(river);
- }
- catch (SQLException sqle) {
- log.error("File '" + desc + "' is broken!");
- }
- catch (ConstraintViolationException cve) {
- log.error("File '" + desc + "' is broken!");
- }
+ epoch.storeDependencies(river);
+
}
}
else {
@@ -1292,15 +1281,8 @@
log.debug("name: " + desc);
- try {
- density.storeDependencies(river);
- }
- catch (SQLException sqle) {
- log.error("File '" + desc + "' is broken!");
- }
- catch (ConstraintViolationException cve) {
- log.error("File '" + desc + "' is broken!");
- }
+ density.storeDependencies(river);
+
}
}
}
@@ -1312,15 +1294,9 @@
River river = getPeer();
for (ImportMorphWidth width: morphologicalWidths) {
- try {
- width.storeDependencies(river);
- }
- catch (SQLException sqle) {
- log.error("Error while parsing file for morph. width.", sqle);
- }
- catch (ConstraintViolationException cve) {
- log.error("Error while parsing file for morph. width.", cve);
- }
+
+ width.storeDependencies(river);
+
}
}
}
@@ -1332,27 +1308,15 @@
River river = getPeer();
for (ImportFlowVelocityModel flowVelocityModel: flowVelocityModels){
- try {
- flowVelocityModel.storeDependencies(river);
- }
- catch (SQLException sqle) {
- log.error("Error while storing flow velocity model.", sqle);
- }
- catch (ConstraintViolationException cve) {
- log.error("Error while storing flow velocity model.", cve);
- }
+
+ flowVelocityModel.storeDependencies(river);
+
}
for (ImportFlowVelocityMeasurement m: flowVelocityMeasurements) {
- try {
- m.storeDependencies(river);
- }
- catch (SQLException sqle) {
- log.error("Error while storing flow velocity measurement.", sqle);
- }
- catch (ConstraintViolationException cve) {
- log.error("Error while storing flow velocity measurement.", cve);
- }
+
+ m.storeDependencies(river);
+
}
}
}
@@ -1365,15 +1329,9 @@
River river = getPeer();
for (ImportSedimentYield sedimentYield: sedimentYields) {
- try {
- sedimentYield.storeDependencies(river);
- }
- catch (SQLException sqle) {
- log.error("Error while storing sediment yield.", sqle);
- }
- catch (ConstraintViolationException cve) {
- log.error("Error while storing sediment yield.", cve);
- }
+
+ sedimentYield.storeDependencies(river);
+
}
}
}
@@ -1388,18 +1346,12 @@
int count = 0;
for (ImportMeasurementStation station: measurementStations) {
- try {
- boolean success = station.storeDependencies(river);
- if (success) {
- count++;
- }
- }
- catch (SQLException sqle) {
- log.error("Error while storing measurement station.", sqle);
- }
- catch (ConstraintViolationException cve) {
- log.error("Error while storing measurement station.", cve);
- }
+
+ boolean success = station.storeDependencies(river);
+ if (success) {
+ count++;
+ }
+
}
log.info("stored " + count + " measurement stations.");
@@ -1416,16 +1368,10 @@
int count = 0;
for (ImportSQRelation sqRelation: sqRelations) {
- try {
- sqRelation.storeDependencies(river);
- count++;
- }
- catch (SQLException sqle) {
- log.error("Error while storing sq relation.", sqle);
- }
- catch (ConstraintViolationException cve) {
- log.error("Error while storing sq relation.", cve);
- }
+
+ sqRelation.storeDependencies(river);
+ count++;
+
}
log.info("stored " + count + " sq relations.");
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportSQRelation.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportSQRelation.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSQRelation.java Wed Apr 03 16:00:21 2013 +0200
@@ -31,14 +31,11 @@
this.values = new ArrayList<ImportSQRelationValue>();
}
- public void storeDependencies(River river) throws SQLException,
- ConstraintViolationException {
+ public void storeDependencies(River river) {
log.info("store dependencies");
SQRelation peer = getPeer(river);
- timeInterval.getPeer();
-
if (peer != null) {
int count = 0;
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportSQRelationValue.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportSQRelationValue.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSQRelationValue.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,5 +1,6 @@
package de.intevation.flys.importer;
+import java.math.BigDecimal;
import java.sql.SQLException;
import java.util.List;
@@ -9,6 +10,7 @@
import org.hibernate.Session;
import org.hibernate.exception.ConstraintViolationException;
+import de.intevation.flys.model.MeasurementStation;
import de.intevation.flys.model.SQRelation;
import de.intevation.flys.model.SQRelationValue;
@@ -21,28 +23,40 @@
private SQRelationValue peer;
private String parameter;
- private String fraction;
- private String function;
- private double km;
- private double a;
- private double b;
+ private Double km;
+ private Double a;
+ private Double b;
+ private Double qMax;
+ private Double rSQ;
+ private Integer nTot;
+ private Integer nOutlier;
+ private Double cFerguson;
+ private Double cDuan;
public ImportSQRelationValue(
String parameter,
- String fraction,
- String function,
- double km,
- double a,
- double b
+ Double km,
+ Double a,
+ Double b,
+ Double qMax,
+ Double rSQ,
+ Integer nTot,
+ Integer nOutlier,
+ Double cFerguson,
+ Double cDuan
) {
this.parameter = parameter;
- this.fraction = fraction;
- this.function = function;
this.km = km;
this.a = a;
this.b = b;
+ this.qMax = qMax;
+ this.rSQ = rSQ;
+ this.nTot = nTot;
+ this.nOutlier = nOutlier;
+ this.cFerguson = cFerguson;
+ this.cDuan = cDuan;
}
@@ -58,30 +72,55 @@
Session session = ImporterSession.getInstance().getDatabaseSession();
Query query = session.createQuery(
+ "from MeasurementStation " +
+ " where station between :kml and :kmh");
+ query.setDouble("kml", km - 1e-4);
+ query.setDouble("kmh", km + 1e-4);
+
+ List<MeasurementStation> result = query.list();
+
+ if (result.isEmpty()) {
+ log.error("No measurement stations found at km " + km);
+ return null;
+ }
+
+ Query query2 = session.createQuery(
"from SQRelationValue " +
" where sqRelation=:owner " +
" and parameter=:parameter" +
- " and fraction=:fraction" +
- " and function=:function" +
- " and km=:km");
+ " and measurementStation=:measurementStation" +
+ " and a=:a" +
+ " and b=:b" +
+ " and qMax=:qMax" +
+ " and rSQ=:rSQ" +
+ " and cFerguson=:cFerguson" +
+ " and cDuan=:cDuan");
- query.setParameter("owner", owner);
- query.setString("parameter", parameter);
- query.setString("fraction", fraction);
- query.setString("function", function);
- query.setDouble("km", km);
+ query2.setParameter("owner", owner);
+ query2.setString("parameter", parameter);
+ query2.setParameter("measurementStation", result.get(0));
+ query2.setBigDecimal("a", toBigDecimal(a));
+ query2.setBigDecimal("b", toBigDecimal(b));
+ query2.setBigDecimal("qMax", toBigDecimal(qMax));
+ query2.setBigDecimal("rSQ", toBigDecimal(rSQ));
+ query2.setBigDecimal("cFerguson", toBigDecimal(cFerguson));
+ query2.setBigDecimal("cDuan", toBigDecimal(cDuan));
- List<SQRelationValue> values = query.list();
+ List<SQRelationValue> values = query2.list();
if (values.isEmpty()) {
peer = new SQRelationValue(
owner,
parameter,
- fraction,
- function,
- km,
+ result.get(0),
a,
- b
+ b,
+ qMax,
+ rSQ,
+ nTot,
+ nOutlier,
+ cFerguson,
+ cDuan
);
session.save(peer);
@@ -90,8 +129,12 @@
peer = values.get(0);
}
}
+ return peer;
+ }
- return peer;
+ private static final BigDecimal toBigDecimal(Double x) {
+ if (x == null) return null;
+ return new BigDecimal(x);
}
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensity.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,13 +1,11 @@
package de.intevation.flys.importer;
-import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.hibernate.Query;
import org.hibernate.Session;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.River;
import de.intevation.flys.model.SedimentDensity;
@@ -21,8 +19,6 @@
protected ImportDepth depth;
- protected ImportUnit unit;
-
protected String description;
protected List<ImportSedimentDensityValue> values;
@@ -40,16 +36,11 @@
this.depth = depth;
}
- public void setUnit(ImportUnit unit) {
- this.unit = unit;
- }
-
public void addValue(ImportSedimentDensityValue value) {
values.add(value);
}
- public void storeDependencies(River river) throws SQLException,
- ConstraintViolationException {
+ public void storeDependencies(River river) {
log.info("store dependencies");
if (depth != null) {
@@ -75,23 +66,15 @@
return null;
}
- if (unit == null) {
- log.warn("cannot store sediment density '" + description
- + "': no unit");
- return null;
- }
-
if (peer == null) {
Session session = ImporterSession.getInstance()
.getDatabaseSession();
Query query = session.createQuery("from SedimentDensity where "
- + " river=:river and " + " depth=:depth and "
- + " unit=:unit");
+ + " river=:river and " + " depth=:depth");
query.setParameter("river", river);
query.setParameter("depth", depth.getPeer());
- query.setParameter("unit", unit.getPeer());
List<SedimentDensity> density = query.list();
@@ -99,7 +82,7 @@
log.debug("Create new SedimentDensity DB instance.");
peer = new SedimentDensity(river, depth.getPeer(),
- unit.getPeer(), description);
+ description);
session.save(peer);
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentDensityValue.java Wed Apr 03 16:00:21 2013 +0200
@@ -23,6 +23,8 @@
protected BigDecimal station;
+ protected BigDecimal shoreOffset;
+
protected BigDecimal density;
private BigDecimal year;
@@ -32,11 +34,13 @@
public ImportSedimentDensityValue(
BigDecimal station,
+ BigDecimal shoreOffset,
BigDecimal density,
BigDecimal year,
String description
) {
this.station = station;
+ this.shoreOffset = shoreOffset;
this.density = density;
this.year = year;
this.description = description;
@@ -60,12 +64,14 @@
"from SedimentDensityValue where " +
" sedimentDensity=:sedimentDensity and " +
" station=:station and " +
+ " shoreOffset=:shoreOffset and " +
" density=:density and " +
" year=:year and " +
" description=:description");
query.setParameter("sedimentDensity", sedimentDensity);
query.setParameter("station", station);
+ query.setParameter("shoreOffset", shoreOffset);
query.setParameter("density", density);
query.setParameter("year", year);
query.setParameter("description", description);
@@ -77,6 +83,7 @@
peer = new SedimentDensityValue(
sedimentDensity,
station,
+ shoreOffset,
density,
year,
description);
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentYield.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentYield.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/ImportSedimentYield.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,6 +1,5 @@
package de.intevation.flys.importer;
-import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
@@ -8,7 +7,6 @@
import org.hibernate.Session;
import org.hibernate.Query;
-import org.hibernate.exception.ConstraintViolationException;
import de.intevation.flys.model.GrainFraction;
import de.intevation.flys.model.River;
@@ -54,8 +52,7 @@
this.values.add(value);
}
- public void storeDependencies(River river) throws SQLException,
- ConstraintViolationException {
+ public void storeDependencies(River river) {
log.debug("store dependencies");
if (grainFraction != null) {
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightEpochParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightEpochParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightEpochParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -45,7 +45,7 @@
km = new BigDecimal(nf.parse(values[0]).doubleValue());
}
catch (ParseException e) {
- log.warn("Error while parsing number from data row: " + line);
+ log.warn("Unparseable number in data row: " + line);
return;
}
@@ -70,7 +70,7 @@
obj.addValue(value);
}
catch (ParseException e) {
- log.warn("Error while parsing number from data row: " + line);
+ log.warn("Unparseable number in data row: " + line);
}
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -67,7 +67,7 @@
Pattern.compile("^ausgewertete Peilbreite: (\\d*).*");
public static final Pattern META_RANGE =
- Pattern.compile("^Strecke:\\D*(\\d++.\\d*)-(\\d++.\\d*).*");
+ Pattern.compile("^Strecke:\\D*(\\d++.?\\d*) ?- ?(\\d++.?\\d*).*");
public static final Pattern META_EVALUATION_BY =
Pattern.compile("^Auswerter: (.*).*");
@@ -235,7 +235,7 @@
obj.setTimeInterval(new ImportTimeInterval(fromYear, toYear));
}
catch (NumberFormatException e) {
- log.warn("BHP: Error while parsing timeinterval!", e);
+ log.warn("BHP: could not parse timeinterval", e);
}
return true;
@@ -256,7 +256,7 @@
return true;
}
catch (NumberFormatException e) {
- log.warn("BHP: Error while parsing sounding width: " + line, e);
+ log.warn("BHP: Could not parse sounding width: " + line, e);
log.warn("-> Set default value '0'");
}
obj.setSoundingWidth(0);
@@ -316,7 +316,7 @@
return true;
}
catch (ParseException e) {
- log.warn("BHP: Error while parsing range!", e);
+ log.warn("BHP: could not parse range", e);
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/BedHeightSingleParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -75,15 +75,31 @@
new BigDecimal(nf.parse(values[1]).doubleValue()),
new BigDecimal(nf.parse(values[2]).doubleValue()),
new BigDecimal(nf.parse(values[3]).doubleValue()),
- new BigDecimal(nf.parse(values[4]).doubleValue()),
+ parseBigDecimal(values[4], line),
new BigDecimal(nf.parse(values[5]).doubleValue())
);
obj.addValue(value);
}
catch (ParseException e) {
- log.warn("BSP: Error while parsing data row.", e);
+ log.warn("BSP: unparseable value in data row.", e);
}
}
+
+ private BigDecimal parseBigDecimal(String value, String line) {
+ BigDecimal result = null;
+ try {
+ Double dValue = Double.valueOf(value.replace(",", "."));
+ result = new BigDecimal(dValue.doubleValue());
+ }
+ catch (NumberFormatException nfe) {
+ log.warn(
+ "Could not parse " +
+ value +
+ " in bed heigt single row: "
+ + line);
+ }
+ return result;
+ }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityMeasurementParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityMeasurementParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityMeasurementParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -96,7 +96,7 @@
));
}
catch (ParseException pe) {
- log.warn("Error while parsing flow velocity values.", pe);
+ log.warn("Unparseable flow velocity values:", pe);
}
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityModelParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityModelParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/FlowVelocityModelParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -200,7 +200,7 @@
new BigDecimal(nf.parse(qStr).doubleValue()) };
}
catch (ParseException pe) {
- log.warn("Error while parsing Q value: '" + qStr + "'");
+ log.warn("Could not parse Q value: '" + qStr + "'");
}
}
@@ -232,7 +232,7 @@
));
}
catch (ParseException pe) {
- log.warn("Error while parsing flow velocity values.", pe);
+ log.warn("Unparseable flow velocity values:", pe);
}
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/MeasurementStationsParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/MeasurementStationsParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/MeasurementStationsParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -80,7 +80,7 @@
current.gauge = getGauge(cols);
current.observationTimerange = getObservationTimerange(cols);
current.operator = getOperator(cols);
- current.comment = getComment(cols);
+ current.description = getDescription(cols);
log.debug("Found new measurement station '" + current.name + "' at km "
+ current.station);
@@ -112,16 +112,15 @@
}
}
- protected ImportRange getRange(String[] cols)
- throws MeasurementStationParserException {
+ protected ImportRange getRange(String[] cols) {
if (cols[4] == null || cols[4].length() == 0) {
- throw new MeasurementStationParserException("invalid lower range '"
- + cols[4] + "'");
+ log.warn("No upper value for range found in '" + cols[4] + "'");
+ return null;
}
if (cols[5] == null || cols[5].length() == 0) {
- throw new MeasurementStationParserException("invalid lower range '"
- + cols[5] + "'");
+ log.warn("No upper value for range found in '" + cols[5] + "'");
+ return null;
}
try {
@@ -131,8 +130,8 @@
return new ImportRange(new BigDecimal(lower), new BigDecimal(upper));
}
catch (ParseException e) {
- throw new MeasurementStationParserException(
- "unable to parse range: " + e.getMessage());
+ log.warn("unable to parse range: " + e.getMessage());
+ return null;
}
}
@@ -150,21 +149,17 @@
return cols[3];
}
- protected String getGauge(String[] cols)
- throws MeasurementStationParserException {
+ protected String getGauge(String[] cols) {
if (cols[6] == null || cols[6].length() == 0) {
- throw new MeasurementStationParserException("invalid gauge '"
- + cols[6] + "'");
+ log.warn("invalid gauge found: '" + cols[6] + "'");
}
return cols[6];
}
- protected ImportTimeInterval getObservationTimerange(String[] cols)
- throws MeasurementStationParserException {
+ protected ImportTimeInterval getObservationTimerange(String[] cols) {
if (cols[8] == null || cols[8].length() == 0) {
- throw new MeasurementStationParserException(
- "invalid observation time '" + cols[8] + "'");
+ log.warn("Found invalid observation time '" + cols[8] + "'");
}
try {
@@ -173,20 +168,20 @@
if (date != null) {
return new ImportTimeInterval(date);
}
-
- throw new MeasurementStationParserException(
- "invalid observation time '" + cols[8] + "'");
+ log.warn("Observation time date invalid: '" + cols[8] + "'");
}
catch (ParseException pe) {
- throw new MeasurementStationParserException(pe.getMessage());
+ log.warn("Observation time date not parseable: '" + cols[8] + "'");
+ return null;
}
+ return null;
}
protected String getOperator(String[] cols) {
return cols[9];
}
- protected String getComment(String[] cols) {
+ protected String getDescription(String[] cols) {
return cols.length > 10 ? cols[10] : null;
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/MorphologicalWidthParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -108,7 +108,7 @@
));
}
catch (ParseException pe) {
- log.warn("MWP: Error while parsing numbers in '" + line + "'");
+ log.warn("MWP: unparseable number in data row: " + line);
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/SQRelationParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SQRelationParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SQRelationParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -105,24 +105,66 @@
protected void handleDataLine(String line) {
String[] cols = line.split(SEPERATOR_CHAR);
- if (cols.length < 8) {
+ if (cols.length < 14) {
log.warn("skip invalid data line: '" + line + "'");
return;
}
+ Double km = parseDouble(cols[3], line);
+ Double a = parseDouble(cols[6], line);
+ Double b = parseDouble(cols[7], line);
+ Double qMax = parseDouble(cols[8], line);
+ Double rSq = parseDouble(cols[9], line);
+ Integer nTot = parseInteger(cols[10], line);
+ Integer nOutlier = parseInteger(cols[11], line);
+ Double cFer = parseDouble(cols[12], line);
+ Double cDuan = parseDouble(cols[13], line);
+ if (km == null || a == null || b == null ||
+ qMax == null || cols[1].length() == 0) {
+ if (km == null) {
+ log.error("No km for measurement station: Can not reference measurement station: "
+ + line);
+ }
+ if ( a == null || b == null ||
+ qMax == null || cols[1].length() == 0) {
+ log.error("Incomplete SQ-relation row (missing a, b, Qmax or parameter): "
+ + line);
+ }
+ return;
+ }
+ current.addValue(new ImportSQRelationValue(
+ cols[1],
+ km,
+ a,
+ b,
+ qMax,
+ rSq,
+ nTot,
+ nOutlier,
+ cFer,
+ cDuan));
+ }
+
+ private Double parseDouble(String value, String line) {
+ Double result = null;
try {
- current.addValue(new ImportSQRelationValue(
- cols[1],
- cols[2],
- cols[4],
- nf.parse(cols[3]).doubleValue(),
- nf.parse(cols[6]).doubleValue(),
- nf.parse(cols[7]).doubleValue()
- ));
+ result = Double.valueOf(value.replace(",", "."));
}
- catch (ParseException pe) {
- log.warn("Error while parsing sq relation row: '" + line + "'", pe);
+ catch (NumberFormatException nfe) {
+ log.warn("Unparseable " + value + " in sq relation row: " + line);
}
+ return result;
+ }
+
+ private Integer parseInteger(String value, String line) {
+ Integer result = null;
+ try {
+ result = Integer.valueOf(value);
+ }
+ catch (NumberFormatException nfe) {
+ log.warn("Unparseable " + value + " in sq relation row: " + line);
+ }
+ return result;
}
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentDensityParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -20,7 +20,6 @@
import de.intevation.flys.importer.ImportDepth;
import de.intevation.flys.importer.ImportSedimentDensity;
import de.intevation.flys.importer.ImportSedimentDensityValue;
-import de.intevation.flys.importer.ImportUnit;
import de.intevation.flys.utils.DateGuesser;
@@ -34,9 +33,6 @@
public static final NumberFormat nf = NumberFormat.getInstance(DEFAULT_LOCALE);
- public static final Pattern META_UNIT =
- Pattern.compile("^Einheit: \\[(.*)\\].*");
-
public static final Pattern META_DEPTH =
Pattern.compile("^Tiefe: (\\w++)-(\\w++)( (\\w++))?.*");
@@ -86,13 +82,7 @@
protected void handleMetaLine(String line) {
- if (handleMetaUnit(line)) {
- return;
- }
- else if (handleMetaDepth(line)) {
- return;
- }
- else if (handleMetaColumns(line)) {
+ if (handleMetaDepth(line)) {
return;
}
else {
@@ -101,48 +91,19 @@
}
- private boolean handleMetaColumns(String line) {
- String[] columns = line.split(";");
- for (int i = 0; i < columns.length; i++) {
- if (columns[i].contains("Sedimentdichte")) {
- this.densitsyColumn = i;
- return true;
- }
- }
- return false;
- }
-
-
- protected boolean handleMetaUnit(String line) {
- Matcher m = META_UNIT.matcher(line);
-
- if (m.matches()) {
- String unit = m.group(1);
-
- current.setUnit(new ImportUnit(unit));
-
- return true;
- }
-
- return false;
- }
-
-
protected boolean handleMetaDepth(String line) {
Matcher m = META_DEPTH.matcher(line);
if (m.matches()) {
String lo = m.group(1);
String up = m.group(2);
- String unit = m.group(4);
- log.info("Found sediment density depth: " + lo + " - " + up + " " + unit);
+ log.info("Found sediment density depth: " + lo + " - " + up + " cm");
try {
ImportDepth depth = new ImportDepth(
new BigDecimal(nf.parse(lo).doubleValue()),
- new BigDecimal(nf.parse(up).doubleValue()),
- new ImportUnit(unit)
+ new BigDecimal(nf.parse(up).doubleValue())
);
current.setDepth(depth);
@@ -150,7 +111,7 @@
return true;
}
catch (ParseException pe) {
- log.warn("Error while parsing numbers in: '" + line + "'");
+ log.warn("Unparseable numbers in: '" + line + "'");
}
}
else {
@@ -169,41 +130,33 @@
return;
}
- BigDecimal km;
- BigDecimal density;
+ BigDecimal km = null;
+ BigDecimal shoreOffset = null;
+ BigDecimal density = null;
try {
- km = new BigDecimal(nf.parse(vals[0]).doubleValue());
- density = new BigDecimal(nf.parse(vals[this.densitsyColumn]).doubleValue());
-
+ km = new BigDecimal(nf.parse(vals[0]).doubleValue());
+ density = new BigDecimal(nf.parse(vals[2]).doubleValue());
+ if (!vals[1].isEmpty()) {
+ shoreOffset = new BigDecimal(nf.parse(vals[1]).doubleValue());
+ }
}
catch (ParseException pe) {
- log.warn("Error while parsing numbers in '" + line + "'");
- return;
+ log.warn("Unparseable numbers in '" + line + "'");
}
+ if (km == null || density == null) {
+ log.warn("SDP: No km nor density given. Skip line");
+ return;
+ }
+
BigDecimal year = null;
- try {
- year =
- new BigDecimal(nf.parse(vals[vals.length - 1]).doubleValue());
- }
- catch(ParseException pe) {
- try {
- Date d = DateGuesser.guessDate(vals[vals.length - 1]);
- Calendar c = Calendar.getInstance();
- c.setTime(d);
- year = new BigDecimal(c.get(Calendar.YEAR));
- }
- catch (IllegalArgumentException iae) {
- log.warn("Error while parsing date in '" + line + "'");
- return;
- }
- }
current.addValue(new ImportSedimentDensityValue(
km,
+ shoreOffset,
density,
year,
- vals[vals.length - 1])
+ currentDescription)
);
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/SedimentYieldParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -228,7 +228,7 @@
}
}
catch (ParseException pe) {
- log.warn("SYP: Error while parsing numbers in '" + line + "':", pe);
+ log.warn("SYP: unparseable number in data row '" + line + "':", pe);
}
}
@@ -269,7 +269,7 @@
log.warn("SYP: Unknown time interval string: '" + column + "'");
}
catch (ParseException pe) {
- log.warn("SYP: Error while parsing years: " + column, pe);
+ log.warn("SYP: Could not parse years: " + column, pe);
}
return null;
@@ -285,7 +285,6 @@
String upperA = a.group(4);
String upperB = a.group(5);
- String unitStr = a.group(7);
String lower = lowerA != null ? lowerA : lowerB;
String upper = upperA != null ? upperA : upperB;
@@ -293,12 +292,11 @@
return new ImportGrainFraction(
getGrainFractionTypeName(this.description),
nf.parse(lower).doubleValue(),
- nf.parse(upper).doubleValue(),
- new ImportUnit(unitStr)
+ nf.parse(upper).doubleValue()
);
}
catch (ParseException pe) {
- log.warn("SYP: Error while parsing ranges of: '" + gfStr + "'");
+ log.warn("SYP: Could not parse ranges of: '" + gfStr + "'");
}
}
@@ -308,7 +306,6 @@
String lowerB = b.group(5);
String upperA = b.group(6);
String upperB = b.group(7);
- String unitStr = b.group(9);
String lower = lowerA != null ? lowerA : lowerB;
String upper = upperA != null ? upperA : upperB;
@@ -317,12 +314,11 @@
return new ImportGrainFraction(
getGrainFractionTypeName(this.description),
nf.parse(lower).doubleValue(),
- nf.parse(upper).doubleValue(),
- new ImportUnit(unitStr)
+ nf.parse(upper).doubleValue()
);
}
catch (ParseException pe) {
- log.warn("SYP: Error while parsing ranges of: '" + gfStr + "'");
+ log.warn("SYP: Could not parse ranges of: '" + gfStr + "'");
}
}
@@ -330,7 +326,6 @@
if (c.matches()) {
String oper = c.group(1);
String valueStr = c.group(3);
- String unitStr = c.group(6);
try {
Double value = nf.parse(valueStr).doubleValue();
@@ -339,21 +334,19 @@
return new ImportGrainFraction(
getGrainFractionTypeName(this.description),
value,
- null,
- new ImportUnit(unitStr)
+ null
);
}
else {
return new ImportGrainFraction(
getGrainFractionTypeName(this.description),
null,
- value,
- new ImportUnit(unitStr)
+ value
);
}
}
catch (ParseException pe) {
- log.warn("SYP: Error while parsing ranges of: '" + gfStr + "'");
+ log.warn("SYP: Could not parse ranges of: '" + gfStr + "'");
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelDifferencesParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelDifferencesParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelDifferencesParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -185,12 +185,12 @@
new BigDecimal(nf.parse(value).doubleValue()));
}
catch (ParseException pe) {
- log.warn("Error while parsing value: '" + value + "'");
+ log.warn("Could not parse value: '" + value + "'");
}
}
}
catch (ParseException pe) {
- log.warn("Error while parsing station: '" + line + "'");
+ log.warn("Could not parse station: '" + line + "'");
}
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java
--- a/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/importer/parsers/WaterlevelParser.java Wed Apr 03 16:00:21 2013 +0200
@@ -154,7 +154,7 @@
return true;
}
catch (ParseException pe) {
- log.warn("Error while parsing Q range: '" + line + "'");
+ log.warn("Unparseable Q range: '" + line + "'");
}
}
@@ -184,7 +184,7 @@
}
}
catch (ParseException pe) {
- log.warn("Error while parsing number values: '" + line + "'");
+ log.warn("Unparseable number in data row: " + line);
}
}
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/Building.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/Building.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Building.java Wed Apr 03 16:00:21 2013 +0200
@@ -79,6 +79,16 @@
this.geom = geom;
}
+ public static List<Building> getBuildings(int riverId, int kindId) {
+ Session session = SessionHolder.HOLDER.get();
+
+ Query query = session.createQuery(
+ "from Building where river.id =:river_id and kind_id=:kind_id");
+ query.setParameter("kind_id", kindId);
+ query.setParameter("river_id", riverId);
+
+ return query.list();
+ }
public static List<Building> getBuildings(int riverId, String name) {
Session session = SessionHolder.HOLDER.get();
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/CrossSectionTrack.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionTrack.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/CrossSectionTrack.java Wed Apr 03 16:00:21 2013 +0200
@@ -134,6 +134,41 @@
return query.list();
}
+ public static List<CrossSectionTrack> getCrossSectionTrack(
+ String river,
+ int kind_id
+ ) {
+ Session session = SessionHolder.HOLDER.get();
+
+ Query query = session.createQuery(
+ "from CrossSectionTrack as cst " +
+ " where river.name =:river" +
+ " and kind_id=:kind_id");
+ query.setParameter("river", river);
+ query.setParameter("kind_id", kind_id);
+
+ return query.list();
+ }
+
+ public static List<CrossSectionTrack> getCrossSectionTrack(
+ String river,
+ String name,
+ int kind_id
+ ) {
+ Session session = SessionHolder.HOLDER.get();
+
+ Query query = session.createQuery(
+ "from CrossSectionTrack as cst " +
+ " where river.name =:river" +
+ " and cst.name=:name" +
+ " and kind_id=:kind_id");
+ query.setParameter("river", river);
+ query.setParameter("name", name);
+ query.setParameter("kind_id", kind_id);
+
+ return query.list();
+ }
+
/**
* Returns the nearest CrossSectionTrack of <i>river</i> to a given
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/Depth.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/Depth.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Depth.java Wed Apr 03 16:00:21 2013 +0200
@@ -10,8 +10,6 @@
import javax.persistence.Column;
import javax.persistence.SequenceGenerator;
import javax.persistence.GenerationType;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
@Entity
@@ -23,17 +21,14 @@
private BigDecimal lower;
private BigDecimal upper;
- private Unit unit;
-
public Depth() {
}
- public Depth(BigDecimal lower, BigDecimal upper, Unit unit) {
+ public Depth(BigDecimal lower, BigDecimal upper) {
this.lower = lower;
this.upper = upper;
- this.unit = unit;
}
@Id
@@ -71,14 +66,5 @@
this.upper = upper;
}
- @OneToOne
- @JoinColumn(name = "unit_id")
- public Unit getUnit() {
- return unit;
- }
-
- public void setUnit(Unit unit) {
- this.unit = unit;
- }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/Floodplain.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/Floodplain.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Floodplain.java Wed Apr 03 16:00:21 2013 +0200
@@ -13,6 +13,7 @@
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.annotations.Type;
+import org.hibernate.HibernateException;
import com.vividsolutions.jts.geom.Polygon;
@@ -32,6 +33,8 @@
private Polygon geom;
+ private String name;
+
public Floodplain() {
}
@@ -77,6 +80,42 @@
this.geom = geom;
}
+ @Column(name = "name")
+ public String getName() {
+ return name;
+ }
+
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public static List<Floodplain> getFloodplains(String river, String name, int kind)
+ throws HibernateException {
+ Session session = SessionHolder.HOLDER.get();
+ Query query = session.createQuery(
+ "from Floodplain as fp where river.name =:river" +
+ " and kind.id =:kind" +
+ " and fp.name=:name");
+ query.setParameter("river", river);
+ query.setParameter("kind", kind);
+ query.setParameter("name", name);
+
+ List<Floodplain> list = query.list();
+ return list.isEmpty() ? null : list;
+ }
+
+ public static List<Floodplain> getFloodplains(String river, int kind)
+ throws HibernateException {
+ Session session = SessionHolder.HOLDER.get();
+ Query query = session.createQuery(
+ "from Floodplain where river.name =:river AND kind.id =:kind");
+ query.setParameter("river", river);
+ query.setParameter("kind", kind);
+
+ List<Floodplain> list = query.list();
+ return list.isEmpty() ? null : list;
+ }
public static Floodplain getFloodplain(String river) {
Session session = SessionHolder.HOLDER.get();
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/GrainFraction.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/GrainFraction.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/GrainFraction.java Wed Apr 03 16:00:21 2013 +0200
@@ -9,8 +9,6 @@
import javax.persistence.Column;
import javax.persistence.SequenceGenerator;
import javax.persistence.GenerationType;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
import org.apache.log4j.Logger;
@@ -38,17 +36,14 @@
private Double lower;
private Double upper;
- private Unit unit;
-
public GrainFraction() {
}
- public GrainFraction(String name, Double lower, Double upper, Unit unit) {
+ public GrainFraction(String name, Double lower, Double upper) {
this.name = name;
this.lower = lower;
this.upper = upper;
- this.unit = unit;
}
@Id
@@ -95,14 +90,5 @@
this.upper = upper;
}
- @OneToOne
- @JoinColumn(name = "unit_id")
- public Unit getUnit() {
- return unit;
- }
-
- public void setUnit(Unit unit) {
- this.unit = unit;
- }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundary.java Wed Apr 03 16:00:21 2013 +0200
@@ -91,6 +91,18 @@
this.geom = geom;
}
+ public static List<HydrBoundary> getHydrBoundaries(int riverId, String name, int kindId) {
+ Session session = SessionHolder.HOLDER.get();
+
+ Query query = session.createQuery(
+ "from HydrBoundary where river.id =:river_id and name=:name" +
+ " and kind.id=:kind_id");
+ query.setParameter("river_id", riverId);
+ query.setParameter("name", name);
+ query.setParameter("kind_id", kindId);
+
+ return query.list();
+ }
public static List<HydrBoundary> getHydrBoundaries(int riverId, String name) {
Session session = SessionHolder.HOLDER.get();
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/HydrBoundaryPoly.java Wed Apr 03 16:00:21 2013 +0200
@@ -104,6 +104,27 @@
return query.list();
}
+ public static List<HydrBoundaryPoly> getHydrBoundaries(int riverId,
+ int kind, int sectie, int sobek) {
+ Session session = SessionHolder.HOLDER.get();
+
+ String filter = " where river.id =:river_id";
+
+ if (kind != -1) {
+ filter += " and kind = " + kind;
+ }
+ if (sectie != -1) {
+ filter += " and sectie = " + sectie;
+ }
+ if (sobek != -1){
+ filter += " and sobek = " + sobek;
+ }
+
+ Query query = session.createQuery("from HydrBoundaryPoly" + filter);
+ query.setParameter("river_id", riverId);
+ return query.list();
+ }
+
/**
* Get sectie.
*
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/Jetty.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-backend/src/main/java/de/intevation/flys/model/Jetty.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,79 @@
+package de.intevation.flys.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+
+import com.vividsolutions.jts.geom.Geometry;
+
+import org.hibernate.Session;
+import org.hibernate.Query;
+import org.hibernate.annotations.Type;
+
+import de.intevation.flys.backend.SessionHolder;
+
+ at Entity
+ at Table(name = "jetties")
+public class Jetty
+implements Serializable
+{
+ private Integer id;
+ private River river;
+ private Geometry geom;
+
+ public Jetty() {
+ }
+
+
+ @Id
+ @Column(name = "id")
+ public Integer getId() {
+ return id;
+ }
+
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+
+ @OneToOne
+ @JoinColumn(name = "river_id")
+ public River getRiver() {
+ return river;
+ }
+
+
+ public void setRiver(River river) {
+ this.river = river;
+ }
+
+ @Column(name = "geom")
+ @Type(type = "org.hibernatespatial.GeometryUserType")
+ public Geometry getGeom() {
+ return geom;
+ }
+
+
+ public void setGeom(Geometry geom) {
+ this.geom = geom;
+ }
+
+ public static List<Jetty> getJetties(int riverId, int kindId) {
+ Session session = SessionHolder.HOLDER.get();
+
+ Query query = session.createQuery(
+ "from Jetty where river.id =:river_id and kind_id=:kind_id");
+ query.setParameter("kind_id", kindId);
+ query.setParameter("river_id", riverId);
+
+ return query.list();
+ }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/MeasurementStation.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/MeasurementStation.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/MeasurementStation.java Wed Apr 03 16:00:21 2013 +0200
@@ -29,6 +29,7 @@
private River river;
private Gauge gauge;
+ private String gaugeName;
private TimeInterval observationTimerange;
@@ -37,7 +38,9 @@
public MeasurementStation(River river, String name, String measurementType,
String riverside, Double station, Range range, Gauge gauge,
- TimeInterval observationTimerange, String operator, String description) {
+ String gaugeName, TimeInterval observationTimerange, String operator,
+ String description
+ ) {
this.river = river;
this.name = name;
this.measurementType = measurementType;
@@ -45,6 +48,7 @@
this.station = station;
this.range = range;
this.gauge = gauge;
+ this.gaugeName = gaugeName;
this.observationTimerange = observationTimerange;
this.operator = operator;
this.description = description;
@@ -109,6 +113,15 @@
this.gauge = gauge;
}
+ @Column(name = "reference_gauge_name")
+ public String getGaugeName() {
+ return gaugeName;
+ }
+
+ public void setGaugeName(String gaugeName) {
+ this.gaugeName = gaugeName;
+ }
+
@Column(name = "station")
public Double getStation() {
return station;
@@ -155,5 +168,4 @@
public void setDescription(String description) {
this.description = description;
}
-
}
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/RiverAxis.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/RiverAxis.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/RiverAxis.java Wed Apr 03 16:00:21 2013 +0200
@@ -34,6 +34,7 @@
private Integer id;
private AxisKind kind;
private River river;
+ private String name;
private MultiLineString geom;
public static final int KIND_UNKOWN = 0;
@@ -67,6 +68,15 @@
this.river = river;
}
+ @Column(name = "name")
+ public String getName() {
+ return name;
+ }
+
+
+ public void setName(String name) {
+ this.name = name;
+ }
/**
* Get kind.
@@ -106,6 +116,21 @@
return getRiverAxis(river, KIND_CURRENT);
}
+ public static List<RiverAxis> getRiverAxis(String river, String name, int kind)
+ throws HibernateException {
+ Session session = SessionHolder.HOLDER.get();
+ Query query = session.createQuery(
+ "from RiverAxis as ax where river.name =:river" +
+ " and kind.id =:kind" +
+ " and ax.name=:name");
+ query.setParameter("river", river);
+ query.setParameter("kind", kind);
+ query.setParameter("name", name);
+
+ List<RiverAxis> list = query.list();
+ return list.isEmpty() ? null : list;
+ }
+
public static List<RiverAxis> getRiverAxis(String river, int kind)
throws HibernateException {
Session session = SessionHolder.HOLDER.get();
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/SQRelationValue.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/SQRelationValue.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SQRelationValue.java Wed Apr 03 16:00:21 2013 +0200
@@ -22,12 +22,17 @@
private SQRelation sqRelation;
private String parameter;
- private String fraction;
- private String function;
- private double km;
- private double a;
- private double b;
+ private MeasurementStation measurementStation;
+
+ private Double a;
+ private Double b;
+ private Double qMax;
+ private Double rSQ;
+ private Integer nTot;
+ private Integer nOutlier;
+ private Double cFerguson;
+ private Double cDuan;
protected SQRelationValue() {
@@ -35,21 +40,29 @@
public SQRelationValue(
- SQRelation sqRelation,
- String parameter,
- String fraction,
- String function,
- double km,
- double a,
- double b
+ SQRelation sqRelation,
+ String parameter,
+ MeasurementStation measurementStation,
+ Double a,
+ Double b,
+ Double qMax,
+ Double rSQ,
+ Integer nTot,
+ Integer nOutlier,
+ Double cFerguson,
+ Double cDuan
) {
- this.sqRelation = sqRelation;
- this.parameter = parameter;
- this.fraction = fraction;
- this.function = function;
- this.km = km;
- this.a = a;
- this.b = b;
+ this.sqRelation = sqRelation;
+ this.parameter = parameter;
+ this.measurementStation = measurementStation;
+ this.a = a;
+ this.b = b;
+ this.qMax = qMax;
+ this.rSQ = rSQ;
+ this.nTot = nTot;
+ this.nOutlier = nOutlier;
+ this.cFerguson = cFerguson;
+ this.cDuan = cDuan;
}
@@ -91,54 +104,88 @@
this.parameter = parameter;
}
-
- @Column(name = "fraction")
- public String getFraction() {
- return fraction;
+ @OneToOne
+ @JoinColumn(name = "measurement_station_id")
+ public MeasurementStation getMeasurementStation() {
+ return measurementStation;
}
- public void setFraction(String fraction) {
- this.fraction = fraction;
+ public void setMeasurementStation(MeasurementStation measurementStation) {
+ this.measurementStation = measurementStation;
}
- @Column(name = "function")
- public String getFunction() {
- return function;
- }
-
- public void setFunction(String function) {
- this.function = function;
- }
-
-
- @Column(name = "km")
- public double getKm() {
- return km;
- }
-
- public void setKm(double km) {
- this.km = km;
- }
-
-
@Column(name = "a")
- public double getA() {
+ public Double getA() {
return a;
}
- public void setA(double a) {
+ public void setA(Double a) {
this.a = a;
}
@Column(name = "b")
- public double getB() {
+ public Double getB() {
return b;
}
- public void setB(double b) {
+ public void setB(Double b) {
this.b = b;
}
+
+ @Column(name = "qmax")
+ public Double getQMax() {
+ return qMax;
+ }
+
+ public void setQMax(Double qMax) {
+ this.qMax = qMax;
+ }
+
+ @Column(name = "rsq")
+ public Double getRSQ() {
+ return rSQ;
+ }
+
+ public void setRSQ(Double rSQ) {
+ this.rSQ = rSQ;
+ }
+
+ @Column(name = "ntot")
+ public Integer getNTot () {
+ return nTot;
+ }
+
+ public void setNTot(Integer nTot) {
+ this.nTot = nTot;
+ }
+
+ @Column(name = "noutl")
+ public Integer getNOutlier() {
+ return nOutlier;
+ }
+
+ public void setNOutlier(Integer nOutlier) {
+ this.nOutlier = nOutlier;
+ }
+
+ @Column(name = "cferguson")
+ public Double getCFerguson() {
+ return cFerguson;
+ }
+
+ public void setCFerguson(Double cFerguson) {
+ this.cFerguson = cFerguson;
+ }
+
+ @Column(name = "cduan")
+ public Double getCDuan() {
+ return cDuan;
+ }
+
+ public void setCDuan(Double cDuan) {
+ this.cDuan = cDuan;
+ }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/SedimentDensity.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/SedimentDensity.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentDensity.java Wed Apr 03 16:00:21 2013 +0200
@@ -25,8 +25,6 @@
private Depth depth;
- private Unit unit;
-
private List<SedimentDensityValue> values;
private String description;
@@ -36,10 +34,9 @@
}
- public SedimentDensity(River river, Depth depth, Unit unit, String desc) {
+ public SedimentDensity(River river, Depth depth, String desc) {
this.river = river;
this.depth = depth;
- this.unit = unit;
this.description = desc;
}
@@ -80,16 +77,6 @@
this.depth = depth;
}
- @OneToOne
- @JoinColumn(name = "unit_id")
- public Unit getUnit() {
- return unit;
- }
-
- public void setUnit(Unit unit) {
- this.unit = unit;
- }
-
@Column(name = "description")
public String getDescription() {
return description;
diff -r 8d0af912351c -r 25c2505df28f flys-backend/src/main/java/de/intevation/flys/model/SedimentDensityValue.java
--- a/flys-backend/src/main/java/de/intevation/flys/model/SedimentDensityValue.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-backend/src/main/java/de/intevation/flys/model/SedimentDensityValue.java Wed Apr 03 16:00:21 2013 +0200
@@ -23,6 +23,7 @@
private SedimentDensity sedimentDensity;
private BigDecimal station;
+ private BigDecimal shoreOffset;
private BigDecimal density;
private BigDecimal year;
@@ -36,12 +37,14 @@
public SedimentDensityValue(
SedimentDensity sedimentDensity,
BigDecimal station,
+ BigDecimal shoreOffset,
BigDecimal density,
BigDecimal year,
String desc
) {
this.sedimentDensity = sedimentDensity;
this.station = station;
+ this.shoreOffset = shoreOffset;
this.density = density;
this.year = year;
this.description = desc;
@@ -83,6 +86,15 @@
this.station = station;
}
+ @Column(name = "shore_offset")
+ public BigDecimal getShoreOffset() {
+ return shoreOffset;
+ }
+
+ public void setShoreOffset(BigDecimal shoreOffset) {
+ this.shoreOffset = shoreOffset;
+ }
+
@Column(name = "density")
public BigDecimal getDensity() {
return density;
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/FLYS.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYS.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYS.java Wed Apr 03 16:00:21 2013 +0200
@@ -514,6 +514,60 @@
});
}
+ public void newSQRelation(String river, int measurementStation) {
+ Config config = Config.getInstance();
+
+ final String locale = config.getLocale();
+ final String riv = river;
+ final int mStation = measurementStation;
+ final FLYS flys = this;
+
+ User user = getCurrentUser();
+
+ if (user == null) {
+ SC.warn(MSG.error_not_logged_in());
+ return;
+ }
+
+ collectionService.create(locale, user.identifier(),
+ new AsyncCallback<Collection>() {
+ @Override
+ public void onFailure(Throwable caught) {
+ GWT.log("Could not create new collection.");
+ SC.warn(getExceptionString(MSG, caught));
+ }
+
+ @Override
+ public void onSuccess(Collection collection) {
+ GWT.log("Successfully created a new collection.");
+ final Collection col = collection;
+ artifactService.createSQRelationArtifact(
+ col, locale, riv, mStation,
+ new AsyncCallback<Artifact>() {
+ @Override
+ public void onFailure(Throwable caught) {
+ GWT.log("Could not create the new artifact.");
+ SC.warn(getExceptionString(MSG, caught));
+ }
+
+ @Override
+ public void onSuccess(Artifact artifact) {
+ GWT.log("Successfully created a new artifact.");
+ CollectionView view = new CollectionView(flys,
+ col, artifact);
+ workspace.addView(col.identifier(), view);
+
+ view.addCollectionChangeHandler(getProjectList());
+ view.addCloseClickHandler(
+ new CloseCollectionViewHandler(
+ FLYS.this, col.identifier()));
+ projectList.updateUserCollections();
+ }
+ });
+ }
+ });
+ }
+
@Override
public void onCollectionChange(CollectionChangeEvent event) {
Collection oldC = event.getOldValue();
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.java Wed Apr 03 16:00:21 2013 +0200
@@ -8,6 +8,8 @@
*/
public interface FLYSConstants extends ConstantsWithLookup {
+ String static_sqrelation();
+
String add();
String unexpected_exception();
@@ -854,7 +856,7 @@
String land();
- String potentiel();
+ String potential();
String rastermap();
@@ -1164,6 +1166,25 @@
String hws_fed_unknown();
+ String jetties();
+
+ String route_data();
+
+ String other();
+
+ String axis();
+
+ String bfg_model();
+
+ String federal();
+
+ String areas();
+
+ String sobek_areas();
+
+ String sobek_flooded();
+
+ String measurements();
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.properties
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants.properties Wed Apr 03 16:00:21 2013 +0200
@@ -207,6 +207,7 @@
askThemeRemove = Are you sure that you want to remove the selected theme / themes?
add = Add
+static_sqrelation = SQ relation
discharge_curve = Discharge Curve at Gauge
discharge_curve_gaugeless = Discharge Curve
gauge_discharge_curve = Discharge Table at Gauge
@@ -437,7 +438,7 @@
uesk = Floodmaps
calculations = Calculations
current = Current
-potentiel = Potentiel
+potential = Potential
bfg = BfG
land = Land
rastermap = Rastermap
@@ -461,6 +462,16 @@
hws_lines = Lines
hws_points = Points
hws_fed_unknown = Unknown State
+jetties = Jetties
+route_data = Route Data
+other = Other
+axis = Axis
+bfg_model = BfG-Model
+federal = Federal States
+areas = Areas
+sobek_flooded = SOBEK-Boundary flooded / not flooded
+sobek_areas = SOBEK-Areas
+measurements = Measurements
startcolor = Colorrange start color
endcolor = Colorrange end color
@@ -560,7 +571,7 @@
wsplgen_cat3 = Fill Color 2.0 <= DIFF < 3
wsplgen_cat4 = Fill Color 3.0 <= DIFF < 4
wsplgen_cat5 = Fill Color 4.0 <= DIFF
-attribution = ©Intevation GmbH 2012<br>Data ©<a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>
+attribution = ©Intevation GmbH 2013<br>Data ©<a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>
# Manual Points Editor
addpoints = Add points
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_de.properties
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_de.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_de.properties Wed Apr 03 16:00:21 2013 +0200
@@ -207,6 +207,7 @@
zoomboxTooltip = Ausschnitt vergr\u00f6\u00dfern
chartPropertiesTooltip = Diagrammeigenschaften
+static_sqrelation = Transport-Abfluss Beziehung
discharge_curve = Abflusskurve am Pegel
discharge_curve_gaugeless = Abflusskurve
gauge_discharge_curve = Abflusstafel am Pegel
@@ -239,7 +240,7 @@
historical_discharge_export = Historische Abflusskurven Export
showextramark = Zeige Anfang der Extrapolation
extreme_wq_curve = W/Q
-fix_wq_curve = W/Q-Diagram
+fix_wq_curve = W/Q-Diagramm
fix_deltawt_curve = \u0394 W/t
fix_longitudinal_section_curve = L\u00e4ngsschnitt
fix_derivate_curve = Ableitungskurve
@@ -409,7 +410,7 @@
requireDGM = Sie m\u00fcssen ein DGM ausw\u00e4hlen.
upload_file = hochladen
shape_file_upload = Shapedatei hochladen
-attribution = ©Intevation GmbH 2012<br>Data ©<a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>
+attribution = ©Intevation GmbH 2013<br>Data ©<a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>
# data cage
waterlevels = Wasserst\u00e4nde
@@ -438,7 +439,7 @@
uesk = \u00dcberschwemmungsfl\u00e4chen
calculations = Berechnungen
current = Aktuell
-potentiel = Potenziell
+potential = Potenziell
bfg = BfG
land = Land
rastermap = Rasterkarte
@@ -462,6 +463,16 @@
hws_lines = Liniendaten
hws_points = Punktdaten
hws_fed_unknown = Unbekanntes Bundesland
+jetties = Buhnen
+route_data = Streckendaten
+other = Sonstige
+axis = Achse
+bfg_model = BfG-Modell
+federal = Bundesländer
+areas = Flächen
+sobek_flooded = SOBEK-Grenzlinie durchflutet / nicht durchflutet
+sobek_areas = SOBEK-Bereiche
+measurements = Messungen
startcolor = Farbverlauf Startfarbe
endcolor = Farbverlauf Endfarbe
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_en.properties
--- a/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_en.properties Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/FLYSConstants_en.properties Wed Apr 03 16:00:21 2013 +0200
@@ -208,6 +208,7 @@
askThemeRemove = Are you sure that you want to remove the selected theme / themes?
add = Add
+static_sqrelation = SQ relation
discharge_curve = Discharge Curve at Gauge
discharge_curve_gaugeless = Discharge Curve
gauge_discharge_curve = Discharge Table at Gauge
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/services/ArtifactService.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/services/ArtifactService.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/services/ArtifactService.java Wed Apr 03 16:00:21 2013 +0200
@@ -46,5 +46,19 @@
String river,
Long gaugeref
) throws ServerException;
+
+ /**
+ * Create a new SQRelationArtifact
+ *
+ * @param collection the collection to add the artifact to
+ * @param river the river
+ * @param measurementStation the measurement station id
+ */
+ public Artifact createSQRelationArtifact(
+ Collection collection,
+ String locale,
+ String river,
+ int measurementStation
+ ) throws ServerException;
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/services/ArtifactServiceAsync.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/services/ArtifactServiceAsync.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/services/ArtifactServiceAsync.java Wed Apr 03 16:00:21 2013 +0200
@@ -28,5 +28,13 @@
Long gaugeref,
AsyncCallback<Artifact> callback
);
+
+ public void createSQRelationArtifact(
+ Collection collection,
+ String locale,
+ String river,
+ int measurementStation,
+ AsyncCallback<Artifact> callback
+ );
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/CollectionView.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/CollectionView.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/CollectionView.java Wed Apr 03 16:00:21 2013 +0200
@@ -39,6 +39,7 @@
import de.intevation.flys.client.client.services.DescribeCollectionServiceAsync;
import de.intevation.flys.client.client.services.LoadArtifactService;
import de.intevation.flys.client.client.services.LoadArtifactServiceAsync;
+import de.intevation.flys.client.client.ui.ScreenLock;
import de.intevation.flys.client.shared.model.Artifact;
import de.intevation.flys.client.shared.model.ArtifactDescription;
import de.intevation.flys.client.shared.model.Collection;
@@ -117,6 +118,7 @@
/** The layout. */
protected Layout layout;
+ /** Layout to show spinning wheel of joy. */
protected VLayout lockScreen;
protected int artifactsQueue;
@@ -296,36 +298,13 @@
/** Disables input, grey out, show spinning wheel of joy. */
public void lockUI() {
- if (lockScreen == null) {
- lockScreen = new VLayout();
- lockScreen.setWidth100();
- lockScreen.setHeight100();
- lockScreen.setBackgroundColor("#7f7f7f");
- lockScreen.setOpacity(50);
- lockScreen.setAlign(VerticalAlignment.CENTER);
- lockScreen.setDefaultLayoutAlign(VerticalAlignment.CENTER);
-
- HLayout inner = new HLayout();
- inner.setAlign(Alignment.CENTER);
- inner.setDefaultLayoutAlign(Alignment.CENTER);
- inner.setOpacity(100);
-
- Img img = new Img(
- GWT.getHostPageBaseURL() + messages.loadingImg(),
- 25, 25);
-
- inner.addMember(img);
-
- lockScreen.addMember(inner);
- }
-
- layout.addChild(lockScreen);
+ lockScreen = ScreenLock.lockUI(layout, lockScreen);
}
/** Enable input, remove grey, remove spinning wheel of joy. */
public void unlockUI() {
- layout.removeChild(lockScreen);
+ ScreenLock.unlockUI(layout, lockScreen);
}
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/DatacageWidget.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/DatacageWidget.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/DatacageWidget.java Wed Apr 03 16:00:21 2013 +0200
@@ -24,6 +24,7 @@
import de.intevation.flys.client.client.event.DatacageHandler;
import de.intevation.flys.client.client.services.MetaDataService;
import de.intevation.flys.client.client.services.MetaDataServiceAsync;
+import de.intevation.flys.client.client.ui.ScreenLock;
import de.intevation.flys.client.shared.model.Artifact;
import de.intevation.flys.client.shared.model.AttrList;
import de.intevation.flys.client.shared.model.DataCageNode;
@@ -65,6 +66,9 @@
protected List<DatacageHandler> handlers;
protected List<DatacageDoubleClickHandler> doubleHandlers;
+ /** Layout to show spinning wheel of joy. */
+ protected VLayout lockScreen;
+
public DatacageWidget() {
handlers = new ArrayList<DatacageHandler>();
@@ -168,6 +172,14 @@
triggerTreeBuilding();
}
+ public void lockUI() {
+ lockScreen = ScreenLock.lockUI(this, lockScreen);
+ }
+
+ /** Enable input, remove grey, remove spinning wheel of joy. */
+ public void unlockUI() {
+ ScreenLock.unlockUI(this, lockScreen);
+ }
/**
* @param handler Handler to be added (notified on add-action).
@@ -382,6 +394,8 @@
String artifactId = artifact.getUuid();
String userId = (user != null) ? user.identifier() : null;
+ lockUI();
+
metaDataService.getMetaData(
locale,
artifactId,
@@ -393,6 +407,7 @@
public void onFailure(Throwable caught) {
GWT.log("Could not load meta data.");
SC.warn(caught.getMessage());
+ unlockUI();
}
@Override
@@ -405,9 +420,10 @@
tree.setRoot(root);
TreeNode[] nodes = tree.getChildren(root);
- for (int i = 0; i < nodes.length; i++) {
- if(!tree.hasChildren(nodes[i])) {
- nodes[i].setIsFolder(true);
+ for (TreeNode node: nodes) {
+ if (node.getAttribute("factory") == null &&
+ !tree.hasChildren(node)) {
+ node.setIsFolder(true);
}
}
@@ -415,6 +431,7 @@
tree.openAll();
}
treeGrid.setData(tree);
+ unlockUI();
}
});
}
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/GaugeDischargeCurvePanel.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/GaugeDischargeCurvePanel.java Wed Apr 03 15:59:01 2013 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,68 +0,0 @@
-package de.intevation.flys.client.client.ui;
-
-import com.google.gwt.core.client.GWT;
-
-import com.smartgwt.client.widgets.Canvas;
-import com.smartgwt.client.widgets.Label;
-import com.smartgwt.client.widgets.form.DynamicForm;
-import com.smartgwt.client.widgets.layout.HLayout;
-import com.smartgwt.client.widgets.layout.VLayout;
-
-import de.intevation.flys.client.client.FLYSConstants;
-import de.intevation.flys.client.shared.model.Data;
-import de.intevation.flys.client.shared.model.DataItem;
-import de.intevation.flys.client.shared.model.DataList;
-
-
-/**
- * This UIProvider displays the old DataItems of GaugeDischargeCurveArtifact
- *
- * @author <a href="mailto:bjoern.ricks at intevation.de">Björn Ricks</a>
- */
-public class GaugeDischargeCurvePanel
-extends AbstractUIProvider
-{
- private static final long serialVersionUID = 7411866539525588336L;
-
- /** The message class that provides i18n strings.*/
- protected FLYSConstants messages = GWT.create(FLYSConstants.class);
-
- /** The combobox.*/
- protected DynamicForm form;
-
- @Override
- public Canvas create(DataList data) {
- VLayout layout = new VLayout();
- return layout;
- }
-
- @Override
- public Canvas createOld(DataList dataList) {
- VLayout vLayout = new VLayout();
- vLayout.setWidth("400px");
-
- int size = dataList.size();
- for (int i = 0; i < size; i++) {
- Data data = dataList.get(i);
- DataItem[] items = data.getItems();
-
- for (DataItem item: items) {
- HLayout hLayout = new HLayout();
-
- hLayout.addMember(new Label(item.getLabel()));
- hLayout.addMember(new Label(item.getStringValue()));
-
- vLayout.addMember(hLayout);
- vLayout.setWidth("130px");
- }
- }
-
- return vLayout;
- }
-
- @Override
- protected Data[] getData() {
- return null;
- }
-}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/HWSDatacagePanel.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/HWSDatacagePanel.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/HWSDatacagePanel.java Wed Apr 03 16:00:21 2013 +0200
@@ -1,12 +1,10 @@
package de.intevation.flys.client.client.ui;
-import java.util.ArrayList;
-import java.util.List;
+import com.google.gwt.core.client.GWT;
-import com.google.gwt.core.client.GWT;
import com.smartgwt.client.widgets.Canvas;
import com.smartgwt.client.widgets.Label;
-import com.smartgwt.client.widgets.grid.ListGridRecord;
+
import com.smartgwt.client.widgets.layout.HLayout;
import com.smartgwt.client.widgets.layout.VLayout;
@@ -15,10 +13,11 @@
import de.intevation.flys.client.shared.model.DataList;
import de.intevation.flys.client.shared.model.DefaultData;
import de.intevation.flys.client.shared.model.DefaultDataItem;
-import de.intevation.flys.client.shared.model.Recommendation;
-import de.intevation.flys.client.shared.model.ToLoad;
import de.intevation.flys.client.shared.model.User;
+import java.util.ArrayList;
+import java.util.List;
+
public class HWSDatacagePanel
extends DatacagePanel
@@ -126,4 +125,4 @@
return data;
}
}
-// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
\ No newline at end of file
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/ScreenLock.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/ScreenLock.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,55 @@
+package de.intevation.flys.client.client.ui;
+
+import com.google.gwt.core.client.GWT;
+
+import com.smartgwt.client.types.Alignment;
+import com.smartgwt.client.types.VerticalAlignment;
+import com.smartgwt.client.widgets.Img;
+import com.smartgwt.client.widgets.layout.HLayout;
+import com.smartgwt.client.widgets.layout.Layout;
+import com.smartgwt.client.widgets.layout.VLayout;
+
+import de.intevation.flys.client.client.FLYSConstants;
+
+/** Basic static functionality to show spinning wheel. */
+public class ScreenLock {
+
+ /** The message class that provides i18n strings. */
+ protected static FLYSConstants messages = GWT.create(FLYSConstants.class);
+
+ /** Disables input, grey out, show spinning wheel of joy. */
+ public static VLayout lockUI(Layout layout, VLayout lockScreen) {
+ if (lockScreen == null) {
+ lockScreen = new VLayout();
+ lockScreen.setWidth100();
+ lockScreen.setHeight100();
+ lockScreen.setBackgroundColor("#7f7f7f");
+ lockScreen.setOpacity(50);
+ lockScreen.setAlign(VerticalAlignment.CENTER);
+ lockScreen.setDefaultLayoutAlign(VerticalAlignment.CENTER);
+
+ HLayout inner = new HLayout();
+ inner.setAlign(Alignment.CENTER);
+ inner.setDefaultLayoutAlign(Alignment.CENTER);
+ inner.setOpacity(100);
+
+ Img img = new Img(
+ GWT.getHostPageBaseURL() + messages.loadingImg(),
+ 25, 25);
+
+ inner.addMember(img);
+
+ lockScreen.addMember(inner);
+ }
+
+ layout.addChild(lockScreen);
+ return lockScreen;
+ }
+
+ /** Enable input, remove grey, remove spinning wheel of joy. */
+ public static void unlockUI(Layout layout, VLayout lockScreen) {
+ layout.removeChild(lockScreen);
+ }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
+
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/StaticDataPanel.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/StaticDataPanel.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,68 @@
+package de.intevation.flys.client.client.ui;
+
+import com.google.gwt.core.client.GWT;
+
+import com.smartgwt.client.widgets.Canvas;
+import com.smartgwt.client.widgets.Label;
+import com.smartgwt.client.widgets.form.DynamicForm;
+import com.smartgwt.client.widgets.layout.HLayout;
+import com.smartgwt.client.widgets.layout.VLayout;
+
+import de.intevation.flys.client.client.FLYSConstants;
+import de.intevation.flys.client.shared.model.Data;
+import de.intevation.flys.client.shared.model.DataItem;
+import de.intevation.flys.client.shared.model.DataList;
+
+
+/**
+ * This UIProvider displays the old DataItems of GaugeDischargeCurveArtifact
+ *
+ * @author <a href="mailto:bjoern.ricks at intevation.de">Björn Ricks</a>
+ */
+public class StaticDataPanel
+extends AbstractUIProvider
+{
+ private static final long serialVersionUID = 7411866539525588336L;
+
+ /** The message class that provides i18n strings.*/
+ protected FLYSConstants messages = GWT.create(FLYSConstants.class);
+
+ /** The combobox.*/
+ protected DynamicForm form;
+
+ @Override
+ public Canvas create(DataList data) {
+ VLayout layout = new VLayout();
+ return layout;
+ }
+
+ @Override
+ public Canvas createOld(DataList dataList) {
+ VLayout vLayout = new VLayout();
+ vLayout.setWidth("400px");
+
+ int size = dataList.size();
+ for (int i = 0; i < size; i++) {
+ Data data = dataList.get(i);
+ DataItem[] items = data.getItems();
+
+ for (DataItem item: items) {
+ HLayout hLayout = new HLayout();
+
+ hLayout.addMember(new Label(item.getLabel()));
+ hLayout.addMember(new Label(item.getStringValue()));
+
+ vLayout.addMember(hLayout);
+ vLayout.setWidth("130px");
+ }
+ }
+
+ return vLayout;
+ }
+
+ @Override
+ protected Data[] getData() {
+ return null;
+ }
+}
+// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/UIProviderFactory.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/UIProviderFactory.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/UIProviderFactory.java Wed Apr 03 16:00:21 2013 +0200
@@ -162,7 +162,7 @@
return new RadioPanel();
}
else if (uiProvider.equals("gauge_discharge_curve")) {
- return new GaugeDischargeCurvePanel();
+ return new StaticDataPanel();
}
else if (uiProvider.equals("minfo.sedimentload_distance_select")) {
return new SedLoadDistancePanel();
@@ -182,6 +182,9 @@
else if (uiProvider.equals("user_rgd_panel")) {
return new UserRGDProvider();
}
+ else if (uiProvider.equals("static_sqrelation")) {
+ return new StaticDataPanel();
+ }
else {
//GWT.log("Picked default provider.");
return new SelectProvider();
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/WQAdaptedInputPanel.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/WQAdaptedInputPanel.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/WQAdaptedInputPanel.java Wed Apr 03 16:00:21 2013 +0200
@@ -11,7 +11,6 @@
import com.smartgwt.client.widgets.Canvas;
import com.smartgwt.client.widgets.Label;
import com.smartgwt.client.widgets.form.DynamicForm;
-import com.smartgwt.client.widgets.form.fields.FormItem;
import com.smartgwt.client.widgets.form.fields.RadioGroupItem;
import com.smartgwt.client.widgets.form.fields.events.BlurEvent;
import com.smartgwt.client.widgets.form.fields.events.BlurHandler;
@@ -95,6 +94,9 @@
/** Stores the input panels related to their keys.*/
protected Map<String, DoubleArrayPanel> wqranges;
+ /** List of doubleArrayPanels shown. */
+ protected ArrayList<DoubleArrayPanel> doubleArrayPanels;
+
/** Stores the min/max values for each q range.*/
protected Map<String, double[]> qranges;
@@ -119,6 +121,7 @@
public WQAdaptedInputPanel() {
wqranges = new HashMap<String, DoubleArrayPanel>();
+ doubleArrayPanels = new ArrayList<DoubleArrayPanel>();
qranges = new HashMap<String, double[]>();
wranges = new HashMap<String, double[]>();
qdTable = new QDTable();
@@ -190,6 +193,14 @@
if (itemWithFocus != null) {
itemWithFocus.setValues(new double[]{val});
+ // TODO 1133, show different data for each doublearraypanel
+ int i = doubleArrayPanels.indexOf(itemWithFocus);
+ if (i == doubleArrayPanels.size()-1) {
+ doubleArrayPanels.get(0).focusInItem(1);
+ }
+ else {
+ doubleArrayPanels.get(i+1).focusInItem(1);
+ }
}
}
};
@@ -197,6 +208,7 @@
qdTable.addCellClickHandler(handler);
}
+
@Override
public Canvas createOld(DataList dataList) {
List<Data> all = dataList.getAll();
@@ -315,6 +327,7 @@
}
}
+
protected List<String> validateRange(Map<String, double[]> ranges) {
List<String> errors = new ArrayList<String>();
NumberFormat nf = NumberFormat.getDecimalFormat();
@@ -467,6 +480,7 @@
label, null, this, this, TitleOrientation.LEFT);
wqranges.put(title, dap);
+ doubleArrayPanels.add(dap);
if (item instanceof WQDataItem) {
WQDataItem wq = (WQDataItem) item;
@@ -649,6 +663,7 @@
}
+ /** Get the WQD data from servide and stuck them up that tables. */
protected void fetchWQData() {
Config config = Config.getInstance();
String locale = config.getLocale ();
@@ -677,7 +692,6 @@
}
addWQInfo(wqi);
-
}
}
);
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/MeasurementStationListGrid.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/MeasurementStationListGrid.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/MeasurementStationListGrid.java Wed Apr 03 16:00:21 2013 +0200
@@ -7,6 +7,8 @@
import com.smartgwt.client.widgets.WidgetCanvas;
import com.smartgwt.client.widgets.grid.ListGridField;
import com.smartgwt.client.widgets.grid.ListGridRecord;
+import com.smartgwt.client.widgets.grid.events.RecordClickEvent;
+import com.smartgwt.client.widgets.grid.events.RecordClickHandler;
import de.intevation.flys.client.client.FLYS;
import de.intevation.flys.client.shared.model.MeasurementStation;
@@ -18,7 +20,9 @@
/**
* @author <a href="mailto:bjoern.ricks at intevation.de">Björn Ricks</a>
*/
-public class MeasurementStationListGrid extends InfoListGrid {
+public class MeasurementStationListGrid
+extends InfoListGrid
+implements RecordClickHandler {
public MeasurementStationListGrid(FLYS flys) {
super(flys);
@@ -28,7 +32,9 @@
ListGridField stfield = new ListGridField("station", "Station [km]");
ListGridField lfield = new ListGridField("link", "Link");
lfield.setType(ListGridFieldType.LINK);
- this.setFields(nfield, sfield, efield, stfield, lfield);
+ ListGridField cfield = new ListGridField("curvelink", "SQ");
+ cfield.addRecordClickHandler(this);
+ this.setFields(nfield, sfield, efield, stfield, lfield, cfield);
}
/**
@@ -88,4 +94,12 @@
MeasurementStationRecord station = (MeasurementStationRecord)record;
return new WidgetCanvas(new MeasurementStationInfoPanel(station));
}
+
+ @Override
+ public void onRecordClick(RecordClickEvent event) {
+ MeasurementStationRecord station =
+ (MeasurementStationRecord)event.getRecord();
+ flys.newSQRelation(station.getRiverName(), station.getID());
+ }
+
}
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/MeasurementStationRecord.java
--- a/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/MeasurementStationRecord.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/client/ui/stationinfo/MeasurementStationRecord.java Wed Apr 03 16:00:21 2013 +0200
@@ -27,6 +27,7 @@
MSG.measurement_station_url();
this.setLink(link);
this.setLinkText(MSG.measurement_station_info_link());
+ this.setCurveLink(MSG.static_sqrelation());
this.setID(number);
this.setName(station.getName());
if (station.isKmUp()) {
@@ -172,4 +173,13 @@
public void setLink(String link) {
this.setAttribute("link", link);
}
+
+ public void setCurveLink(String link) {
+ this.setAttribute("curvelink", link);
+ }
+
+ public String getCurveLink() {
+ return this.getAttribute("curvelink");
+ }
+
}
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/server/ArtifactHelper.java
--- a/flys-client/src/main/java/de/intevation/flys/client/server/ArtifactHelper.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/server/ArtifactHelper.java Wed Apr 03 16:00:21 2013 +0200
@@ -39,7 +39,7 @@
* Name of the factory to generate a GaugeDischargeCurveArtifact
*/
private static final String GAUGE_DISCHARGE_CURVE_ARTIFACT = "gaugedischargecurve";
-
+ private static final String SQ_RELATION_ARTIFACT = "staticsqrelation";
private ArtifactHelper() {
}
@@ -162,5 +162,35 @@
return cf;
}
+
+
+ public static Artifact createSQRelationArtifact(
+ String serverUrl,
+ String locale,
+ String river,
+ int measurementStation)
+ throws ServerException
+ {
+ Document create = ClientProtocolUtils.newCreateDocument(
+ SQ_RELATION_ARTIFACT);
+
+ XMLUtils.ElementCreator ec = new XMLUtils.ElementCreator(
+ create,
+ ArtifactNamespaceContext.NAMESPACE_URI,
+ ArtifactNamespaceContext.NAMESPACE_PREFIX);
+
+ Element root = create.getDocumentElement();
+
+ Element eriver = ec.create("river");
+ ec.addAttr(eriver, "name", river);
+
+ Element estation = ec.create("measurement_station");
+ ec.addAttr(estation, "number", String.valueOf(measurementStation));
+
+ root.appendChild(eriver);
+ root.appendChild(estation);
+
+ return sendCreate(serverUrl, locale, create);
+ }
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/server/ArtifactServiceImpl.java
--- a/flys-client/src/main/java/de/intevation/flys/client/server/ArtifactServiceImpl.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/server/ArtifactServiceImpl.java Wed Apr 03 16:00:21 2013 +0200
@@ -78,5 +78,28 @@
return artifact;
}
+ @Override
+ public Artifact createSQRelationArtifact(
+ Collection collection,
+ String locale,
+ String river,
+ int measurementStation)
+ throws ServerException
+ {
+ logger.info("ArtifactServiceImpl.createSQRelationArtifact");
+ String url = getServletContext().getInitParameter("server-url");
+
+ Artifact artifact = ArtifactHelper.createSQRelationArtifact(url,
+ locale, river, measurementStation);
+ if (artifact == null) {
+ return null;
+ }
+ logger.info("SQRelationArtifact created successfully");
+
+ CollectionHelper.addArtifact(collection, artifact, url, locale);
+
+ return artifact;
+ }
+
}
// vim:set ts=4 sw=4 si et sta sts=4 fenc=utf8 :
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/server/FLYSArtifactCreator.java
--- a/flys-client/src/main/java/de/intevation/flys/client/server/FLYSArtifactCreator.java Wed Apr 03 15:59:01 2013 +0200
+++ b/flys-client/src/main/java/de/intevation/flys/client/server/FLYSArtifactCreator.java Wed Apr 03 16:00:21 2013 +0200
@@ -24,6 +24,7 @@
import de.intevation.flys.client.shared.model.GaugeDischargeCurveArtifact;
import de.intevation.flys.client.shared.model.MapArtifact;
import de.intevation.flys.client.shared.model.MINFOArtifact;
+import de.intevation.flys.client.shared.model.StaticSQRelationArtifact;
import de.intevation.flys.client.shared.model.WINFOArtifact;
@@ -149,6 +150,10 @@
logger.debug("+++++ NEW GAUGEDISCHARGECURVE ARTIFACT.");
return new GaugeDischargeCurveArtifact(uuid, hash, background, msg);
}
+ else if (name.length() > 0 && name.equals("staticsqrelation")) {
+ logger.debug("+++++ STATICSQRELATION ARTIFACT.");
+ return new StaticSQRelationArtifact(uuid, hash, background, msg);
+ }
return new DefaultArtifact(uuid, hash, background, msg);
}
diff -r 8d0af912351c -r 25c2505df28f flys-client/src/main/java/de/intevation/flys/client/shared/model/StaticSQRelationArtifact.java
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/flys-client/src/main/java/de/intevation/flys/client/shared/model/StaticSQRelationArtifact.java Wed Apr 03 16:00:21 2013 +0200
@@ -0,0 +1,36 @@
+package de.intevation.flys.client.shared.model;
+
+import java.util.List;
+
+
+public class StaticSQRelationArtifact
+extends DefaultArtifact
+{
+
+ /** The name of this artifact */
+ public static final String NAME = "static_sqrelation";
+
+
+
+ public StaticSQRelationArtifact() {
+ }
+
+ public StaticSQRelationArtifact(String uuid, String hash) {
+ super(uuid, hash);
+ }
+
+
+ public StaticSQRelationArtifact(
+ String uuid,
+ String hash,
+ boolean inBackground,
+ List<CalculationMessage> messages
+ ) {
+ super(uuid, hash, inBackground, messages);
+ }
+
+
+ public String getName() {
+ return NAME;
+ }
+}
More information about the Dive4elements-commits
mailing list