[Formed-commits] r425 - in trunk: . formed/formed/plugins/export
scm-commit@wald.intevation.org
scm-commit at wald.intevation.org
Wed Nov 24 17:33:15 CET 2010
Author: bh
Date: 2010-11-24 17:33:14 +0100 (Wed, 24 Nov 2010)
New Revision: 425
Modified:
trunk/ChangeLog
trunk/formed/formed/plugins/export/rules_sh.py
Log:
* formed/formed/plugins/export/rules_sh.py: Fix formatting.
Modified: trunk/ChangeLog
===================================================================
--- trunk/ChangeLog 2010-11-15 17:08:43 UTC (rev 424)
+++ trunk/ChangeLog 2010-11-24 16:33:14 UTC (rev 425)
@@ -1,3 +1,7 @@
+2010-11-24 Bernhard Herzog <bh at intevation.de>
+
+ * formed/formed/plugins/export/rules_sh.py: Fix formatting.
+
2010-11-15 Torsten Irlaender <torsten at intevation.de>
* formed/formed/model/data.py: Do not set attribute "repeat" for
Modified: trunk/formed/formed/plugins/export/rules_sh.py
===================================================================
--- trunk/formed/formed/plugins/export/rules_sh.py 2010-11-15 17:08:43 UTC (rev 424)
+++ trunk/formed/formed/plugins/export/rules_sh.py 2010-11-24 16:33:14 UTC (rev 425)
@@ -10,15 +10,16 @@
# Torsten Irländer <torsten.irlaender at intevation.de>
#
+import codecs
+from cgi import escape
+
from formed.plugins.ui.controls import FileDialogFilter
-from formed.model.exprtree import *
+from formed.model.exprtree import *
import formed.model.data as data
#from formed.model.misc import ModeChecker, checkMode
-import codecs
-from cgi import escape
def sql_today(node, document):
out = []
@@ -30,9 +31,9 @@
out.append("'%s'::date" % node.eval(None))
return " ".join(out)
-def sql_var(node, document):
- out = []
- out.append(node.var_name)
+def sql_var(node, document):
+ out = []
+ out.append(node.var_name)
return " ".join(out)
def sql_const(node, document):
@@ -43,14 +44,16 @@
def sql_And(node, document):
out = []
out.append("(")
- out.append("%s AND %s" % (recursive_parse(node.a, document), recursive_parse(node.b, document)))
+ out.append("%s AND %s" % (recursive_parse(node.a, document),
+ recursive_parse(node.b, document)))
out.append(")")
return " ".join(out)
def sql_Or(node, document):
out = []
out.append("(")
- out.append("%s OR %s" % (recursive_parse(node.a, document), recursive_parse(node.b, document)))
+ out.append("%s OR %s" % (recursive_parse(node.a, document),
+ recursive_parse(node.b, document)))
out.append(")")
return " ".join(out)
@@ -59,36 +62,47 @@
operator = ""
if node.operator.__name__ == "GT":
operator = ">"
- out.append("%s %s %s" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("%s %s %s" % (recursive_parse(node.a, document),
+ operator, recursive_parse(node.b, document)))
elif node.operator.__name__ == "LT":
operator = "<"
- out.append("%s %s %s" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("%s %s %s" % (recursive_parse(node.a, document),
+ operator, recursive_parse(node.b, document)))
elif node.operator.__name__ == "LE":
operator = "<="
- out.append("%s %s %s" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("%s %s %s" % (recursive_parse(node.a, document), operator,
+ recursive_parse(node.b, document)))
elif node.operator.__name__ == "EQ":
operator = "="
- out.append("%s %s %s" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("%s %s %s" % (recursive_parse(node.a, document), operator,
+ recursive_parse(node.b, document)))
elif node.operator.__name__ == "NE":
operator = "!="
- out.append("%s %s %s" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("%s %s %s" % (recursive_parse(node.a, document), operator,
+ recursive_parse(node.b, document)))
elif node.operator.__name__ == "GE":
operator = ">="
- out.append("%s %s %s" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("%s %s %s" % (recursive_parse(node.a, document), operator,
+ recursive_parse(node.b, document)))
elif node.operator.__name__ == "DAYS":
operator = "-"
- out.append("abs(%s %s %s)" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("abs(%s %s %s)" % (recursive_parse(node.a, document),
+ operator, recursive_parse(node.b,
+ document)))
elif node.operator.__name__ == "ADD":
operator = "+"
- out.append("%s %s %s" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("%s %s %s" % (recursive_parse(node.a, document), operator,
+ recursive_parse(node.b, document)))
elif node.operator.__name__ == "MINUS":
operator = "-"
- out.append("%s %s %s" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("%s %s %s" % (recursive_parse(node.a, document), operator,
+ recursive_parse(node.b, document)))
elif node.operator.__name__ == "MUL":
operator = "*"
- out.append("%s %s %s" % (recursive_parse(node.a, document), operator, recursive_parse(node.b, document)))
+ out.append("%s %s %s" % (recursive_parse(node.a, document), operator,
+ recursive_parse(node.b, document)))
else:
- raise Exception('Can not parse %s' % node)
+ raise Exception('Can not parse %s' % node)
return " ".join(out)
def sql_OperatorUnary(node, document):
@@ -104,7 +118,7 @@
out.append(")")
elif node.operator.__name__ == "KNOWN":
if not isinstance(node.a, Var):
- raise Exception('Can not parse %s' % node)
+ raise Exception('Can not parse %s' % node)
n = document.getNodeComponentByName(node.a.var_name)
if isinstance(n, data.DateLeaf):
ukv = "'0001-01-01'"
@@ -137,15 +151,18 @@
elif isinstance(node, DateNode):
out.append(sql_date(node, document))
else:
- raise Exception('Can not parse %s' % node)
+ raise Exception('Can not parse %s' % node)
return " ".join(out)
def generate_analyse(rules, document):
out = []
# 1. Header
out.append('#!/bin/bash')
- out.append("""DATABASES=`psql -t -c "SELECT datname FROM pg_database WHERE datname LIKE 'ka_%';" postgres`""")
- out.append('echo "DB,%s"' % ",".join(["%s" % str(i) for i in range(0,len(rules))]))
+ out.append("DATABASES=`psql -t -c"
+ " \"SELECT datname FROM pg_database WHERE datname LIKE 'ka_%';\""
+ " postgres`")
+ out.append('echo "DB,%s"'
+ % ",".join(["%s" % str(i) for i in range(0,len(rules))]))
out.append("for DB in ${DATABASES}")
out.append("do")
@@ -156,12 +173,14 @@
rule_str = recursive_parse(rule.getExpr().prog, document)
out.append('# Eval %s' % num)
out.append('# %s' % rule.getDescription())
- out.append(command %
- (num, 'select count(id) from master_tbl where not (%s)' % rule_str))
+ out.append(command
+ % (num, ('select count(id) from master_tbl where not (%s)'
+ % rule_str)))
out.append('\n')
known_rules.append(rule_str)
- # 3. Footer
- out.append('echo "${DB},%s"' % ",".join(["${q%s}" % str(i) for i in range(0,len(rules))]))
+ # 3. Footer
+ out.append('echo "${DB},%s"'
+ % ",".join(["${q%s}" % str(i) for i in range(0, len(rules))]))
out.append("done")
return "\n".join(out)
@@ -193,7 +212,7 @@
date_rules = []
inserts = []
base_sql = "SELECT id from ( %%(table)s ) m WHERE ( %(where)s )"
- # 1. Checks
+ # 1. Checks
out.append('<checks>')
for num, rule in enumerate(rules):
try:
@@ -204,7 +223,8 @@
name = rule.getDescription().replace("'", '"')
desc = rule.getValue().replace("'", '"')
add_sql = base_sql % {
- 'where': "NOT ( %s )" % recursive_parse(rule.getExpr().prog, document)}
+ 'where': "NOT ( %s )" % recursive_parse(rule.getExpr().prog,
+ document)}
del_sql = base_sql % {
'where': "TRUE"}
if tag >= 1000:
@@ -214,8 +234,12 @@
# Added check for test if all date-sequence rules are ok
tag = "10"
name = "Datumsangaben konsistent"
- desc = "Alle Datumsangaben in der Fallakte befinden sich in chronologisch korrekter Reihenfolge und liegen nicht in der Zukunft"
- add_sql = "select id from ( %%(table)s ) m where not ARRAY(SELECT tag_id FROM nm_tags_cases_view WHERE case_id = m.id) && ARRAY[%s]" % ",".join([str(id) for id in date_rules])
+ desc = ("Alle Datumsangaben in der Fallakte befinden sich in"
+ " chronologisch korrekter Reihenfolge und liegen nicht in der"
+ " Zukunft")
+ add_sql = ("select id from ( %%(table)s ) m where not "
+ "ARRAY(SELECT tag_id FROM nm_tags_cases_view WHERE case_id=m.id)"
+ " && ARRAY[%s]" % ",".join([str(id) for id in date_rules]))
del_sql = base_sql % {'where': "TRUE"}
out.append(generate_check(tag, name, desc, add_sql, del_sql))
#inserts.append("""INSERT INTO tags (id, label, description) VALUES (%s, '%s','%s');""" % (tag, name.replace("'", '"'), desc.replace("'", '"')))
@@ -225,6 +249,7 @@
out.append('\n-->')
return "".join(out)
+
class ExportRulesForTagging(FileDialogFilter):
def __init__(self):
@@ -247,52 +272,57 @@
def doExport(self, path, main, dummy):
document = main.getDocument()
- root = document.root
- mode = main.getSelectedMode()
+ root = document.root
+ mode = main.getSelectedMode()
allModes = main.getAllModes()
- out = None
- sql = None
+ out = None
+ sql = None
try:
out = codecs.open(path, "wb", "UTF-8")
- all_rules = list(document.findAllByClass(data.RuleLeaf, mode, allModes))
+ all_rules = list(document.findAllByClass(data.RuleLeaf, mode,
+ allModes))
error_rules = []
known_rules = []
- rg_fields = []
+ rg_fields = []
for idx, rule in enumerate(all_rules):
ignore_rule = False
-
+
# If rule does not result in error, ignore
if rule.getMark().find('error') < 0:
ignore_rule = True
-
+
# If rule already generated, ignore
rule_str = recursive_parse(rule.getExpr().prog, document)
if rule_str in known_rules:
ignore_rule = True
-
+
# If field is in in a repeat group, ignore
# TODO: if this is needed, the SQL statements have to be adapted
for dependent_field in rule.getExpr().getDependencies():
- field = document.getNodeComponentByName(dependent_field, mode, allModes)
+ field = document.getNodeComponentByName(dependent_field,
+ mode, allModes)
if field in rg_fields:
ignore_rule = True
for parent in field.pathToRoot(True):
if isinstance(parent, data.RepeatNode):
rg_fields.append(field)
ignore_rule = True
-
+
if ignore_rule:
continue
-
+
error_rules.append(rule)
known_rules.append(rule_str)
sql = generate_tagging(error_rules, document)
print >> out, sql
finally:
if out:
- try: out.close()
- except: pass
+ try:
+ out.close()
+ except:
+ pass
+
class ExportRulesAsSH(FileDialogFilter):
def __init__(self):
@@ -315,28 +345,33 @@
def doExport(self, path, main, dummy):
document = main.getDocument()
- root = document.root
- mode = main.getSelectedMode()
+ root = document.root
+ mode = main.getSelectedMode()
allModes = main.getAllModes()
- out = None
- sql = None
+ out = None
+ sql = None
try:
out = codecs.open(path, "wb", "UTF-8")
- all_rules = list(document.findAllByClass(data.RuleLeaf, mode, allModes))
+ all_rules = list(document.findAllByClass(data.RuleLeaf, mode,
+ allModes))
error_rules = []
known_rules = []
for idx, rule in enumerate(all_rules):
- if rule.getMark().find('error') < 0: continue
+ if rule.getMark().find('error') < 0:
+ continue
# filter out already generated rules.
rule_str = recursive_parse(rule.getExpr().prog, document)
- if rule_str in known_rules: continue
+ if rule_str in known_rules:
+ continue
error_rules.append(rule)
known_rules.append(rule_str)
sql = generate_analyse(error_rules, document)
print >> out, sql
finally:
if out:
- try: out.close()
- except: pass
+ try:
+ out.close()
+ except:
+ pass
# vim:set ts=4 sw=4 si et sta sts=4 enc=utf-8 fenc=utf-8:
More information about the Formed-commits
mailing list