def deliver_custom(): try: field = request.form.get("field") fieldname = request.form.get("fieldname") date = request.form.get("date") if field == "" or fieldname == "" or date =="": emsg = "An empty parameter was provided." errormsg(emsg) return render_template('content.html', desc=create_program_meta, params=build_params(), emsg=emsg) # By default the latest day is used. When another date was specified # this one is used today = get_latest_day() if date is not None: try: d = datetime.datetime.strptime(date, "%Y-%m-%d") today = d.strftime("%Y%m%d") except ValueError,e: errormsg("deliver_custom: Invalid timestamp. "+str(e)) if red.sismember("FIELDS", fieldname): return deliver_evolution(today, fieldname, field) emsg = "An invalid parameter was provided" return render_template('content.html', desc=create_program_meta, params=build_params(), emsg=emsg)
def send_settings(): try: if request.method == 'POST': sfields = request.form.getlist('selectedfields') vfields = dict() # Check if the fields are valid for field in sfields: if red.sismember("FIELDS", field): red.sadd("ENFIELDS", field) vfields[field] = True # TODO log invalid fields # Find checkboxes that were not set or unticketed and remove them for f in red.smembers('FIELDS'): if (f in vfields) is False: # Found a field that was not selected but is marked as being set # in a previous iteration if red.sismember("ENFIELDS", f): red.srem("ENFIELDS", f) fields = load_selected_fields() return render_template('settings.html', fields=fields, desc=create_program_meta(), params=build_params()) except redis.ConnectionError as err: errormsg("Cannot connect to redis. " + str(err)) return render_template('offline.html', prefix=prefix)
def deliver_evolution(date, field, key): try: desc = create_program_meta() params = build_params() if check_date(date) is False: emsg = "Invalid date specified" return render_template('content.html', desc=desc, params=params, emsg=emsg) data = [] daterange = enum_last_days(date, coverage) rkey = translate_human_to_redis(field, key) for date in daterange: entry = dict() k = sensorname + ":" + date + ":" + field score = red.zscore(k, rkey) if (score is not None): entry['date'] = date entry['score'] = score data.append(entry) # Convert date d = datetime.datetime.strptime(date, "%Y%m%d") showdate = d.strftime("%Y-%m-%d") return render_template("evol.html", desc=desc, date=showdate, field=field, key=key, data=data, params=params) except redis.ConnectionError as err: errormsg("Cannot connect to redis " + str(err)) return render_template('offline.html', prefix=prefix)
def process_file(self): #FIXME read from config f = open(self.sourceFile, "r") docs = json.load(f) newdocs = [] f.close() docs = self.handle_docs(docs) #FIXME Two different dump functions one here one in potiron-an-all.py if self.directory is None: json.dump(docs, sys.stdout) else: #FIXME assume that always the same filename filename = None if len(docs) > 0: item = docs[0] if item.has_key("filename"): filename = item["filename"] if filename is None: errormsg("Cannot store file as no filename was found") return fn = get_file_struct(self.directory, filename) t = fn.split("/") t.pop() d = "/".join(t) if os.path.exists(d) == False: os.makedirs(d) if os.path.exists(fn): #FIXME Merge files? errormsg("Do not overwrite file " + fn) return f = open(fn, "w") json.dump(docs, f)
def deliver_custom(): try: field = request.form.get("field") fieldname = request.form.get("fieldname") date = request.form.get("date") if field == "" or fieldname == "" or date == "": emsg = "An empty parameter was provided." errormsg(emsg) return render_template('content.html', desc=create_program_meta, params=build_params(), emsg=emsg) # By default the latest day is used. When another date was specified # this one is used today = get_latest_day() if date is not None: try: d = datetime.datetime.strptime(date, "%Y-%m-%d") today = d.strftime("%Y%m%d") except ValueError, e: errormsg("deliver_custom: Invalid timestamp. " + str(e)) if red.sismember("FIELDS", fieldname): return deliver_evolution(today, fieldname, field) emsg = "An invalid parameter was provided" return render_template('content.html', desc=create_program_meta, params=build_params(), emsg=emsg)
def send_settings(): try: if request.method == 'POST': sfields = request.form.getlist('selectedfields') vfields = dict() # Check if the fields are valid for field in sfields: if red.sismember("FIELDS", field): red.sadd("ENFIELDS", field) vfields[field] = True # TODO log invalid fields # Find checkboxes that were not set or unticketed and remove them for f in red.smembers('FIELDS'): if vfields.has_key(f) is False: # Found a field that was not selected but is marked as being set # in a previous iteration if red.sismember("ENFIELDS", f): red.srem("ENFIELDS", f) fields = load_selected_fields() return render_template('settings.html', fields=fields, desc = create_program_meta(), params = build_params()) except redis.ConnectionError,err: errormsg("Cannot connect to redis. "+str(err)) return render_template('offline.html', prefix=prefix)
def process_file(self): #FIXME read from config f = open(self.sourceFile,"r") docs = json.load(f) newdocs = [] f.close() docs = self.handle_docs(docs) #FIXME Two different dump functions one here one in potiron-an-all.py if self.directory is None: json.dump(docs, sys.stdout) else: #FIXME assume that always the same filename filename = None if len(docs) > 0: item = docs[0] if item.has_key("filename"): filename = item["filename"] if filename is None: errormsg("Cannot store file as no filename was found") return fn = get_file_struct(self.directory, filename) t = fn.split("/") t.pop() d = "/".join(t) if os.path.exists(d) == False: os.makedirs(d) if os.path.exists(fn): #FIXME Merge files? errormsg("Do not overwrite file " + fn) return f = open(fn,"w") json.dump(docs,f)
def deliver_evolution(date, field, key): try: desc = create_program_meta() params = build_params() if check_date(date) is False: emsg="Invalid date specified" return render_template('content.html', desc=desc, params=params, emsg=emsg) data = [] daterange = enum_last_days(date, coverage) rkey = translate_human_to_redis(field, key) for date in daterange: entry = dict() k = sensorname+":"+date+":"+field score = red.zscore(k, rkey) if (score is not None): entry['date'] = date entry['score'] = score data.append(entry) # Convert date d = datetime.datetime.strptime(date, "%Y%m%d") showdate = d.strftime("%Y-%m-%d") return render_template("evol.html", desc=desc, date=showdate, field=field, key=key, data=data, params=params) except redis.ConnectionError,err: errormsg("Cannot connect to redis "+str(err)) return render_template('offline.html', prefix=prefix)
def welcome(): try: desc = create_program_meta() params = build_params() emsg = check_database() if emsg is not None: return render_template('content.html', desc=desc, params=params, emsg=emsg) # By default use latest day day = get_latest_day() if request.method == 'POST': p = request.form.get('datepicker') # JavaScript Library does also some checks. Do not trust the # code running on client machines day = check_user_day(p) if day is None: return render_template('content.html', desc=desc, params=params, emsg="Invalid date specified") fields = [] for field in red.smembers("ENFIELDS"): fields.append(field) topdata = get_top_10_per_day(day, fields) # Convert back the selected date d = datetime.datetime.strptime(day, "%Y%m%d") selday = d.strftime("%Y-%m-%d") # Put a warning when no fields are selected if get_enabled_fields_num() == 0: emsg = "No data fields are selected. Please select some fields in \ the settings menu." return render_template('content.html', desc=desc, fields=fields, topdata=topdata, params=build_params(), seldate=selday, emsg=emsg) return render_template('content.html', desc=desc, fields=fields, topdata=topdata, params=build_params(), seldate=selday) except redis.ConnectionError as err: errormsg("Could not connect to redis. " + str(err)) return render_template('offline.html', prefix=prefix)
def get_asn(self, ipaddress, date): if ipaddress in self.cache: return self.cache[ipaddress] asn, returndate = ipasn.asn(ipaddress, date) # FIXME Cache is common between all annotations self.cacheid = self.cacheid + 1 self.cache[ipaddress] = (self.cacheid, asn) self.cache['type'] = potiron.TYPE_ASN_DICT if returndate != date: # FIXME Not tested potiron.errormsg("Date mismatch between ASN database and encountered timestamp in packet capture. IP={}. Date={} Return date= {}".format(ipaddress, date, returndate)) return (self.cacheid, asn)
def get_asn(self, ipaddress,date): if self.cache.has_key(ipaddress): return self.cache[ipaddress] (asn,returndate) = ipasn.asn(ipaddress , date) #FIXME Cache is common between all annotations self.cacheid = self.cacheid + 1 self.cache[ipaddress] = (self.cacheid,asn) self.cache['type'] = potiron.TYPE_ASN_DICT if returndate != date: #FIXME Not tested potiron.errormsg("Date mismatch between ASN database and encountered timestamp in packet capture. IP="+ipaddress+". Date="+date+" "+"Return date= "+returndate) return (self.cacheid, asn)
def __init__(self): self.mfields = [ "ipsrc", "ipdst", "packet_id", "timestamp", "sensorname", "filename" ] #Open the geoip database self.database = "/usr/share/GeoIP/GeoIPCity.dat" self.help=\ """potiron-json-geo.py [-h] [-r filename] [-d directory] [-k] [-c config] [-i index] -h Shows this screen -d directory Specify the directory where the files should be stored -k Sent log data also to console and not only to syslog -c Filename of the configuration file -i Put annotation data directly in the index instead of writing json files INPUT FILES This program reads json documents as input. The source IP addresses and destination IP addresses are annotated with a Geo lookup of each IP address. The following fields are required in the input json files KEY VALUE ipsrc Source IP address in dotted decimal notation ipdst Destination IP address in dotted decimal notation OUTPUT The following fields are added to the json document KEY VALUE sipcountry Country of the source IP sipcity City of the source IP dipcountry Country of the Destination IP address dipcity City of the Destination IP address """ try: self.gi = GeoIP.open(self.database, GeoIP.GEOIP_STANDARD) except Exception, e: potiron.errormsg("Failed to initialize GeoIP module. Cause=" + str(e)) self.gi = None
def get_asn(self, ipaddress, date): if self.cache.has_key(ipaddress): return self.cache[ipaddress] (asn, returndate) = ipasn.asn(ipaddress, date) #FIXME Cache is common between all annotations self.cacheid = self.cacheid + 1 self.cache[ipaddress] = (self.cacheid, asn) self.cache['type'] = potiron.TYPE_ASN_DICT if returndate != date: #FIXME Not tested potiron.errormsg( "Date mismatch between ASN database and encountered timestamp in packet capture. IP=" + ipaddress + ". Date=" + date + " " + "Return date= " + returndate) return (self.cacheid, asn)
def annoate_doc(self, doc): if doc.has_key('state') == False: doc['state'] = 0 if doc['state'] & potiron.STATE_PDNS_AN: #The document was already annotated return doc try: (rid,name) = self.get_rrnames(doc["ipsrc"]) if name != "": doc["a_"+str(potiron.TYPE_PDNS_DICT)+"_ipsrc"] = rid (rid,name) = self.get_rrnames(doc["ipdst"]) if name != "": doc["a_"+str(potiron.TYPE_PDNS_DICT)+"_ipdst"] = rid doc["state"] = doc["state"] | potiron.STATE_PDNS_AN except Exception,e: potiron.errormsg("Failed to annotate with PDNS data. Cause="+str(e))
def check_user_day(day): try: if day is None: return None if len(day) > 20: raise ValueError("User day string is too large." + day) # Let the datetime library check if the parameters correspond to # the right date format. If bad parameters are specified, # the most recent date is used d = datetime.datetime.strptime(day, "%Y-%m-%d") day = d.strftime("%Y%m%d") #Check if there is data for this day if red.sismember("DAYS", day) == 1: return day except ValueError, error: errormsg("check_user_day: "+str(error))
def check_user_day(day): try: if day is None: return None if len(day) > 20: raise ValueError("User day string is too large." + day) # Let the datetime library check if the parameters correspond to # the right date format. If bad parameters are specified, # the most recent date is used d = datetime.datetime.strptime(day, "%Y-%m-%d") day = d.strftime("%Y%m%d") #Check if there is data for this day if red.sismember("DAYS", day) == 1: return day except ValueError, error: errormsg("check_user_day: " + str(error))
def __init__(self): self.mfields = [ "ipsrc" , "ipdst", "packet_id", "timestamp", "sensorname", "filename"] #Open the geoip database self.database = "/usr/share/GeoIP/GeoIPCity.dat" self.help=\ """potiron-json-geo.py [-h] [-r filename] [-d directory] [-k] [-c config] [-i index] -h Shows this screen -d directory Specify the directory where the files should be stored -k Sent log data also to console and not only to syslog -c Filename of the configuration file -i Put annotation data directly in the index instead of writing json files INPUT FILES This program reads json documents as input. The source IP addresses and destination IP addresses are annotated with a Geo lookup of each IP address. The following fields are required in the input json files KEY VALUE ipsrc Source IP address in dotted decimal notation ipdst Destination IP address in dotted decimal notation OUTPUT The following fields are added to the json document KEY VALUE sipcountry Country of the source IP sipcity City of the source IP dipcountry Country of the Destination IP address dipcity City of the Destination IP address """ try: self.gi = GeoIP.open(self.database,GeoIP.GEOIP_STANDARD) except Exception,e: potiron.errormsg("Failed to initialize GeoIP module. Cause="+str(e)) self.gi = None
def annoate_doc(self, doc): if doc.has_key('state') == False: doc['state'] = 0 if doc['state'] & potiron.STATE_PDNS_AN: #The document was already annotated return doc try: (rid, name) = self.get_rrnames(doc["ipsrc"]) if name != "": doc["a_" + str(potiron.TYPE_PDNS_DICT) + "_ipsrc"] = rid (rid, name) = self.get_rrnames(doc["ipdst"]) if name != "": doc["a_" + str(potiron.TYPE_PDNS_DICT) + "_ipdst"] = rid doc["state"] = doc["state"] | potiron.STATE_PDNS_AN except Exception, e: potiron.errormsg("Failed to annotate with PDNS data. Cause=" + str(e))
def annoate_doc(self, doc): if 'state' not in doc: doc['state'] = 0 if doc['state'] & potiron.STATE_PDNS_AN: # The document was already annotated return doc try: (rid, name) = self.get_rrnames(doc["ipsrc"]) if name != "": doc["a_{}_ipsrc".format(potiron.TYPE_PDNS_DICT)] = rid (rid, name) = self.get_rrnames(doc["ipdst"]) if name != "": doc["a_{}_ipdst".format(potiron.TYPE_PDNS_DICT)] = rid doc["state"] = doc["state"] | potiron.STATE_PDNS_AN except Exception as e: potiron.errormsg("Failed to annotate with PDNS data. Cause={}".format(e)) return doc
def welcome(): try: desc = create_program_meta() params = build_params() emsg = check_database() if emsg is not None: return render_template('content.html', desc=desc, params=params, emsg=emsg) #By default use latest day day = get_latest_day() if request.method == 'POST': p = request.form.get('datepicker') #JavaScript Library does also some checks. Do not trust the #code running on client machines day = check_user_day(p) if day is None: return render_template('content.html', desc=desc, params=params, emsg= "Invalid date specified") fields = [] for field in red.smembers("ENFIELDS"): fields.append(field) topdata = get_top_10_per_day(day, fields) # Convert back the selected date d = datetime.datetime.strptime(day, "%Y%m%d") selday = d.strftime("%Y-%m-%d") # Put a warning when no fields are selected if get_enabled_fields_num() == 0: emsg = "No data fields are selected. Please select some fields in \ the settings menu." return render_template('content.html', desc=desc, fields=fields, topdata=topdata, params=build_params(), seldate=selday, emsg=emsg) return render_template('content.html', desc=desc, fields=fields, topdata=topdata, params=build_params(), seldate=selday) except redis.ConnectionError,err: errormsg("Could not connect to redis. "+str(err)) return render_template('offline.html',prefix=prefix)
def numerize_proto(pstr): if pstr == "I": return potiron.PROTO_ICMP elif pstr == "T": return potiron.PROTO_TCP elif pstr == "U": return potiron.PROTO_UDP elif pstr == "41": return potiron.PROTO_ICMP6 elif pstr == "-": # Avoid error messages when protocol is not set return potiron.PROTO_UNKNOWN # If there is a protocol number return it try: return int(pstr) except ValueError: errormsg("Unknown protocol " + pstr) return potiron.PROTO_UNKNOWN # Should not be executed return potiron.PROTO_UNKNOWN
def numerize_proto(pstr): if pstr == "I": return potiron.PROTO_ICMP elif pstr == "T": return potiron.PROTO_TCP elif pstr == "U": return potiron.PROTO_UDP elif pstr == "41": return potiron.PROTO_ICMP6 elif pstr == "-": #Avoid error messages when protocol is not set return potiron.PROTO_UNKNOWN #If there is a protocol number return it try: return int(pstr) except ValueError: errormsg("Unknown protocol " + pstr) return potiron.PROTO_UNKNOWN #Should not be executed return potiron.PROTO_UNKNOWN
def annoate_doc(self, doc): if self.gi is None: return doc try: g = self.gi.record_by_addr(doc["ipdst"]) if g is not None: if g["city"] is not None and type(g["city"]) is str: doc["dipcity"] = unidecode(g["city"]) if g["country_name"] is not None and type(g["country_name"]) is str: doc["dipcountry"] = unidecode(g["country_name"]) g = self.gi.record_by_addr(doc["ipsrc"]) if g is not None: if g["city"] is not None and type(g["city"]) is str: doc["sipcity"] = unidecode(g["city"]) if g["country_name"] is not None and type(g["country_name"]) is str: doc["sipcountry"] = unidecode(g["country_name"]) doc['state'] = doc['state'] | potiron.STATE_GEO_AN except Exception as e: potiron.errormsg("Geoip annotation failed. Cause={}".format(e)) return doc
def check_date(date): try: if date is None: errormsg("check_date: No date was specified") return False if len(date) > 20: errormsg("check_date: Date field is too large") return False #Try to parse it. If it fails exception is thrown datetime.datetime.strptime(date, "%Y%m%d") return True except ValueError, e: errormsg("check_date: Wrong date format." + str(e))
def check_date(date): try: if date is None: errormsg("check_date: No date was specified") return False if len(date) > 20: errormsg("check_date: Date field is too large") return False #Try to parse it. If it fails exception is thrown datetime.datetime.strptime(date, "%Y%m%d") return True except ValueError,e: errormsg("check_date: Wrong date format."+str(e))
parser = argparse.ArgumentParser(description="Do all potiron annotations") parser.add_argument("-r", "--read", type=str, nargs=1, help="Json document that should be annotated") parser.add_argument("-d", "--directory", type=str, nargs=1, help="Directory containing the annotated files") parser.add_argument("-c", "--config", type=str, nargs=1, help="Config file") args = parser.parse_args() if args.config is None: errormsg("A config file must be specified") sys.exit(1) # Load config file config = configparser.ConfigParser() config.readfp(open(args.config[0], 'r')) # Access the fields if not exits throw excpetion # FIXME implement cleaner error handling config.get("pdns", "server") config.getint("pdns", "port") config.get("ipasn", "server") config.getint("ipasn", "port") f = sys.stdin if args.read is not None: f = open(args.read[0], "r") docs = json.load(f) # FIXME Mandatory fields are not checked
if __name__ == '__main__': parser = argparse.ArgumentParser(description="Start the too ipsumpdump and\ transform the output in a json document") parser.add_argument("-r","--read", type=str, nargs=1, help = "Compressed pcap\ file or pcap filename") parser.add_argument("-c","--console", action='store_true', help="Log output also to console") parser.add_argument("-d","--directory", nargs=1, help="Result directory where\ the json documents are stored") args = parser.parse_args() potiron.logconsole = args.console if args.read is not None: if os.path.exists(args.read[0]) is False: errormsg("The filename " + args.read[0] + " was not found") sys.exit(1) if args.directory is not None and os.path.isdir(args.directory[0]) is False: errormsg("The root directory is not a directory") sys.exit(1) if args.read is None: errormsg("At least a pcap file must be specified") sys.exit(1) try: rootdir = None if args.directory is not None: rootdir = args.directory[0] process_file(rootdir, args.read[0]) except OSError,e:
file or pcap filename") parser.add_argument("-c", "--console", action='store_true', help="Log output also to console") parser.add_argument("-d", "--directory", nargs=1, help="Result directory where\ the json documents are stored") args = parser.parse_args() potiron.logconsole = args.console if args.read is not None: if os.path.exists(args.read[0]) is False: errormsg("The filename " + args.read[0] + " was not found") sys.exit(1) if args.directory is not None and os.path.isdir( args.directory[0]) is False: errormsg("The root directory is not a directory") sys.exit(1) if args.read is None: errormsg("At least a pcap file must be specified") sys.exit(1) try: rootdir = None if args.directory is not None: rootdir = args.directory[0] process_file(rootdir, args.read[0])
nargs=1, help="Compressed pcap file or pcap filename") parser.add_argument("-c", "--console", action='store_true', help="Log output also to console") parser.add_argument( "-o", "--outputdir", nargs=1, help="Output directory where the json documents are stored") args = parser.parse_args() potiron.logconsole = args.console if args.input is None: errormsg("At least a pcap file must be specified") sys.exit(1) else: if os.path.exists(args.input[0]) is False: errormsg("The filename {} was not found".format(args.input[0])) sys.exit(1) if args.outputdir is not None and os.path.isdir( args.outputdir[0]) is False: errormsg("The root directory is not a directory") sys.exit(1) try: rootdir = None if args.outputdir is not None: rootdir = args.outputdir[0]
if proc.returncode != 0: errmsg = "".join(proc.stderr.readlines()) raise OSError("ipsumdump failed. Return code {}. {}".format(proc.returncode, errmsg)) potiron.store_packet(rootdir, filename, json.dumps(allpackets)) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Start the too ipsumpdump and transform the output in a json document") parser.add_argument("-i", "--input", type=str, nargs=1, help="Compressed pcap file or pcap filename") parser.add_argument("-c", "--console", action='store_true', help="Log output also to console") parser.add_argument("-o", "--outputdir", nargs=1, help="Output directory where the json documents are stored") args = parser.parse_args() potiron.logconsole = args.console if args.input is None: errormsg("At least a pcap file must be specified") sys.exit(1) else: if os.path.exists(args.input[0]) is False: errormsg("The filename {} was not found".format(args.input[0])) sys.exit(1) if args.outputdir is not None and os.path.isdir(args.outputdir[0]) is False: errormsg("The root directory is not a directory") sys.exit(1) try: rootdir = None if args.outputdir is not None: rootdir = args.outputdir[0] process_file(rootdir, args.input[0])
from PotironAnGeo import AnnotateGeo from PotironAnPDNS import AnnotatePDNS from PotironAnASN import AnnotateASN from potiron import get_file_struct from potiron import errormsg import potiron import configparser parser = argparse.ArgumentParser(description="Do all potiron annotations") parser.add_argument("-r", "--read", type=str, nargs=1, help="Json document that should be annotated") parser.add_argument("-d", "--directory", type=str, nargs=1, help="Directory containing the annotated files") parser.add_argument("-c", "--config", type=str, nargs=1, help="Config file") args = parser.parse_args() if args.config is None: errormsg("A config file must be specified") sys.exit(1) # Load config file config = configparser.ConfigParser() config.readfp(open(args.config[0], 'r')) # Access the fields if not exits throw excpetion # FIXME implement cleaner error handling config.get("pdns", "server") config.getint("pdns", "port") config.get("ipasn", "server") config.getint("ipasn", "port") f = sys.stdin if args.read is not None: f = open(args.read[0], "r") docs = json.load(f) # FIXME Mandatory fields are not checked
today = get_latest_day() if date is not None: try: d = datetime.datetime.strptime(date, "%Y-%m-%d") today = d.strftime("%Y%m%d") except ValueError,e: errormsg("deliver_custom: Invalid timestamp. "+str(e)) if red.sismember("FIELDS", fieldname): return deliver_evolution(today, fieldname, field) emsg = "An invalid parameter was provided" return render_template('content.html', desc=create_program_meta, params=build_params(), emsg=emsg) except redis.ConnectionError,err: errormsg("deliver_custom: Cannot connect to redis "+str(err)) return render_template('offline.html', prefix=prefix) # Deliver all the files in static directory # TODO ../../../etc/passwd seems not to work @app.route('/static/<path:filename>') def send_foo(filename): return send_from_directory('static/', filename) def load_selected_fields(): fields = [] for field in red.smembers("FIELDS"): k = "ENFIELDS" obj = dict() obj['name'] = field
try: d = datetime.datetime.strptime(date, "%Y-%m-%d") today = d.strftime("%Y%m%d") except ValueError, e: errormsg("deliver_custom: Invalid timestamp. " + str(e)) if red.sismember("FIELDS", fieldname): return deliver_evolution(today, fieldname, field) emsg = "An invalid parameter was provided" return render_template('content.html', desc=create_program_meta, params=build_params(), emsg=emsg) except redis.ConnectionError, err: errormsg("deliver_custom: Cannot connect to redis " + str(err)) return render_template('offline.html', prefix=prefix) # Deliver all the files in static directory # TODO ../../../etc/passwd seems not to work @app.route('/static/<path:filename>') def send_foo(filename): return send_from_directory('static/', filename) def load_selected_fields(): fields = [] for field in red.smembers("FIELDS"): k = "ENFIELDS" obj = dict()