def subscription_event_delete(subscriber_id): conn = util.getConn() cur = conn.cursor() row = util.getRowsFromTable("subscription", extraWhere="subscriber_id=%s", extraArgs=(subscriber_id, ), conn=conn) if not row: conn.close() abort(404, 'Subscriber not found') util.updateRowById("subscription", row[0]["id"], {"last_datetime": util.getDateFromParam("now")}, cursor=cur) rows = util.getRowsFromTable('subscription_event', extraWhere="subscriber_id=%s", extraArgs=(subscriber_id, ), conn=conn) if rows: cur.execute("DELETE FROM subscription_event where subscriber_id = %s", (subscriber_id, )) conn.commit() conn.close() return util.responseJSON({"events": rows})
def service_alarms_user_settings(): if "user" in request: user_id = request.user.id rows = util.getRowsFromTable('account', extraWhere="id = %s", extraArgs=(user_id, )) else: rows = util.getRowsFromTable('account') ret = [] for user in rows: u = {} user_configs = None if user["configs"]: user_configs = json.loads(user["configs"]) alarm_settings = {"all_disabled": True} if user_configs: if "alarm_settings" in user_configs: alarm_settings = user_configs["alarm_settings"] u["user_info"] = { "email": user["email"], "name": user["name"], "phone": user["phone"] } u["alarm_settings"] = alarm_settings ret.append(u) return ret
def luts_mc_maxtemp_fast(): conn = util.getConn() luts = util.getRowsFromTable("mc_maxtemp_lut", conn=conn) for lut in luts: values = util.getRowsFromTable("mc_maxtemp_lut_value", extraWhere="mc_maxtemp_lut_id=%s", extraArgs=(lut['id'], ), orderStatement=" ORDER BY mc ", conn=conn) lut["values"] = values return util.responseJSON({'luts': luts})
def fill_list_fast(): begin_span1 = util.getDateFromParam(request.params.get("begin_span1")) begin_span2 = util.getDateFromParam(request.params.get("begin_span2")) bin_id = request.params.get("bin_id", None) extra_where = "" extra_args = tuple() if bin_id: try: bin_id = int(bin_id) except: abort(400, 'Invalid bin_id') if extra_where: extra_where += " AND " extra_where += " bin_id = %s " extra_args += (bin_id, ) if begin_span1 and begin_span2: if extra_where: extra_where += " AND " extra_where += "((air_begin_datetime >= %s AND air_begin_datetime <= %s) OR (filled_datetime >= %s AND filled_datetime <= %s) ) " extra_args += (begin_span1, begin_span2, begin_span1, begin_span2) conn = util.getConn() rows = util.getRowsFromTable( 'fill', extraWhere=extra_where, extraArgs=extra_args, orderStatement= " ORDER by coalesce(filled_datetime, air_begin_datetime) ", conn=conn) cur = conn.cursor() for row in rows: cur.execute( "SELECT mc, datetime FROM fill_during_mc WHERE fill_id = %s ORDER BY datetime", (row["id"], )) during_mc = cur.fetchall() row['during_mc'] = during_mc sheller_windows = util.getRowsFromTable( 'fill_sheller_window', extraWhere=" fill_id = %s ", extraArgs=(row['id'], ), orderStatement=" ORDER BY begin_datetime ", conn=conn) row['sheller_windows'] = sheller_windows cur.close() conn.close() return util.responseJSON({'fills': rows})
def air_deductions_fast(): beginDate = util.getDateFromParam(request.params.get("begin_span1")) endDate = util.getDateFromParam(request.params.get("begin_span2")) if (not beginDate or not endDate): rows = util.getRowsFromTable( "air_deduct", orderStatement=" ORDER BY begin_datetime ") else: rows = util.getRowsFromTable( "air_deduct", extraWhere=" ( begin_datetime >= %s AND begin_datetime <= %s ) ", extraArgs=(beginDate, endDate), orderStatement=" ORDER BY begin_datetime ") return util.responseJSON({'air_deductions': rows})
def alarm_past_events_fast(): after_dt = util.getDateFromParam(request.params.get("after_datetime")) if not after_dt: abort(400, 'No after_datetime argument.') conn = util.getConn() extra_where = " (begin_datetime > %s AND end_datetime IS NOT NULL) " global_events = util.getRowsFromTable("alarm_global_event", extraWhere=extra_where, extraArgs=(after_dt, ), conn=conn) sql = "SELECT alarm_event.id FROM alarm_event, alarm WHERE alarm_id = alarm.id AND alarm.account_id = %s AND (alarm_event.begin_datetime < %s AND alarm_event.end_datetime IS NOT NULL) " cur = conn.cursor(cursor_factory=dbapi2extras.DictCursor) cur.execute(sql, (request.user.id, after_dt)) results = [] for row in cur: result_rows = {} for key in row.keys(): result_rows[key] = row[key] results.append(result_rows) global_events.extend(results) cur.close() conn.close() return util.responseJSON({'events': global_events})
def r_alarm_current_events_fast(conn=None): conn_given = False if conn: conn_given = True else: conn = util.getConn() extra_where = " (begin_datetime < %s AND end_datetime IS NULL) " now = util.getDateFromParam("now") global_events = util.getRowsFromTable("alarm_global_event", extraWhere=extra_where, extraArgs=(now, ), conn=conn) # TODO: Add user events sql = "SELECT alarm_event.* from alarm_event, alarm WHERE alarm_id = alarm.id AND alarm.account_id = %s AND (alarm_event.begin_datetime < %s AND alarm_event.end_datetime IS NULL) " cur = conn.cursor(cursor_factory=dbapi2extras.DictCursor) cur.execute(sql, (request.user.id, now)) results = [] for row in cur: result_row = {} for key in row.keys(): result_row[key] = row[key] results.append(result_row) global_events.extend(results) cur.close() if not conn_given: conn.close() return util.responseJSON({'events': global_events})
def allFills_forBin_forYear(year, bin_id): conn = util.getConn() dbFills = util.getRowsFromTable("fill", extraWhere="bin_id=%s", extraArgs=(bin_id, ), conn=conn) # create a list of dicts, one for each fill theFills = [] for dbf in dbFills: if (dbf["filled_datetime"] and dbf["filled_datetime"].year == year) or (dbf["air_begin_datetime"] and dbf["air_begin_datetime"].year == year): theFills += [{ "fill_num": dbf["fill_number"], "fill_dt": dbf["filled_datetime"], "empty_dt": dbf["emptied_datetime"], "air_begin_dt": dbf["air_begin_datetime"], "air_end_dt": dbf["air_end_datetime"] }] # if no filled/emptied dt, use the air start dt for fill in theFills: if not fill["fill_dt"]: fill["fill_dt"] = fill["air_begin_dt"] if not fill["empty_dt"]: fill["empty_dt"] = fill["air_end_dt"] # sort list of fill dicts by fill date/time theFills.sort(key=lambda f: f["fill_dt"]) # done return theFills
def addBinSectionNamesToConfig(cfg): conn = util.getConn() rows = util.getRowsFromTable("bin_section", columns="id,name", conn=conn) for r in rows: # add to config cfg["binSectionNames"][r["id"]] = r["name"] conn.close()
def sensor_types(): device_type_id = request.params.get('device_type_id', None) extra_where = "" extra_args = () if device_type_id: try: device_type_id = int(device_type_id) extra_where = "device_type_id=%s" extra_args = (device_type_id, ) except (ValueError, TypeError): abort(400, 'Invalid device_type_id') rows = util.getRowsFromTable("device_type_to_sensor_type", extraWhere=extra_where, extraArgs=extra_args) return { 'xlink': [ '/resources/conf/sensor_types/' + str(row["sensor_type_id"]) for row in rows ] } else: ids = util.getIdsFromTable("sensor_type") return { 'xlink': ['/resources/conf/sensor_types/' + str(id) for id in ids] }
def accounts_recover_update(): recovery_hash = request.params.get('code', None) new_password = request.params.get('new_password', None) if (not recovery_hash or not new_password): abort(400, 'Missing parameters.') rows = util.getRowsFromTable( table="account", columns="id", extraWhere= "recovery_hash=%s AND recovery_datetime > now() - interval '24h'", extraArgs=(recovery_hash, ), checkEnabled=True) if not rows: abort(400, 'Invalid code') user = rows[0] seed = generatePassword() + generatePassword() passwordHash = hashlib.sha1(seed + new_password).hexdigest() util.updateRowById( "account", user["id"], { 'seed': seed, 'password': passwordHash, 'recovery_hash': None, 'recovery_datetime': None }) return HTTPResponse(output="Password updated", status=204)
def get_fill_by_id(fill_id, conn=None): close_conn = False if not conn: conn = util.getConn() close_conn = True row = util.getRowFromTableById('fill', int(fill_id), conn=conn) if row: cur = conn.cursor() cur.execute( "SELECT mc, datetime FROM fill_during_mc WHERE fill_id = %s ORDER BY datetime", (fill_id, )) during_mc = cur.fetchall() row['during_mc'] = during_mc cur.close() sheller_windows = util.getRowsFromTable( 'fill_sheller_window', extraWhere=" fill_id = %s ", extraArgs=(fill_id, ), orderStatement=" ORDER BY begin_datetime ", conn=conn) row['sheller_windows'] = sheller_windows if close_conn: conn.close() return row
def latex_create_fill_report_full(reportYear, fileNameBase=None, displayTZStr=None): # # create random file name, open for writing, and print file preamble stuff # if not fileNameBase: fileNameBase = tempfile.mktemp(prefix="fillRpt_full_", dir=".") filePath = "./gen_data/" + fileNameBase + ".tex" fh = open(filePath, 'w') fh.write( "\\documentclass{IsadoreReportFillFull}\n\\rptTitle{Fill report (full)}\n\\begin{document}\n\n" ) # # get all fills (will filter by year later) # conn = util.getConn() # TODO: sort by fill number (in SQL?) # TODO: have DB only return fills from proper year to save CPU? dbFills = util.getRowsFromTable("fill", conn=conn) for dbFi, dbF in enumerate(dbFills): print dbFi, "/", len(dbFills) fillYear = dbF["air_begin_datetime"].year if dbF[ "air_begin_datetime"] != None else dbF["filled_datetime"].year if fillYear == reportYear: # # create fill plot image # startDT = util.getStartAirDT(dbF) endDT = util.getStopAirDT(dbF) fillGraphPath = None try: fillGraphPath = fillReport.createFillGraph( startDT, endDT, dbF["bin_id"], binSecID_upper, binSecID_lower) except: # TODO: log error print "plot of fill data not created!" # # print the fill rpt page for the given fill if # the fill is from the correct year # fillReport.printFillPage(dbF, fh, fillGraphPath) fh.write("\n\\newpage\n\n") fh.write("%%% next fill %%%\n\n") conn.close() # # end writing to file # fh.write("\n\n\\end{document}") fh.close() # # latex # LatexFile(LATEX_IO_DIR, fileNameBase) # # return PDF # return fileNameBase
def luts_mc_maxtemp_getsingle(id): conn = util.getConn() lut = util.getRowFromTableById("mc_maxtemp_lut", id, conn=conn) values = util.getRowsFromTable("mc_maxtemp_lut_value", extraWhere="mc_maxtemp_lut_id=%s", extraArgs=(lut['id'], ), orderStatement=" ORDER BY mc ", conn=conn) lut["values"] = values return util.responseJSON(lut)
def devices_clone(): # TODO: Clone sensor mirrors even though not part of device anymore? from_year = request.params.get('from_year') to_year = request.params.get('to_year') if not from_year or not to_year: abort(400, 'Missing parameters') try: from_year = int(from_year) to_year = int(to_year) except ValueError: abort(400, 'Bad parameters') conn = util.getConn() cursor = conn.cursor() devicesl = util.getRowsFromTable("device", extraWhere='year=%s', extraArgs=(from_year, ), conn=conn) if len(devicesl) == 0: abort(400, 'Bad parameters') cursor.execute('DELETE FROM device WHERE year=%s', (to_year, )) for device in devicesl: nd = device.copy() del (nd['id']) nd['year'] = to_year nd['id'] = util.insertRow('device', nd, cursor) # Sensors sensors = util.getRowsFromTable('sensor', extraWhere='device_id=%s', extraArgs=(device['id'], ), conn=conn) for sensor in sensors: ns = sensor.copy() del (ns['id']) ns['device_id'] = nd['id'] ns['id'] = util.insertRow('sensor', ns, cursor) conn.commit() cursor.close() conn.close() return HTTPResponse(output='devices configuration cloned.', status=204)
def sensors_get_fast(): device_id = request.params.get("device_id", None) if not device_id: abort(400, "Bad parameter.") try: device_id = int(device_id) except: abort(400, "Bad parameter.") rows = util.getRowsFromTable("sensor", extraWhere="device_id = %s", extraArgs=(device_id, )) return {"sensors": rows}
def get_subscribers(event, conn): subs = util.getRowsFromTable("subscription", conn=conn) subscribers = [] for s in subs: try: if s["subscribed"]: se = json.loads(s["subscribed"]) for seev in se["subscriptions"]: if is_subscribed(event, seev): subscribers.append(s["subscriber_id"]) except: logging.exception("Error parsing subscribed.") return subscribers
def devices_fast(): bin_id = request.params.get('bin_id') bin_section_id = request.params.get('bin_section_id') year = request.params.get('year') if not year: year = datetime.datetime.now().year extra_where = "year = %s " extra_args = (year, ) if bin_id: try: bin_id = int(bin_id) extra_where += " AND bin_id = %s " extra_args += (bin_id, ) except ValueError: abort(400, 'Invalid bin_id') if bin_section_id: try: bin_section_id = int(bin_section_id) extra_where += " AND bin_section_id = %s " extra_args += (bin_section_id, ) except ValueError: abort(400, 'Invalid bin_section_id') conn = util.getConn() devicesl = util.getRowsFromTable('device', extraWhere=extra_where, extraArgs=extra_args, conn=conn) for device in devicesl: sensors = util.getRowsFromTable('sensor', extraWhere='device_id = %s ', extraArgs=(device['id'], ), conn=conn) device['sensors'] = sensors return {'devices': devicesl}
def latex_create_fill_report_hybrid(reportYear, fileNameBase=None): # # create random file name, open for writing, and print file preamble stuff # if not fileNameBase: fileNameBase = tempfile.mktemp(prefix="fillRpt_hyb_", dir=".") filePath = "./gen_data/" + fileNameBase + ".tex" fh = open(filePath, 'w') fh.write( "\\documentclass{IsadoreReportHybridTab}\n\\rptTitle{Fill report (hybrid)}\n\\begin{document}\n\n" ) # # get all fills (will filter by year later) # conn = util.getConn() # TODO: have DB only return fills from proper year to save CPU? tmpDBFills = util.getRowsFromTable("fill", conn=conn) # # get list of hybrids # hybridSet = list( set([ tdbf["hybrid_code"] for tdbf in tmpDBFills if util.getStartAirDT(tdbf).year == reportYear ])) hybridList = [hs for hs in hybridSet if hs] # # create one table for each hybrid found # # grab fills for the current year if hybridList: dbFills = [ tdbf for tdbf in tmpDBFills if util.getStartAirDT(tdbf).year == reportYear ] for hyb in hybridSet: hybridReport.printHybridTable(hyb, dbFills, fh) # # end file # fh.write("\n\n\\end{document}") # # latex # LatexFile(LATEX_IO_DIR, fileNameBase) # # return PDF # return LATEX_IO_DIR + "/" + fileNameBase + ".pdf" return None
def buildAllBurnerControlsJSON(mid_name): retVal = [] conn = util.getConn() # query DB extraWhere = reduce(lambda x, y: x + " OR device_type_id=%s", BURNER_DEVICE_IDS[1:], "device_type_id=%s") + " AND year=%s AND port >= 0" if mid_name: extraWhere += " AND mid_name = %s " extraArgs = BURNER_DEVICE_IDS + (CURRENT_YEAR,) if mid_name: extraArgs += (mid_name,) rows = util.getRowsFromTable("device", columns="id,address,port,bin_id,bin_section_id,device_type_id,mid_name", extraWhere=extraWhere, extraArgs=extraArgs, checkEnabled=True) for tr in rows: extraWhere = "device_id=%s AND sensor_type_id=%s" sensor = util.getRowsFromTable("sensor", extraWhere=extraWhere, extraArgs=(tr["id"], PV_ID), checkEnabled=True, conn=conn) # PV sensors for device if sensor: retVal += [{"type": PV_ID, "device_type": tr["device_type_id"], "sensor_id": sensor[0]["id"], "addy": tr["address"], "mid_name": tr["mid_name"], "bin_id": tr["bin_id"], "bin_sec_id": tr["bin_section_id"]}] sensor = util.getRowsFromTable("sensor", extraWhere=extraWhere, extraArgs=(tr["id"], SP_ID), checkEnabled=True, conn=conn) # SP sensors for device if sensor: retVal += [{"type": SP_ID, "device_type": tr["device_type_id"], "sensor_id": sensor[0]["id"], "addy": tr["address"], "mid_name": tr["mid_name"], "bin_id": tr["bin_id"], "bin_sec_id": tr["bin_section_id"]}] conn.close() return retVal
def alarms_get(alarm_id): # TODO: Check that alarm belongs to you, or are power user. row = util.getRowFromTableById('alarm', int(alarm_id)) if not row: abort(404, "Alarm not found.") else: # contact types rows = util.getRowsFromTable('alarm_contact', columns='alarm_contact_type_id', extraWhere='alarm_id=%s ', extraArgs=(alarm_id,)) alarm_contact_type_ids = [] for arow in rows: alarm_contact_type_ids.append(arow['alarm_contact_type_id']) row['alarm_contact_type_ids'] = alarm_contact_type_ids return row
def fill_config_list(): year = request.params.get("year", None) try: year = int(year) except: abort(400, 'Invalid year') conn = util.getConn() row = util.getRowsFromTable('fill_config', extraWhere="year=%s", extraArgs=(year, ), checkEnabled=False, conn=conn) if row: return row[0] else: row = util.getRowsFromTable('fill_config', extraWhere="year=%s", extraArgs=(0, ), checkEnabled=False, conn=conn) if row: return row[0] else: return {}
def accounts_recover_check(): code = request.params.get("code", None) if not code: abort(400, 'Missing code parameter.') rows = util.getRowsFromTable( table="account", columns="id", extraWhere= "recovery_hash=%s AND recovery_datetime > now() - interval '24h'", extraArgs=(code, ), checkEnabled=True) if rows: return HTTPResponse(output="Valid code.", status=204) else: abort(404, "Invalid code.")
def r_readings_data_latest(conn=None): # get parameter values bin_id = request.params.get("bin_id") bin_section_id = request.params.get("bin_section_id") read_type_id = request.params.get("read_type_id") extra_where = "" extra_args = () if bin_id: try: extra_where += " bin_id = %s" extra_args += (bin_id, ) except ValueError: abort(400, 'Invalid bin_id.') if bin_section_id: try: if extra_where: extra_where += " AND " extra_where += " bin_section_id = %s " extra_args += (bin_section_id, ) except ValueError: abort(400, 'Invalid bin_section_id') if read_type_id: try: if extra_where: extra_where += " AND " extra_where += " read_type_id = %s " extra_args += (read_type_id, ) except ValueError: abort(400, 'Invalid sensor_type_id') # find read data reading_data = util.getRowsFromTable('reading_data_latest', columns='*', extraWhere=extra_where, extraArgs=extra_args, conn=conn) if not reading_data: abort(404, 'reading data latest not found') # return the newly created alarm's id url return util.responseJSON({"results": reading_data})
def sensor_data_latest(): sensor_ids = request.params.get("sensor_ids") try: sensor_ids = sensor_ids.split(',') sensor_ids = map(int, sensor_ids) except: abort(400, 'Invalid sensor_ids') extra_where = "" extra_args = [] for i in sensor_ids: if extra_where: extra_where += " OR " extra_where += " sensor_id = %s " extra_args.append(i) rows = util.getRowsFromTable('sensor_data_latest', extraWhere=extra_where, extraArgs=extra_args) return util.responseJSON({"results": rows})
def latex_create_fill_report_tab(reportYear, fileNameBase=None): # # create random file name, open for writing, and print file preamble stuff # if not fileNameBase: fileNameBase = tempfile.mktemp(prefix="fillRpt_tab_", dir=".") filePath = "./gen_data/" + fileNameBase + ".tex" fh = open(filePath, 'w') fh.write( "\\documentclass{IsadoreReportFillTab}\n\\rptTitle{Fill report (tabulated)}\n\\begin{document}\n\n" ) # # get all fills (will filter by year later) # conn = util.getConn() # TODO: have DB only return fills from proper year to save CPU? tmpDBFills = util.getRowsFromTable("fill", conn=conn) # # sort by fill number # dbFills = [tdbf for tdbf in tmpDBFills] dbFills.sort(key=lambda x: x["fill_number"]) # # start table env and print table rows # fh.write("\\begin{fillTable}\n") for dbFi, dbF in enumerate(dbFills): # if correct year, add corresponding row to table fillYear = dbF["air_begin_datetime"].year if dbF[ "air_begin_datetime"] != None else dbF["filled_datetime"].year if fillYear == reportYear: fillReportTabulated.printRow(dbF, fh) fh.write("\\fillTableNextRow\n") conn.close() # # end env and file # fh.write("\\end{fillTable}\n\n\\end{document}") fh.close() # # latex # LatexFile(LATEX_IO_DIR, fileNameBase)
def service_alarms_history_get(): try: keys = json.loads(request.params.get('keys', None)) except: abort(400, 'Bad arguments') where = "" first = True whereargs = [] for key in keys: if not first: where += " OR " where += " key = %s " whereargs.append(key) rows = util.getRowsFromTable("alarm_history", extraWhere=where, extraArgs=whereargs) ret = {} for row in rows: ret[row["key"]] = json.dumps(rows["info"]) return ret
def computeMaxTempsMulti(times, binId, conn=None): if not conn: conn = util.getConn() startDatetime = times[0] endDatetime = times[len(times) - 1] extraWhere = ' bin_id = %s AND '+\ '((air_begin_datetime >= %s AND air_begin_datetime < %s) OR '+\ '(air_begin_datetime <= %s AND air_end_datetime > %s) OR '+ \ '(air_begin_datetime < %s AND air_end_datetime IS NULL))' extraArgs = (binId, startDatetime, endDatetime, startDatetime, startDatetime, endDatetime) orderStatement = ' ORDER BY air_begin_datetime ' fills = util.getRowsFromTable("fill", extraWhere=extraWhere, extraArgs=extraArgs, orderStatement=orderStatement, conn=conn) result = numpy.array([None] * len(times)) startTimesIdx = 0 ntimes = numpy.array(times) for fill in fills: (startMC, lutId) = getFillStartMCLUTId(fill['id'], fill=fill, conn=conn) if startMC == None and fill["pre_mc"]: startMC = sum(fill["pre_mc"]) / len(fill["pre_mc"]) if startMC == None or lutId == None: continue filterLogic, s, l = filterFillTimes(fill, ntimes[startTimesIdx:]) ftimes = ntimes[startTimesIdx:][filterLogic] if len(ftimes) > 0: result[startTimesIdx + s:startTimesIdx + l + 1] = computeMaxTempsDB(fill['air_begin_datetime'], ftimes[-1], lutId, startMC, ftimes) startTimesIdx = startTimesIdx + l + 1 if startTimesIdx >= len(ntimes): break return result.tolist()
def subscription_event_update(subscriber_id): subscribed = request.params.get("subscribed", None) if not subscribed: abort(400, 'subscribed parameter missing') try: json.loads(subscribed) except: abort(400, 'subscribed invalid json') conn = util.getConn() cur = conn.cursor() row = util.getRowsFromTable("subscription", extraWhere="subscriber_id=%s", extraArgs=(subscriber_id, ), conn=conn) util.updateRowById('subscription', row[0]["id"], { "last_datetime": util.getDateFromParam("now"), "subscribed": subscribed }, cur) conn.commit() conn.close() return HTTPResponse(output="Subscription updated", status=204)
def accounts_recover(): email = request.params.get('email', None) if (not email): abort(400, 'Email parameter not given.') user = util.getAccountByEmail(email) #TODO: See if recovery has been tried lately and refuse new one if so? if (not user): abort(400, 'Email does not exist.') recovery_hash = hashlib.sha1(generatePassword() + user.email).hexdigest() while (len( util.getRowsFromTable(table="account", columns="id", extraWhere="recovery_hash = %s", extraArgs=(recovery_hash, ), checkEnabled=True)) > 0): recovery_hash = hashlib.sha1(generatePassword() + user.email).hexdigest() util.updateRowById("account", user.id, {'recovery_hash': recovery_hash}) message = """%s: A request has been sent to reset the password to your Isadore account. If you did not intend to reset your password you may ignore this message. To continue the reset process follow the instructions below: Do one of the following: 1) Goto the following: https://%s/isadore/s/login.html?c=%s#fs2 OR 2) Type in the reset code in the form at: https://%s/isadore/s/login.html#fs2 Using the code: %s After 24 hours the reset code will expire and will have to send a new reset request if you wish to reset your password.\n\n""" % \ (user.name, request.urlparts[1], recovery_hash, request.urlparts[1], recovery_hash) # logging.debug(message) util.sendEmail(user.email, '*****@*****.**', 'Isadore Password Recovery', message) return HTTPResponse(output="Recovery Email Sent", status=204)