Exemple #1
0
def reports(meterId=None):
    """ Creates excel .xlsx reports for a given date range
    """
    if meterId is None:
        return 'Report API'
    else:
        if request.method == 'GET':
            params = request.args.to_dict()
        elif request.method == 'POST':
            params = request.form.to_dict()
        else:
            params = {}
        try:
            sDate = params['sDate']
            eDate = params['eDate']    
        except KeyError:
            return 'ERROR: URL must be in form meterNo?sDate=2014-06-01&eDate=2014-06-02'

        settings_filename = os.path.abspath('settings/dbExample.json')
        query_filename = os.path.abspath('sql/MeterReadings.sql')
        params_dict = {'METERID': meterId, 'SDATE':sDate, 'EDATE':eDate}
        hProfile, dProfile = database.run_query(settings_filename, query_filename, params_dict)

        OnePhase = False
        
        settings_filename = os.path.abspath('settings/dbExample.json')
        query_filename = os.path.abspath('sql/MeterEvents.sql')
        params_dict = {'METERID': meterId, 'SDATE':sDate, 'EDATE':eDate}
        hEvents, dEvents = database.run_query(settings_filename, query_filename, params_dict)
        filePath = excel.create_excel_report(meterId, sDate, eDate, OnePhase, hProfile, dProfile, 
                       hEvents, dEvents)
                       
        return render_template('report_download.html', filePath=filePath)
Exemple #2
0
def meters(meterId=None):
    """ Generates the required data for the meters page
    """
    if meterId is None:
        h, d = get_meter_list()
        tableMeterList = {'headings':h, 'data':d}
        return render_template('meters.html', tableMeterList=tableMeterList)
    else:

        lat, lon = get_meter_coords(meterId)
        location = {'lat':lat, 'lon':lon}
        
        settings_filename = os.path.abspath('settings/dbExample.json')
        query_filename = os.path.abspath('sql/MonthlyReports.sql')
        params_dict = {'METERID': meterId}
        h, d = database.run_query(settings_filename, query_filename, params_dict)
        reports = {'headings':h, 'data':d}
        
        settings_filename = os.path.abspath('settings/dbExample.json')
        query_filename = os.path.abspath('sql/Last10Events.sql')
        params_dict = {'METERID': meterId}
        h, d = database.run_query(settings_filename, query_filename, params_dict)
        tableLast10Events = {'headings':h, 'data':d}        
        
        return render_template('meter.html', meterId=meterId,
                             tableLast10Events=tableLast10Events, 
                             location=location, reports=reports)
def purge_ip(ip):
    yield database.run_query("""DELETE FROM reports
        WHERE cracker_id IN (
            SELECT id FROM crackers WHERE ip_address=?
            )""", ip)
    yield database.run_query("DELETE FROM crackers WHERE ip_address=?", ip)
    yield database.run_query("DELETE FROM legacy WHERE ip_address=?", ip)
    returnValue(0)
def upload_data(clean_data):
	try: 
		for i in range(len(clean_data)):
			sql_command = ('INSERT INTO dbo.player_data_complete (FullName,GameDate,HomeTeam,VisitorTeam,DayofWeek,WL,Min,FgPct,FtPct,Reb,Ast,Stl,Blk,Tov,Pts) VALUES (' + "'" + str(player_name) + "','" + str(clean_data["gameDate"][i]) + "','" + str(clean_data["home"][i]) + "','" + \
			               	 	str(clean_data["away"][i]) + "','" + str(clean_data["DayofWeek"][i]) + "','" + str(clean_data["wl"][i]) + "'," + str(clean_data["min"][i]) + ",'" + str(clean_data["fgPct"][i]) + "','"+  str(clean_data["ftPct"][i]) + "'," + \
			               	 	str(clean_data["reb"][i]) + "," + str(clean_data["ast"][i]) + "," + str(clean_data["stl"][i]) + "," + str(clean_data["blk"][i]) + "," + str(clean_data["tov"][i]) +  "," + \
			               	 	str(clean_data["pts"][i]) + ")")
			print sql_command
		 	db.run_query(sql_command)
	except Exception:
		pass
def download_from_legacy_server():
    if config.legacy_server is None or config.legacy_server == "":
        returnValue(0)

    logging.info("Downloading hosts from legacy server...")
    rows = yield database.run_query('SELECT `value` FROM info WHERE `key`="last_legacy_sync"')
    last_legacy_sync_time = int(rows[0][0])

    try:
        server = yield deferToThread(xmlrpclib.ServerProxy, config.legacy_server)

        response = yield deferToThread(server.get_new_hosts, 
            last_legacy_sync_time, config.legacy_threshold, [],
            config.legacy_resiliency)
        try:
            last_legacy_sync_time = int(response["timestamp"])
        except:
            logging.ERROR("Illegal timestamp {} from legacy server".format(response["timestamp"]))
        #Registry.DBPOOL.runOperation('UPDATE info SET `value`=%s WHERE `key`="last_legacy_sync"', (str(last_legacy_sync_time),))
        database.run_operation('UPDATE info SET `value`=? WHERE `key`="last_legacy_sync"', str(last_legacy_sync_time))
        now = time.time()
        logging.debug("Got {} hosts from legacy server".format(len(response["hosts"])))
        for host in response["hosts"]:
            legacy = yield Legacy.find(where=["ip_address=?",host], limit=1)
            if legacy is None:
                logging.debug("New host from legacy server: {}".format(host))
                legacy = Legacy(ip_address=host, retrieved_time=now)
            else:
                logging.debug("Known host from legacy server: {}".format(host))
                legacy.retrieved_time = now
            yield legacy.save()
    except Exception, e:
        logging.error("Error retrieving info from legacy server: {}".format(e))
Exemple #6
0
def get_meter_list():
    settings_filename = os.path.abspath('settings/dbExample.json')
    query_filename = os.path.abspath('sql/ListMeters.sql')
    params_dict = {}
    headings, rows = database.run_query(settings_filename, query_filename, params_dict)    

    return headings, rows
Exemple #7
0
def get_meter_coords(meterId):
    dbPath = 'data/meters.db'
    conn = sqlite3.connect(dbPath)
    curs = conn.cursor()

    settings_filename = os.path.abspath('settings/dbExample.json')
    query_filename = os.path.abspath('sql/MeterCoords.sql')
    params_dict = {'METERID': meterId}
    headings, rows = database.run_query(settings_filename, query_filename, params_dict)
        

    lat = rows[0][0]
    lon = rows[0][1]

    return lat, lon
Exemple #8
0
def get_unbal_chart_data(meterId):
    """ Return json object for flot chart
    """
    query_filename = os.path.abspath('sql/Last50UnbalReadings.sql')
    params_dict = {'METERID': meterId}
    h, d = database.run_query(SETTINGS_FILENAME, query_filename, params_dict)
    
    chartdata = {}
    chartdata['label'] = 'Unbalance Profile'
    chartdata['a'] = [] 
    
    for row in d:
        dTime = datetime.datetime.strptime(row[0], '%Y-%m-%d %H:%M:%S')
        ts = int(time.mktime(dTime.timetuple()) * 1000)
        chartdata['a'].append([ts,row[1]])             
        
    return chartdata  
def get_qualifying_crackers(min_reports, min_resilience, previous_timestamp,
        max_crackers, latest_added_hosts):
    # Thank to Anne Bezemer for the algorithm in this function. 
    # See https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=622697
   
    # This query takes care of conditions (a) and (b)
    # cracker_ids = yield database.runGetPossibleQualifyingCrackerQuery(min_reports, min_resilience, previous_timestamp)
    cracker_ids = yield database.run_query("""
            SELECT DISTINCT c.id, c.ip_address 
            FROM crackers c 
            WHERE (c.current_reports >= ?)
                AND (c.resiliency >= ?)
                AND (c.latest_time >= ?)
            ORDER BY c.first_time DESC
            """, min_reports, min_resilience, previous_timestamp)
  
    if cracker_ids is None:
        returnValue([])

    # Now look for conditions (c) and (d)
    result = []
    for c in cracker_ids:
        cracker_id = c[0]
        if c[1] in latest_added_hosts:
            logging.debug("Skipping {}, just reported by client".format(c[1]))
            continue
        cracker = yield Cracker.find(cracker_id)
        if cracker is None:
            continue
        logging.debug("Examining cracker:")
        logging.debug(cracker)
        reports = yield cracker.reports.get(orderby="first_report_time ASC")
        #logging.debug("reports:")
        #for r in reports:
        #    logging.debug("    "+str(r))
        logging.debug("r[m-1].first, prev: {}, {}".format(reports[min_reports-1].first_report_time, previous_timestamp))
        if (len(reports)>=min_reports and 
            reports[min_reports-1].first_report_time >= previous_timestamp): 
            # condition (c) satisfied
            logging.debug("c")
            result.append(cracker.ip_address)
        else:
            logging.debug("checking (d)...")
            satisfied = False
            for report in reports:
                #logging.debug("    "+str(report))
                if (not satisfied and 
                    report.latest_report_time>=previous_timestamp and
                    report.latest_report_time-cracker.first_time>=min_resilience):
                    logging.debug("    d1")
                    satisfied = True
                if (report.latest_report_time<=previous_timestamp and 
                    report.latest_report_time-cracker.first_time>=min_resilience):
                    logging.debug("    d2 failed")
                    satisfied = False
                    break
            if satisfied:
                logging.debug("Appending {}".format(cracker.ip_address))
                result.append(cracker.ip_address)
            else:
                logging.debug("    skipping")
        if len(result)>=max_crackers:
            break

    if len(result) < max_crackers:
        # Add results from legacy server
        extras = yield Legacy.find(where=["retrieved_time>?", previous_timestamp],
            orderby="retrieved_time DESC", limit=max_crackers-len(result))
        result = result + [extra.ip_address for extra in extras]

    logging.debug("Returning {} hosts".format(len(result)))
    returnValue(result)
def purge_legacy_addresses():
    yield database.run_truncate_query('legacy')
    yield database.run_query('UPDATE info SET `value`=0 WHERE `key`="last_legacy_sync"')
    returnValue(0)
def update_stats_cache():
    global _stats_busy
    global _cache
    if _stats_busy:
        logging.debug("Already updating statistics cache, exiting")
        returnValue(None)
    _stats_busy = True

    logging.debug("Updating statistics cache...")

    # Fill history table for yesterday, when necessary
    yield update_recent_history()
    yield update_country_history()

    now = time.time()
    stats = {}
    stats["last_updated"] = now
    stats["has_hostnames"] = config.stats_resolve_hostnames
    # Note paths configured in main.py by the Resource objects
    stats["static_base"] = "../static"
    stats["graph_base"] = "../static/graph"
    stats["server_version"] = __init__.version
    try:
        #rows = yield database.run_query("SELECT num_hosts,num_reports, num_clients, new_hosts FROM stats ORDER BY time DESC LIMIT 1")
        stats["num_hosts"] = yield models.Cracker.count()
        stats["num_reports"] = yield models.Report.count()

        rows = yield database.run_query("SELECT count(DISTINCT ip_address) FROM reports") 
        if len(rows)>0:
            stats["num_clients"] = rows[0][0]
        else:
            stats["num_clients"] = 0

        yesterday = now - 24*3600
        stats["daily_reports"] = yield models.Report.count(where=["first_report_time>?", yesterday])
        stats["daily_new_hosts"] = yield models.Cracker.count(where=["first_time>?", yesterday])

        recent_hosts = yield models.Cracker.find(orderby="latest_time DESC", limit=10)
        yield threads.deferToThread(fixup_crackers, recent_hosts)
        stats["recent_hosts"] = recent_hosts

        most_reported_hosts = yield models.Cracker.find(orderby="total_reports DESC", limit=10)
        yield threads.deferToThread(fixup_crackers, most_reported_hosts)
        stats["most_reported_hosts"] = most_reported_hosts

        logging.info("Stats: {} reports for {} hosts from {} reporters".format(
            stats["num_reports"], stats["num_hosts"], stats["num_clients"]))

        if stats["num_reports"] > 0:
            yield Registry.DBPOOL.runInteraction(make_daily_graph)
            yield Registry.DBPOOL.runInteraction(make_monthly_graph)
            yield Registry.DBPOOL.runInteraction(make_contrib_graph)
            yield Registry.DBPOOL.runInteraction(make_history_graph)
            yield Registry.DBPOOL.runInteraction(make_country_bargraph)

        if _cache is None:
            _cache = {}
        _cache["stats"] = stats
        _cache["time"] = time.time()
        logging.debug("Finished updating statistics cache...")
    except Exception, e:
        log.err(_why="Error updating statistics: {}".format(e))
        logging.warning("Error updating statistics: {}".format(e))
Exemple #12
0
def get_estimate():
  global zips

  house_types = ['S', 'F', 'T', 'O', 'D']
  build_types = ['Y', 'N']
  est_types = ['L', 'F']
  type_offset = 3
  build_offset = 8
  est_offset = 10

  try:
    if request.method == 'GET':
      if request.args is not None:
        home_args = dict(request.args.to_dict().items())

        # Run db query
        result_db = database.run_query(input_args=home_args)

        zipcode = home_args['zip']
        type_   = home_args['type']
        newbuild = home_args['newbuild']
        estatetype = home_args['esttype']

        day = 365.0*6.5
        #zips = pd.read_pickle("data/ukpostcodes.pkl")
        #zips.drop('id',axis=1,inplace=True)
        try:
            zipdf = zips.loc[zips['postcode'] == str(zipcode).upper()]
            lat = zipdf.iloc[0]['latitude']
            long_ = zipdf.iloc[0]['longitude']
        except (KeyError, IndexError):
            return jsonify(result=0);
        # One hot encoding for type newbuild and estatetype
        # List indices as follows
        inputs = np.zeros(12)
        inputs[0] = lat
        inputs[1] = long_
        inputs[2] = day
        type_idx = house_types.index(type_)
        build_idx = build_types.index(newbuild)
        est_idx = est_types.index(estatetype)

        inputs[type_offset+type_idx] = 1.0
        inputs[build_offset+build_idx] = 1.0
        inputs[est_offset+est_idx] = 1.0

        # Run ML part
        tmp_price = run_mlp.run_once(web_input=inputs)
      else:
        print "Args not found"
  except:
    pass


  # Pack up ML results
  result_ml = {}
  result_ml['latitude'] = lat
  result_ml['longitude'] = long_
  #tmp_price = run_mlp.run_once(web_input=inputs)
  result_ml['price'] = int(tmp_price) - int(tmp_price)%100

  # Pack up DB results
  # Get data from result tuple
  #myio, myo, mtio, mto, vyio, vyo = result_db[0], result_db[1], result_db[2], result_db[3], result_db[4], result_db[5]
  comps = result_db[6]
  comps_ll = []

  hist_data = result_db[7]

  try:
    for address in comps:
      zipdf = zips.loc[zips['postcode'] == str(address[2]).strip()]
      lat = zipdf.iloc[0]['latitude']
      long_ = zipdf.iloc[0]['longitude']
      tmplist = list(address)
      tmplist.append(lat)
      tmplist.append(long_)
      comps_ll.append(tuple(tmplist))
  except:
    pass

  result_full = (result_db[0], result_db[1], result_db[2], result_db[3], result_db[4], result_db[5], comps_ll, hist_data, result_ml)

  return jsonify(result=result_full)