def main():
    parser = argparse.ArgumentParser(description="Retrieve freebox statistics")
    parser.add_argument(
        "-d",
        "--database",
        help="Sqlite3 database where to store the statistics (default: %s)" %
        config.database)
    parser.add_argument(
        "-u",
        "--url",
        help="URL where the statistics are retrieved (default: %s)" %
        config.statsurl)

    args = parser.parse_args()

    if args.database is not None:
        config.database = args.database
    if args.url is not None:
        config.statsurl = args.url

    cnx = db.new_connection()
    cur = cnx.cursor()
    db.create_tables(cur)

    p = StatsPage()
    store_stats(p, cur)
    store_logs(p, cur)
    store_netlinks(p, cur)

    cur.execute("PRAGMA optimize")
    cnx.commit()
示例#2
0
def main():
    if len(sys.argv) != 2:
        print("usage: %s datadir" % sys.argv[0], file=sys.stderr)
        return

    datadir = sys.argv[1]

    cnx = db.new_connection()
    cur = cnx.cursor()
    db.create_tables(cur)

    for f in sorted(glob.glob(datadir + "/*.csv")):
        import_data(cur, f)
        cnx.commit()

    cur.execute("ANALYZE")
    cnx.commit()
示例#3
0
def main():
    parser = argparse.ArgumentParser(description="Plot the statistics")
    parser.add_argument("-f", "--figdir", help="Directory where to store the output figures (default: %s)" % config.figdir)
    parser.add_argument("-g", "--gnuplotdir", help="Directory where the gnuplot scripts are located (default: %s)" % config.gnuplotdir)
    parser.add_argument("-t", "--tmpdir", help="Directory where to store the temporary data files (default to system temporary directory)")
    parser.add_argument("-l", "--list", action='store_true', help="List available countries and exit")
    parser.add_argument("-c", "--country", action='append', help="Countries to plot")
    parser.add_argument("-N", "--ndaysavg", default=5, type=int, help="Average the number of confirmed case over the N previous days")

    args = parser.parse_args()

    if args.figdir is not None:
        config.figdir = args.figdir
    if args.gnuplotdir is not None:
        congnuplot.gnuplotdir = args.gnuplotdir
    if args.tmpdir is not None:
        config.tmpdir = args.tmpdir

    if args.country is None:
        countries = ["France", "Chine"]
    else:
        countries = args.country

    ndaysavg = args.ndaysavg

    cnx = db.new_connection()
    cur = cnx.cursor()

    if args.list:
        list_countries(cur)
    else:
        if check_countries(cur, countries):
            plot_raw_data(cur, countries, ndaysavg)
            plot_regression(cnx, countries, ndaysavg)
            plot_metrics_evolution(cnx, countries, ndaysavg)

    cur.execute("PRAGMA optimize")
示例#4
0
from db import new_connection
from mysql.connector import ProgrammingError

table_contacts = """
    CREATE TABLE IF NOT EXISTS contacts(
        name VARCHAR(50),
        tel VARCHAR(40)
    )
"""
table_emails = """
    CREATE TABLE IF NOT EXISTS emails(
        id INT AUTO_INCREMENT PRIMARY KEY,
        dono VARCHAR(50)
    )
"""

with new_connection() as connection:
    try:
        cursor = connection.cursor()
        cursor.execute(table_contacts)
        cursor.execute(table_emails)
    except ProgrammingError as e:
        print(f'Error: {e.msg}')
示例#5
0
      col="#606000")
lines(x=d$time,
      y=d$ip24,
      type="h",
      col="#006000")
legend("topright",
       legend=c('in transit (<24h)', 
                'in queue (<24h)',
                'in transit (24-48h)',
                'in queue (24-48h)',
                'in transit (>48h)',
                'in queue (>48h)'),
       ncol=3,
       col=c("#006000", "#00E000", "#606000", "#D0D000", "#800000", "#FF0000"),
       lty=1)
'''.format(**locals())

if __name__ == '__main__':
    import queue, db
    with closing(db.new_connection(initdb=False)) as conn:
        last_update, queue_contracts, accepted_contracts = queue.load(conn)
        done_1day = db.Contract.load_completed_after(conn = conn, cutoff = last_update - datetime.timedelta(days=1))
        done_7day = db.Contract.load_completed_after(conn = conn, cutoff = last_update - datetime.timedelta(days=7))
        history = db.load_queue_history(conn = conn, first_update = last_update - datetime.timedelta(days=7), last_update = last_update)

    make_queue_graph(last_update, queue_contracts, accepted_contracts, scale=3600, filename="queue_3600.png")
    make_queue_graph(last_update, queue_contracts, accepted_contracts, scale=900, filename="queue_900.png")
    make_delivery_graph(last_update, done_1day, scale=3600, filename="delivery_1day.png", title="Red Frog delivery times - last day")
    make_delivery_graph(last_update, done_7day, scale=3600, filename="delivery_7day.png", title="Red Frog delivery times - last week")
    make_history_graph(history, filename="queue_history_7day.png", title="Red Frog queue size - last week")
示例#6
0
def run_one_cycle():
    log("Starting an update cycle")
    
    conn = db.new_connection()
    try:
        previous_update = db.get_last_update(conn)

        try:
            log("Scraping NCF page")
            ncf_results = scrape_ncf.scrape_ncf(write_copy = 'log/ncf-latest.html',
                                                write_error = datetime.datetime.now().strftime('log/ncf-missing-%Y%m%d%H%M%S.html'))
            log("NCF: Server time: {t}", t = ncf_results['server_time'])
            log("NCF: Last update: {t}", t = ncf_results['last_update'])
            log("NCF: Next update: {t}", t = ncf_results['next_update'])
            log("NCF: Outstanding: {n}", n = ncf_results['outstanding'])
            log("NCF: In progress: {n}", n = ncf_results['inprogress'])
            log("NCF: Refresh:     {r}", r = ncf_results['refresh'])
            if ncf_results['missing'] > 0: 
                log("NCF: *MISSING*:   {n}", n = ncf_results['missing'])
        except:
            log("Caught an exception processing the NCF page")
            traceback.print_exc()
            return datetime.timedelta(minutes=5)

        last_update = ncf_results['last_update']
        if previous_update is not None and last_update <= previous_update:
            log("NCF update ({last_update}) is not newer than database's last update ({previous_update}); skipping this update",
                **locals())
            return ncf_results['refresh']
            
        try:
            log("Scraping tracker page")
            tracker_results = scrape_tracker.scrape_tracker(write_copy = 'log/tracker-latest.html',
                                                            write_error = datetime.datetime.now().strftime('log/tracker-missing-%Y%m%d%H%M%S.html'))
            log("Tracker: Server time: {t}", t = tracker_results['server_time'])
            log("Tracker: Last update: {t}", t = tracker_results['last_update'])
            log("Tracker: Next update: {t}", t = tracker_results['next_update'])
            log("Tracker: Outstanding: {n}", n = tracker_results['outstanding'])
            log("Tracker: In progress: {n}", n = tracker_results['inprogress'])
            log("Tracker: Refresh:     {r}", r = tracker_results['refresh'])
            if tracker_results['missing'] > 0: 
                log("Tracker: *MISSING*:   {n}", n = tracker_results['missing'])
        except:
            log("Caught an exception processing the tracker page")
            traceback.print_exc()
            return datetime.timedelta(minutes=5)
        
        if last_update != tracker_results['last_update']:
            log("NCF and Tracker pages had different last update times. Retrying.")
            return datetime.timedelta(minutes=1)

        seen_contracts = set()

        for contract in ncf_results['contracts'] + tracker_results['contracts']:
            try:
                cid = contract.contract_id
                seen_contracts.add(cid)

                db_contract = db.Contract.load(conn = conn, contract_id = cid)
                if not db_contract:
                    if contract.state == db.Contract.IN_PROGRESS and contract.created_min is None:
                        # Missed it in queue. It must have been created between the last update we processed and the time it was accepted
                        contract.created_min = previous_update
                        contract.created_max = contract.accepted

                    log("New:  {contract}", **locals())
                    contract.insert_row(conn = conn)
                else:
                    merged_contract = db.Contract.merge(db_contract, contract)                    
                    if db_contract.state != merged_contract.state:
                        log("Upd:  {merged_contract}", **locals())
                    merged_contract.update_row(conn = conn)

            except:
                log("Exception processing contract: {contract}", **locals())
                raise
                    
        log("Looking for contracts that disappeared from the queue")
        for contract in db.Contract.load_by_state(conn = conn, state = db.Contract.IN_QUEUE):
            if contract.contract_id not in seen_contracts:
                contract.state = db.Contract.GONE
                log("Gone: {contract}", **locals())
                contract.update_row(conn = conn)
                
        log("Looking for contracts that disappeared from the tracker")
        for contract in db.Contract.load_by_state(conn = conn, state = db.Contract.IN_PROGRESS):
            if contract.contract_id not in seen_contracts:
                contract.state = db.Contract.DONE
                log("Done: {contract}", **locals())
                contract.update_row(conn = conn)

        log("Updating queue history")
        active_contracts = db.Contract.load_where(conn = conn, clause = "state IN (:inqueue,:inprogress)", inqueue = db.Contract.IN_QUEUE, inprogress = db.Contract.IN_PROGRESS)
        history.update(conn = conn,
                       active_contracts = active_contracts,
                       update_time = last_update)

        db.add_update_info(conn,
                           update_time = last_update,
                           queue_count = len(ncf_results['contracts']), 
                           in_progress_count = len(tracker_results['contracts']))
        db.set_last_update(conn, ncf_results['last_update'])
        conn.commit()

        try:
            regenerate_graphs(conn)
        except:
            log("Exception generating graphs (ignored)")
            traceback.print_exc()

    except:
        log("Exception in update cycle")
        traceback.print_exc()
        return datetime.timedelta(minutes=5)

    finally:
        conn.close()

    refresh = max(ncf_results['refresh'], tracker_results['refresh'])
    log("Update cycle is done, next refresh in {refresh}", **locals())
    return refresh