def run():
    output = open(sys.argv[1], 'w')
    writer = DictWriter(output, fieldnames=['uid', 'data'])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        data = fetch_user_location_logs(uid, db)
        locations = merge_locations(data)
        matrix = generate_matrix(locations)
        semantic_data = fetch_semantic_data(list(matrix.keys()))
        semantic_dict = {}
        for row in semantic_data:
            semantic_dict[row['location']] = clean_tags(row['tags'], 5)
        tag_matrix = {}
        for location, proba in list(matrix.items()):
            tag_dict = semantic_dict[location]
            tag_weight = sum(v for v in list(tag_dict.values()))
            if tag_weight == 0:
                continue
            for tag, cnt in list(tag_dict.items()):
                tag_matrix.setdefault(tag, [0] * 48)
                for i in range(48):
                    tag_matrix[tag][i] += (proba[i] * cnt + 0.001) / (tag_weight + 0.001)
        writer.writerow({
            'uid': uid,
            'data': json.dumps(tag_matrix)
        })
    output.close()
Пример #2
0
def run():
    output = open(sys.argv[1], 'w')
    writer = DictWriter(output, fieldnames=['uid', 'data'])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        data = fetch_user_location_logs(uid, db)
        locations = merge_locations(data)
        matrix = generate_matrix(locations)
        semantic_data = fetch_semantic_data(list(matrix.keys()))
        semantic_dict = {}
        for row in semantic_data:
            semantic_dict[row['location']] = clean_tags(row['tags'], 5)
        tag_matrix = {}
        for location, proba in list(matrix.items()):
            tag_dict = semantic_dict[location]
            tag_weight = sum(v for v in list(tag_dict.values()))
            if tag_weight == 0:
                continue
            for tag, cnt in list(tag_dict.items()):
                tag_matrix.setdefault(tag, [0] * 48)
                for i in range(48):
                    tag_matrix[tag][i] += (proba[i] * cnt +
                                           0.001) / (tag_weight + 0.001)
        writer.writerow({'uid': uid, 'data': json.dumps(tag_matrix)})
    output.close()
Пример #3
0
def run(outputfile):
    cols = ['uid', 'data']
    f = open(outputfile, 'w')
    writer = DictWriter(f, cols)
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        logs = fetch_uid_app_data(uid, db)
        writer.writerow({'uid': uid, 'data': json.dumps(active_matrix(logs))})
Пример #4
0
def run():
    output = open(sys.argv[1], 'w')
    writer = DictWriter(output, fieldnames=['uid', 'data'])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        locations = area_by_uid_stop(uid, db, get_business_logs)
        writer.writerow({'uid': uid, 'data': json.dumps(locations)})
    output.close()
Пример #5
0
def run():
    output = open(sys.argv[1], 'w')
    writer = DictWriter(output, fieldnames=['uid', 'data'])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        data = fetch_user_location_logs(uid, db)
        locations = merge_locations(data)
        writer.writerow({'uid': uid, 'data': json.dumps(locations)})
    output.close()
def run():
    output = open(sys.argv[1], "w")
    writer = DictWriter(output, fieldnames=["uid", "data"])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        logs = fetch_user_location_logs(uid, db)
        locations = merge_locations(logs)
        matrix = generate_matrix(locations)
        writer.writerow({"uid": uid, "data": json.dumps(matrix)})
    output.close()
def run(outputfile):
    cols = ['uid', 'data']
    f = open(outputfile, 'w')
    writer = DictWriter(f, cols)
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        logs = fetch_uid_app_data(uid, db)
        writer.writerow({
            'uid': uid,
            'data': json.dumps(active_matrix(logs))
        })
Пример #8
0
def run():
    output = open(sys.argv[1], 'w')
    writer = DictWriter(output, fieldnames=['uid', 'data'])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        locations = area_by_uid_stop(uid, db, get_business_logs)
        writer.writerow({
            'uid': uid,
            'data': json.dumps(locations)
        })
    output.close()
def run():
    output = open(sys.argv[1], 'w')
    db = DB(dbconfig)

    for uid in fetch_users(db):
        data = fetch_user_location_logs(uid, db)
        for day in range(1, 32):
            day = '201312%02d' % day
            rows_by_day = [x for x in data if x['day'] == day]
            if not rows_by_day:
                continue
            rows_by_day = [[x['start_time'], x['location']] for x in rows_by_day]
            speeds = [x['speed'] for x in get_speed_by_day(rows_by_day, day)]
            output.write(','.join(map(str, [uid, day] + speeds)) + '\n')
    output.close()
def run(outputfile):
    cols = ['uid', 'data']
    f = open(outputfile, 'w')
    writer = DictWriter(f, cols)
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        logs = fetch_user_location_logs(uid, db)
        results = merge_locations(logs)
        get_delta(results)
        moves = get_moves(results)
        stops = get_stop(results)
        user_status = generate_status_matrix(moves, stops)
        writer.writerow({'uid': uid, 'data': json.dumps(user_status)})
def run():
    output = open(sys.argv[1], 'w')
    db = DB(dbconfig)

    for uid in fetch_users(db):
        data = fetch_user_location_logs(uid, db)
        for day in range(1, 32):
            day = '201312%02d' % day
            rows_by_day = [x for x in data if x['day'] == day]
            if not rows_by_day:
                continue
            rows_by_day = [[x['start_time'], x['location']]
                           for x in rows_by_day]
            speeds = [x['speed'] for x in get_speed_by_day(rows_by_day, day)]
            output.write(','.join(map(str, [uid, day] + speeds)) + '\n')
    output.close()
def run(outputfile):
    cols = ['uid', 'data']
    f = open(outputfile, 'w')
    writer = DictWriter(f, cols)
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        logs = fetch_user_location_logs(uid, db)
        results = merge_locations(logs)
        get_delta(results)
        moves = get_moves(results)
        stops = get_stop(results)
        user_status = generate_status_matrix(moves, stops)
        writer.writerow({
            'uid': uid,
            'data': json.dumps(user_status)
        })