def user_status(uid):
    logs = fetch_uid_location_data(uid)
    results = merge_locations(logs)
    get_delta(results)
    moves = get_moves(results)
    stops = get_stop(results)
    return make_response(dumps(get_status(moves, stops)))
def run():
    output = open(sys.argv[1], 'w')
    writer = DictWriter(output, fieldnames=['uid', 'data'])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        data = fetch_user_location_logs(uid, db)
        locations = merge_locations(data)
        matrix = generate_matrix(locations)
        semantic_data = fetch_semantic_data(list(matrix.keys()))
        semantic_dict = {}
        for row in semantic_data:
            semantic_dict[row['location']] = clean_tags(row['tags'], 5)
        tag_matrix = {}
        for location, proba in list(matrix.items()):
            tag_dict = semantic_dict[location]
            tag_weight = sum(v for v in list(tag_dict.values()))
            if tag_weight == 0:
                continue
            for tag, cnt in list(tag_dict.items()):
                tag_matrix.setdefault(tag, [0] * 48)
                for i in range(48):
                    tag_matrix[tag][i] += (proba[i] * cnt + 0.001) / (tag_weight + 0.001)
        writer.writerow({
            'uid': uid,
            'data': json.dumps(tag_matrix)
        })
    output.close()
Пример #3
0
def user_status(uid):
    logs = fetch_uid_location_data(uid)
    results = merge_locations(logs)
    get_delta(results)
    moves = get_moves(results)
    stops = get_stop(results)
    return make_response(dumps(get_status(moves, stops)))
Пример #4
0
def run():
    output = open(sys.argv[1], 'w')
    writer = DictWriter(output, fieldnames=['uid', 'data'])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        data = fetch_user_location_logs(uid, db)
        locations = merge_locations(data)
        matrix = generate_matrix(locations)
        semantic_data = fetch_semantic_data(list(matrix.keys()))
        semantic_dict = {}
        for row in semantic_data:
            semantic_dict[row['location']] = clean_tags(row['tags'], 5)
        tag_matrix = {}
        for location, proba in list(matrix.items()):
            tag_dict = semantic_dict[location]
            tag_weight = sum(v for v in list(tag_dict.values()))
            if tag_weight == 0:
                continue
            for tag, cnt in list(tag_dict.items()):
                tag_matrix.setdefault(tag, [0] * 48)
                for i in range(48):
                    tag_matrix[tag][i] += (proba[i] * cnt +
                                           0.001) / (tag_weight + 0.001)
        writer.writerow({'uid': uid, 'data': json.dumps(tag_matrix)})
    output.close()
def area_by_uid_stop(uid, area_func=fetch_uid_business_data):
    results = area_func(uid)
    invalids = check_error_points(raw_merge_locations_by_date(results))
    results = [x for x in results if (x['location'], x['start_time']) not in invalids]
    locations = merge_locations(results)
    get_delta(locations)
    locations = get_stop(locations, 30)
    return locations
Пример #6
0
def area_by_uid_stop(uid, area_func=fetch_uid_business_data):
    results = area_func(uid)
    invalids = check_error_points(raw_merge_locations_by_date(results))
    results = [x for x in results if (x['location'], x['start_time']) not in invalids]
    locations = merge_locations(results)
    get_delta(locations)
    locations = get_stop(locations, 30)
    return locations
Пример #7
0
def run():
    output = open(sys.argv[1], 'w')
    writer = DictWriter(output, fieldnames=['uid', 'data'])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        data = fetch_user_location_logs(uid, db)
        locations = merge_locations(data)
        writer.writerow({'uid': uid, 'data': json.dumps(locations)})
    output.close()
def run():
    output = open(sys.argv[1], "w")
    writer = DictWriter(output, fieldnames=["uid", "data"])
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        logs = fetch_user_location_logs(uid, db)
        locations = merge_locations(logs)
        matrix = generate_matrix(locations)
        writer.writerow({"uid": uid, "data": json.dumps(matrix)})
    output.close()
def run(outputfile):
    cols = ['uid', 'data']
    f = open(outputfile, 'w')
    writer = DictWriter(f, cols)
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        logs = fetch_user_location_logs(uid, db)
        results = merge_locations(logs)
        get_delta(results)
        moves = get_moves(results)
        stops = get_stop(results)
        user_status = generate_status_matrix(moves, stops)
        writer.writerow({'uid': uid, 'data': json.dumps(user_status)})
def run(outputfile):
    cols = ['uid', 'data']
    f = open(outputfile, 'w')
    writer = DictWriter(f, cols)
    writer.writeheader()
    db = DB(dbconfig)

    for uid in fetch_users(db):
        logs = fetch_user_location_logs(uid, db)
        results = merge_locations(logs)
        get_delta(results)
        moves = get_moves(results)
        stops = get_stop(results)
        user_status = generate_status_matrix(moves, stops)
        writer.writerow({
            'uid': uid,
            'data': json.dumps(user_status)
        })
def _location_by_uid_stop(uid):
    results = fetch_uid_location_data(uid)
    locations = merge_locations(results)
    get_delta(locations)
    locations = get_stop(locations, 30)
    return locations
def location_by_uid(uid):
    results = fetch_uid_location_data(uid)
    return make_response(dumps(merge_locations(results)))
Пример #13
0
def _location_by_uid_stop(uid):
    results = fetch_uid_location_data(uid)
    locations = merge_locations(results)
    get_delta(locations)
    locations = get_stop(locations, 30)
    return locations
Пример #14
0
def location_by_uid(uid):
    results = fetch_uid_location_data(uid)
    return make_response(dumps(merge_locations(results)))