def get_speed_by_day(all_rows, day): timestamps = pd.date_range(start=day + '001500', end=day + '235959', freq='30Min') cols = ['start_time', 'location'] speeds = [] if len(all_rows) == 0: return speeds delta_t = 60 for i in range(len(timestamps)): start_time = timestamps[i].to_datetime() - datetime.timedelta(minutes=delta_t / 2) end_time = timestamps[i].to_datetime() + datetime.timedelta(minutes=delta_t / 2) rows = [x for x in all_rows if date2str(start_time) <= x[0] <= date2str(end_time)] if len(rows) == 0: speeds.append({ 'time': date2str(timestamps[i]), 'speed': 0 }) continue rows = merge_locations_by_date([dict(list(zip(cols, row))) for row in rows]) get_delta_by_day(rows) speed = entropy(rows, delta_t, [start_time, end_time]) speeds.append({ 'time': date2str(timestamps[i]), 'speed': speed }) return speeds
def get_speed_by_day_at_change_point(all_rows, day): cols = ['start_time', 'location'] speeds = [] results = merge_locations_by_date([dict(list(zip(cols, row))) for row in all_rows]) points = set() for location in results: points.add(location['start_time']) points.add(location['end_time']) points = [str2date(x) for x in sorted(points)] if len(all_rows) == 0: return speeds delta_t = 60 for i in range(len(points)): start_time = points[i] - datetime.timedelta(minutes=delta_t / 2) end_time = points[i] + datetime.timedelta(minutes=delta_t / 2) rows = [x for x in all_rows if date2str(start_time) <= x[0] <= date2str(end_time)] if len(rows) == 0: speeds.append({ 'time': date2str(points[i]), 'speed': 0 }) continue rows = merge_locations_by_date([dict(list(zip(cols, row))) for row in rows]) get_delta_by_day(rows) speed = entropy(rows, delta_t, [start_time, end_time]) speeds.append({ 'time': date2str(points[i]), 'speed': speed }) return speeds
def location_by_uid_day_stop(uid, day): day = '201312' + day cols = ['start_time', 'location'] db.ping(True) cursor = db.cursor() prepare_sql = """select start_time, location from location_logs_with_date where uid = %s and log_date = %s order by start_time""" cursor.execute(prepare_sql, (uid, day)) rows = cursor.fetchall() results = merge_locations_by_date([dict(list(zip(cols, row))) for row in rows]) get_delta_by_day(results) results = get_stop_by_day(results) return make_response(dumps(results))
def entropy_by_uid_day(uid, day): day = '201312' + day cols = ['start_time', 'location'] db.ping(True) cursor = db.cursor() prepare_sql = """select start_time, location from location_logs_with_date where uid = %s and log_date = %s order by start_time""" cursor.execute(prepare_sql, (uid, day)) rows = cursor.fetchall() results = merge_locations_by_date([dict(list(zip(cols, row))) for row in rows]) get_delta_by_day(results) moves = get_moves_by_day(results) result = [] for move in moves: for location in move: result.append({ 'entropy': transient_entropy(location, move), 'time': location['start_time'] }) return make_response(dumps(result))
if len(cur_move) > 1: moves.append(cur_move) cur_move = [] last_endtime = None else: start_time = str2date(record['start_time']) end_time = str2date(record['end_time']) if last_endtime is None: cur_move.append(record) last_endtime = end_time else: delta = (start_time - last_endtime).total_seconds() / 60 if delta < H2: cur_move.append(record) last_endtime = end_time else: if len(cur_move) > 1: moves.append(cur_move) cur_move = [] last_endtime = None return moves if __name__ == '__main__': import json from get_stop import get_delta_by_day import pprint data = json.load(open('31430787_01.json')) get_delta_by_day(data) pprint.pprint(get_moves(data))