def handle(self, *args, **options): hashes = REPORT_DEVICES # PurpleRobotPayload.objects.order_by().values('user_id').distinct() start = datetime.datetime.now() - datetime.timedelta(days=21) labels = PurpleRobotReading.objects.exclude(probe__startswith='edu.northwestern').values('probe').distinct() for user_hash in hashes: for label in labels: slug_label = slugify(label['probe']) payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=label['probe'], logged__gte=start).order_by('logged') count = payloads.count() if count > 0: temp_file = tempfile.TemporaryFile() gzf = gzip.GzipFile(mode='wb', fileobj=temp_file) gzf.write('User ID\tTimestamp\tValue\n') index = 0 while index < count: end = index + 100 if end > count: end = count for payload in payloads[index:end]: reading_json = json.loads(payload.payload) gzf.write(user_hash + '\t' + str(reading_json['TIMESTAMP']) + '\t' + reading_json['FEATURE_VALUE'] + '\n') index += 100 gzf.flush() gzf.close() temp_file.seek(0) report = PurpleRobotReport(generated=timezone.now(), mime_type='application/x-gzip', probe=slug_label, user_id=hash) report.save() report.report_file.save(user_hash + '-' + slug_label + '.txt.gz', File(temp_file)) report.save()
def handle(self, *args, **options): hashes = REPORT_DEVICES # PurpleRobotPayload.objects.order_by().values('user_id').distinct() # start = datetime.datetime.now() - datetime.timedelta(days=120) start_ts = datetime.datetime(2015, 11, 10, 0, 0, 0, 0, tzinfo=pytz.timezone('US/Central')) end_ts = start_ts + datetime.timedelta(days=1) for user_hash in hashes: payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start_ts, logged__lt=end_ts).order_by('logged') count = payloads.count() if count > 0: temp_file = tempfile.TemporaryFile() gzf = gzip.GzipFile(mode='wb', fileobj=temp_file) gzf.write('User ID\tTimestamp\n') index = 0 while index < count: end = index + 100 if end > count: end = count for payload in payloads[index:end]: reading_json = json.loads(payload.payload) gzf.write(hash + '\t' + str(reading_json['TIMESTAMP']) + '\n') index += 100 gzf.flush() gzf.close() temp_file.seek(0) report = PurpleRobotReport(generated=timezone.now(), mime_type='application/x-gzip', probe=PROBE_NAME, user_id=user_hash) report.save() report.report_file.save(hash + '-significant-motion.txt.gz', File(temp_file)) report.save()
def handle(self, *args, **options): hashes = REPORT_DEVICES # start = datetime.datetime.now() - datetime.timedelta(days=120) start_ts = datetime.datetime(2015, 7, 3, 5, 0, 0, 0, tzinfo=pytz.timezone('US/Central')) end_ts = start_ts + datetime.timedelta(hours=1) # print(start_ts.isoformat()) # print(end_ts.isoformat()) for user_hash in hashes: payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start_ts, logged__lt=end_ts).order_by('logged') count = payloads.count() if count > 0: temp_file = tempfile.TemporaryFile() gzf = gzip.GzipFile(mode='wb', fileobj=temp_file) gzf.write('User ID\tSensor Timestamp\tNormalized Timestamp\tCPU Timestamp\tX\tY\tZ\n') index = 0 last_sensor = sys.maxint base_ts = 0 while index < count: end = index + 100 if end > count: end = count last_sensor = sys.maxint base_ts = 0 for payload in payloads[index:end]: reading_json = json.loads(payload.payload) normal_times = [] sensor_times = [] cpu_times = [] x_readings = [] y_readings = [] z_readings = [] has_sensor = False if 'SENSOR_TIMESTAMP' in reading_json: has_sensor = True for sensor_time in reading_json['SENSOR_TIMESTAMP']: sensor_times.append(sensor_time) for event_time in reading_json['EVENT_TIMESTAMP']: cpu_times.append(event_time) if has_sensor is False: sensor_times.append(-1) normal_times.append(-1) if has_sensor: for i in range(0, len(sensor_times)): sensor_ts = float(sensor_times[i]) normalized_ts = sensor_ts / (1000 * 1000 * 1000) if normalized_ts < last_sensor: cpu_time = cpu_times[i] base_ts = cpu_time - normalized_ts normal_times.append(base_ts + normalized_ts) last_sensor = normalized_ts for x_reading in reading_json['X']: x_readings.append(x_reading) for y_reading in reading_json['Y']: y_readings.append(y_reading) for z_reading in reading_json['Z']: z_readings.append(z_reading) for i in range(0, len(cpu_times)): x_reading = x_readings[i] y_reading = y_readings[i] z_reading = z_readings[i] cpu_time = cpu_times[i] sensor_time = sensor_times[i] normal_time = normal_times[i] gzf.write(user_hash + '\t' + str(sensor_time) + '\t' + str(normal_time) + '\t' + str(cpu_time) + '\t' + str(x_reading) + '\t' + str(y_reading) + '\t' + str(z_reading) + '\n') index += 100 gzf.flush() gzf.close() temp_file.seek(0) report = PurpleRobotReport(generated=timezone.now(), mime_type='application/x-gzip', probe=PROBE_NAME, user_id=user_hash) report.save() report.report_file.save(user_hash + '-accelerometer.txt.gz', File(temp_file)) report.save()
def handle(self, *args, **options): hashes = REPORT_DEVICES # PurpleRobotPayload.objects.order_by().values('user_id').distinct() start = datetime.datetime.now() - datetime.timedelta(days=21) for user_hash in hashes: # hash = hash['user_id'] payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start).order_by('logged') count = payloads.count() if count > 0: temp_file = tempfile.TemporaryFile() gzf = gzip.GzipFile(mode='wb', fileobj=temp_file) gzf.write('User ID\tSensor Timestamp\tNormalized Timestamp\tCPU Timestamp\tX\tY\tZ\n') index = 0 last_sensor = sys.maxint base_ts = 0 while index < count: end = index + 100 if end > count: end = count for payload in payloads[index:end]: reading_json = json.loads(payload.payload) ns = [] ss = [] ts = [] xs = [] ys = [] zs = [] has_sensor = False if 'SENSOR_TIMESTAMP' in reading_json: has_sensor = True for s in reading_json['SENSOR_TIMESTAMP']: ss.append(s) for t in reading_json['EVENT_TIMESTAMP']: ts.append(t) if has_sensor is False: ss.append(-1) ns.append(-1) if has_sensor: for i in range(0, len(ss)): sensor_ts = float(ss[i]) normalized_ts = sensor_ts / (1000 * 1000 * 1000) if normalized_ts < last_sensor: cpu_time = ts[i] base_ts = cpu_time - normalized_ts ns.append(base_ts + normalized_ts) last_sensor = normalized_ts for x in reading_json['X']: xs.append(x) for y in reading_json['Y']: ys.append(y) for z in reading_json['Z']: zs.append(z) for i in range(0, len(ts)): x = xs[i] y = ys[i] z = zs[i] t = ts[i] s = ss[i] n = ns[i] gzf.write(user_hash + '\t' + str(s) + '\t' + str(n) + '\t' + str(t) + '\t' + str(x) + '\t' + str(y) + '\t' + str(z) + '\n') index += 100 gzf.flush() gzf.close() temp_file.seek(0) report = PurpleRobotReport(generated=timezone.now(), mime_type='application/x-gzip', probe=PROBE_NAME, user_id=user_hash) report.save() report.report_file.save(user_hash + '-gyroscope.txt.gz', File(temp_file)) report.save()
def handle(self, *args, **options): hashes = REPORT_DEVICES # PurpleRobotPayload.objects.order_by().values('user_id').distinct() start_ts = timezone.now() - datetime.timedelta(days=120) # start_ts = datetime.datetime(2015, 7, 3, 5, 0, 0, 0, tzinfo=pytz.timezone('US/Central')) end_ts = timezone.now() # start_ts + datetime.timedelta(hours=1) # print('HASHES: ' + str(hashes)) for user_hash in hashes: # hash = hash['user_id'] payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start_ts, logged__lt=end_ts).order_by('logged') count = payloads.count() # print(user_hash + ' -- ' + str(count)) if count > 0: temp_file = tempfile.TemporaryFile() gzf = gzip.GzipFile(mode='wb', fileobj=temp_file) gzf.write('User ID\tTimestamp\tACCELEROMETER_READING_COUNT\tGYROSCOPE_READING_COUNT\tBAROMETER_READING_COUNT\tIS_FALL\tNOT_FALL_ODDS\tNOT_FALL_SUM\tNOT_FALL_PROBABILITY\tEVALUATION_WINDOW_START\tEVALUATION_WINDOW_END\tEVALUATION_WINDOW_SIZE\n') index = 0 while index < count: end = index + 100 if end > count: end = count for payload in payloads[index:end]: reading_json = json.loads(payload.payload) is_fall = 0 if reading_json['IS_FALL']: is_fall = 1 accel_count = '' if 'ACCELEROMETER_READING_COUNT' in reading_json: accel_count = str(reading_json['ACCELEROMETER_READING_COUNT']) gyro_count = '' if 'GYROSCOPE_READING_COUNT' in reading_json: gyro_count = str(reading_json['GYROSCOPE_READING_COUNT']) baro_count = '' if 'BAROMETER_READING_COUNT' in reading_json: baro_count = str(reading_json['BAROMETER_READING_COUNT']) eval_start = '' if 'EVALUATION_WINDOW_START' in reading_json: eval_start = str(reading_json['EVALUATION_WINDOW_START']) eval_end = '' if 'EVALUATION_WINDOW_END' in reading_json: eval_end = str(reading_json['EVALUATION_WINDOW_END']) eval_size = '' if 'EVALUATION_WINDOW_SIZE' in reading_json: eval_end = str(reading_json['EVALUATION_WINDOW_SIZE']) gzf.write(user_hash + '\t' + str(reading_json['TIMESTAMP']) + '\t' + accel_count + '\t' + gyro_count + '\t' + baro_count + '\t' + str(is_fall) + '\t' + str(reading_json['NOT_FALL_ODDS']) + '\t' + str(reading_json['NOT_FALL_SUM']) + '\t' + str(reading_json['NOT_FALL_PROBABILITY']) + '\t' + eval_start + '\t' + eval_end + '\t' + eval_size + '\n') index += 100 gzf.flush() gzf.close() temp_file.seek(0) report = PurpleRobotReport(generated=timezone.now(), mime_type='application/x-gzip', probe=PROBE_NAME, user_id=user_hash) report.save() report.report_file.save(user_hash + '-fallnet.txt.gz', File(temp_file)) report.save()