def userBuild(request, users_to_return, decrypted = False, own_data = False, roles = []): _start_time = time.time() pretty = booleanize(request.REQUEST.get('pretty', False)) response = {} response['meta'] = {} db = db_wrapper.DatabaseHelper() collection= 'device_inventory' response['results'] = [x['user'] for x in db.execute_named_query(NAMED_QUERIES["get_unique_users_in_device_inventory"], None) if x['user'] in users_to_return or 'all' in users_to_return] response['meta']['execution_time_seconds'] = time.time()-_start_time response['meta']['status'] = {'status':'OK','code':200, 'desc':''} if decrypted: pass if pretty: log.info(audit.message(request, response['meta'])) return render_to_response('pretty_json.html', {'response': json.dumps(response, indent=2)}) else: log.info(audit.message(request, response['meta'])) return HttpResponse(json.dumps(response), content_type="application/json", status=response['meta']['status']['code']) return HttpResponse('hello decrypted')
def stops_answer(request, user, scopes, users_to_return, user_roles, own_data): page = request.GET.get('page', 0) start_date = request.GET.get('start_date', 0) end_date = request.GET.get('end_date', time.time()) try: limit = int(request.GET.get('limit', 1000)) if limit <= 0 or limit > 1000: limit = 1000 except: limit = 1000 roles_to_use = [] if own_data and 'researcher' in user_roles: roles_to_use = ['researcher'] if own_data and 'developer' in user_roles: roles_to_use = ['developer'] db = db_wrapper.DatabaseHelper() cur = db.retrieve( { 'limit': limit, 'after': page, 'fields': ['user', 'arrival', 'departure', 'lon', 'lat', 'label'], 'start_date': float(start_date), 'end_date': float(end_date), 'users': users_to_return, 'sortby': 'timestamp', }, 'question_stop_locations', roles=roles_to_use) rows = [r for r in cur] return rows
def upload(request): auth = authorization_manager.authenticate_token(request, 'connector_questionnaire.input_form_data') if 'error' in auth: return HttpResponse(json.dumps(auth), status=401) user = auth['user'] roles = None try: roles = [x.role for x in UserRole.objects.get(user=user).roles.all()] except: pass doc = urllib.unquote(request.REQUEST.get('doc')) doc = json.loads(doc) doc['user'] = user.username probe = 'dk_dtu_compute_questionnaire' backup.backupValue(data=doc, probe=probe, user=user.username) database_helper = db_wrapper.DatabaseHelper() try: doc_id = database_helper.insert(doc, collection=probe, roles=roles) except pymongo.errors.DuplicateKeyError: doc_id = '00' return HttpResponse(json.dumps({'ok':str(doc_id)}), status=200)
def __init__(self): self.mapping = defaultdict(list) self.user_mapping = defaultdict(list) self.db = db_wrapper.DatabaseHelper() for inventory in self.db.execute_named_query(NAMED_QUERIES["get_device_inventory"], None): self.mapping[inventory['a_bt_mac']].append(inventory) self.user_mapping[inventory['user']].append(inventory)
def handle_noargs(self, **options): anonymizerObject = anonymizer.Anonymizer() db = db_wrapper.DatabaseHelper() authorizations = Authorization.objects.filter(application=Application.objects.get(name="Phone Data Collector")) print len(authorizations) devices = defaultdict(lambda: defaultdict(str)) for a in authorizations: try: devices[a.device.device_id][a.activated_at] = a.user.username except: print a.device, a.user.username mapping = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: 9999999999999))) for d in devices: users = defaultdict(lambda: 9999999999999) timestamps = defaultdict(str) for t in devices[d]: if users[devices[d][t]] > t: users[devices[d][t]] = t for u in users: timestamps[users[u]] = u previous_t = -1 for t in sorted(timestamps, reverse=True): mapping[d][timestamps[t]]['start'] = t if not previous_t == -1: mapping[d][timestamps[t]]['end'] = previous_t else: mapping[d][timestamps[t]]['end'] = 9999999999999 previous_t = t for device_id in mapping: for u in mapping[device_id]: a_device_id = anonymizerObject.anonymizeValue('device_id', device_id) hardware_info = None try: for v in db.retrieve({'where': {'device_id':[a_device_id]}, "sortby": "timestamp", "order": -1, "limit": 10000}, 'edu_mit_media_funf_probe_builtin_HardwareInfoProbe'): if v['timestamp_added'] < v['timestamp']: continue hardware_info = v break except: continue if not hardware_info: continue doc = {} doc['_id'] = a_device_id + '_' + u doc['device_id'] = device_id doc['a_device_id'] = a_device_id doc['user'] = u doc['start'] = mapping[device_id][u]['start'] doc['end'] = mapping[device_id][u]['end'] doc['bt_mac'] = hardware_info['device_bt_mac'] doc['a_bt_mac'] = hardware_info['bt_mac'] doc['a_wifi_mac'] = hardware_info['wifi_mac'] doc['sensible_version'] = hardware_info['uuid'].split('-')[-2] doc['funf_version'] = hardware_info['uuid'].split('-')[-1] print doc db.update_device_info(doc)
def run_for_role(role): log.debug({'type': 'stops_question', 'message': 'starting'}) db = db_wrapper.DatabaseHelper() users = get_users(db, role) for idx, username in enumerate(users): print('%d/%d') % (idx, len(users)) start_time = time.time() db.execute_named_query(NAMED_QUERIES['delete_stops_' + role], (username, ), readonly=False) page = 0 rows = [] while True: cur = db.retrieve( { 'limit': 100000, 'after': page, 'fields': ['timestamp', 'lon', 'lat'], 'users': [username] }, 'resampled_location', roles=[role]) newrows = [r for r in cur] if len(newrows) == 0: break rows.extend(newrows) page += 1 if len(rows) > 0: epoch = [ dateutils.mysql_datetime_to_epoch(r['timestamp']) for r in rows ] df = pd.DataFrame(rows) df['timestamp'] = epoch stops = getstops_dbscan(username, df) if stops is not None: stops['timestamp'] = [ dateutils.epoch_to_mysql_string(t) for t in stops.arrival ] db.insert_rows(stops.to_dict(outtype='records'), 'question_stop_locations', roles=[role]) log.debug({ 'type': 'stops_question', 'message': '%s: found %d stops in %ds' % (username, len(stops), int(time.time() - start_time)) }) log.debug({'type': 'stops_question', 'message': 'done'})
def facebook_request(request): values = {} fb_data_types = [ 'birthday', 'education', 'feed', 'friendlists', 'friendrequests', 'friends', 'groups', 'hometown', 'interests', 'likes', 'location', 'locations', 'political', 'religion', 'statuses', 'work' ] db = db_wrapper.DatabaseHelper() for data_type in fb_data_types: values[data_type + '_doc'] = db.retrieve( {}, 'dk_dtu_compute_facebook_' + data_type).rowcount values[data_type + '_users'] = db.execute_named_query( NAMED_QUERIES["count_unique_facebook_users"], (data_type, )).fetchone().values()[0] return HttpResponse(json.dumps(values))
def recalculate(start, end, role): log.debug({'type': 'resampled_location', 'message': 'starting'}) db = db_wrapper.DatabaseHelper() current = start DELTAT = 24*60*60 t = time.time() while current < end: db.execute_named_query(NAMED_QUERIES['delete_resampled_location_' + role], (dateutils.epoch_to_mysql_string(current), dateutils.epoch_to_mysql_string(current + DELTAT)), readonly=False) rows = [] page = 0 while True: cur = db.retrieve({'limit': 100000, 'start_date': current, 'end_date': current + DELTAT, 'after': page, 'fields': ['timestamp', 'lon', 'lat', 'user'], 'sortby': 'timestamp'}, 'edu_mit_media_funf_probe_builtin_LocationProbe', roles=[role] ) newrows = [r for r in cur] if len(newrows) == 0: break rows.extend(newrows) page += 1 log.debug({'type': 'resampled_location', 'message': 'fetched %d (%d rows/s)' % (len(rows), len(rows)/(time.time() - t))}) if len(rows) > 0: alllocs = pd.DataFrame(rows) resampled_rows = [] for uid, grp in alllocs.groupby('user'): resampled = grp.drop('user',1).set_index('timestamp').resample('15min', how='median').dropna() resampled['user'] = uid resampled['timestamp'] = resampled.index resampled['timestamp'] = resampled['timestamp'].apply(dateutils.datetime_to_string) resampled_rows.extend(resampled.to_dict(outtype='records')) db.insert_rows(resampled_rows, 'resampled_location', roles=[role]) current += DELTAT log.debug({'type': 'resampled_location', 'message': 'done in %ds' % (int(time.time() - t))})
def handle_noargs(self, **options): responses = defaultdict(lambda: defaultdict(dict)) conflicts = [] duplicates = [] jj = 0 db = db_wrapper.DatabaseHelper() for doc in db.execute_named_query(NAMED_QUERIES["select_questionnaires"], None): jj += 1 if not jj%1000: print jj try: if responses[doc['user']][doc['variable_name']]['response'] == doc['response']: duplicates.append(doc) else: conflicts.append((responses[doc['user']][doc['variable_name']], doc)) except KeyError: responses[doc['user']][doc['variable_name']] = doc print len(duplicates), len(conflicts)
def questionnaires_request(request): values = {} #try: db = db_wrapper.DatabaseHelper() values['documents_no'] = db.retrieve( {}, "dk_dtu_compute_questionnaire").rowcount values['users'] = db.execute_named_query( NAMED_QUERIES["count_unique_questionnaire_users"], None).fetchone().values()[0] values['finished'] = db.execute_named_query( NAMED_QUERIES["count_questionnaire_users_with_variable"], ("_submitted", )).fetchone().values()[0] values['male'] = db.execute_named_query( NAMED_QUERIES["count_questionnaire_users_by_sex"], ("mand", )).fetchone().values()[0] values['female'] = db.execute_named_query( NAMED_QUERIES["count_questionnaire_users_by_sex"], ("kvinde", )).fetchone().values()[0] #except: pass return HttpResponse(json.dumps(values))
def mobile_request(request): values = {} sections = [ 'BluetoothProbe', 'CallLogProbe', 'CellProbe', 'ContactProbe', 'HardwareInfoProbe', 'LocationProbe', 'ScreenProbe', 'SMSProbe', 'TimeOffsetProbe', 'WifiProbe' ] try: db = db_wrapper.DatabaseHelper() for x in sections: values[x + '_doc'] = db.retrieve( {}, 'edu_mit_media_funf_probe_builtin_' + x).rowcount count_funf_users_query = NAMED_QUERIES[ "count_funf_unique_users_by_probe"] count_funf_users_query[ "database"] = 'edu_mit_media_funf_probe_builtin_' + x values[x + '_users'] = db.execute_named_query( count_funf_users_query, None).fetchone().values()[0] except: pass return HttpResponse(json.dumps(values))
def handle_noargs(self, **options): user_auths = defaultdict(lambda: defaultdict(int)) authorizations = Authorization.objects.filter(application=Application.objects.get(connector_type='facebook_in')) db = db_wrapper.DatabaseHelper() MIN_SENSIBLE_VERSION = 'v0.3.2.3' for a in authorizations: user = a.user expires_at = json.loads(a.payload)['expires_at'] if expires_at > user_auths[user]['expires_at']: user_auths[user]['expires_at'] = expires_at now = time.time() for user in user_auths: expired = (user_auths[user]['expires_at'] < now) if expired: #TODO: base this on connetor_type gcm_registrations = GcmRegistration.objects.filter(user=user, application=Application.objects.get(name='Phone Data Collector')) for gr in gcm_registrations: for d in db.execute_named_query(NAMED_QUERIES["get_device_inventory_with_device_id"], (gr.device.device_id, )): try: sensible_version = d['sensible_version'] except KeyError: continue print sensible_version if d['end'] > now and d['user'] == user.username and sensible_version >= MIN_SENSIBLE_VERSION: self.sendNotification(gr.gcm_id, user)
def calculate_epi_summary(): this_path = os.path.split(os.path.realpath(__file__))[0] + '/' db = db_wrapper.DatabaseHelper() waves = read_waves() an = anonymizer.Anonymizer() inventory = device_inventory.DeviceInventory() localtz = pytz.timezone('Europe/Copenhagen') f = open(this_path+'epi_summary.json', 'w') print waves states = defaultdict(lambda: defaultdict(set)) for wave in sorted(waves): print wave last_id = 0 next_wave_begins = waves[wave]['next_wave_begins'] while True: cur = db.retrieve(params={'limit':100000, 'sortby':'timestamp', 'order':1, 'after': last_id, 'start_date': wave, 'end_date': next_wave_begins}, collection='edu_mit_media_funf_probe_builtin_EpidemicProbe', roles='') if cur.rowcount == 0: break last_id += 1 for row in cur: try: if row['timestamp_added'] < row['timestamp'] : continue user = row['user'] data = row['data'] data = json.loads(base64.b64decode(data)) #if not data['wave_no'] == int(waves[wave]['wave_no']): continue timestamp = data['TIMESTAMP'] date = localtz.localize(datetime.datetime.fromtimestamp(timestamp)).date() state = data['self_state'] #try: side_effects_lost_points = int(data['side_effects_lost_points']) #except: continue #print user, timestamp, state, side_effects_lost_points, date #if state == 'V' and side_effects_lost_points > 0: state = 'VS' states[user][date].add(state) except: continue user_day_states = defaultdict(lambda: defaultdict(str)) #final state for user in given day all_users_in_day = defaultdict(int) all_days = set() all_users = set() for user in states: all_users.add(user) for date in states[user]: if 'I' in states[user][date]: user_day_state = 'I' elif 'E' in states[user][date]: user_day_state = 'I' elif 'VS' in states[user][date]: user_day_state = 'VS' elif 'V' in states[user][date]: user_day_state = 'V' elif 'S' in states[user][date]: user_day_state = 'S' else: continue user_day_states[user][date] = user_day_state all_users_in_day[date] += 1 global_stats = defaultdict(lambda: defaultdict(float)) #for given day, how many users with given final state for user in user_day_states: for date in user_day_states[user]: all_days.add(date) global_stats[date][user_day_states[user][date]] += 1 for date in global_stats: for status in global_stats[date]: global_stats[date][status] /= float(all_users_in_day[date]) print date, status, global_stats[date][status] interactions = defaultdict(set) for wave in sorted(waves): last_id = 0 next_wave_begins = waves[wave]['next_wave_begins'] while True: cur = db.retrieve(params={'limit':100000, 'sortby':'timestamp', 'order':1, 'after': last_id, 'start_date': wave, 'end_date': next_wave_begins}, collection='edu_mit_media_funf_probe_builtin_BluetoothProbe', roles='') if cur.rowcount == 0: break last_id += 1 for row in cur: if row['timestamp_added'] < row['timestamp'] : continue user = row['user'] timestamp = row['timestamp'] bt_mac = row['bt_mac'] if bt_mac == '-1': continue userB = inventory.mapBtToUser(bt_mac, long(timestamp.strftime('%s'))) if userB == bt_mac: continue interactions[user].add(timestamp.strftime('%s')+'_'+userB) for user in all_users: infected_interactions = defaultdict(set) vaccinated_interactions = defaultdict(set) vaccinated_side_interactions = defaultdict(set) susceptible_interactions = defaultdict(set) for interaction in interactions[user]: t = long(interaction.split('_')[0])/int(300) tt = long(interaction.split('_')[0]) date = localtz.localize(datetime.datetime.fromtimestamp(tt)).date() userB = interaction.split('_')[1] all_days.add(date) if user_day_states[userB][date] == 'I': infected_interactions[date].add(userB) if user_day_states[userB][date] == 'VS': vaccinated_side_interactions[date].add(userB) if user_day_states[userB][date] == 'V': vaccinated_interactions[date].add(userB) if user_day_states[userB][date] == 'S': susceptible_interactions[date].add(userB) user_dict = {} user_dict['user'] = user user_dict['values'] = {} for day in sorted(all_days): if day < (datetime.datetime.now().date() + datetime.timedelta(days=-4)): continue all_l = float(len(infected_interactions[day]) + len(vaccinated_interactions[day]) + len(vaccinated_side_interactions[day]) + len(susceptible_interactions[day])) user_dict['values'][str(day)] = {} if all_l == 0: user_dict['values'][str(day)]['infected_interactions'] = 0 user_dict['values'][str(day)]['vaccinated_interactions'] = 0 user_dict['values'][str(day)]['vaccinated_side_interactions'] = 0 user_dict['values'][str(day)]['susceptible_interactions'] = 0 else: user_dict['values'][str(day)]['infected_interactions'] = len(infected_interactions[day])/all_l user_dict['values'][str(day)]['vaccinated_interactions'] = len(vaccinated_interactions[day])/all_l user_dict['values'][str(day)]['vaccinated_side_interactions'] = len(vaccinated_side_interactions[day])/all_l user_dict['values'][str(day)]['susceptible_interactions'] = len(susceptible_interactions[day])/all_l user_dict['values'][str(day)]['infected_all'] = global_stats[day]['I'] user_dict['values'][str(day)]['vaccinated_all'] = global_stats[day]['V'] user_dict['values'][str(day)]['vaccinated_side_all'] = global_stats[day]['VS'] user_dict['values'][str(day)]['susceptible_all'] = global_stats[day]['S'] user_dict['values'][str(day)]['day'] = str(day) f.write(json.dumps(user_dict) + '\n') f.close()
def calculate_epi_summary_OLD(): this_path = os.path.split(os.path.realpath(__file__))[0] + '/' db = db_wrapper.DatabaseHelper() waves = read_waves() an = anonymizer.Anonymizer() inventory = device_inventory.DeviceInventory() localtz = pytz.timezone('Europe/Copenhagen') f = open(this_path+'epi_summary.json', 'w') for wave in sorted(waves): last_id = 0 next_wave_begins = waves[wave]['next_wave_begins'] interactions = defaultdict(set) while True: cur = db.retrieve(params={'limit':100000, 'sortby':'timestamp', 'order':1, 'after': last_id, 'start_date': wave, 'end_date': next_wave_begins}, collection='edu_mit_media_funf_probe_builtin_BluetoothProbe', roles='') if cur.rowcount == 0: break last_id += 1 for row in cur: if row['timestamp_added'] < row['timestamp'] : continue user = row['user'] timestamp = row['timestamp'] bt_mac = row['bt_mac'] bt_name = row['name'] if bt_mac == '-1': continue if bt_name: bt_name = an.deanonymizeValue('bluetooth_name', bt_name) else: bt_name = '' userB = inventory.mapBtToUser(bt_mac, long(timestamp.strftime('%s'))) if userB == bt_mac: continue interactions[user].add(timestamp.strftime('%s')+'_'+userB+'_'+bt_name.decode('utf-8')) for user in interactions: save_string = '' infected_interactions = defaultdict(set) vaccinated_interactions = defaultdict(set) susceptible_interactions = defaultdict(set) all_days = set() for interaction in interactions[user]: t = long(interaction.split('_')[0])/int(300) tt = long(interaction.split('_')[0]) date = localtz.localize(datetime.datetime.fromtimestamp(tt)).date() userB = interaction.split('_')[1] name = interaction.split('_')[2] all_days.add(date) if name.endswith(waves[wave]['infected_tag'].decode('utf-8')): infected_interactions[date].add(str(t) + userB) elif name.endswith(waves[wave]['vaccinated_tag']): vaccinated_interactions[date].add(str(t) + userB) else: susceptible_interactions[date].add(str(t) + userB) user_dict = {} user_dict['user'] = user user_dict['values'] = {} for day in sorted(all_days): user_dict['values'][str(day)] = {} user_dict['values'][str(day)]['wave_no'] = int(waves[wave]['wave_no']) user_dict['values'][str(day)]['infected_interactions'] = len(infected_interactions[day]) user_dict['values'][str(day)]['vaccinated_interactions'] = len(vaccinated_interactions[day]) user_dict['values'][str(day)]['susceptible_interactions'] = len(susceptible_interactions[day]) f.write(json.dumps(user_dict) + '\n') f.close()
import json from db_access.named_queries.named_queries import NAMED_QUERIES NAME = "aggregate_questionnaire_question" from utils import db_wrapper db_helper = db_wrapper.DatabaseHelper() def get_aggregated_questionnaire_data(request, user, scopes, users_to_return, user_roles, own_data): form_version = request.REQUEST.get("form_version", "") if not form_version: return { "status": { "code": 400, "message": "Please select a questionnaire version" } } if "all" in users_to_return: users_to_return = [ x['user'] for x in db_helper.execute_named_query( NAMED_QUERIES["get_unique_users_in_device_inventory"], None) ] response = [] header = ['user'] header.extend([ doc['variable_name'] for doc in db_helper.execute_named_query( NAMED_QUERIES["get_variable_names"], (form_version)).fetchall() ])
import authorization_manager.authorization_manager as authorization_manager from django.conf import settings import connectors.connectors_config import traceback import time from accounts.models import UserRole from sensible_audit import audit from utils import db_wrapper log = audit.getLogger(__name__) myConnector = connectors.connectors_config.CONNECTORS['ConnectorFunf'][ 'config'] db = db_wrapper.DatabaseHelper() valid_tokens = {} def populate(documents_to_insert): inserted_counter = 0 # for probe in documents_to_insert: # for role in documents_to_insert[probe]: # for doc in documents_to_insert[probe][role]: # print json.dumps({'probe': probe, 'role': role, 'doc':doc}) for probe in documents_to_insert: for role in documents_to_insert[probe]: population_start = time.time() roles = role.split('@')
def analyse_epidemic(): db = db_wrapper.DatabaseHelper() #cur = db.retrieve(params={'limit':1000, 'sortby':'timestamp', 'order':-1, 'where': {'user':[USER]}}, collection='edu_mit_media_funf_probe_builtin_EpidemicProbe', roles='') cur = db.retrieve( params={ 'limit': 10000000, 'sortby': 'timestamp', 'order': -1, 'start_date': 1407794400, 'end_date': 1407880800 }, collection='edu_mit_media_funf_probe_builtin_EpidemicProbe', roles='') lt = 0 inventory = device_inventory.DeviceInventory() states = defaultdict(set) for row in cur: data = json.loads(base64.b64decode(row['data'])) user = row['user'] timestamp = row['timestamp'] timestamp_2 = data['TIMESTAMP'] # if not (timestamp_2 >= 1407708000 and timestamp_2 <= 1407794400): continue # print user, timestamp, timestamp_2, lt - timestamp_2, data['self_state'], data['infected_tag'], data['to_recover_time'], # try: print data['self_state'], timestamp # except: print '' # except: pass states[user].add(data['self_state']) lt = timestamp_2 #if timestamp < datetime.datetime(2014, 07, 29): continue final_states = defaultdict(str) for user in states: if 'I' in states[user]: final_states[user] = 'I' elif 'E' in states[user]: final_states[user] = 'I' elif 'V' in states[user]: final_states[user] = 'V' elif 'S' in states[user]: final_states[user] = 'S' global_states = defaultdict(int) for user in final_states: global_states[final_states[user]] += 1 print global_states cur = db.retrieve( params={ 'limit': 10000000, 'sortby': 'timestamp', 'order': -1, 'start_date': 1407794400, 'end_date': 1407880800, 'where': { 'user': [USER] } }, collection='edu_mit_media_funf_probe_builtin_BluetoothProbe', roles='') interactions = set() for row in cur: if row['timestamp_added'] < row['timestamp']: continue user = row['user'] timestamp = row['timestamp'] bt_mac = row['bt_mac'] if bt_mac == '-1': continue userB = inventory.mapBtToUser(bt_mac, long(timestamp.strftime('%s'))) if userB == bt_mac: continue interactions.add(userB) for user in interactions: print user, final_states[user]
def dataBuild(request, probe_settings, users_to_return, decrypted = False, own_data = False, roles = None): if roles == None: roles = [] _start_time = time.time() results = None query = None proc_req = None response = {} response['meta'] = {} try: if not users_to_return: raise BadRequestException('error',403,'The current token does not allow to view data from any users') proc_req = processApiCall(request, probe_settings, users_to_return) roles_to_use = [] if own_data and 'researcher' in roles: roles_to_use = ['researcher'] if own_data and 'developer' in roles: roles_to_use = ['developer'] try: db = db_wrapper.DatabaseHelper() docs = db.retrieve(proc_req, probe_settings['collection'], roles_to_use) #raise BadRequestException('error',200,json.dumps({'proc_req':proc_req,'probe_settings':probe_settings['collection'],'roles':roles})) results = cursorToArray(docs, decrypted = decrypted,\ probe=probe_settings['collection'],\ is_researcher = ('researcher' in roles),\ map_to_users=proc_req['map_to_users']) except Exception as e: raise BadRequestException('error',500,'The request caused a DB malfunction: ' + str(e)) results_count = len(results) response['meta']['status'] = {'status':'success','code':200} response['meta']['results_count'] = len(results) response['meta']['api_call'] = proc_req response['meta']['query'] = query response['results'] = results if len(results) > 0: if results_count == proc_req['limit']: response['meta']['paging'] = {} response['meta']['paging']['cursors'] = {} if proc_req['after'] is not None: response['meta']['paging']['cursors']['after'] = str(int(proc_req['after']) + 1) response['meta']['paging']['links'] =\ {'next':re.sub('&after=[^ &]+','&after=' + str(response['meta']['paging']['cursors']['after']),request.build_absolute_uri())} else: response['meta']['paging']['links'] = \ {'next':request.build_absolute_uri() + '&after=' + str(1)} except BadRequestException as e: response['meta']['status'] = e.value proc_req = {'format':'json'} response['meta']['execution_time_seconds'] = time.time()-_start_time callback = request.REQUEST.get('callback','') if len(callback) > 0: data = '%s(%s);' % (callback, json.dumps(response)) return HttpResponse(data, content_type="text/javascript", status=response['meta']['status']['code']) if decrypted: pass # users_return=[] # users_results = cursorToArray(results, decrypted = decrypted, probe=probe_settings['collection']) # for data_users in users_results: # if data_users['user'] not in users_return: # users_return.append(data_users['user']) if proc_req['format'] == 'pretty': return render_to_response('pretty_json.html', {'response': json.dumps(response, indent=2)}) elif proc_req['format'] == 'csv': output = '#' + json.dumps(response['meta'], indent=2).replace('\n','\n#') + '\n' output += array_to_csv(results) return HttpResponse(output, content_type="text/plain; charset=utf-8", status=response['meta']['status']['code']) else: return HttpResponse(json.dumps(response), content_type="application/json; charset=utf-8", status=response['meta']['status']['code'])