def vehicles(): from models import Agency, Route, VehicleLocation, Prediction # TODO: Somehow bundle these queries into the object model definitions? So messy :( # 1. Select the latest vehicle locations for each vehicle. (The DB may have old ones too). v_inner = db.session.query(VehicleLocation.vehicle, db.func.max(VehicleLocation.time).label("time"))\ .group_by(VehicleLocation.vehicle).subquery() vehicle_locations = db.session.query(VehicleLocation).join(v_inner, db.and_( v_inner.c.vehicle == VehicleLocation.vehicle, v_inner.c.time == VehicleLocation.time )).filter(Agency.tag==agency).all() # 2. Select the predictions for each vehicle:stop pair which came from the most recent # API call for that vehicle:stop pair. Old predictions may be stored but we don't want them. now = datetime.now() p_inner = db.session.query(Prediction.vehicle, Prediction.stop_id, db.func.max(Prediction.api_call_id).label("api_call_id"))\ .group_by(Prediction.vehicle, Prediction.stop_id)\ .subquery() predictions = db.session.query(Prediction).join(p_inner, db.and_( p_inner.c.api_call_id == Prediction.api_call_id, p_inner.c.vehicle == Prediction.vehicle, p_inner.c.stop_id == Prediction.stop_id )).filter( Agency.tag==agency, Prediction.prediction >= now)\ .group_by(Prediction.id, Prediction.vehicle, Prediction.stop_id)\ .all() z = { "locations": {v.vehicle: v.serialize() for v in vehicle_locations}, "predictions": {p.id: p.serialize() for p in predictions} } return z
def generate_summary(self, survey_name, start, end): survey = Survey.query.filter_by(name=survey_name.lower()).one_or_none() new_users_query = survey.mobile_users.filter( db.and_(MobileUser.created_at >= start, MobileUser.created_at <= end)) new_points_query = (MobileCoordinate.query.filter( db.and_(MobileCoordinate.survey_id == survey.id, MobileCoordinate.timestamp >= start, MobileCoordinate.timestamp <= end))) active_users_query = (survey.mobile_coordinates.filter( MobileCoordinate.timestamp >= start, MobileCoordinate.timestamp <= end).distinct( MobileCoordinate.mobile_id)) new_prompts_query = PromptResponse.query.filter( db.and_(PromptResponse.survey_id == survey.id, PromptResponse.displayed_at >= start, PromptResponse.displayed_at <= end)) summary = { 'new_users': new_users_query.count(), 'active_users': active_users_query.with_entities( MobileCoordinate.mobile_id).count(), 'num_points': new_points_query.count(), 'num_prompts': new_prompts_query.count() } return summary
async def post(self, request): try: request_data = await request.post() camera_id = int(request.match_info['camera_id']) access_token = request_data.get('access_token') label = request_data.get('label') x = request_data.get('x') y = request_data.get('y') if not access_token: raise MissingParameter('access_token') if not label: raise MissingParameter('label') if not x: raise MissingParameter('x') if not y: raise MissingParameter('y') if not x.isdigit(): raise InvalidParameter('x') if not y.isdigit(): raise InvalidParameter('y') account_id = await Account.redis_get_id(request.app['redis'], access_token) if account_id is None: raise InvalidAccessToken() user_camera = await db.select([Camera.id])\ .where(db.and_(Camera.id == camera_id, Camera.account_id == account_id))\ .gino\ .model(Camera)\ .all() if not user_camera: raise InvalidParameter('camera_id') status, result = await Place.update\ .values(x=int(x), y=int(y))\ .where(db.and_(Place.camera_id == camera_id, Place.label == label))\ .gino\ .status() if status == 'UPDATE 0': place = await Place.create(camera_id=camera_id, label=label, x=int(x), y=int(y)) else: place = await db.select([Place.id])\ .where(db.and_(Place.camera_id == camera_id, Place.label == label))\ .gino\ .model(Place)\ .first() return aiohttp.web.json_response({'status': True, 'id': place.id}) except APIException as api_exception: raise api_exception except: raise InternalServerError()
def get_applicant_list(current_user=None): if not (is_recruiter(current_user) or is_admin(current_user)): raise ForbiddenException('User must be recruiter or admin.') result = {} for user in User.query.join( Application, Application.user_id == User.id).filter( db.and_( Application.is_submitted, db.or_( db.not_(Application.is_concluded), db.and_(Application.is_accepted, db.not_(Application.is_invited))))): recruiter_name = None application = Application.query.filter_by( user_id=user.id, is_concluded=False).one_or_none() if application is None: application = Application.query.filter_by( user_id=user.id, is_accepted=True, is_invited=False).one_or_none() application_status = 'new' if application: recruiter_id = application.recruiter_id if recruiter_id: recruiter = User.get(recruiter_id) recruiter_name = recruiter.name application_status = 'claimed' if application.is_accepted: application_status = 'accepted' applicant_visible = False # senior recruiters see all if is_senior_recruiter(current_user) or is_admin(current_user): applicant_visible = True elif is_recruiter(current_user): if application and application.recruiter_id == current_user.id: applicant_visible = True else: applicant_visible = application_status == 'new' if applicant_visible: result[user.id] = { 'user_id': user.id, 'recruiter_id': application.recruiter_id if application else None, 'recruiter_name': recruiter_name, 'status': application_status, 'name': user.name, } return {'info': result}
async def post(self, request): try: request_data = await request.post() access_token = request_data.get('access_token') name = request_data.get('name') if not access_token: raise MissingParameter('access_token') if not name: raise MissingParameter('name') account_id = await Account.redis_get_id(request.app['redis'], access_token) if account_id is None: raise InvalidAccessToken() camera = await db.select([Camera.id])\ .where(db.and_(Camera.account_id == account_id, Camera.name == name))\ .gino\ .model(Camera)\ .all() if camera: camera = camera[0] else: camera = await Camera.create(account_id=account_id, name=name) return aiohttp.web.json_response({"id": camera.id}) except APIException as api_exception: raise api_exception except: raise InternalServerError()
def action_batch_alter(self, ids): try: if request.form['promotion_id'] is '': raise Exception(lazy_gettext('No new promotion ID entered.')) new_promotion_id = int(request.form['promotion_id']) new_promotion = Promotion.query.filter_by(id=new_promotion_id).one_or_none() if new_promotion is None: raise Exception(lazy_gettext('New promotion not found.')) user_promotion_ids = [(id_pair.split(',')[0], id_pair.split(',')[1]) for id_pair in ids] user_promotions = UserPromotion.query.filter( db.or_( db.and_(UserPromotion.user_id == user_id, UserPromotion.promotion_id == promotion_id) for user_id, promotion_id in user_promotion_ids )) count = 0 for user_promotion in user_promotions.all(): user_promotion.promotion_id = new_promotion_id self.session.add(user_promotion) count += 1 self.session.commit() flash(lazy_gettext("Successfully Changed") + f" {count} " + lazy_gettext("Records")) logger.info(f'Batch UserPromotion change by admin {auth_get_current_username()} changed {count} UserPromotions') except Exception as ex: self.session.rollback() flash(lazy_gettext("Error ") + f"{str(ex)}") logger.exception(f'Batch UserPromotion change by admin {auth_get_current_username()} raised exception')
async def get(self, request): try: camera_id = int(request.match_info['camera_id']) access_token = request.rel_url.query.get('access_token') if not access_token: raise MissingParameter('access_token') account_id = await Account.redis_get_id(request.app['redis'], access_token) if account_id is None: raise InvalidAccessToken() places = await Place.join(Camera, db.and_(Camera.id == Place.camera_id, Camera.account_id == account_id))\ .select()\ .where(Place.camera_id == camera_id)\ .gino\ .model(Place)\ .all() place_json = [{ 'id': place.id, 'label': place.label, 'status': place.status, 'x': place.x, 'y': place.y } for place in places] return aiohttp.web.json_response({'place': place_json}) except APIException as api_exception: raise api_exception except: raise InternalServerError()
def validate(self): rv = Form.validate(self) if not rv: return False current_time = int(time.time()) if self.team_id.data: team = Team.query.get(self.team_id.data) else: team = Team() team.admin_id = self.admin_user.id team.name = self.name.data team.created_at = current_time if self.status.data: team.status = 1 else: team.status = 0 db.session.add(team) db.session.commit() if not self.team_id.data: team_user = TeamUser() team_user.team_id = team.id team_user.user_id = self.admin_user.id db.session.add(team_user) db.session.commit() if self.status.data: team_user = TeamUser.query.filter(db.and_(TeamUser.team_id == team.id and TeamUser.user_id == self.admin_user.id)).first() if team_user: db.session.delete(team_user) db.session.commit() db.session.query(Project).filter(Project.team_id == team.id).update({Project.status: 1}) db.session.commit() self.team = team return True
async def delete_by_id(self, request): try: camera_id = int(request.match_info['camera_id']) place_id = int(request.match_info['place_id']) request_data = await request.post() access_token = request_data.get('access_token') if not access_token: raise MissingParameter('access_token') account_id = await Account.redis_get_id(request.app['redis'], access_token) if account_id is None: raise InvalidAccessToken() status, result = await Place.delete\ .where(db.and_(Place.id == place_id, Place.camera_id == camera_id, Camera.id == Place.camera_id, Camera.account_id == account_id))\ .gino\ .status() if status == 'DELETE 0': raise InvalidParameter('camera_id or place_id') return aiohttp.web.json_response({'status': True}) except APIException as api_exception: raise api_exception except: raise InternalServerError()
def build_query(self, resource_type, params, id_only=False): ''' Compile a SQL query from a set of FHIR search params If `id_only` is true, a SQL query that selects only `resource_id` will be returned ''' query_args = [Resource.visible == True, Resource.resource_type == resource_type, Resource.owner_id == self.owner_id] valid_search_params = SPECS[resource_type]['searchParams'] make_pred = partial(self.make_pred_from_param, resource_type, possible_param_types=valid_search_params) predicates = [pred for pred in map(make_pred, iterdict(params)) if pred is not None] # customized coordinate search parameter if 'coordinate' in params and resource_type == 'Sequence': coords = params['coordinate'].split(',') coord_preds = map(make_coord_pred, coords) query_args.append(db.or_(*coord_preds)) if len(predicates) > 0: query_args.append( Resource.resource_id.in_(intersect_predicates(predicates).alias())) if '_id' in params: query_args.append(Resource.resource_id.in_(params['_id'].split(','))) if id_only: return db.select([Resource.resource_id]).\ select_from(Resource).\ where(db.and_(*query_args)).alias() else: return Resource.query.filter(*query_args)
def prompts_by_times(self, start, end): query = (PromptResponse.query.filter( db.and_(PromptResponse.timestamp >= start, PromptResponse.timestamp <= end)).order_by( PromptResponse.mobile_id, PromptResponse.timestamp.desc())) return query
async def post(self, request): try: request_data = await request.post() camera_id = int(request.match_info['camera_id']) access_token = request_data.get('access_token') if not access_token: raise MissingParameter('access_token') account_id = await Account.redis_get_id(request.app['redis'], access_token) if account_id is None: raise InvalidAccessToken() user_camera = await db.select([Camera.delay])\ .where(db.and_(Camera.id == camera_id, Camera.account_id == account_id))\ .gino\ .model(Camera)\ .all() if not user_camera: raise InvalidParameter('camera_id') frame = request_data.get('frame') frame_path = os.path.join(request.app['config']['path_camera_captures'], 'capture_%d.jpg' % camera_id) if not frame: raise MissingParameter('frame') with open(frame_path, 'wb') as frame_file: frame_file.write(frame.file.read()) await Camera.update \ .values(is_complete=False, last_update=int(time.time())) \ .where(Camera.id == camera_id) \ .gino \ .status() return aiohttp.web.json_response({'status': True, 'next_delay': user_camera[0].delay}) except APIException as api_exception: raise api_exception except: raise InternalServerError()
async def post(self, request): try: request_data = await request.post() camera_id = int(request.match_info['camera_id']) access_token = request_data.get('access_token') status = request_data.get('status') if not access_token: raise MissingParameter('access_token') if not status: raise MissingParameter('status') if status not in ['true', 'false']: raise InvalidParameter('status') account_id = await Account.redis_get_id(request.app['redis'], access_token) if account_id is None: raise InvalidAccessToken() status, result = await Camera.update\ .values(status=bool(distutils.util.strtobool(status)))\ .where(db.and_(Camera.id == camera_id, Camera.account_id == account_id))\ .gino\ .status() if status == 'UPDATE 0': raise InvalidParameter('camera_id') return aiohttp.web.json_response({'status': True}) except APIException as api_exception: raise api_exception except: raise InternalServerError()
def coordinates_csv(survey, active_users, start, end): def _tz_formatters(table, cols): formatters = [] for col_name in cols: col = getattr(table, col_name) timestamp = db.func.timezone('UTC', col).label('timestamp_UTC') timestamp_epoch = db.func.extract('epoch', col).cast( db.Integer).label('timestamp_epoch') formatters.append(timestamp) formatters.append(timestamp_epoch) return formatters coordinates_csv = io.BytesIO() coordinates_csv.write(codecs.BOM_UTF8) active_mobile_ids = [u.id for u in active_users] formatters = _tz_formatters(MobileCoordinate, ['timestamp']) coordinates = (db.session.query( MobileUser, MobileCoordinate, *formatters).join(MobileCoordinate).filter( db.and_(MobileCoordinate.mobile_id.in_(active_mobile_ids), MobileCoordinate.timestamp >= start, MobileCoordinate.timestamp <= end)).options( db.Load(MobileUser).defer('id').load_only('uuid'), db.Load(MobileCoordinate).defer('mobile_id').defer( 'survey_id').defer('timestamp')).order_by( MobileCoordinate.id)) postgres_copy.copy_to(coordinates, coordinates_csv, db.engine, format='csv', header=True) return coordinates_csv
def coordinates_by_times(self, start, end): query = (MobileCoordinate.query.filter( db.and_(MobileCoordinate.timestamp >= start, MobileCoordinate.timestamp <= end)).order_by( MobileCoordinate.mobile_id, MobileCoordinate.timestamp.desc()).distinct( MobileCoordinate.mobile_id)) return query
def cancelled_prompts(self, survey, uuid, start_time=None, end_time=None): user = survey.mobile_users.filter_by(uuid=uuid).one_or_none() if user: results = user.cancelled_prompts if start_time and end_time: results = results.filter( db.and_(CancelledPromptResponse.displayed_at >= start_time, CancelledPromptResponse.displayed_at <= end_time)) return results.order_by(CancelledPromptResponse.displayed_at.asc())
def make_reference_pred(self, param_data, param_val, resource_type): ''' make a predicate based on a ResourceReference Implement it as a method here because `make_reference_pred`, unlike other `make_**_pred`s, can potentially be dealing with chained query -- which requires us to reinvoke `build_query` method recursively. ''' # a reference search must have exactly ONE resource type, # which is either specified via a modifier # or implied because a resource def. that says it can only be one resource type. # either way, we have to figure it out. modifier = param_data['modifier'] possible_reference_types = REFERENCE_TYPES[resource_type][ param_data['param']] #print possible_reference_types if (modifier is not None and modifier not in possible_reference_types ) or (modifier is None and len(possible_reference_types) > 1): # either can't deduct type of the referenced resource # or the modifier supplied is an invalid type raise InvalidQuery referenced_type = (modifier if modifier is not None and modifier not in NON_TYPE_MODIFIERS else possible_reference_types[0]) chained_param = param_data['chained_param'] if chained_param is not None: # we have a chained query... chained_query = {chained_param: param_val} # make a subquery that finds referenced resource that fits the description reference_query = self.build_query(referenced_type, chained_query, id_only=True) pred = db.and_(SearchParam.referenced_type == referenced_type, SearchParam.referenced_id.in_(reference_query)) else: pred = db.and_(SearchParam.referenced_id == param_val, SearchParam.referenced_type == referenced_type) return pred
def make_reference_pred(self, param_data, param_val, resource_type): ''' make a predicate based on a ResourceReference Implement it as a method here because `make_reference_pred`, unlike other `make_**_pred`s, can potentially be dealing with chained query -- which requires us to reinvoke `build_query` method recursively. ''' # a reference search must have exactly ONE resource type, # which is either specified via a modifier # or implied because a resource def. that says it can only be one resource type. # either way, we have to figure it out. modifier = param_data['modifier'] possible_reference_types = REFERENCE_TYPES[resource_type][param_data['param']] if (modifier is not None and modifier not in possible_reference_types) or ( modifier is None and len(possible_reference_types) > 1): # either can't deduct type of the referenced resource # or the modifier supplied is an invalid type raise InvalidQuery referenced_type = (modifier if modifier is not None and modifier not in NON_TYPE_MODIFIERS else possible_reference_types[0]) chained_param = param_data['chained_param'] if chained_param is not None: # we have a chained query... chained_query = {chained_param: param_val} # make a subquery that finds referenced resource that fits the description reference_query = self.build_query(referenced_type, chained_query, id_only=True) pred = db.and_(SearchParam.referenced_type==referenced_type, SearchParam.referenced_id.in_(reference_query)) else: pred = db.and_(SearchParam.referenced_id==param_val, SearchParam.referenced_type==referenced_type) return pred
def is_applicant_user_id(user_id): user = db.session.query(User).filter(User.id == user_id).join( Application, Application.user_id == User.id, ).filter( db.or_( db.not_(Application.is_concluded), db.and_(Application.is_accepted, db.not_(Application.is_invited)))).one_or_none() return user is not None
def make_coord_pred(coord): coord_match = COORD_RE.match(coord) if coord_match is None: raise InvalidQuery chrom = coord_match.group('chrom') start = coord_match.group('start') end = coord_match.group('end') return db.and_(Resource.resource_type == 'Sequence', Resource.chromosome == chrom, Resource.start <= end, Resource.end >= start)
def test_promote_active_recruiter_to_senior(self): response = set_roles(self.recruiter.id, is_senior_recruiter=True, current_user=self.admin) self.assertEqual(response, {'status': 'ok'}) self.assertTrue(self.recruiter.recruiter.is_senior) self.assertTrue( db.session.query(db.exists().where( db.and_(Application.user_id == self.applicant.id, Application.is_concluded == False))).scalar())
def is_applicant_character_id(character_id): base = db.session.query(Character).filter(Character.id == character_id, ) character = base.join(User, User.id == Character.user_id).join( Application, Application.user_id == User.id, ).filter( db.or_( db.not_(Application.is_concluded), db.and_(Application.is_accepted, db.not_(Application.is_invited)))).first() return character is not None
async def get_camera_id(detector_id): camera_id_query = db.select([Camera.id]) \ .order_by(Camera.last_update.asc()) \ .where(db.and_(Camera.detector_id == 0, Camera.status == True, Camera.is_complete == False)) \ .limit(1) status, result = await Camera.update \ .values(detector_id=detector_id) \ .where(Camera.id == camera_id_query) \ .gino \ .status() if status == 'UPDATE 0': return None camera = await db.select([Camera.id])\ .where(db.and_(Camera.detector_id == detector_id, Camera.is_complete == False))\ .gino\ .model(Camera)\ .first() return camera.id
def make_token_pred(param_data, param_val): ''' Compile a token search parameter into a SQL predicate ''' token = TOKEN_RE.match(param_val) if token is None: raise InvalidQuery pred = (SearchParam.code == token.group('code')) if token.group('system') is not None: pred = db.and_(pred, SearchParam.system == token.group('system')) return pred
def make_coord_pred(coord): coord_match = COORD_RE.match(coord) if coord_match is None: raise InvalidQuery chrom = coord_match.group('chrom') start = coord_match.group('start') end = coord_match.group('end') return db.and_( Resource.resource_type == 'Sequence', Resource.chromosome == chrom, Resource.start <= end, Resource.end >= start)
def make_quantity_pred(param_data, param_val): ''' Compile a quantity search parameter into a SQL predicate ''' quantity = QUANTITY_RE.match(param_val) if quantity is None: raise InvalidQuery preds = [] if quantity.group('code') is not None: preds.append(SearchParam.code == quantity.group('code')) if quantity.group('system') is not None: preds.append(SearchParam.system == quantity.group('system')) # tough stuff here... because quantity stored in the database can also have comparator # we have to build query based on the comparators from both the search and the db value = quantity.group('number') comparator = quantity.group('comparator') if comparator is None: comparator = '=' val_preds = [] if '<' in comparator: val_preds = [ SearchParam.comparator.in_('<', '<='), SearchParam.comparator.quantity < value ] elif '>' in comparator: val_preds = [ SearchParam.comparator.in_('>', '>='), SearchParam.comparator.quantity > value ] if '=' in comparator: val_preds.append( db.and_(SearchParam.comparator.in_(None, '<=', '>='), SearchParam.quantity == value)) preds.append(db.or_(*val_preds)) return db.and_(*preds)
def cancelled_prompts_csv(survey, active_users, start, end): cancelled_prompts_csv = io.BytesIO() cancelled_prompts_csv.write(codecs.BOM_UTF8) active_mobile_ids = [u.id for u in active_users] cancelled_prompts = (db.session.query( MobileUser, CancelledPromptResponse).join(CancelledPromptResponse).filter( db.and_( CancelledPromptResponse.mobile_id.in_(active_mobile_ids)), CancelledPromptResponse.displayed_at >= start, CancelledPromptResponse.displayed_at <= end).order_by( CancelledPromptResponse.id)) ignored_columns = ['survey_id', 'mobile_id'] columns = [] for c in CancelledPromptResponse.__table__.columns: if c.name in ignored_columns: continue if c.type.python_type == datetime: columns.append(c.name + '_UTC') columns.append(c.name + '_epoch') else: columns.append(c.name) # create csv file in memory with list responses expanded to string csv_writer = csv.writer(cancelled_prompts_csv) headers = list(columns) headers.insert(1, 'uuid') csv_writer.writerow(headers) for user, cancelled in cancelled_prompts: cancelled.displayed_at_UTC = _utc_timestamp(cancelled.displayed_at) cancelled.displayed_at_epoch = _epoch_timestamp( cancelled.displayed_at) if cancelled.cancelled_at: cancelled.cancelled_at_UTC = _utc_timestamp( cancelled.cancelled_at) cancelled.cancelled_at_epoch = _epoch_timestamp( cancelled.cancelled_at) else: cancelled.cancelled_at_UTC = None cancelled.cancelled_at_epoch = None cancelled_row = [] for col in columns: # make uuid second column value if len(cancelled_row) == 1: cancelled_row.append(user.uuid) cancelled_row.append(getattr(cancelled, col)) csv_writer.writerow(cancelled_row) return cancelled_prompts_csv
def make_quantity_pred(param_data, param_val): ''' Compile a quantity search parameter into a SQL predicate ''' quantity = QUANTITY_RE.match(param_val) if quantity is None: raise InvalidQuery preds = [] if quantity.group('code') is not None: preds.append(SearchParam.code == quantity.group('code')) if quantity.group('system') is not None: preds.append(SearchParam.system == quantity.group('system')) # tough stuff here... because quantity stored in the database can also have comparator # we have to build query based on the comparators from both the search and the db value = quantity.group('number') comparator = quantity.group('comparator') if comparator is None: comparator = '=' val_preds = [] if '<' in comparator: val_preds = [ SearchParam.comparator.in_('<', '<='), SearchParam.comparator.quantity < value] elif '>' in comparison: val_preds = [ SearchParam.comparator.in_('>', '>='), SearchParam.comparator.quantity > value] if '=' in comparator: val_preds.append(db.and_( SearchParam.comparator.in_(None, '<=', '>='), SearchParam.quantity == value)) preds.append(db.or_(*val_preds)) return db.and_(*preds)
def trips_csv(survey, active_users, start, end): def _process_trip_points(uuid, points, headers): rows = [] for pt in points: pt_row = [uuid] for h in headers: if h == 'uuid': continue value = pt.get(h) if h == 'timestamp_UTC': value = _utc_timestamp(pt['timestamp']) elif h == 'timestamp_epoch': value = _epoch_timestamp(pt['timestamp']) elif isinstance(value, Decimal): value = float(value) pt_row.append(value) rows.append(pt_row) return rows parameters = { 'break_interval_seconds': survey.trip_break_interval, 'subway_buffer_meters': survey.trip_subway_buffer, 'cold_start_distance_meters': survey.trip_break_cold_start_distance, 'accuracy_cutoff_meters': survey.gps_accuracy_threshold } trips_csv = io.BytesIO() trips_csv.write(codecs.BOM_UTF8) writer = csv.writer(trips_csv) headers = [ 'uuid', 'trip', 'latitude', 'longitude', 'h_accuracy', 'timestamp_UTC', 'timestamp_epoch', 'trip_distance', 'distance', 'break_period', 'trip_code' ] writer.writerow(headers) for user in active_users: user_coordinates = (user.mobile_coordinates.filter( db.and_(MobileCoordinate.timestamp >= start, MobileCoordinate.timestamp <= end))) trips, segments = tripbreaker.run(parameters, survey.subway_stops, user_coordinates) for trip_id, points in trips.iteritems(): point_rows = _process_trip_points(user.uuid, points, headers) writer.writerows(point_rows) return trips_csv
def vehicles(): from models import Agency, Route, VehicleLocation, Prediction # TODO: Somehow bundle these queries into the object model definitions? So messy :( # 1. Select the latest vehicle locations for each vehicle. (The DB may have old ones too). v_inner = db.session.query(VehicleLocation.vehicle, db.func.max(VehicleLocation.time).label("time"))\ .group_by(VehicleLocation.vehicle).subquery() vehicle_locations = db.session.query(VehicleLocation).join( v_inner, db.and_(v_inner.c.vehicle == VehicleLocation.vehicle, v_inner.c.time == VehicleLocation.time)).filter( Agency.tag == agency).all() # 2. Select the predictions for each vehicle:stop pair which came from the most recent # API call for that vehicle:stop pair. Old predictions may be stored but we don't want them. now = datetime.now() p_inner = db.session.query(Prediction.vehicle, Prediction.stop_id, db.func.max(Prediction.api_call_id).label("api_call_id"))\ .group_by(Prediction.vehicle, Prediction.stop_id)\ .subquery() predictions = db.session.query(Prediction).join(p_inner, db.and_( p_inner.c.api_call_id == Prediction.api_call_id, p_inner.c.vehicle == Prediction.vehicle, p_inner.c.stop_id == Prediction.stop_id )).filter( Agency.tag==agency, Prediction.prediction >= now)\ .group_by(Prediction.id, Prediction.vehicle, Prediction.stop_id)\ .all() z = { "locations": {v.vehicle: v.serialize() for v in vehicle_locations}, "predictions": {p.id: p.serialize() for p in predictions} } return z
def make_pred_from_param(self, resource_type, param_and_val, possible_param_types): ''' Compile FHIR search parameter into a SQL predicate This is the "master" function that invokes other `make_*_pred` functions. `param_and_val` is the key-value pair of a parameter and its value `possible_param_types` is a dictionary maintaining the mapping between a name of a search parameter and it's type (string, number, etc). ''' raw_param, param_val = param_and_val matched_param = PARAM_RE.match(raw_param) if matched_param is None: # invalid search param return None param_data = matched_param.groupdict() param = param_data['param'] modifier = param_data['modifier'] if param not in possible_param_types: # an undefined search parameter is supplied return None param_type = possible_param_types[ param] if modifier != 'text' else 'string' if modifier == 'missing': pred = ((SearchParam.missing == True) if param_val == 'true' else (SearchParam.missing == False)) else: if param_type == 'reference': pred_maker = partial(self.make_reference_pred, resource_type=resource_type) elif param_type in PRED_MAKERS: pred_maker = PRED_MAKERS[param_type] if pred_maker is None: raise InvalidQuery else: pred_maker = make_string_pred # deal with FHIR's union search (e.g. `abc=x,y,z`) here alts = param_val.split(',') preds = [pred_maker(param_data, alt) for alt in alts] pred = db.or_(*preds) return db.and_( pred, SearchParam.name == param, SearchParam.param_type == possible_param_types[param], #SearchParam.owner_id==self.owner_id )
def cart(): form = OrderForm() data = [] sum = 0 selected_meals = {} user_id = 1 if 'data' in session: data = session['data'] selected_meals = { int(key): int(value) for key, value in data.items() if value != 0 } meals = db.session.query(Meal).filter( Meal.id.in_(selected_meals.keys())).all() data = [] for meal in meals: sum += int(meal.price) * int(selected_meals[meal.id]) data.append({ 'id': meal.id, 'title': meal.title, 'price': meal.price, 'qnt': selected_meals[meal.id] }) if request.method == "POST": if 'logged_in' in session and session['logged_in']: form.name.data = session['name'] form.email.data = session["username"] user_id = session['user_id'] order = Order(date=datetime.now(), sum=sum, status='ordered', user_id=user_id, mail=form.email.data, phone=form.phone.data, address=form.address.data) db.session.add(order) for key in selected_meals.keys(): meal = db.session.query(Meal).filter(Meal.id == key).first() order.meals.append(meal) db.session.commit() for key, value in selected_meals.items(): db.session.query(Association).filter( db.and_(Association.order_id == order.id, Association.meal_id == key)).update({'counter': value}) db.session.commit() return redirect('/order_done/') return render_template("cart.html", data=data, form=form)
def prompts_csv(survey, active_users, start, end): prompts_csv = io.BytesIO() prompts_csv.write(codecs.BOM_UTF8) active_mobile_ids = [u.id for u in active_users] prompts = (db.session.query( MobileUser, PromptResponse).join(PromptResponse).filter( db.and_(PromptResponse.mobile_id.in_(active_mobile_ids)), PromptResponse.displayed_at >= start, PromptResponse.displayed_at <= end).order_by( PromptResponse.id)) ignored_columns = ['survey_id', 'mobile_id'] columns = [] for c in PromptResponse.__table__.columns: if c.name in ignored_columns: continue if c.type.python_type == datetime: columns.append(c.name + '_UTC') columns.append(c.name + '_epoch') else: columns.append(c.name) # create csv file in memory with list responses expanded to string csv_writer = csv.writer(prompts_csv) headers = list(columns) headers.insert(1, 'uuid') csv_writer.writerow(headers) for user, prompt in prompts: prompt.displayed_at_UTC = _utc_timestamp(prompt.displayed_at) prompt.displayed_at_epoch = _epoch_timestamp(prompt.displayed_at) prompt.recorded_at_UTC = _utc_timestamp(prompt.recorded_at) prompt.recorded_at_epoch = _epoch_timestamp(prompt.recorded_at) prompt.edited_at_UTC = _utc_timestamp(prompt.edited_at) prompt.edited_at_epoch = _epoch_timestamp(prompt.edited_at) prompt_row = [] for col in columns: # make uuid second column value if len(prompt_row) == 1: prompt_row.append(user.uuid) value = getattr(prompt, col) if isinstance(value, list): value = '; '.join([v.strip() for v in value]) prompt_row.append(value) csv_writer.writerow(prompt_row) return prompts_csv
def make_pred_from_param(self, resource_type, param_and_val, possible_param_types): ''' Compile FHIR search parameter into a SQL predicate This is the "master" function that invokes other `make_*_pred` functions. `param_and_val` is the key-value pair of a parameter and its value `possible_param_types` is a dictionary maintaining the mapping between a name of a search parameter and it's type (string, number, etc). ''' raw_param, param_val = param_and_val matched_param = PARAM_RE.match(raw_param) if matched_param is None: # invalid search param return None param_data = matched_param.groupdict() param = param_data['param'] modifier = param_data['modifier'] if param not in possible_param_types: # an undefined search parameter is supplied return None param_type = possible_param_types[param] if modifier != 'text' else 'string' if modifier == 'missing': pred = ((SearchParam.missing == True) if param_val == 'true' else (SearchParam.missing == False)) else: if param_type == 'reference': pred_maker = partial(self.make_reference_pred, resource_type=resource_type) elif param_type in PRED_MAKERS: pred_maker = PRED_MAKERS[param_type] if pred_maker is None: raise InvalidQuery else: pred_maker = make_string_pred # deal with FHIR's union search (e.g. `abc=x,y,z`) here alts = param_val.split(',') preds = [pred_maker(param_data, alt) for alt in alts] pred = db.or_(*preds) return db.and_(pred, SearchParam.name==param, SearchParam.param_type==possible_param_types[param], SearchParam.owner_id==self.owner_id)
def coordinates(self, survey, uuid, start_time=None, end_time=None, limit=None, min_accuracy=100): user = survey.mobile_users.filter_by(uuid=uuid).one_or_none() if user: results = user.mobile_coordinates.filter( MobileCoordinate.h_accuracy <= min_accuracy) if start_time and end_time: results = results.filter( db.and_(MobileCoordinate.timestamp >= start_time, MobileCoordinate.timestamp <= end_time)) # get the most recent points return results.order_by( MobileCoordinate.timestamp.asc()).limit(limit)
def build_query(self, resource_type, params, id_only=False): ''' Compile a SQL query from a set of FHIR search params If `id_only` is true, a SQL query that selects only `resource_id` will be returned ''' query_args = [ Resource.visible == True, Resource.resource_type == resource_type, #Resource.owner_id == self.owner_id or Resource.owner_id=='0' ] valid_search_params = SPECS[resource_type]['searchParams'] make_pred = partial(self.make_pred_from_param, resource_type, possible_param_types=valid_search_params) predicates = [ pred for pred in map(make_pred, iterdict(params)) if pred is not None ] #print params # customized coordinate search parameter if 'coordinate' in params and resource_type == 'Sequence': coords = params['coordinate'].split(',') coord_preds = map(make_coord_pred, coords) query_args.append(db.or_(*coord_preds)) if len(predicates) > 0: query_args.append( Resource.resource_id.in_( intersect_predicates(predicates).alias())) if '_id' in params: query_args.append( Resource.resource_id.in_(params['_id'].split(','))) if id_only: return db.select([Resource.resource_id]).\ select_from(Resource).\ where(db.and_(*query_args)).alias() else: return Resource.query.filter(*query_args)
def booking(id, day, time): profile_data = Teacher.query.get(id) timetable = Timetable.query.filter( db.and_(Timetable.teacher_id == id, Timetable.weekday == day, Timetable.time == '{}:00:00.000000'.format(time))).first() print(50 * '*') print(timetable.id) day = DAYS[day] form = userForm() return render_template('booking.html', profile_data=profile_data, profile_id=id, day=day, time=time, form=form, timetable_id=timetable.id)
def make_date_pred(param_data, param_val): ''' Compile a date search into a SQL predicate ''' date = DATE_RE.match(param_val) if date is None: raise InvalidQuery try: value = dateutil.parser.parse(date.group('date')) comparator = date.group('comparator') if comparator is None: pred = db.and_(SearchParam.start_date <= value, SearchParam.end_date >= value) elif comparator in ('<', '<='): pred = (SearchParam.end_date <= value) elif comparator in ('>', '>='): pred = (SearchParam.start_date >= value) return pred except ValueError: raise InvalidQuery
def index(self): one_day_ago = datetime.utcnow() - timedelta(hours=24) comments = Comment.query.filter( db.and_(Comment.created >= one_day_ago, Comment.is_visible == False, Comment.is_deleted != True) ).all() return self.render('admin/moderation.html', comments=comments)
def get_games(user_id): games = db.session.query(Game, Forecast).\ outerjoin(Forecast, db.and_(Forecast.user_id == user_id, Forecast.game_id == Game.id)).\ all() return games