def percentage_by_district(self): """ Returns the percentage of votes aggregated by the distict. Includes uncounted districts and districts with no results available. """ results = self.election.results results = results.join(ElectionResult.candidate_results) results = results.filter(CandidateResult.candidate_id == self.id) results = results.with_entities( ElectionResult.district.label('name'), func.array_agg(ElectionResult.entity_id).label('entities'), func.coalesce( func.bool_and(ElectionResult.counted), False ).label('counted'), func.sum(ElectionResult.accounted_ballots).label('total'), func.sum(CandidateResult.votes).label('votes'), ) results = results.group_by(ElectionResult.district) results = results.order_by(None) results = results.all() percentage = { r.name: { 'counted': r.counted, 'entities': r.entities, 'percentage': 100 * (r.votes / r.total) if r.total else 0.0 } for r in results } empty = self.election.results empty = empty.with_entities( ElectionResult.district.label('name'), func.array_agg(ElectionResult.entity_id).label('entities'), func.coalesce( func.bool_and(ElectionResult.counted), False ).label('counted') ) empty = empty.group_by(ElectionResult.district) empty = empty.order_by(None) for result in empty: update = ( result.name not in percentage or ( set(percentage[result.name]['entities']) != set(result.entities) ) ) if update: percentage[result.name] = { 'counted': result.counted, 'entities': result.entities, 'percentage': 0.0 } return percentage
def results_by_district(self): """ Returns the results aggregated by the distict. """ counted = func.coalesce(func.bool_and(BallotResult.counted), False) yeas = func.sum(BallotResult.yeas) nays = func.sum(BallotResult.nays) yeas_percentage = 100 * yeas / ( cast(func.coalesce(func.nullif(yeas + nays, 0), 1), Float) ) nays_percentage = 100 - yeas_percentage accepted = case({True: yeas > nays}, counted) results = self.results.with_entities( BallotResult.district.label('name'), counted.label('counted'), accepted.label('accepted'), yeas.label('yeas'), nays.label('nays'), yeas_percentage.label('yeas_percentage'), nays_percentage.label('nays_percentage'), func.sum(BallotResult.empty).label('empty'), func.sum(BallotResult.invalid).label('invalid'), func.sum(BallotResult.eligible_voters).label('eligible_voters'), func.array_agg(BallotResult.entity_id).label('entity_ids') ) results = results.group_by(BallotResult.district) results = results.order_by(None).order_by(BallotResult.district) return results
def get_chart_stats_for_team(self, id_: str, session=None ) -> List[DatabaseObjectDataPoint]: game: Game = session.query(Game).filter(Game.hash == id_).first() if game is None: raise ReplayNotFound() # this is weird because we need to do aggregate team_game: List = session.query(func.bool_and( TeamStat.is_orange), *self.team_stats.individual_query).filter( TeamStat.game == id_).group_by(TeamStat.is_orange).all() wrapped_team: List[DatabaseObjectDataPoint] = [ DatabaseObjectDataPoint( id=0, is_orange=team_game[0], name=('Orange' if team_game[0] == 1 else 'Blue'), stats=self.get_wrapped_stats(team_game[1:], self.team_stats)) for team_game in team_game ] wrapped_team = sorted(sorted(wrapped_team, key=lambda x: x.id), key=lambda x: x.is_orange) return wrapped_team
def get_locations(self): """Return SQL query of locations table for given date range.""" log.debug('get_locations') subquery = SESSION.query( Location.document_id, func.bool_and(Location.location_publish).label('location_publish'), func.string_agg( cast(Location.street_number, Text) + ' ' + cast(Location.address, Text), '; ').label('address'), func.string_agg( 'Unit: ' + cast(Location.unit, Text) + ', ' + 'Condo: ' + cast(Location.condo, Text) + ', ' + 'Weeks: ' + cast(Location.weeks, Text) + ', ' + 'Subdivision: ' + cast(Location.subdivision, Text) + ', ' + 'District: ' + cast(Location.district, Text) + ', ' + 'Square: ' + cast(Location.square, Text) + ', ' + 'Lot: ' + cast(Location.lot, Text), '; ').label('location_info') # todo: Once SQLAlchemy supports this, add these fields this way. # 'mode() WITHIN GROUP (ORDER BY locations.zip_code) AS zip_code', # 'mode() WITHIN GROUP (ORDER BY locations.latitude) AS latitude', # 'mode() WITHIN GROUP (ORDER BY locations.longitude) ' + # ' AS longitude', # 'mode() WITHIN GROUP (ORDER BY locations.neighborhood) ' + # 'AS neighborhood' ).group_by(Location.document_id).subquery() # log.debug(subquery) SESSION.close() return subquery
def get_chart_stats_for_player(self, id_: str, session=None ) -> List[DatabaseObjectDataPoint]: game: Game = session.query(Game).filter(Game.hash == id_).first() if game is None: raise ReplayNotFound() # this is weird because we need to do aggregate playergames: List = session.query( func.max(PlayerGame.id), func.bool_and(PlayerGame.is_orange), func.max(PlayerGame.name), *self.player_stats.individual_query).filter( PlayerGame.game == id_).group_by(PlayerGame.player).all() wrapped_playergames: List[DatabaseObjectDataPoint] = [ DatabaseObjectDataPoint(id=playergame[0], is_orange=playergame[1], name=playergame[2], stats=self.get_wrapped_stats( playergame[3:], self.player_stats)) for playergame in playergames ] wrapped_playergames = sorted(sorted(wrapped_playergames, key=lambda x: x.id), key=lambda x: x.is_orange) return wrapped_playergames
def counted(cls): expr = select([ func.coalesce(func.bool_and(ElectionResult.counted), False) ]) expr = expr.where(ElectionResult.election_id == cls.id) expr = expr.label('counted') return expr
def counted(self): """ True if all results have been counted. """ result = self.results.with_entities( func.coalesce(func.bool_and(BallotResult.counted), False) ) result = result.order_by(None) result = result.first() return result[0] if result else False
def get_only_incremental_since(self, timestamp): session = self.session_maker() results = (session.query(ExecutionModelEntity.model_name).filter( ExecutionModelEntity.completed_on > timestamp, ExecutionModelEntity.rows_processed > 0, ).distinct(ExecutionModelEntity.model_name).group_by( ExecutionModelEntity.model_name).having( func.bool_and( ExecutionModelEntity.is_full_refresh == False)).all()) session.close() return [r for (r, ) in results]
def load(session, user): """Clicker load handler.""" parser = reqparse.RequestParser() parser.add_argument('pid', None, type=int) parser.add_argument('csid', None, type=int) args = parser.parse_args() if args.pid is not None and args.csid is not None: return 'only pid or csid allowed', 400 if args.pid is None and args.csid is None: # pylint: disable-msg=E1101,E1103 photo = session.query(database.Photo) \ .outerjoin(database.ClickSession) \ .group_by(database.Photo.id) \ .having(func.bool_and(or_( database.ClickSession.uid != user.id, database.ClickSession.uid == None ))) \ .having(func.count(database.ClickSession.id) < 1) \ .order_by(desc(func.count(database.ClickSession.id))) \ .order_by(func.random()) \ .first() # pylint: enable-msg=E1101,E1103 points = [] elif args.pid is not None and args.csid is None: photo = session.query(database.Photo) \ .filter_by(id=args.pid) \ .one() points = [] elif args.csid is not None: clicker_session = session.query(database.ClickSession) \ .filter_by(id=args.csid) \ .one() photo = clicker_session.photo points = [{ 'x': point.x, 'y': point.y } for point in clicker_session.clicks] data = { 'pid': photo.id, 'image': os.path.join(APP.config['HOST'], photo.name), 'points': points, } if photo is None: return jsonify(status='empty') else: return jsonify(status='ok', data=data)
def load(session, user): """Day/Night load handler.""" parser = reqparse.RequestParser() parser.add_argument('pid', None, type=int) parser.add_argument('dnid', None, type=int) args = parser.parse_args() if args.pid is not None and args.dnid is not None: return 'only pid or dnid allowed', 400 if args.pid is None and args.dnid is None: # pylint: disable-msg=E1101,E1103 photo = session.query(database.Photo) \ .join(database.Annotation) \ .outerjoin(database.DayNight) \ .filter(database.Photo.seesground == True) \ .group_by(database.Photo.id) \ .having(func.bool_and(or_( database.DayNight.uid != user.id, database.DayNight.uid == None ))) \ .having(func.count(database.DayNight.id) < 2) \ .order_by(desc(func.count(database.DayNight.id))) \ .order_by(func.random()) \ .first() # pylint: enable-msg=E1101,E1103 label = None elif args.pid is not None and args.dnid is None: photo = session.query(database.Photo) \ .filter_by(id=args.pid) \ .one() label = None elif args.dnid is not None: daynight = session.query(database.DayNight) \ .filter_by(id=args.dnid) \ .one() photo = daynight.photo label = daynight.daynight data = { 'pid': photo.id, 'image': os.path.join(APP.config['HOST'], photo.name), 'label': label, } if photo is None: return jsonify(status='empty') else: return jsonify(status='ok', data=data)
def transactions_all_json(): lower = request.args.get('after', "") upper = request.args.get('before', "") filter = request.args.get('filter', "nonuser") if filter == "nonuser": non_user_transactions = (select([Split.transaction_id]) .select_from( Join(Split, User, (User.account_id == Split.account_id), isouter=True)) .group_by(Split.transaction_id) .having(func.bool_and(User.id == None)) .alias("nut")) tid = literal_column("nut.transaction_id") transactions = non_user_transactions.join(Transaction, Transaction.id == tid) else: transactions = Transaction.__table__ q = (select([Transaction.id, Transaction.valid_on, Split.account_id, Account.type, Split.amount]) .select_from(transactions .join(Split, Split.transaction_id == Transaction.id) .join(Account, Account.id == Split.account_id))) try: datetime.strptime(lower, "%Y-%m-%d").date() except ValueError: not lower or abort(422) else: q = q.where(Transaction.valid_on >= lower) try: datetime.strptime(upper, "%Y-%m-%d").date() except ValueError: not upper or abort(422) else: q = q.where(Transaction.valid_on <= upper) res = session.execute(json_agg_core(q)).fetchone()[0] or [] return jsonify(items=res)
def transactions_all_json(): lower = request.args.get('after', "") upper = request.args.get('before', "") filter = request.args.get('filter', "nonuser") if filter == "nonuser": non_user_transactions = (select([Split.transaction_id]).select_from( Join(Split, User, (User.account_id == Split.account_id), isouter=True)).group_by(Split.transaction_id).having( func.bool_and(User.id == None)).alias("nut")) tid = literal_column("nut.transaction_id") transactions = non_user_transactions.join(Transaction, Transaction.id == tid) else: transactions = Transaction.__table__ q = (select([ Transaction.id, Transaction.valid_on, Split.account_id, Account.type, Split.amount ]).select_from( transactions.join(Split, Split.transaction_id == Transaction.id).join( Account, Account.id == Split.account_id))) try: datetime.strptime(lower, "%Y-%m-%d").date() except ValueError: not lower or abort(422) else: q = q.where(Transaction.valid_on >= lower) try: datetime.strptime(upper, "%Y-%m-%d").date() except ValueError: not upper or abort(422) else: q = q.where(Transaction.valid_on <= upper) res = session.execute(json_agg_core(q)).fetchone()[0] or [] return jsonify(items=res)
def get_locations(self): """Return SQL query of locations table for given date range.""" log.debug('get_locations') subquery = SESSION.query( Location.document_id, func.bool_and(Location.location_publish).label('location_publish'), func.string_agg( cast(Location.street_number, Text) + ' ' + cast(Location.address, Text), '; ' ).label('address'), func.string_agg( 'Unit: ' + cast(Location.unit, Text) + ', ' + 'Condo: ' + cast(Location.condo, Text) + ', ' + 'Weeks: ' + cast(Location.weeks, Text) + ', ' + 'Subdivision: ' + cast(Location.subdivision, Text) + ', ' + 'District: ' + cast(Location.district, Text) + ', ' + 'Square: ' + cast(Location.square, Text) + ', ' + 'Lot: ' + cast(Location.lot, Text), '; ' ).label('location_info') # todo: Once SQLAlchemy supports this, add these fields this way. # 'mode() WITHIN GROUP (ORDER BY locations.zip_code) AS zip_code', # 'mode() WITHIN GROUP (ORDER BY locations.latitude) AS latitude', # 'mode() WITHIN GROUP (ORDER BY locations.longitude) ' + # ' AS longitude', # 'mode() WITHIN GROUP (ORDER BY locations.neighborhood) ' + # 'AS neighborhood' ).group_by( Location.document_id ).subquery() # log.debug(subquery) SESSION.close() return subquery
def get_stats_query(stat_list: List[QueryFieldWrapper]): avg_list = [] std_list = [] individual_list = [] for stat in stat_list: if stat.is_cumulative: std_list.append(literal(1)) avg_list.append(stat.query) individual_list.append(stat.query) elif stat.is_averaged or stat.is_percent: std_list.append(func.stddev_samp(stat.query)) avg_list.append(func.avg(stat.query)) individual_list.append(func.sum(stat.query)) elif stat.is_boolean: std_list.append(literal(1)) avg_list.append(func.count()) individual_list.append(func.bool_and(stat.query)) else: std_list.append(func.stddev_samp(stat.query)) avg_list.append( 300 * func.sum(stat.query) / safe_divide(func.sum(PlayerGame.time_in_game), default=300)) individual_list.append(func.sum(stat.query)) return avg_list, std_list, individual_list
def get_group_stats(self, replay_ids, session=None): return_obj = {} # Players player_tuples: List[Tuple[str, str, int]] = session.query( PlayerGame.player, func.min(PlayerGame.name), func.count(PlayerGame.player)).filter( PlayerGame.game.in_(replay_ids)).group_by( PlayerGame.player).all() return_obj['playerStats'] = [] # ensemble are the players that do not have enough replays to make an individual analysis for them ensemble = [] for player_tuple in player_tuples: player, name, count = player_tuple if count > 1: player_stats = self._create_group_stats(session, player_filter=player, replay_ids=replay_ids) player_stats['name'] = name player_stats['player'] = player return_obj['playerStats'].append(player_stats) else: ensemble.append(player) if len(ensemble) > 0: # create stats that only includes the ensemble ensemble_stats = self._create_group_stats(session, player_filter=ensemble, replay_ids=replay_ids) return_obj['ensembleStats'] = ensemble_stats # STATS # Global # create stats that include all the players in the game # global_stats = self._create_group_stats(session, ids=replay_ids) # return_obj['globalStats'] = global_stats num_replays = len(replay_ids) if num_replays > 1 and all( [player_tuple[2] == num_replays for player_tuple in player_tuples]): assert 'ensembleStats' not in return_obj # all players have played every game is_orange = {player_tuple[0]: [] for player_tuple in player_tuples} for replay_id in replay_ids: game: Game = session.query(Game).filter( Game.hash == replay_id).first() if game is None: raise ReplayNotFound() playergames: List = session.query( func.max(PlayerGame.player), func.bool_and(PlayerGame.is_orange)).filter( PlayerGame.game == replay_id).group_by( PlayerGame.player).all() for playergame in playergames: assert len(playergame) == 2 player, is_orange_game = playergame assert player in is_orange is_orange[player].append(is_orange_game) # if the player is always in the same team if all([ len(set(player_is_orange)) == 1 for player_is_orange in is_orange.values() ]): for i in range(len(return_obj['playerStats'])): player = return_obj['playerStats'][i]['player'] return_obj['playerStats'][i]['is_orange'] = is_orange[ player][0] return_obj['playerStats'] = sorted( sorted(return_obj['playerStats'], key=lambda x: x['name'].lower()), key=lambda x: x['is_orange']) return return_obj
def load(session, user): """Occlusion ranking load handler.""" parser = reqparse.RequestParser() parser.add_argument('vids', None, type=str) parser.add_argument('osid', None, type=int) args = parser.parse_args() if args.vids is not None and args.osid is not None: return 'only vids or osid allowed', 400 if args.vids is not None: args.vids = [int(vid) for vid in args.vids.split('-')] if args.vids is None and args.osid is None: # pylint: disable-msg=E1101,E1103 vehicles = session.query(database.Vehicle) \ .outerjoin(database.OcclusionRanking) \ .outerjoin(database.OcclusionSession) \ .join(database.Revision) \ .group_by(database.Vehicle.id) \ .having(func.bool_and(or_( database.OcclusionSession.uid != user.id, database.OcclusionSession.uid == None ))) \ .filter(database.Vehicle.cropped != None) \ .filter(database.Revision.final == True) \ .having(func.count(database.OcclusionSession.id) < 2) \ .order_by(desc(func.count(database.OcclusionSession.id))) \ .order_by(func.random()) \ .limit(24) # pylint: disable-msg=E1101,E1103 labels = [{ 'vid': vehicle.id, 'image': os.path.join(APP.config['HOST'], vehicle.cropped) } for vehicle in vehicles] elif args.vids is not None and args.osid is None: # pylint: disable-msg=E1101 vehicles = session.query(database.Vehicle) \ .filter(database.Vehicle.id.in_(args.vids)) # pylint: disable-msg=E1101 labels = [{ 'vid': vehicle.id, 'image': os.path.join(APP.config['HOST'], vehicle.cropped), 'label': None, } for vehicle in vehicles] elif args.osid is not None: occlusion_session = session.query(database.OcclusionSession) \ .filter_by(id=args.osid) \ .one() labels = [{ 'vid': occlusion.vid, 'image': os.path.join( APP.config['HOST'], occlusion.vehicle.cropped ), 'label': occlusion.category, } for occlusion in occlusion_session.occlusions] if labels is None: return jsonify(status='empty') else: return jsonify(status='ok', data=labels)
def load(session, user): """BBox load handler.""" parser = reqparse.RequestParser() parser.add_argument('vid', None, type=int) parser.add_argument('bbsid', None, type=int) args = parser.parse_args() if args.vid is not None and args.bbsid is not None: return 'only vid or bbsid allowed', 400 if args.vid is None and args.bbsid is None: # pylint: disable-msg=E1101,E1103 vehicle = session.query(database.Vehicle) \ .join(database.Revision) \ .outerjoin(database.BoundingBoxSession) \ .filter(database.Revision.final == True) \ .group_by(database.Vehicle.id) \ .having(func.bool_and(or_( database.BoundingBoxSession.uid != user.id, database.BoundingBoxSession.uid == None ))) \ .having(func.count(database.BoundingBoxSession.id) < 2) \ .order_by(desc(func.count(database.BoundingBoxSession.id))) \ .order_by(func.random()) \ .first() # pylint: enable-msg=E1101,E1103 photo = vehicle.revision.annotation.photo image = os.path.join(APP.config['HOST'], vehicle.revision.annotation.photo.name) bbox = { 'x1': vehicle.x1, 'x2': vehicle.x2, 'y1': vehicle.y1, 'y2': vehicle.y2, 'vid': vehicle.id, 'image': image, } elif args.vid is not None and args.bbsid is None: vehicle = session.query(database.Vehicle) \ .filter_by(id=args.vid) \ .one() photo = vehicle.revision.annotation.photo image = os.path.join(APP.config['HOST'], vehicle.revision.annotation.photo.name) bbox = { 'x1': vehicle.x1, 'x2': vehicle.x2, 'y1': vehicle.y1, 'y2': vehicle.y2, 'vid': vehicle.id, 'image': image, } elif args.bbsid is not None: bbox_session = session.query(database.BoundingBoxSession) \ .filter_by(id=args.bbsid) \ .one() vehicle = bbox_session.vehicle photo = vehicle.revision.annotation.photo image = os.path.join(APP.config['HOST'], vehicle.revision.annotation.photo.name) bbox = { 'x1': bbox_session.x1, 'x2': bbox_session.x2, 'y1': bbox_session.y1, 'y2': bbox_session.y2, 'vid': vehicle.id, 'image': image, } if bbox is None: return jsonify(status='empty') else: return jsonify(status='ok', data=bbox)
def load(session, user): """Approve pairs load handler.""" parser = reqparse.RequestParser() parser.add_argument('vids', None, type=str) parser.add_argument('apsid', None, type=int) args = parser.parse_args() if args.vids is not None: args.vids = [int(vid) for vid in args.vids.split('-')] if len(args.vids) != 2: return 'vids must be two elements', 400 if args.vids is None and args.apsid is None: # pylint: disable-msg=E1101,E1103 vehicle = session.query(database.Vehicle) \ .join(database.Revision) \ .outerjoin(database.ApprovePairToVehicleAssociation) \ .outerjoin(database.ApprovePairSession) \ .group_by(database.Vehicle.id) \ .having(func.bool_and(or_( database.ApprovePairSession.uid != user.id, database.ApprovePairSession.uid == None ))) \ .filter(database.Revision.final == True) \ .filter(database.Vehicle.cropped != None) \ .filter(database.Vehicle.partner_id != None) \ .having(func.count(database.ApprovePairSession.id) < 1) \ .order_by(desc(func.count(database.ApprovePairSession.id))) \ .order_by(func.random()) \ .first() # pylint: enable-msg=E1101,E1103 if vehicle: image1 = os.path.join(APP.config['HOST'], vehicle.cropped) image2 = os.path.join(APP.config['HOST'], vehicle.partner.cropped) labels = { 'vehicle1': { 'vid': vehicle.id, 'image': image1, }, 'vehicle2': { 'vid': vehicle.partner_id, 'image': image2, } } else: labels = None elif args.vids is not None and args.apsid is None: vehicle = session.query(database.Vehicle) \ .filter_by(id=args.vids[0]) \ .one() if vehicle.partner_id != args.vids[1]: return 'not a valid vid pair', 400 image1 = os.path.join(APP.config['HOST'], vehicle.cropped) image2 = os.path.join(APP.config['HOST'], vehicle.partner.cropped) labels = { 'vehicle1': { 'vid': vehicle.id, 'image': image1, }, 'vehicle2': { 'vid': vehicle.partner_id, 'image': image2, } } elif args.apsid is not None: approve_pair_session = session.query(database.ApprovePairSession) \ .filter_by(id=args.apsid) \ .one() image1 = os.path.join( APP.config['HOST'], approve_pair_session.vehicles[0].cropped ) image2 = os.path.join( APP.config['HOST'], approve_pair_session.vehicles[1].cropped ) labels = { 'vehicle1': { 'vid': approve_pair_session.vehicles[0].id, 'image': image1, }, 'vehicle2': { 'vid': approve_pair_session.vehicles[1].id, 'image': image2, }, 'answer': approve_pair_session.answer } if labels is None: return jsonify(status='empty') else: return jsonify(status='ok', data=labels)
def recurse_availability_up_tree(channel_id): bridge = Bridge(app_name=CONTENT_APP_NAME) ContentNodeClass = bridge.get_class(ContentNode) ContentNodeTable = bridge.get_table(ContentNode) connection = bridge.get_connection() node_depth = bridge.session.query(func.max( ContentNodeClass.level)).scalar() logging.info( 'Setting availability of ContentNode objects with children for {levels} levels' .format(levels=node_depth)) child = ContentNodeTable.alias() # start a transaction trans = connection.begin() # Go from the deepest level to the shallowest start = datetime.datetime.now() for level in range(node_depth, 0, -1): available_nodes = select([child.c.available]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id)) # Create an expression that will resolve a boolean value for all the available children # of a content node, whereby if they all have coach_content flagged on them, it will be true, # but otherwise false. # Everything after the select statement should be identical to the available_nodes expression above. if bridge.engine.name == 'sqlite': coach_content_nodes = select([func.min( child.c.coach_content)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id)) elif bridge.engine.name == 'postgresql': coach_content_nodes = select( [func.bool_and(child.c.coach_content)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id)) logging.info( 'Setting availability of ContentNode objects with children for level {level}' .format(level=level)) # Only modify topic availability here connection.execute(ContentNodeTable.update().where( and_(ContentNodeTable.c.level == level - 1, ContentNodeTable.c.channel_id == channel_id, ContentNodeTable.c.kind == content_kinds.TOPIC)).values( available=exists(available_nodes))) # Update all ContentNodes connection.execute(ContentNodeTable.update().where( and_( # In this level ContentNodeTable.c.level == level - 1, # In this channel ContentNodeTable.c.channel_id == channel_id, # That are topics, and that have children that are flagged as available, with the coach content expression above ContentNodeTable.c.kind == content_kinds.TOPIC)).where( exists(available_nodes)).values( coach_content=coach_content_nodes)) # commit the transaction trans.commit() elapsed = (datetime.datetime.now() - start) logging.debug("Availability annotation took {} seconds".format( elapsed.seconds)) bridge.end()
def get_channel_annotation_stats(channel_id, checksums=None): bridge = Bridge(app_name=CONTENT_APP_NAME) ContentNodeTable = bridge.get_table(ContentNode) FileTable = bridge.get_table(File) LocalFileTable = bridge.get_table(LocalFile) if checksums is not None: file_table = FileTable.join( LocalFileTable, and_( FileTable.c.local_file_id == LocalFileTable.c.id, or_( filter_by_checksums(LocalFileTable.c.id, checksums), LocalFileTable.c.available == True, # noqa ), ), ) else: file_table = FileTable.join( LocalFileTable, FileTable.c.local_file_id == LocalFileTable.c.id) contentnode_statement = ( select([FileTable.c.contentnode_id]).select_from(file_table).where( FileTable.c.supplementary == False) # noqa .where( or_(*(FileTable.c.preset == preset for preset in renderable_files_presets))).where( ContentNodeTable.c.id == FileTable.c.contentnode_id)) connection = bridge.get_connection() # start a transaction trans = connection.begin() connection.execute(ContentNodeTable.update().where( and_( ContentNodeTable.c.kind != content_kinds.TOPIC, ContentNodeTable.c.channel_id == channel_id, )).values(available=exists(contentnode_statement))) ContentNodeClass = bridge.get_class(ContentNode) node_depth = (bridge.session.query(func.max( ContentNodeClass.level)).filter_by(channel_id=channel_id).scalar()) child = ContentNodeTable.alias() # Update all leaf ContentNodes to have num_coach_content to 1 or 0 # Update all leaf ContentNodes to have on_device_resources to 1 or 0 connection.execute(ContentNodeTable.update().where( and_( # In this channel ContentNodeTable.c.channel_id == channel_id, # That are not topics ContentNodeTable.c.kind != content_kinds.TOPIC, )).values( num_coach_contents=cast(ContentNodeTable.c.coach_content, Integer()), on_device_resources=cast(ContentNodeTable.c.available, Integer()), )) # Before starting set availability to False on all topics. connection.execute(ContentNodeTable.update().where( and_( # In this channel ContentNodeTable.c.channel_id == channel_id, # That are topics ContentNodeTable.c.kind == content_kinds.TOPIC, )).values(available=False)) # Expression to capture all available child nodes of a contentnode available_nodes = select([child.c.available]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) # Expressions for annotation of coach content # Expression that will resolve a boolean value for all the available children # of a content node, whereby if they all have coach_content flagged on them, it will be true, # but otherwise false. # Everything after the select statement should be identical to the available_nodes expression above. if bridge.engine.name == "sqlite": # Use a min function to simulate an AND. coach_content_nodes = select([func.min(child.c.coach_content)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) elif bridge.engine.name == "postgresql": # Use the postgres boolean AND operator coach_content_nodes = select([func.bool_and( child.c.coach_content)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) # Expression that sums the total number of coach contents for each child node # of a contentnode coach_content_num = select([func.sum(child.c.num_coach_contents)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) # Expression that sums the total number of on_device_resources for each child node # of a contentnode on_device_num = select([func.sum(child.c.on_device_resources)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) stats = {} # Go from the deepest level to the shallowest for level in range(node_depth, 0, -1): # Only modify topic availability here connection.execute( ContentNodeTable.update().where( and_( ContentNodeTable.c.level == level - 1, ContentNodeTable.c.channel_id == channel_id, ContentNodeTable.c.kind == content_kinds.TOPIC, )) # Because we have set availability to False on all topics as a starting point # we only need to make updates to topics with available children. .where(exists(available_nodes)).values( available=exists(available_nodes), coach_content=coach_content_nodes, num_coach_contents=coach_content_num, on_device_resources=on_device_num, )) level_stats = connection.execute( select([ ContentNodeTable.c.id, ContentNodeTable.c.coach_content, ContentNodeTable.c.num_coach_contents, ContentNodeTable.c.on_device_resources, ]).where( and_( ContentNodeTable.c.level == level, ContentNodeTable.c.channel_id == channel_id, ContentNodeTable.c.available == True, # noqa ))) for stat in level_stats: stats[stat[0]] = { "coach_content": bool(stat[1]), "num_coach_contents": stat[2] or 0, "total_resources": stat[3] or 0, } root_node_stats = connection.execute( select([ ContentNodeTable.c.id, ContentNodeTable.c.coach_content, ContentNodeTable.c.num_coach_contents, ContentNodeTable.c.on_device_resources, ]).where( and_( ContentNodeTable.c.level == 0, ContentNodeTable.c.channel_id == channel_id, ))).fetchone() stats[root_node_stats[0]] = { "coach_content": root_node_stats[1], "num_coach_contents": root_node_stats[2], "total_resources": root_node_stats[3], } # rollback the transaction to undo the temporary annotation trans.rollback() bridge.end() return stats
def load(session, user): """Labeler load handler.""" parser = reqparse.RequestParser() parser.add_argument('pid', None, type=int) parser.add_argument('rid', None, type=int) args = parser.parse_args() if args.pid is not None and args.rid is not None: return 'only pid or rid allowed', 400 if args.pid is None and args.rid is None: # pylint: disable-msg=E1101,E1103 photo = session.query(database.Photo) \ .outerjoin(database.Annotation) \ .group_by(database.Photo.id) \ .having(func.bool_and(or_( database.Annotation.uid != user.id, database.Annotation.uid == None))) \ .having(func.count(database.Annotation.id) < 2) \ .order_by(func.random()) \ .filter(database.Photo.seesground == True) \ .first() # pylint: enable-msg=E1101,E1103 cameraheight = photo.aboveground if photo.aboveground > 0 else 1.75 if photo: result = { 'cameraheight': cameraheight, 'cars': [], } elif args.pid and not args.rid: photo = session.query(database.Photo) \ .filter_by(id=args.pid) \ .one() cameraheight = photo.aboveground if photo.aboveground > 0 else 1.75 result = { 'cameraheight': cameraheight, 'cars': [], } elif args.rid: revision = session.query(database.Revision) \ .filter_by(id=args.rid) \ .one() photo = revision.annotation.photo cameraheight = photo.aboveground if photo.aboveground > 0 else 1.75 result = { 'cameraheight': cameraheight, 'cars': [], } cars = [] for vehicle in revision.vehicles: car = { 'x': vehicle.x, 'z': vehicle.z, 'theta': vehicle.theta, 'type': vehicle.type, } cars += [car] result = { 'cameraheight': revision.cameraheight, 'cars': cars, 'comment': revision.comment, } for flag in revision.annotation.flags: result['reason'] = flag.reason if photo is None: return jsonify(status='empty') else: fov = math.degrees(2 * math.atan2(photo.height, 2 * photo.focal)) data = { 'id': photo.id, 'image': os.path.join(APP.config['HOST'], photo.name), 'width': photo.width, 'height': photo.height, 'focal': photo.focal, 'upx': photo.r21, 'upy': photo.r22, 'upz': -photo.r23, 'forwardx': -photo.r31, 'forwardy': -photo.r32, 'forwardz': photo.r33, 'lat': photo.lat, 'lon': photo.lon, 'fov': fov, 'aspect': float(photo.width) / photo.height, 'result': result, } return jsonify(status='ok', data=data)
def load(session, user): """BBox load handler.""" parser = reqparse.RequestParser() parser.add_argument('vid', None, type=int) parser.add_argument('bbsid', None, type=int) args = parser.parse_args() if args.vid is not None and args.bbsid is not None: return 'only vid or bbsid allowed', 400 if args.vid is None and args.bbsid is None: # pylint: disable-msg=E1101,E1103 vehicle = session.query(database.Vehicle) \ .join(database.Revision) \ .outerjoin(database.BoundingBoxSession) \ .filter(database.Revision.final == True) \ .group_by(database.Vehicle.id) \ .having(func.bool_and(or_( database.BoundingBoxSession.uid != user.id, database.BoundingBoxSession.uid == None ))) \ .having(func.count(database.BoundingBoxSession.id) < 2) \ .order_by(desc(func.count(database.BoundingBoxSession.id))) \ .order_by(func.random()) \ .first() # pylint: enable-msg=E1101,E1103 photo = vehicle.revision.annotation.photo image = os.path.join( APP.config['HOST'], vehicle.revision.annotation.photo.name ) bbox = { 'x1': vehicle.x1, 'x2': vehicle.x2, 'y1': vehicle.y1, 'y2': vehicle.y2, 'vid': vehicle.id, 'image': image, } elif args.vid is not None and args.bbsid is None: vehicle = session.query(database.Vehicle) \ .filter_by(id=args.vid) \ .one() photo = vehicle.revision.annotation.photo image = os.path.join( APP.config['HOST'], vehicle.revision.annotation.photo.name ) bbox = { 'x1': vehicle.x1, 'x2': vehicle.x2, 'y1': vehicle.y1, 'y2': vehicle.y2, 'vid': vehicle.id, 'image': image, } elif args.bbsid is not None: bbox_session = session.query(database.BoundingBoxSession) \ .filter_by(id=args.bbsid) \ .one() vehicle = bbox_session.vehicle photo = vehicle.revision.annotation.photo image = os.path.join( APP.config['HOST'], vehicle.revision.annotation.photo.name ) bbox = { 'x1': bbox_session.x1, 'x2': bbox_session.x2, 'y1': bbox_session.y1, 'y2': bbox_session.y2, 'vid': vehicle.id, 'image': image, } if bbox is None: return jsonify(status='empty') else: return jsonify(status='ok', data=bbox)
def recurse_annotation_up_tree(channel_id): bridge = Bridge(app_name=CONTENT_APP_NAME) ContentNodeClass = bridge.get_class(ContentNode) ContentNodeTable = bridge.get_table(ContentNode) connection = bridge.get_connection() node_depth = (bridge.session.query(func.max( ContentNodeClass.level)).filter_by(channel_id=channel_id).scalar()) logger.info( "Annotating ContentNode objects with children for {levels} levels". format(levels=node_depth)) child = ContentNodeTable.alias() # start a transaction trans = connection.begin() start = datetime.datetime.now() # Update all leaf ContentNodes to have num_coach_content to 1 or 0 # Update all leaf ContentNodes to have on_device_resources to 1 or 0 connection.execute(ContentNodeTable.update().where( and_( # In this channel ContentNodeTable.c.channel_id == channel_id, # That are not topics ContentNodeTable.c.kind != content_kinds.TOPIC, )).values( num_coach_contents=cast(ContentNodeTable.c.coach_content, Integer()), on_device_resources=cast(ContentNodeTable.c.available, Integer()), )) # Before starting set availability to False on all topics. connection.execute(ContentNodeTable.update().where( and_( # In this channel ContentNodeTable.c.channel_id == channel_id, # That are topics ContentNodeTable.c.kind == content_kinds.TOPIC, )).values(available=False)) # Expression to capture all available child nodes of a contentnode available_nodes = select([child.c.available]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) # Expressions for annotation of coach content # Expression that will resolve a boolean value for all the available children # of a content node, whereby if they all have coach_content flagged on them, it will be true, # but otherwise false. # Everything after the select statement should be identical to the available_nodes expression above. if bridge.engine.name == "sqlite": # Use a min function to simulate an AND. coach_content_nodes = select([func.min(child.c.coach_content)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) elif bridge.engine.name == "postgresql": # Use the postgres boolean AND operator coach_content_nodes = select([func.bool_and( child.c.coach_content)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) # Expression that sums the total number of coach contents for each child node # of a contentnode coach_content_num = select([func.sum(child.c.num_coach_contents)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) # Expression that sums the total number of on_device_resources for each child node # of a contentnode on_device_num = select([func.sum(child.c.on_device_resources)]).where( and_( child.c.available == True, # noqa ContentNodeTable.c.id == child.c.parent_id, )) # Go from the deepest level to the shallowest for level in range(node_depth, 0, -1): logger.info( "Annotating ContentNode objects with children for level {level}". format(level=level)) # Only modify topic availability here connection.execute( ContentNodeTable.update().where( and_( ContentNodeTable.c.level == level - 1, ContentNodeTable.c.channel_id == channel_id, ContentNodeTable.c.kind == content_kinds.TOPIC, )) # Because we have set availability to False on all topics as a starting point # we only need to make updates to topics with available children. .where(exists(available_nodes)).values( available=exists(available_nodes), coach_content=coach_content_nodes, num_coach_contents=coach_content_num, on_device_resources=on_device_num, )) # commit the transaction trans.commit() elapsed = datetime.datetime.now() - start logger.debug("Recursive topic tree annotation took {} seconds".format( elapsed.seconds)) bridge.end()
def load(session, user): """Occlusion ranking load handler.""" parser = reqparse.RequestParser() parser.add_argument('vids', None, type=str) parser.add_argument('osid', None, type=int) args = parser.parse_args() if args.vids is not None and args.osid is not None: return 'only vids or osid allowed', 400 if args.vids is not None: args.vids = [int(vid) for vid in args.vids.split('-')] if args.vids is None and args.osid is None: # pylint: disable-msg=E1101,E1103 vehicles = session.query(database.Vehicle) \ .outerjoin(database.OcclusionRanking) \ .outerjoin(database.OcclusionSession) \ .join(database.Revision) \ .group_by(database.Vehicle.id) \ .having(func.bool_and(or_( database.OcclusionSession.uid != user.id, database.OcclusionSession.uid == None ))) \ .filter(database.Vehicle.cropped != None) \ .filter(database.Revision.final == True) \ .having(func.count(database.OcclusionSession.id) < 2) \ .order_by(desc(func.count(database.OcclusionSession.id))) \ .order_by(func.random()) \ .limit(24) # pylint: disable-msg=E1101,E1103 labels = [{ 'vid': vehicle.id, 'image': os.path.join(APP.config['HOST'], vehicle.cropped) } for vehicle in vehicles] elif args.vids is not None and args.osid is None: # pylint: disable-msg=E1101 vehicles = session.query(database.Vehicle) \ .filter(database.Vehicle.id.in_(args.vids)) # pylint: disable-msg=E1101 labels = [{ 'vid': vehicle.id, 'image': os.path.join(APP.config['HOST'], vehicle.cropped), 'label': None, } for vehicle in vehicles] elif args.osid is not None: occlusion_session = session.query(database.OcclusionSession) \ .filter_by(id=args.osid) \ .one() labels = [{ 'vid': occlusion.vid, 'image': os.path.join(APP.config['HOST'], occlusion.vehicle.cropped), 'label': occlusion.category, } for occlusion in occlusion_session.occlusions] if labels is None: return jsonify(status='empty') else: return jsonify(status='ok', data=labels)
def load(session, user): """Approve pairs load handler.""" parser = reqparse.RequestParser() parser.add_argument('vids', None, type=str) parser.add_argument('apsid', None, type=int) args = parser.parse_args() if args.vids is not None: args.vids = [int(vid) for vid in args.vids.split('-')] if len(args.vids) != 2: return 'vids must be two elements', 400 if args.vids is None and args.apsid is None: # pylint: disable-msg=E1101,E1103 vehicle = session.query(database.Vehicle) \ .join(database.Revision) \ .outerjoin(database.ApprovePairToVehicleAssociation) \ .outerjoin(database.ApprovePairSession) \ .group_by(database.Vehicle.id) \ .having(func.bool_and(or_( database.ApprovePairSession.uid != user.id, database.ApprovePairSession.uid == None ))) \ .filter(database.Revision.final == True) \ .filter(database.Vehicle.cropped != None) \ .filter(database.Vehicle.partner_id != None) \ .having(func.count(database.ApprovePairSession.id) < 1) \ .order_by(desc(func.count(database.ApprovePairSession.id))) \ .order_by(func.random()) \ .first() # pylint: enable-msg=E1101,E1103 if vehicle: image1 = os.path.join(APP.config['HOST'], vehicle.cropped) image2 = os.path.join(APP.config['HOST'], vehicle.partner.cropped) labels = { 'vehicle1': { 'vid': vehicle.id, 'image': image1, }, 'vehicle2': { 'vid': vehicle.partner_id, 'image': image2, } } else: labels = None elif args.vids is not None and args.apsid is None: vehicle = session.query(database.Vehicle) \ .filter_by(id=args.vids[0]) \ .one() if vehicle.partner_id != args.vids[1]: return 'not a valid vid pair', 400 image1 = os.path.join(APP.config['HOST'], vehicle.cropped) image2 = os.path.join(APP.config['HOST'], vehicle.partner.cropped) labels = { 'vehicle1': { 'vid': vehicle.id, 'image': image1, }, 'vehicle2': { 'vid': vehicle.partner_id, 'image': image2, } } elif args.apsid is not None: approve_pair_session = session.query(database.ApprovePairSession) \ .filter_by(id=args.apsid) \ .one() image1 = os.path.join(APP.config['HOST'], approve_pair_session.vehicles[0].cropped) image2 = os.path.join(APP.config['HOST'], approve_pair_session.vehicles[1].cropped) labels = { 'vehicle1': { 'vid': approve_pair_session.vehicles[0].id, 'image': image1, }, 'vehicle2': { 'vid': approve_pair_session.vehicles[1].id, 'image': image2, }, 'answer': approve_pair_session.answer } if labels is None: return jsonify(status='empty') else: return jsonify(status='ok', data=labels)